text
stringlengths
29
850k
import logging import os import hashlib from functools import wraps from taskflow import engines import sys sys.path.append('../recovery') from base import AnsibleTask, ShellTask, LinearFlowCreator, UnorderedFlowCreator sys.path.append("../db") from db_Dao import DRGlanceDao, DRNovaDao def task_list(fn): @wraps(fn) def wrapper(self, tasks, *args, **kwargs): task = fn(self, *args, **kwargs) task.add_result_handler(self.result_handler) tasks.append(task) return wrapper class RollbackHandler(object): def __init__(self): self.logger = logging.getLogger("RecoveryHandler") self.logger.info('Init RecoveryHandler') result_handler = ResultHandler() self.glance_handler = GlanceHandler(result_handler) self.nova_handler = NovaHandler(result_handler) def start(self, *req, **kwargs): self.logger = logging.getLogger("RecoveryHandler:start") self.logger.info("--- Hello Recovery ---") flow = self.prepare() eng = engines.load(flow) eng.run() results = eng.storage.fetch_all() print results return ['Hello Recovery'] def prepare(self): flows = [self.glance_handler.prepare(), self.nova_handler.prepare()] flow = UnorderedFlowCreator().create('restore', flows) return LinearFlowCreator().create('DR_restore', [self.nova_handler.stop_vm_task[0], flow] + self.glance_handler.drbd_tasks) class RecoveryError(Exception): pass class ResultHandler(object): def __init__(self): pass def analyze(self, name, result): for host in result['dark']: print 'Error in Task "%s": %s' % (name, result['dark'][host]['msg']) raise RecoveryError('Error in Task "%s": %s' % (name, result['dark'][host]['msg'])) for host in result['contacted']: self.analyze_result_for_host(name, result['contacted'][host]) def analyze_result_for_host(self, name, result): if 'msg' in result and result['msg'] != '': print 'Error in Task "%s": %s' % (name, result['msg']) if 'service-start' in name: raise RecoveryError('Error in Task "%s": %s' % (name, result['msg'])) if 'stderr' in result and result['stderr'] != '': print 'Error in Task "%s": %s' % (name, result['stderr']) if 'role_change' in name and 'State change failed' in result['stderr']: raise RecoveryError('Error in Task "%s": %s' % (name, result['stderr'])) if 'stdout' in result and result['stdout'] != '': print 'Output in Task "%s": %s' % (name, result['stdout']) class ComponentHandler(object): def __init__(self, component, hosts, disc, result_handler): self.component = component self.hosts = hosts self.disc = disc self.config = None self.disc_tasks = [] self.result_handler = result_handler self.restore_tasks =[] @task_list def create_role_change_task(self): drbd = 'openstack' #config return ShellTask('%s_role_change' % drbd, self.hosts, 'drbdadm secondary %s' % drbd) @task_list def create_disconnect_task(self): drbd = 'openstack' #config return ShellTask('%s_disconnect' % drbd, self.hosts, 'drbdadm disconnect %s' % drbd) @task_list def create_network_up_task(self): return ShellTask('network_neo4_up', self.hosts, 'ifconfig eno4 up') @task_list def create_connect_task(self): drbd = 'openstack' #config return ShellTask('%s_connect' % drbd, self.hosts, 'drbdadm -- --discard-my-data connect %s' % drbd) def prepare(self): self.create_tasks() return self.create_flow() def create_tasks(self): raise NotImplementedError() def create_flow(self): raise NotImplementedError() def analyze(self): raise NotImplementedError() class GlanceHandler(ComponentHandler): def __init__(self, result_handler): controllers = ['10.175.150.16'] #config super(GlanceHandler, self).__init__('glance', controllers, 'drbd0', result_handler) self.db = DRGlanceDao() self.drbd_tasks = [] @task_list def create_restore_backup_task(self): return ShellTask('%s_fs_restore' % self.component, self.hosts, 'chdir=/var/lib mv %sbak %s' % (self.component, self.component)) @task_list def create_remove_task(self): return ShellTask('%s_fs_remove' % self.component, self.hosts, 'chdir=/var/lib rm -rf %s' % self.component) @task_list def create_umount_task(self): return AnsibleTask('%s_fs_umount' % self.component, self.hosts, 'mount', 'src=/dev/%s name=/var/lib/%s fstype=xfs state=unmounted' % (self.disc, self.component)) def create_tasks(self): self.create_umount_task(self.disc_tasks) self.create_remove_task(self.disc_tasks) self.create_restore_backup_task(self.disc_tasks) self.create_disconnect_task(self.drbd_tasks) self.create_role_change_task(self.drbd_tasks) self.create_network_up_task(self.drbd_tasks) self.create_connect_task(self.drbd_tasks) def create_flow(self): return LinearFlowCreator().create('glance_op', self.disc_tasks + self.restore_tasks) class NovaHandler(ComponentHandler): def __init__(self, result_handler): nodes = ['10.175.150.16'] #config super(NovaHandler, self).__init__('nova', nodes, 'drbd1', result_handler) self.db = DRNovaDao() self.instance_tasks = {} self.base_tasks = {} self.stop_vm_task = [] self.instance_ids = [] @task_list def create_restore_backup_task(self): return ShellTask('%s_fs_restore' % self.component, self.hosts, 'chdir=/var/lib/%s mv instancesbak instances' % self.component) @task_list def create_remove_task(self): return ShellTask('%s_fs_remove' % self.component, self.hosts, 'chdir=/var/lib/%s rm -rf instances' % self.component) @task_list def create_rebase_task(self, host, instance_uuid_local, base_uuid_local): return ShellTask('rebase', host, 'chdir=/var/lib/nova/instances/%s qemu-img -u -b /var/lib/nova/instances/_base/%s disk' % (instance_uuid_local, base_uuid_local)) @task_list def create_umount_task(self): return AnsibleTask('%s_fs_umount' % self.component, self.hosts, 'mount', 'src=/dev/%s name=/var/lib/%s/instances fstype=xfs state=unmounted' % (self.disc, self.component)) @task_list def create_vm_stop_task(self): controllers = ['10.175.150.16'] #config return ShellTask('vm_stop', [controllers[0]], 'python /home/eshufan/scripts/nova_stop_vm.py --instance_ids %s' % ','.join(self.instance_ids)) def create_tasks(self): for (instance_uuid_primary, instance_uuid_local, image_uuid_primary, image_uuid_local, host_primary, host_local) in self.db.get_all_uuids_node():#[('', 'f6158ecb-18ca-4295-b3dd-3d7e0f7394d2', '10.175.150.16')]: print (instance_uuid_primary, instance_uuid_local, image_uuid_local) self.instance_ids.append(instance_uuid_local) self.create_vm_stop_task(self.stop_vm_task) self.create_umount_task(self.disc_tasks) self.create_remove_task(self.disc_tasks) self.create_restore_backup_task(self.disc_tasks) def create_flow(self): return LinearFlowCreator().create('nova_op', self.disc_tasks + self.restore_tasks) if __name__ == '__main__': rollback = RollbackHandler() rollback.start()
Just like any other means of client capture and client retention, the look, feel, and function of your website is just as important as that of your printed sales materials. We understand this fact and work with each business and their needs to develop unique web solutions specific to each client. Whether you’re a retailer and need an online eCommerce solution, a medical provider who needs to schedule appointments online, a manufacture who needs resource available to clients, or a musician who needs map-software linked to his gigs, we can help you design and maintain it. Because the needs of our clients are just as diverse as our clients themselves, we offer different Ardmore web design options to maximize your presence on the web, while minimizing your costs. If you need it, we can build you a fully customized website from scratch. But if you want the look, function, and feel of a customized website on a small business budget, we have the perfect solution for that, too. Choose us for Ardmore web design, Ardmore SEO and Ardmore Search Engine Optimization. WHY CHOOSE KNUCKLEHEAD FOR YOUR ARDMORE WEB DESIGN & ARDMORE SEO? WHO USES KNUCKLEHEAD FOR THIER ARDMORE WEB DESIGN & ARDMORE SEO? Call us at (484) 351-8052 with any questions about your Ardmore web design or Ardmore SEO project, or fill out our contact form here to send us a message.
#!/usr/bin/env python ''' connect as a client to two tcpip ports on localhost with mavlink packets. pass them both directions, and show packets in human-readable format on-screen. this is useful if * you have two SITL instances you want to connect to each other and see the comms. * you have any tcpip based mavlink happening, and want something better than tcpdump hint: * you can use netcat/nc to do interesting redorection things with each end if you want to. Copyright Sept 2012 David "Buzz" Bussenschutt Released under GNU GPL version 3 or later ''' import sys, time, os, struct from pymavlink import mavutil #from pymavlink import mavlinkv10 as mavlink from argparse import ArgumentParser parser = ArgumentParser(description=__doc__) parser.add_argument("srcport", type=int) parser.add_argument("dstport", type=int) args = parser.parse_args() msrc = mavutil.mavlink_connection('tcp:localhost:{}'.format(args.srcport), planner_format=False, notimestamps=True, robust_parsing=True) mdst = mavutil.mavlink_connection('tcp:localhost:{}'.format(args.dstport), planner_format=False, notimestamps=True, robust_parsing=True) # simple basic byte pass through, no logging or viewing of packets, or analysis etc #while True: # # L -> R # m = msrc.recv(); # mdst.write(m); # # R -> L # m2 = mdst.recv(); # msrc.write(m2); # similar to the above, but with human-readable display of packets on stdout. # in this use case we abuse the self.logfile_raw() function to allow # us to use the recv_match function ( whch is then calling recv_msg ) , to still get the raw data stream # which we pass off to the other mavlink connection without any interference. # because internally it will call logfile_raw.write() for us. # here we hook raw output of one to the raw input of the other, and vice versa: msrc.logfile_raw = mdst mdst.logfile_raw = msrc while True: # L -> R l = msrc.recv_match(); if l is not None: l_last_timestamp = 0 if l.get_type() != 'BAD_DATA': l_timestamp = getattr(l, '_timestamp', None) if not l_timestamp: l_timestamp = l_last_timestamp l_last_timestamp = l_timestamp print("--> %s.%02u: %s\n" % ( time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(l._timestamp)), int(l._timestamp*100.0)%100, l)) # R -> L r = mdst.recv_match(); if r is not None: r_last_timestamp = 0 if r.get_type() != 'BAD_DATA': r_timestamp = getattr(r, '_timestamp', None) if not r_timestamp: r_timestamp = r_last_timestamp r_last_timestamp = r_timestamp print("<-- %s.%02u: %s\n" % ( time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(r._timestamp)), int(r._timestamp*100.0)%100, r))
Would you see an enlarged right heart on a chest CT scan without contrast and CT scan x ray? What is better CT scan or mir to further diagnose right superior mediastinum widening in the chest x ray? How are an ordinary xray, a CT scan and a MRI scan different? See: Radiologyinfo.Org for discussion of radiological studies. Can a x ray show spinal cancer? SOFT TISSUE US performed of the left hip demonstrates a hypoechoic, non-specific 17 mm. x 10 mm. x 6 mm. abnormality, Surgiacal Eval meaning? How specific is CT scan to confirm avn in a bone in compare to MRI and bone scan? ? How do bone scan and a pet scan differ? Mri & bone scan impression is suspecious metastatis, bmd show osteopenia spiral, m.Band negative, s.Cortisol 30 with dexona 1 mg, biopsy needed ? Is pa and lateral view of the chest an xray or CT scan?
""" This is specific meta-algorithm based on the idea of cross-validation. """ from __future__ import division, print_function, absolute_import import numpy from sklearn import clone from six.moves import zip from . import utils from sklearn.cross_validation import KFold from sklearn.utils.validation import check_random_state from .factory import train_estimator from ..estimators.interface import Classifier, Regressor from ..estimators.utils import check_inputs import pandas __author__ = 'Tatiana Likhomanenko, Alex Rogozhnikov' __all__ = ['FoldingClassifier', 'FoldingRegressor'] from .utils import get_classifier_probabilities, get_classifier_staged_proba, get_regressor_prediction, \ get_regressor_staged_predict class FoldingBase(object): """ Base class for FoldingClassifier and FoldingRegressor """ def __init__(self, base_estimator, n_folds=2, random_state=None, features=None, parallel_profile=None): self.estimators = [] self.parallel_profile = parallel_profile self.n_folds = n_folds self.base_estimator = base_estimator self._folds_indices = None self.random_state = random_state self._random_number = None # setting features directly self.features = features def _get_folds_column(self, length): """ Return special column with indices of folds for all events. """ if self._random_number is None: self._random_number = check_random_state(self.random_state).randint(0, 100000) folds_column = numpy.zeros(length) for fold_number, (_, folds_indices) in enumerate( KFold(length, self.n_folds, shuffle=True, random_state=self._random_number)): folds_column[folds_indices] = fold_number return folds_column def _prepare_data(self, X, y, sample_weight): raise NotImplementedError('To be implemented in descendant') def fit(self, X, y, sample_weight=None): """ Train the classifier, will train several base classifiers on overlapping subsets of training dataset. :param X: pandas.DataFrame of shape [n_samples, n_features] :param y: labels of events - array-like of shape [n_samples] :param sample_weight: weight of events, array-like of shape [n_samples] or None if all weights are equal """ if hasattr(self.base_estimator, 'features'): assert self.base_estimator.features is None, \ 'Base estimator must have None features! Use features parameter in Folding instead' self.train_length = len(X) X, y, sample_weight = self._prepare_data(X, y, sample_weight) folds_column = self._get_folds_column(len(X)) for _ in range(self.n_folds): self.estimators.append(clone(self.base_estimator)) if sample_weight is None: weights_iterator = [None] * self.n_folds else: weights_iterator = (sample_weight[folds_column != index] for index in range(self.n_folds)) result = utils.map_on_cluster(self.parallel_profile, train_estimator, range(len(self.estimators)), self.estimators, (X.iloc[folds_column != index, :].copy() for index in range(self.n_folds)), (y[folds_column != index] for index in range(self.n_folds)), weights_iterator) for status, data in result: if status == 'success': name, classifier, spent_time = data self.estimators[name] = classifier else: print('Problem while training on the node, report:\n', data) return self def _folding_prediction(self, X, prediction_function, vote_function=None): """ Supplementary function to predict (labels, probabilities, values) :param X: dataset to predict :param prediction_function: function(classifier, X) -> prediction :param vote_function: if using averaging over predictions of folds, this function shall be passed. For instance: lambda x: numpy.mean(x, axis=0), which means averaging result over all folds. Another useful option is lambda x: numpy.median(x, axis=0) """ X = self._get_features(X) if vote_function is not None: print('KFold prediction with voting function') results = [] for estimator in self.estimators: results.append(prediction_function(estimator, X)) # results: [n_classifiers, n_samples, n_dimensions], reduction over 0th axis results = numpy.array(results) return vote_function(results) else: if len(X) != self.train_length: print('KFold prediction using random classifier (length of data passed not equal to length of train)') else: print('KFold prediction using folds column') folds_column = self._get_folds_column(len(X)) parts = [] for fold in range(self.n_folds): parts.append(prediction_function(self.estimators[fold], X.iloc[folds_column == fold, :])) result_shape = [len(X)] + list(numpy.shape(parts[0])[1:]) results = numpy.zeros(shape=result_shape) folds_indices = [numpy.where(folds_column == fold)[0] for fold in range(self.n_folds)] for fold, part in enumerate(parts): results[folds_indices[fold]] = part return results def _staged_folding_prediction(self, X, prediction_function, vote_function=None): X = self._get_features(X) if vote_function is not None: print('Using voting KFold prediction') iterators = [prediction_function(estimator, X) for estimator in self.estimators] for fold_prob in zip(*iterators): result = numpy.array(fold_prob) yield vote_function(result) else: if len(X) != self.train_length: print('KFold prediction using random classifier (length of data passed not equal to length of train)') else: print('KFold prediction using folds column') folds_column = self._get_folds_column(len(X)) iterators = [prediction_function(self.estimators[fold], X.iloc[folds_column == fold, :]) for fold in range(self.n_folds)] folds_indices = [numpy.where(folds_column == fold)[0] for fold in range(self.n_folds)] for stage_results in zip(*iterators): result_shape = [len(X)] + list(numpy.shape(stage_results[0])[1:]) result = numpy.zeros(result_shape) for fold in range(self.n_folds): result[folds_indices[fold]] = stage_results[fold] yield result def _get_feature_importances(self): """ Get features importance :return: pandas.DataFrame with column effect and `index=features` """ importances = numpy.sum([est.feature_importances_ for est in self.estimators], axis=0) # to get train_features, not features one_importances = self.estimators[0].get_feature_importances() return pandas.DataFrame({'effect': importances / numpy.max(importances)}, index=one_importances.index) class FoldingRegressor(FoldingBase, Regressor): """ This meta-regressor implements folding algorithm: * training data is splitted into n equal parts; * we train n regressors, each one is trained using n-1 folds To build unbiased predictions for data, pass the **same** dataset (with same order of events) as in training to `predict` or `staged_predict`, in which case classifier will use to predict each event that base classifier which didn't use that event during training. To use information from not one, but several estimators during predictions, provide appropriate voting function. Examples of voting function: >>> voting = lambda x: numpy.mean(x, axis=0) >>> voting = lambda x: numpy.median(x, axis=0) Parameters: ----------- :param sklearn.BaseEstimator base_estimator: base classifier, which will be used for training :param int n_folds: count of folds :param features: features used in training :type features: None or list[str] :param parallel_profile: profile for IPython cluster, None to compute locally. :type parallel_profile: None or str :param random_state: random state for reproducibility :type random_state: None or int or RandomState """ def _prepare_data(self, X, y, sample_weight): X = self._get_features(X) y_shape = numpy.shape(y) self.n_outputs_ = 1 if len(y_shape) < 2 else y_shape[1] return check_inputs(X, y, sample_weight=sample_weight, allow_multiple_targets=True) def predict(self, X, vote_function=None): """ Get predictions. To get unbiased predictions on training dataset, pass training data (with same order of events) and vote_function=None. :param X: pandas.DataFrame of shape [n_samples, n_features] :param vote_function: function to combine prediction of folds' estimators. If None then folding scheme is used. Parameters: numpy.ndarray [n_classifiers, n_samples] :type vote_function: None or function :rtype: numpy.array of shape [n_samples, n_outputs] """ return self._folding_prediction(X, prediction_function=get_regressor_prediction, vote_function=vote_function) def staged_predict(self, X, vote_function=None): """ Get predictions after each iteration of base estimator. To get unbiased predictions on training dataset, pass training data (with same order of events) and vote_function=None. :param X: pandas.DataFrame of shape [n_samples, n_features] :param vote_function: function to combine prediction of folds' estimators. If None then folding scheme is used. Parameters: numpy.ndarray [n_classifiers, n_samples] :type vote_function: None or function :rtype: sequence of numpy.array of shape [n_samples, n_outputs] """ return self._folding_prediction(X, prediction_function=get_regressor_staged_predict, vote_function=vote_function) def get_feature_importances(self): """ Get features importance :rtype: pandas.DataFrame with column effect and `index=features` """ return self._get_feature_importances() @property def feature_importances_(self): """Sklearn-way of returning feature importance. This returned as numpy.array, assuming that initially passed train_features=None """ return self.get_feature_importances().ix[self.features, 'effect'].values class FoldingClassifier(FoldingBase, Classifier): """ This meta-classifier implements folding algorithm: * training data is splitted into n equal parts; * we train n classifiers, each one is trained using n-1 folds To build unbiased predictions for data, pass the **same** dataset (with same order of events) as in training to `predict`, `predict_proba` or `staged_predict_proba`, in which case classifier will use to predict each event that base classifier which didn't use that event during training. To use information from not one, but several estimators during predictions, provide appropriate voting function. Examples of voting function: >>> voting = lambda x: numpy.mean(x, axis=0) >>> voting = lambda x: numpy.median(x, axis=0) Parameters: ----------- :param sklearn.BaseEstimator base_estimator: base classifier, which will be used for training :param int n_folds: count of folds :param features: features used in training :type features: None or list[str] :param parallel_profile: profile for IPython cluster, None to compute locally. :type parallel_profile: None or str :param random_state: random state for reproducibility :type random_state: None or int or RandomState """ def _prepare_data(self, X, y, sample_weight): X = self._get_features(X) self._set_classes(y) return check_inputs(X, y, sample_weight=sample_weight, allow_multiple_targets=True) def predict(self, X, vote_function=None): """ Predict labels. To get unbiased predictions on training dataset, pass training data (with same order of events) and vote_function=None. :param X: pandas.DataFrame of shape [n_samples, n_features] :param vote_function: function to combine prediction of folds' estimators. If None then folding scheme is used. :type vote_function: None or function :rtype: numpy.array of shape [n_samples] """ return numpy.argmax(self.predict_proba(X, vote_function=vote_function), axis=1) def predict_proba(self, X, vote_function=None): """ Predict probabilities. To get unbiased predictions on training dataset, pass training data (with same order of events) and vote_function=None. :param X: pandas.DataFrame of shape [n_samples, n_features] :param vote_function: function to combine prediction of folds' estimators. If None then folding scheme is used. :type vote_function: None or function :rtype: numpy.array of shape [n_samples, n_classes] """ result = self._folding_prediction(X, prediction_function=get_classifier_probabilities, vote_function=vote_function) return result / numpy.sum(result, axis=1, keepdims=True) def staged_predict_proba(self, X, vote_function=None): """ Predict probabilities after each stage of base_estimator. To get unbiased predictions on training dataset, pass training data (with same order of events) and vote_function=None. :param X: pandas.DataFrame of shape [n_samples, n_features] :param vote_function: function to combine prediction of folds' estimators. If None then folding scheme is used. :type vote_function: None or function :rtype: sequence of numpy.arrays of shape [n_samples, n_classes] """ for proba in self._staged_folding_prediction(X, prediction_function=get_classifier_staged_proba, vote_function=vote_function): yield proba / numpy.sum(proba, axis=1, keepdims=True) def get_feature_importances(self): """ Get features importance :rtype: pandas.DataFrame with column effect and `index=features` """ return self._get_feature_importances() @property def feature_importances_(self): """Sklearn-way of returning feature importance. This returned as numpy.array, assuming that initially passed train_features=None """ return self.get_feature_importances().ix[self.features, 'effect'].values
Jérôme Minière in Montreal or Quebec City? Inform me about the next events for Jérôme Minière. Enroll yourself to our priority waiting list for Jérôme Minière to be alerted when new tickets are available. Filling this form doesn't engage you to any purchase.
#!/usr/bin/env python from math import * MAX_TRI = 9999999L triangles = [] def next_pos(mn, pos): if mn > triangles[MAX_TRI - 1]: return -1 else: maxv = MAX_TRI - 1 minv = pos mid = minv + (maxv - minv) / 2 while triangles[mid] != mn and minv < maxv: if triangles[mid] < mn : minv = mid + 1 else : maxv = mid - 1 mid = minv + (maxv - minv) / 2 return mid def gen_triangles(offset): triangles[:] = [] i = 1L + offset * MAX_TRI bound = i + MAX_TRI print "Generating %i through %i " % (i, bound) while i <= bound: triangles.append((i * (i + 1L)) / 2L) i += 1L print "Max value = %i " % (triangles[MAX_TRI - 1]) def pe321(): offset = pos = tn = total = count = mn = 0L n = 1L while count != 40L: mn = 2L * n + n * n while mn % 3L != 0L and mn % 9L != 1L: n += 1L mn = 2L * n + n * n tn = 1L + 8L * mn if sqrt(tn) % 1 == 0: count += 1L total += n print "%i.\tM(%i) = %i is a triangule number" % (count, n, mn) # if n == 1L: n += 1L # else: # n += tn / (2 * (n * n)) #else: # n += 1L print "The sum of the first %i terms = %i" % (count, total) pe321()
is high-speed and high-accurate simulator of LSI structure with graphical user interface. Memory 256MB without 3D options. (but according to the size of simulation). Graphic board for OpenGL is recommended.
# -*- coding: utf-8 -*- # Copyright (c) 2015 - Juha Ruotsalainen # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """Plugin which replaces certain characters in album names. Rules for conversion are specified in sigal.conf.py -file in an array of tuples by the name of 'album_title_mangler'. E.g. album_title_mangler = [('__','-'),('_',' ')] This example will first convert all double underscores into a single dash, then convert all single underscores into a single space. You can add as many tuples as you want, just remember: the order is important, mangling rules are processed in first-come-first-serve -order. Btw, the given example above is the default name mangling rule. """ import collections import logging import os.path from sigal import signals logger = logging.getLogger(__name__) orderedDict = collections.OrderedDict([('__', '-'), ('_', ' ')]) def process_album(album): '''Process an album title with the predefined rules set in orderedDict.''' toMangle = album.title for key in orderedDict.keys(): value = orderedDict[key] toMangle = toMangle.replace(key, value) album.title = toMangle logger.info("Album name mangled to '%s'." % toMangle) def register(settings): global orderedDict pluginName = os.path.splitext(__name__)[-1][1:] try: dictInput = settings[pluginName] od = collections.OrderedDict(dictInput) orderedDict = od logger.info( "Using the following name mangling rules: %s", orderedDict) except: # Settings didn't contain input. Use the default. logger.info("Using the default name mangling rules: %s", orderedDict) signals.album_initialized.connect(process_album)
Your privacy is important to us at cocoasharp.org . To better protect your privacy we provide this notice explaining our online information practices and the choices you can make about the way your information is collected and used. To make this notice easy to find, we make it available on our homepage and at every point where personally identifiable information may be requested.
# -*- coding: utf8 -*- import re import os # ... list_keywords_decs = ["integer", "real", "logical", "type", "class"] dict_keywords_re = {} for word in list_keywords_decs: # pattern = r"[ ]*\b" + word + r"\b\s*::\s*" pattern = r"[ ]*" + word + r"[^:]*::\s*(.+)" word_re = re.compile(pattern, re.DOTALL | re.I) dict_keywords_re[word] = word_re # ... # ... def extract_blocks(word, TAG): pattern_start = word + "\s+" + TAG pattern_end = "end\s+" + word + r"\s+\b" + TAG + r"\b" pattern = pattern_start + "(.*?)" + pattern_end word_re = re.compile(pattern, re.DOTALL | re.I) return word_re # ... # ... def extract_signature(): pattern = r"[^:]*\(.+\)" # pattern ="\(.+\)" # pattern =r"\s*\(\s*.*\)" word_re = re.compile(pattern, re.I) return word_re # ... # ... def extract_arguments(): pattern ="([\w]*),*" word_re = re.compile(pattern, re.I) return word_re # ... # ... def get_names_subroutine(text_in): # ... find subroutine names keyword = "subroutine" pattern = r"\b" + keyword + r"\b.*\(" word_re = re.compile(pattern, re.I) text = word_re.findall(text_in.lower(), re.I) list_names = [] for t in text: list_s = [s for s in t.split(keyword) if len(s) > 0] for s in list_s: list_d = [d.rstrip().lstrip() for d in s.split("(") if len(d) > 0] list_names.append(list_d[0]) # print ("+++++ subroutine-names :", list_names) list_names = [name.lower() for name in list_names if len(name) > 0] return list_names # ... # ... def get_names_function(text_in): # ... find function names keyword = "function" pattern = r"\b" + keyword + r"\b.*\(" word_re = re.compile(pattern, re.I) text = word_re.findall(text_in.lower(), re.I) list_names = [] for t in text: list_s = [s for s in t.split(keyword) if len(s) > 0] for s in list_s: list_d = [d.rstrip().lstrip() for d in s.split("(") if len(d) > 0] list_names.append(list_d[0]) # print ("+++++ function-names :", list_names) list_names = [name.lower() for name in list_names if len(name) > 0] return list_names # ... # ... def get_names_module(text_in): # ... find module names keyword = "module" pattern = r"\b" + keyword + r"\b.*" word_re = re.compile(pattern, re.I) text = word_re.findall(text_in.lower(), re.I) list_names = [] for t in text: list_s = [s for s in t.split(keyword) if len(s) > 0] # print ("++ list_s :", list_s) for s in list_s: list_d = [d.rstrip().lstrip() for d in s.split("(") if len(d) > 0] list_names.append(list_d[0]) set_names = set(list_names) list_names = list(set_names) list_names = [name.lower() for name in list_names if len(name) > 0] # print ("+++++ modules-names :", list_names) return list_names # ... # ... def get_calls_subroutine(source): _re = extract_subroutine_call() return _re.findall(source, re.I) # ... # ... def get_calls_function(source): _re = extract_function_call() return _re.findall(source, re.I) # ... # ... def extract_subroutine_call(): pattern = r"\bcall\s+(\w+)\(" word_re = re.compile(pattern, re.I) return word_re # ... # ... def extract_function_call(): pattern = r"\b(\w+)\(" word_re = re.compile(pattern, re.I) return word_re # ... # ... def extract_contains(): pattern = r"\bcontains\b" word_re = re.compile(pattern, re.I) return word_re # ... # ... def get_declarations_calls(source): text = source _re = extract_contains() condition = (len(_re.findall(text, re.I)) > 0) dict_decl = {} dict_calls = {} if condition: list_code = _re.split(text) # ... get calls - subroutines calls_sub = get_calls_subroutine(list_code[0]) # ... # ... get calls - functions _calls_fun = get_calls_function(list_code[0]) calls_fun = [s for s in _calls_fun if s not in calls_sub] # ... # ... put back the other contains list_code_new = [] code = list_code[1] list_code_new.append(r"\tcontains \n") list_code_new.append(code) text = ''.join(list_code_new[:]) # ... # ... get declaration - subroutines names_sub = get_names_subroutine(text) # ... # ... get declaration - functions names_fun = get_names_function(text) # ... # ... dict_decl["subroutine"] = names_sub dict_decl["function"] = names_fun # ... # ... dict_calls["subroutine"] = calls_sub dict_calls["function"] = calls_fun # ... else: # ... get calls - subroutines calls_sub = get_calls_subroutine(text) # ... # ... get calls - functions _calls_fun = get_calls_function(text) calls_fun = [s for s in _calls_fun if s not in calls_sub] # ... # ... dict_decl["subroutine"] = [] dict_decl["function"] = [] # ... # ... dict_calls["subroutine"] = calls_sub dict_calls["function"] = calls_fun # ... return dict_decl, dict_calls # ... # ... def get_signature_from_text(source): _re = extract_signature() m = _re.match(source) t = m.group(0) return t # ... # ... def get_arguments_from_text(source): text = source try: data = extract_arguments().findall(text.lstrip(), re.I) arguments = [b.rstrip().lstrip() for b in data if len(b) > 0] except: arguments = [] return arguments # ... # ... def get_declarations_variables(source, constructor): _source = source.lower() list_var = [] for word in list_keywords_decs: pattern = r"[ ]*" + word + r"[^:]*::\s*(.+)" pattern = r"[ ]*" + word + r"(.*dimension)?.*::\s*(.+)" _re = re.compile(pattern,re.I) _vars_name = _re.findall(_source, re.I) print _vars_name try: _vars_arg,_vars_name = zip(*_vars_name) except: _vars_name = [] ; _vars_arg = None print _vars_name, " ---- ",_vars_arg # _vars_name = _re.match(_source).group(-1) if len(_vars_name) > 0: for _vars, _args in zip(_vars_name, _vars_arg): for var_name in _vars.split(','): args = _args.split(',') args = [s.strip() for s in args if len(s)>0] print var_name, " --- ARGS :", args var = constructor(name=var_name.strip(), \ dtype=word, \ attributs=args) list_var.append(var) return list_var # ...
Rapid and effective ignition of pyrotechnic countermeasure decoy flares is vitally important to the safety of expensive military platforms such as aircraft. QinetiQ is conducting experimental and theoretical research into pyrotechnic countermeasure decoy flares. A key part of this work is the development and application of improved ignition models to improve the understanding of the ignition processes occurring for these flares. Typically, these flares are wrapped in a material such as foil. Until this material bursts or ruptures, the flare can be considered to be effectively a closed system. Therefore it is possible to use gun internal ballistics models to investigate the ignition and combustion processes in these flares. Previous work validated the computer models for conditions of atmospheric pressure and 21ºC. This paper describes computer modelling and experiments conducted to validate predictions at pressures equivalent to those at 20000 and 40000 feet.
import cv2, time, sys, threading, os, json from PyQt4 import QtCore, QtGui, QtOpenGL from constants import * class SliderWidget(QtGui.QWidget): ''' This widget wraps a single parameter in the TunerWindow. Name, value, min, max, interval are stored in this object. Three gui elements are included to display the information of the parameter: 1) QLabel showing name 2) QLabel showing value 3) QSlider ''' def __init__(self, parent, name, min, max, value, interval): super(SliderWidget, self).__init__(parent) self.parent = parent self.name = name self.min = min self.max = max self.value = value self.interval = interval self.QLabel_name = QtGui.QLabel(self) # QLabel showing name self.QLabel_value = QtGui.QLabel(self) # QLabel showing value self.QSlider = QtGui.QSlider(QtCore.Qt.Horizontal, self) # QSlider # Create and set H box layout self.hbox = QtGui.QHBoxLayout() self.setLayout(self.hbox) self.hbox.addWidget(self.QLabel_name) self.hbox.addWidget(self.QLabel_value) self.hbox.addWidget(self.QSlider) self.QLabel_name.setText(name) self.QLabel_value.setText(str(value)) self.QSlider.setMinimum(min) self.QSlider.setMaximum(max) self.QSlider.setValue(value) self.QSlider.setSingleStep(interval) self.QSlider.setTickInterval(interval) self.QSlider.setTickPosition(QtGui.QSlider.TicksBelow) self.QSlider.sliderReleased.connect(self.slider_released) def slider_released(self): ''' User invoked action (mouse release event) => Notify the parent object ''' value = self.QSlider.value() # Round the value to fit the interval value = value - self.min value = round( value / float(self.interval) ) * self.interval value = int( value + self.min ) self.value = value self.QSlider.setValue(value) self.QLabel_value.setText(str(value)) # Notify the parent that the user changed the value with mouse. # Let the parent decide what to do with the gui event. self.parent.user_changed_value(self.name, value) def set_value(self, value): ''' Set the value of self.QSlider and self.QLabel_value Note that this only sets the displayed value without invoking any downstream action This method is not invoked by user interaction This method is only for displaying value ''' if value >= self.min and value <= self.max: self.value = value self.QSlider.setValue(value) self.QLabel_value.setText(str(value)) class TunerWindow(QtGui.QWidget): ''' A gui template window for tuning parameters. This class does not contain any business logic. All it does is to provide an interface to adjust parameters through gui. Each parameter is wrapped in a 'block' of SliderWidget object. Properties (name, min, max, value, interval) of each parameter is stored in the SliderWidget object. ''' def __init__(self): super(TunerWindow, self).__init__() self.vbox = QtGui.QVBoxLayout() self.setLayout(self.vbox) self.widgets = {} # a dictionary of widgets, indexed by the name of each parameter def add_parameter(self, name, min, max, value, interval): ''' Add a new SliderWidget object holding all information of the new parameter. ''' widget = SliderWidget(parent = self, name = name, min = min, max = max, value = value, interval = interval) # Add the widget to the dictionary self.widgets[name] = widget # Insert the widget to the last row of the V box self.vbox.insertWidget(len(self.vbox), widget) def add_widget(self, widget): ''' Insert QWidget object to the last row of self.vbox (QVBoxLayout) ''' self.vbox.insertWidget(len(self.vbox), widget) def set_parameter(self, name, value): ''' Set the widget slider value ''' # If the name is not present in self.parameters then do nothing if self.widgets.get(name, None) is None: return self.widgets[name].set_value(value) def user_changed_value(self, name, value): ''' To be overridden. Decides what to do when the child widget slider_released() method is called... which is invoked upon user mouse action ''' pass class CameraTunerWindow(TunerWindow): ''' Inherits from the TunerWindow class. The business logics for the camera imaging parameters is specified in this class. This class manages the transfer of camera parameters to the core object. ''' def __init__(self, controller, which_cam, paired, parent): super(CameraTunerWindow, self).__init__() self.controller = controller self.which_cam = which_cam self.parent = parent self.setWindowIcon(QtGui.QIcon('icons/windu_vision.png')) self.setMinimumWidth(600) title = {CAM_R: 'Right Camera' , CAM_L: 'Left Camera' , CAM_E: 'Ambient Camera'} self.setWindowTitle(title[which_cam]) self.__init__load_parameters() if paired: # If this CameraTunerWindow object is paired to another camera, e.g. left and right cameras # then add a check box for toggling the synchronization of the two cameras self.sync_box = QtGui.QCheckBox(parent=self) self.sync_box.setText('Sync Control') self.sync_box.toggled.connect(self.user_changed_sync) self.add_widget(self.sync_box) def __init__load_parameters(self): ''' Load parameters from the .json file, and set the values of the QSliders ''' filepath = 'parameters/' + self.which_cam + '.json' with open(filepath, 'r') as fh: P = json.loads(fh.read()) self.add_parameter(name='brightness' , min=0 , max=255 , value=P['brightness' ], interval=5 ) self.add_parameter(name='contrast' , min=0 , max=255 , value=P['contrast' ], interval=5 ) self.add_parameter(name='saturation' , min=0 , max=255 , value=P['saturation' ], interval=5 ) self.add_parameter(name='gain' , min=0 , max=127 , value=P['gain' ], interval=1 ) self.add_parameter(name='exposure' , min=-7 , max=-1 , value=P['exposure' ], interval=1 ) self.add_parameter(name='white_balance' , min=3000, max=6500, value=P['white_balance'], interval=100) self.add_parameter(name='focus' , min=0 , max=255 , value=P['focus' ], interval=5 ) self.isManual = {} for name in ['brightness', 'contrast', 'saturation', 'gain', 'exposure', 'white_balance', 'focus']: self.isManual[name] = True def user_changed_sync(self): ''' User (mouse action) check or uncheck the self.sync_box ''' self.parent.user_changed_sync(self.which_cam, self.sync_box.isChecked()) def set_sync(self, isChecked): ''' Accessed by external object to set the state of self.sync_box ''' self.sync_box.setChecked(isChecked) def user_changed_value(self, name, value): ''' Called by the child widget method slider_released(). Transfers parameters to the core object via the controller. ''' self.parent.user_changed_value(self.which_cam, name, value) self.apply_parameter(name, value) def apply_parameter(self, name, value): ''' Apply the camera parameter value to the core object throught the controller i.e. configuring the camera hardware ''' # Decides whether or not to apply the parameter to configure the camera hardware if not self.isManual[name]: return data = {'which_cam': self.which_cam, 'parameters': {name: value}} self.controller.call_method( method_name = 'apply_camera_parameters', arg = data ) def auto_cam_resumed(self): ''' Auto camera tuning mainly works on gain and exposure So set these two parameters to isManual = False... to prevent user from changing it ''' for name in ['gain', 'exposure']: self.isManual[name] = False def auto_cam_paused(self): ''' Change gain and exposure back to isManual = True ''' for name in ['gain', 'exposure']: self.isManual[name] = True class CameraTunerWindowSet(object): ''' This class possesses the three CameraTunerWindow: CAM_R, CAM_L, CAM_E This class should have the basic methods (interface) that the child CameraTunerWindow has, e.g. show(), hide(), close() ... ''' def __init__(self, controller): # Instantiate three CameraTunerWindow objects # Collect them in a dictionary self.windows = {} self.windows[CAM_R] = CameraTunerWindow(controller, CAM_R, paired=True , parent=self) self.windows[CAM_L] = CameraTunerWindow(controller, CAM_L, paired=True , parent=self) self.windows[CAM_E] = CameraTunerWindow(controller, CAM_E, paired=False, parent=self) self.isSync = False def show(self): for i, win in enumerate(self.windows.values()): win.move(200+200*i, 200) win.show() def hide(self): for win in self.windows.values(): win.hide() def close(self): for win in self.windows.values(): win.close() def set_parameter(self, which_cam, name, value): self.windows[which_cam].set_parameter(name, value) def auto_cam_resumed(self): for win in self.windows.values(): win.auto_cam_resumed() def auto_cam_paused(self): for win in self.windows.values(): win.auto_cam_paused() def user_changed_value(self, which_cam, name, value): if which_cam == CAM_L and self.isSync: self.windows[CAM_R].set_parameter(name, value) self.windows[CAM_R].apply_parameter(name, value) elif which_cam == CAM_R and self.isSync: self.windows[CAM_L].set_parameter(name, value) self.windows[CAM_L].apply_parameter(name, value) def user_changed_sync(self, which_cam, isChecked): if which_cam == CAM_L: self.windows[CAM_R].set_sync(isChecked) if which_cam == CAM_R: self.windows[CAM_L].set_sync(isChecked) self.isSync = isChecked class DepthTunerWindow(TunerWindow): ''' Inherits from the TunerWindow class. The business logics for the actual depth parameters to be tuned is specified in this class. This class also manages the transfer of depth parameters to the core object. ''' def __init__(self, controller): super(DepthTunerWindow, self).__init__() self.controller = controller self.setWindowIcon(QtGui.QIcon('icons/windu_vision.png')) self.setWindowTitle('Stereo Depth Parameters') self.setMinimumWidth(600) self.add_parameter(name='ndisparities', min=0, max=160, value=32, interval=16) self.add_parameter(name='SADWindowSize', min=5, max=105, value=31, interval=2) def user_changed_value(self): ''' Transfers parameters to the core object via the controller. ''' parms = {} for p in self.parameters.values(): parms[p.name] = p.value self.controller.call_method( method_name = 'apply_depth_parameters', arg = parms )
Fig & Olive is one of those restaurants that just makes you feel you’re in the South of France, no matter which location you’re at. I credit it to Laurent whose love for simple elegant French food is infectious. Recently, they published a long overdue cookbook and I got to raise a glass to celebrate with them for the occasion. The dish that stood out to me the most that night was this Citrus Scallops En Papillote With Herb Emulsion. It’s a dish you can make with winter ingredients so it brings bright flavor to even the most wintery day. The original recipe has salmon in addition to the scallops but I am partial to it with all scallops. For Herb Emulsion: Blend the herbs with salt, pepper, and olive oil with a mortar and pestle or a blender until smooth. For The Papillotes: Place a pot on medium-low heat with olive oil; gently cook the fennel, ginger, spring onions, and liqueur, salt, and pepper, covered, until soft, about 15 to 20 minutes. Chill. Heat an oven to 400°F and arrange a rack in the middle. Cut baking parchment into 4 (12-inch) squares; divide fennel mixture among pieces of parchment. Season the scallops with salt and pepper. Place 4 scallops on each portion. Divide the citrus and tomato halves; add 1 teaspoon herb emulsion to each. Bring the sides of the parchment up over the filling and tie together tightly with butcher’s string, sealing well. Place the papillotes on a rimmed baking sheet and bake for 15 to 20 minutes.Transfer to a platter and open carefully to release the steam in front of your guests.
# Copyright (c) 2014 by Ecreall under licence AGPL terms # available on http://www.gnu.org/licenses/agpl.html # licence: AGPL # author: Amen Souissi import colander import deform from pyramid.view import view_config from pyramid.threadlocal import get_current_registry import html_diff_wrapper from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS from pontus.form import FormView from pontus.view_operation import MultipleView from pontus.schema import Schema, omit from pontus.view import BasicView from pontus.default_behavior import Cancel from pontus.widget import TextInputWidget from novaideo.content.processes.amendment_management.behaviors import ( SubmitAmendment) from novaideo.content.amendment import Amendment, Intention from novaideo.widget import ( DragDropSelect2Widget, DragDropSequenceWidget, DragDropMappingWidget, LimitedTextAreaWidget) from novaideo import _ from novaideo.utilities.amendment_viewer import IAmendmentViewer def get_default_explanations_groups(context): explanations = dict(context.explanations) groups = [] grouped_explanations = [] for explanation in explanations.values(): if not(explanation['oid'] in grouped_explanations): group = [e for e in explanations.values() if Intention.eq(explanation['intention'], e['intention'])] grouped_explanations.extend([e['oid'] for e in group]) groups.append(group) if len(grouped_explanations) == len(explanations): break return groups @colander.deferred def explanations_choice(node, kw): context = node.bindings['context'] values = [(i['oid'], i['oid']) for i in context.explanations.values()] return DragDropSelect2Widget(values=values, item_css_class="col-md-4", multiple=True) class ExplanationGroupSchema(Schema): title = colander.SchemaNode( colander.String(), missing="", widget=TextInputWidget(css_class="title-select-item", item_css_class="col-md-4", readonly=True) ) explanations = colander.SchemaNode( colander.Set(), widget=explanations_choice, missing=[], default=[], title=_('Improvements'), ) justification = colander.SchemaNode( colander.String(), widget=LimitedTextAreaWidget(limit=350, css_class="justification-select-item", item_css_class="col-md-4", placeholder=_("Justification")), missing="", title=_("Justification") ) @colander.deferred def groups_widget(node, kw): context = node.bindings['context'] return DragDropSequenceWidget( item_css_class="explanation-groups", item_title_template=context.title+'-', node_description=_("To do so, you can drag-and-drop your improvements " "from one amendment to the other, add amendments or " "suppress the empty ones."), max_len=len(context.explanations)) class ExplanationGroupsSchema(Schema): groups = colander.SchemaNode( colander.Sequence(), omit(ExplanationGroupSchema(name='Amendment', widget=DragDropMappingWidget()), ['_csrf_token_']), widget=groups_widget, title=_('Group your improvements into amendments') ) single_amendment = colander.SchemaNode( colander.Boolean(), widget=deform.widget.CheckboxWidget(css_class="single-amendment-control"), label=_('Group the improvements into a single amendment'), title='', missing=False ) justification = colander.SchemaNode( colander.String(), widget=LimitedTextAreaWidget(limit=350, item_css_class="justification-amendment hide-bloc", placeholder=_("Justification")), missing="", title=_("Justification") ) class SubmitAmendmentViewStudyReport(BasicView): title = _('Alert for publication') name = 'alertforpublication' template = 'novaideo:views/amendment_management/templates/alert_amendment_submit.pt' readonly_explanation_template = 'novaideo:views/amendment_management/templates/readonly/submit_explanation_item.pt' def update(self): result = {} amendment_viewer = get_current_registry().getUtility( IAmendmentViewer, 'amendment_viewer') souptextdiff, explanations = amendment_viewer.get_explanation_diff( self.context, self.request) amendment_viewer.add_details(explanations, self.context, self.request, souptextdiff, self.readonly_explanation_template) text_diff = html_diff_wrapper.soup_to_text(souptextdiff) not_published_ideas = [] if not self.request.moderate_ideas: not_published_ideas = [i for i in self.context.get_used_ideas() if 'published' not in i.state] values = {'context': self.context, 'explanationtext': text_diff, 'not_published_ideas': not_published_ideas} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result class SubmitAmendmentView(FormView): title = _('Submit') name = 'submitamendmentform' formid = 'formsubmitamendment' schema = ExplanationGroupsSchema() behaviors = [SubmitAmendment, Cancel] validate_behaviors = False css_class = 'panel-transparent' def default_data(self): groups = get_default_explanations_groups(self.context) data = {'groups': []} i = 1 for group in groups: justification = ''.join(list(set([e['intention']['comment'] for e in group]))) group_data = {'title': self.context.title +'-'+str(i), 'explanations': [str(e['oid']) for e in group], 'justification': justification} data['groups'].append(group_data) i += 1 return data @view_config( name='submitamendment', context=Amendment, renderer='pontus:templates/views_templates/grid.pt', ) class SubmitAmendmentViewMultipleView(MultipleView): title = _('Prepare amendments') name = 'submitamendment' viewid = 'submitamendment' template = 'daceui:templates/mergedmultipleview.pt' views = (SubmitAmendmentViewStudyReport, SubmitAmendmentView) behaviors = [SubmitAmendment] validators = [SubmitAmendment.get_validator()] requirements = {'css_links': ['novaideo:static/css/organize_amendments.css'], 'js_links': ['novaideo:static/js/organize_amendments.js', 'novaideo:static/js/jquery.elastic.source.js']} css_class = 'panel-transparent' DEFAULTMAPPING_ACTIONS_VIEWS.update( {SubmitAmendment: SubmitAmendmentViewMultipleView})
Volleyball requires lots of dynamic movement, leaving the joints susceptible to strain and injury. Knee, ankle and elbow injuries are especially common. When lunging, blocking, jumping, serving or spiking, volleyball players need to change direction quickly in order to execute powerful, precise moves. One wrong landing, a rolled ankle, or the accumulation of these movements over time can take a player out of the game. Bauerfeind’s medical-grade compression promotes healing blood flow and proprioception to reduce pain, swelling, and reinjury. Whether you are recovering from injury or trying to ensure your best performance, Bauerfeind’s braces and supports stabilize the joints without limiting mobility, so you can rely on your body to get you through the game. What are the most common injuries in volleyball players? Some of the most common volleyball injuries include: shoulder tendinitis, ankle sprain, knee sprain, jumper’s knee, pulled muscles, ligament tears, and injuries to the wrist and forearms. Bauerfeind offers medical-grade compression braces, supports and sleeves to treat or prevent these injuries. Ask your athletic trainer or healthcare provider for more information on which product may be right for you. How can I prevent or avoid injury while playing volleyball? Wearing a brace or support from Bauerfeind can help to reduce your chances of injury by providing enhanced stability, proprioception and medical-grade compression. The standard first-line treatment for acute injury such as an ankle sprain is RICE Therapy - Rest, Ice, Compression and Elevation. Bauerfeind braces and supports provide the compression component in this treatment. As always, consult your healthcare provider to create and manage your recovery plan.
""" model output """ import logging from pathlib import Path import pickle from hic import flow import numpy as np from sklearn.externals import joblib from . import workdir, cachedir, systems, expt from .design import Design # TODO move this symmetric cumulant code to hic def csq(x): """ Return the absolute square |x|^2 of a complex array. """ return (x*x.conj()).real def corr2(Qn, M): """ Compute the two-particle correlation <v_n^2>. """ return (csq(Qn) - M).sum() / (M*(M - 1)).sum() def symmetric_cumulant(events, m, n): """ Compute the symmetric cumulant SC(m, n). """ M = np.asarray(events['M'], dtype=float) Q = dict(enumerate(events['Qn'].T, start=1)) cm2n2 = ( csq(Q[m]) * csq(Q[n]) - 2*(Q[m+n] * Q[m].conj() * Q[n].conj()).real - 2*(Q[m] * Q[m-n].conj() * Q[n].conj()).real + csq(Q[m+n]) + csq(Q[m-n]) - (M - 4)*(csq(Q[m]) + csq(Q[n])) + M*(M - 6) ).sum() / (M*(M - 1)*(M - 2)*(M - 3)).sum() cm2 = corr2(Q[m], M) cn2 = corr2(Q[n], M) return cm2n2 - cm2*cn2 # fully specify numeric data types, including endianness and size, to # ensure consistency across all machines float_t = '<f8' int_t = '<i8' complex_t = '<c16' class ModelData: """ Helper class for event-by-event model data. Reads binary data files and computes centrality-binned observables. """ dtype = np.dtype([ ('initial_entropy', float_t), ('mult_factor', float_t), ('nsamples', int_t), ('dNch_deta', float_t), ('dN_dy', [(s, float_t) for s in ['pion', 'kaon', 'proton']]), ('mean_pT', [(s, float_t) for s in ['pion', 'kaon', 'proton']]), ('M', int_t), ('Qn', complex_t, 6), ]) def __init__(self, *files): # read each file using the above dtype and treat each as a minimum-bias # event sample def load_events(f): logging.debug('loading %s', f) d = np.fromfile(str(f), dtype=self.dtype) d.sort(order='dNch_deta') return d self.events = [load_events(f) for f in files] def observables_like(self, data, *keys): """ Compute the same centrality-binned observables as contained in `data` with the same nested dict structure. This function calls itself recursively, each time prepending to `keys`. """ try: x = data['x'] cent = data['cent'] except KeyError: return { k: self.observables_like(v, k, *keys) for k, v in data.items() } def _compute_bin(): """ Choose a function to compute the current observable for a single centrality bin. """ obs_stack = list(keys) obs = obs_stack.pop() if obs == 'dNch_deta': return lambda events: events[obs].mean() if obs == 'dN_dy': species = obs_stack.pop() return lambda events: events[obs][species].mean() if obs == 'mean_pT': species = obs_stack.pop() return lambda events: np.average( events[obs][species], weights=events['dN_dy'][species] ) if obs.startswith('vn'): n = obs_stack.pop() k = 4 if obs == 'vn4' else 2 return lambda events: flow.Cumulant( events['M'], *events['Qn'].T[1:] ).flow(n, k, imaginary='zero') if obs.startswith('sc'): mn = obs_stack.pop() return lambda events: symmetric_cumulant(events, *mn) compute_bin = _compute_bin() def compute_all_bins(events): n = events.size bins = [ events[int((1 - b/100)*n):int((1 - a/100)*n)] for a, b in cent ] return list(map(compute_bin, bins)) return dict( x=x, cent=cent, Y=np.array(list(map(compute_all_bins, self.events))).squeeze() ) def observables(system, map_point=False): """ Compute model observables for the given system to match the corresponding experimental data. """ if map_point: files = [Path('map', system)] cachefile = Path(system + '_map') else: # expected filenames for each design point files = [Path(system, p) for p in Design(system).points] cachefile = Path(system) files = [workdir / 'model_output' / f.with_suffix('.dat') for f in files] cachefile = cachedir / 'model' / cachefile.with_suffix('.pkl') if cachefile.exists(): # use the cache unless any of the model data files are newer # this DOES NOT check any other logical dependencies, e.g. the # experimental data # to force recomputation, delete the cache file mtime = cachefile.stat().st_mtime if all(f.stat().st_mtime < mtime for f in files): logging.debug('loading observables cache file %s', cachefile) return joblib.load(cachefile) else: logging.debug('cache file %s is older than event data', cachefile) else: logging.debug('cache file %s does not exist', cachefile) logging.info( 'loading %s%s event data and computing observables', system, '_map' if map_point else '' ) data = expt.data[system] # identified particle data are not yet available for PbPb5020 # create dummy entries for these observables so that they are computed for # the model if system == 'PbPb5020': data = dict( ((obs, expt.data['PbPb2760'][obs]) for obs in ['dN_dy', 'mean_pT']), **data ) # also compute "extra" data for the MAP point if map_point: data = dict(expt.extra_data[system], **data) # flow correlations and central flow not yet available for PbPb5020 if system == 'PbPb5020': data = dict( ((obs, expt.extra_data['PbPb2760'][obs]) for obs in ['sc', 'sc_central', 'vn_central']), **data ) data = ModelData(*files).observables_like(data) logging.info('writing cache file %s', cachefile) cachefile.parent.mkdir(exist_ok=True) joblib.dump(data, cachefile, protocol=pickle.HIGHEST_PROTOCOL) return data data = {s: observables(s) for s in systems} map_data = {s: observables(s, map_point=True) for s in systems} if __name__ == '__main__': from pprint import pprint print('design:') pprint(data) print('map:') pprint(map_data)
Supplied with a large 3L bowl, this model also has a powerful, well-ventilated motor making it perfect for sustained use. 26 speeds between 120 and 16000rpm. - 1500 Watts heating. A range of programmable food processors to blend, mince, mix, and cook all at the same time. Hotmix Pro is an essential resource for creating even, consistent sauces, creams, soups and marmalades, without needing to keep a constant eye on the process. Accurate : Adjustable working temperature of between -24°C and +190°C (depending on model). Multi-Purpose : 26 speeds to cover all needs. Innovative : LCD display for standard recipes, create your own recipes using an SD card and a downloadable PC programme, or programme your recipes manually. Effective : Professional motor works for 4-12 hours (depending on model). Hardwearing : Body, bowl, and blades in stainless steel, easy to clean. Practical : Synthetic lid with opening for adding ingredients while working. safe : 4 shock-absorbing and stabilizing feet reduce vibrations for quiet running.
import httplib as http from dataverse import Connection from framework.exceptions import HTTPError from website.addons.dataverse import settings def connect(username, password, host=settings.HOST): connection = Connection( username=username, password=password, host=host, disable_ssl=not settings.VERIFY_SSL, ) return connection if connection.connected else None def connect_from_settings(user_settings): return connect( user_settings.dataverse_username, user_settings.dataverse_password ) if user_settings else None def connect_or_403(username, password, host=settings.HOST): connection = Connection( username=username, password=password, host=host, disable_ssl=not settings.VERIFY_SSL, ) if connection.status == http.FORBIDDEN: raise HTTPError(http.FORBIDDEN) return connection if connection.connected else None def connect_from_settings_or_403(user_settings): return connect_or_403( user_settings.dataverse_username, user_settings.dataverse_password ) if user_settings else None def delete_file(file): study = file.study study.delete_file(file) def upload_file(study, filename, content): study.upload_file(filename, content) def get_file(study, filename, released=False): return study.get_file(filename, released) def get_file_by_id(study, file_id, released=False): return study.get_file_by_id(file_id, released) def get_files(study, released=False): return study.get_files(released) def release_study(study): return study.release() def get_studies(dataverse): if dataverse is None: return [], [] accessible_studies = [] bad_studies = [] # Currently none, but we may filter some out for s in dataverse.get_studies(): accessible_studies.append(s) return accessible_studies, bad_studies def get_study(dataverse, hdl): if dataverse is None: return study = dataverse.get_study_by_doi(hdl) try: if study.get_state() == 'DEACCESSIONED': raise HTTPError(http.GONE) return study except UnicodeDecodeError: raise HTTPError(http.NOT_ACCEPTABLE) def get_dataverses(connection): if connection is None: return [] dataverses = connection.get_dataverses() released_dataverses = [d for d in dataverses if d.is_released] return released_dataverses def get_dataverse(connection, alias): if connection is None: return dataverse = connection.get_dataverse(alias) return dataverse if dataverse and dataverse.is_released else None
Browse businesses looking to expand in and around Harrington. Located in the First State, as of the 2010 census, Harrington had a population of 3,643. Are you looking to own your own franchise business in, or around, Harrington, DE? Well, you are in luck! Browse any of these multiple franchises, shown below, to request FREE information about the opportunities that interests you or click here to see the full list of franchises looking to expand throughout Delaware. Listed below you will find population, income and education related statistics, as well as other pertinent information, for Harrington, DE. When compared to the state's average median age of 39.1 years, Harrington's median age is 9.5 years younger. When compared to the median income of individuals working in Delaware, Harrington's average income is $12,621 less than that of the state. Harrington has 11% fewer citizens, with masters degrees or higher, than the general population's average, in Delaware.
import re from streamlink.plugin import Plugin from streamlink.plugin.api import useragents from streamlink.stream import HLSStream, RTMPStream, HTTPStream API_URL = "https://api-dsa.17app.co/api/v1/liveStreams/getLiveStreamInfo" _url_re = re.compile(r"https://17.live/live/(?P<channel>[^/&?]+)") _status_re = re.compile(r'\\"closeBy\\":\\"\\"') _rtmp_re = re.compile(r'\\"url\\"\s*:\s*\\"(.+?)\\"') class App17(Plugin): @classmethod def can_handle_url(cls, url): return _url_re.match(url) def _get_streams(self): match = _url_re.match(self.url) channel = match.group("channel") self.session.http.headers.update({'User-Agent': useragents.CHROME}) payload = '{"liveStreamID": "%s"}' % (channel) res = self.session.http.post(API_URL, data=payload) status = _status_re.search(res.text) if not status: self.logger.info("Stream currently unavailable.") return http_url = _rtmp_re.search(res.text).group(1) http_url = http_url.replace("http:", "https:") yield "live", HTTPStream(self.session, http_url) if 'pull-rtmp' in http_url: url = http_url.replace("https:", "rtmp:").replace(".flv", "") stream = RTMPStream(self.session, { "rtmp": url, "live": True }) yield "live", stream if 'wansu-' in http_url: url = http_url.replace(".flv", "/playlist.m3u8") for stream in HLSStream.parse_variant_playlist(self.session, url).items(): yield stream else: url = http_url.replace("live-hdl", "live-hls").replace(".flv", ".m3u8") yield "live", HLSStream(self.session, url) __plugin__ = App17
The average annual and hourly salaries for logisticians in the state of District of Columbia are shown in Table 1 and Table 2, respectively. The comparison of the salary statistics of logisticians amongst District of Columbia metropolitan areas is shown in Table 3. The salary statistics are based on the national compensation survey conducted by the U.S. Bureau of Labor Statistics in 2017 and published in April 2018 . Table 1 shows the average annual salary for logisticians in District of Columbia in 5 percentile scales. The average annual salary for the 90th percentile (the top 10 percent of the highest paid) is $154,830. The median (50th percentile) annual salary is $105,610. The average annual salary for the bottom 10 percent is $60,780.
import argparse import os class ArgParser(): def __init__(self): self.parser = argparse.ArgumentParser(description=None) self._add_arguments() def parse_args(self): return self.parser.parse_args() def _add_arguments(self): self.parser.add_argument("--hostname", type=str, default="localhost", help="Hostname") self.parser.add_argument("--st-port-num", type=int, default=2222, help="Starting port number for processes") self.parser.add_argument("--job-name", type=str, default="worker", help="'One of ps' or 'worker'") self.parser.add_argument("--task-index", type=int, default=0, help="Task index within a job") self.parser.add_argument("--ps-hosts-num", type=int, default=1, help="The Number of Parameter Servers") self.parser.add_argument("--worker-hosts-num", type=int, default=1, help="The Number of Workers") self.parser.add_argument('--algo-name', default="a3c", help='Name of algorithm. For list, see README') self.parser.add_argument('--log-dir', default=os.getcwd() + "/tmp", help='Log directory path') self.parser.add_argument('--env-id', default="PongNoFrameskip-v4", help='Environment id') self.parser.add_argument('--max-bootstrap-length', default=20, type=int, help='Max length of trajectory \ before bootstrapping') self.parser.add_argument('--max-master-time-step', default=999999999999999, type=int, help='Max number of time steps to train') self.parser.add_argument('--max-clock-limit', default=0, type=float, help='Max clock limit to train') self.parser.add_argument('--anneal-learning-rate', action='store_true', help='Flag to whether to anneal learning rate or not') self.parser.add_argument('--anneal-by-clock', action='store_true', help='Flag to anneal learning rate by clock time') self.parser.add_argument('--use-gpu', action='store_true', help='Flag to use gpu') def conv_layer_type(inpt): try: print(inpt) tup = eval(inpt) return tup except: raise argparse.ArgumentTypeError("Type in a list of 3-valued tuples e.g. [(16, 8, 4), (32, 4, 2)]\ where first value: # of filters, second value: 1-dim size of squared filter, \ third value: stride value") self.parser.add_argument('--convs', nargs='*', default=[(32, 8, 4), (64, 4, 2), (64, 3, 1)], #(32, 8, 4), (64, 4, 2), (64, 3, 1) (16, 8, 4), (32, 4, 2) help="Convolutional layer specification", type=conv_layer_type) self.parser.add_argument('--hiddens', nargs='*', type=int, default=[512], # 256 help="Hidden layer specification: Type in a list of integers e.g. [256 256] where each element\ denotes the hidden layer node sizes in order given") self.parser.add_argument('--replay-buffer-size', default=1000000, type=int, help='Replay memory size') self.parser.add_argument('--exploration-fraction', default=0.1, type=float, help='Exploration fraction, after which final eps is used') self.parser.add_argument('--exploration-final-eps', default=0.05, type=float, help='Exploration afinal eps after exploration fraction * max time step.') self.parser.add_argument('--replay-start-size', default=50000, type=int, help='random policy timesteps before actual learning begins') self.parser.add_argument('--train-update-freq', default=5, type=int, help='number of actions between successive SGD updates') #4 self.parser.add_argument('--minibatch-size', default=32, type=int, help='minibatch size for SGD') self.parser.add_argument('--target-network-update-freq', default=10000, type=int, help='target network update freq to stabilize learning')
This ad traffic training classroom supported public access training programmes. These have now moved to in-company courses so if your team are interested then ask us about the details for you and your team. Try to include the title of the Academy Lesson that your question relates to (if there is one). Putting this at the start will help other participants find the topics they are interested in. The classroom is open for one month following your Academy and materials will stay here as a reference point for you for a further year. In our operations Academies we run small workshops to help you find solutions to key problems. It's a great way to bring you together and to make the training really specific to the challenge you face in your company today. On the most recent Academy we had an interesting ideas generation session about some of those challenges and as part of the discussion took notes about the key issues. Here you can download our summary. This PDF covers off five key topics and how collectively Academy participants have thought about solutions to the challenges. If you have extra comments then why not post them here? Christopher Hogg has been working in online marketing since late 1999, starting as an pan European account manager for Engage Technologies - part of the CMGi group. Here he was charged with over a £1.5m in billings, including the largest European online agency of the day, Carat Interactive in Paris. Before the dotcom bubble burst he went to work at the BBC as an assistant producer in Radio Drama, and then returned to the online environment in 2003 as a Senior Account for outsourced advertising operations company Traffic-maid. 2004 saw him move back into the technology arena as Senior Account Director for Adtech AG. A german based ad server now owned by AOL. His accounts included Sky, RBI, Emap, National Magazines. Neat and clean ad server administration is the key to efficiency, productivity and optimal campaign performance for every trafficker. It impacts every aspect of your day to day work; from inventory management, to trafficking, ad delivery and reporting. Have a contingency; if one site is full can you use behavioural targeting to deliver the extra volume? Can run-of-site be used to deliver part of the volume? Explore ways to extend the campaign duration, or upweighting the delivery rate within the ad server. Look for how different creative executions could deliver greater response. Consider further campaign optimisation techniques. Are you having problems trying to set up new types of creative we haven't used before? Don't be shy of calling; email isn't the only route! Review your specs quarterly; are there some new things that should now be on them? Are there some common challenges your team all share? Have a standard structure for copy-chasing emails; maybe 7 days and 2 days? Consider who has the control and responsibility for copy-chasing. Elizabeth Townsend has been leading online advertising operations since 1999, first as a Ad Operations Executive at Yahoo! and now as a Head of Online Advertising Production at Financial Times where she manages the global online ads production team for the FT website. Elizabeth sits on our Academy Steering Board and we asked her for a few extra hints and tips to share for how to run ad operations. Since 2000 Geisla Turner has been exploring online advertising operations and now runs the ad ops team at CNET Networks in London. Geisla sits on our Academy Steering Board and when she dropped by at our last Academy, we asked her for a few extra hints and tips to share for how to approach inventory forecasting. . Remember that things are never 100%: you can't be perfect in forecasting, but you can be methodological and conservative. And always factor in that contingency so there's no danger you won't have the inventory that's been booked. Designed for newcomers to the world of online ad trafficking, getting you up to speed with ad traffic processes and giving you a framework you can apply to future campaigns. This hands-on session for advertising operations teams teaches the basics of advertising trafficking and scheduling. It’s an orientation to the principles and theory, and a roadmap for applying them to your workflow. You’ll see how popular toolkits work them and get a clear picture of how trafficking fits into the bigger picture. Danny Meadows-Klue provides an overview of guidelines and best practice, developed as part of the standards programme for IAB Europe. Download the slideshow summary of IAB Europe online ad standards, check out the standards on the website, and ask the team for more information. Alongside our Digital Management Academy we run more than 40 other Academies to help marketers of all levels of experience get the most from the internet and the new marketing tools. Download Digital Training Academy prospectus that lists what we're running this term, and email Martin Kilinski ([email protected]) with the sorts of topics you feel your team could benefit from more help with. If you've logged on to this online learning centre then the chances are that you're thinking about joining our Digital Ad Trafficking Academy soon. I'd like to introduce myself: I'm Danny, and I'll be your trainer. It was back in 1994 that I got involved in Internet media and web marketing, and by 1996 I'd started helping other people - especially media sales guys - make the transition into digital media and digital marketing. Our team really enjoy working with you at Digital Advertising Traffic Academy programme.
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Importing and Exporting """ from __future__ import unicode_literals from __future__ import absolute_import import six from mathics.core.expression import Expression, from_python, strip_context from mathics.builtin.base import Builtin, Predefined, Symbol, String from mathics.builtin.options import options_to_rules from .pymimesniffer import magic import mimetypes import sys from itertools import chain import urllib try: import urllib.request as urllib2 from urllib.error import HTTPError, URLError except ImportError: import urllib2 from urllib2 import HTTPError, URLError mimetypes.add_type('application/vnd.wolfram.mathematica.package', '.m') # Seems that JSON is not registered on the mathics.net server, so we do it manually here. # Keep in mind that mimetypes has system-dependent aspects (it inspects "/etc/mime.types" and other files). mimetypes.add_type('application/json', '.json') # TODO: Add more file formats mimetype_dict = { 'application/dicom': 'DICOM', 'application/dbase': 'DBF', 'application/dbf': 'DBF', 'application/eps': 'EPS', 'application/fits': 'FITS', 'application/json': 'JSON', 'application/mathematica': 'NB', 'application/mdb': 'MDB', 'application/mbox': 'MBOX', 'application/msaccess': 'MDB', 'application/octet-stream': 'OBJ', 'application/pdf': 'PDF', 'application/pcx': 'PCX', 'application/postscript': 'EPS', 'application/rss+xml': 'RSS', 'application/rtf': 'RTF', 'application/sla': 'STL', 'application/tga': 'TGA', 'application/vnd.google-earth.kml+xml': 'KML', 'application/vnd.ms-excel': 'XLS', 'application/vnd.ms-pki.stl': 'STL', 'application/vnd.oasis.opendocument.spreadsheet': 'ODS', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': 'XLSX', # nopep8 'application/vnd.sun.xml.calc': 'SXC', 'application/vnd.msaccess': 'MDB', 'application/vnd.wolfram.cdf': 'CDF', 'application/vnd.wolfram.cdf.text': 'CDF', 'application/vnd.wolfram.mathematica.package': 'Package', 'application/xhtml+xml': 'XHTML', 'application/xml': 'XML', 'application/x-3ds': '3DS', 'application/x-cdf': 'NASACDF', 'application/x-eps': 'EPS', 'application/x-flac': 'FLAC', 'application/x-font-bdf': 'BDF', 'application/x-hdf': 'HDF', 'application/x-msaccess': 'MDB', 'application/x-netcdf': 'NetCDF', 'application/x-shockwave-flash': 'SWF', 'application/x-tex': 'TeX', # Also TeX 'audio/aiff': 'AIFF', 'audio/basic': 'AU', # Also SND 'audio/midi': 'MIDI', 'audio/x-aifc': 'AIFF', 'audio/x-aiff': 'AIFF', 'audio/x-flac': 'FLAC', 'audio/x-wav': 'WAV', 'chemical/seq-na-genbank': 'GenBank', 'chemical/seq-aa-fasta': 'FASTA', 'chemical/seq-na-fasta': 'FASTA', 'chemical/seq-na-fastq': 'FASTQ', 'chemical/seq-na-sff': 'SFF', 'chemical/x-cif': 'CIF', 'chemical/x-daylight-smiles': 'SMILES', 'chemical/x-hin': 'HIN', 'chemical/x-jcamp-dx': 'JCAMP-DX', 'chemical/x-mdl-molfile': 'MOL', 'chemical/x-mdl-sdf': 'SDF', 'chemical/x-mdl-sdfile': 'SDF', 'chemical/x-mdl-tgf': 'TGF', 'chemical/x-mmcif': 'CIF', 'chemical/x-mol2': 'MOL2', 'chemical/x-mopac-input': 'Table', 'chemical/x-pdb': 'PDB', 'chemical/x-xyz': 'XYZ', 'image/bmp': 'BMP', 'image/eps': 'EPS', 'image/fits': 'FITS', 'image/gif': 'GIF', 'image/jp2': 'JPEG2000', 'image/jpeg': 'JPEG', 'image/pbm': 'PNM', 'image/pcx': 'PCX', 'image/pict': 'PICT', 'image/png': 'PNG', 'image/svg+xml': 'SVG', 'image/tga': 'TGA', 'image/tiff': 'TIFF', 'image/vnd.dxf': 'DXF', 'image/vnd.microsoft.icon': 'ICO', 'image/x-3ds': '3DS', 'image/x-dxf': 'DXF', 'image/x-exr': 'OpenEXR', 'image/x-icon': 'ICO', 'image/x-ms-bmp': 'BMP', 'image/x-pcx': 'PCX', 'image/x-portable-anymap': 'PNM', 'image/x-portable-bitmap': 'PBM', 'image/x-portable-graymap': 'PGM', 'image/x-portable-pixmap': 'PPM', 'image/x-xbitmap': 'XBM', 'model/x3d+xml': 'X3D', 'model/vrml': 'VRML', 'model/x-lwo': 'LWO', 'model/x-pov': 'POV', 'text/calendar': 'ICS', 'text/comma-separated-values': 'CSV', 'text/csv': 'CSV', 'text/html': 'HTML', 'text/mathml': 'MathML', 'text/plain': 'Text', 'text/rtf': 'RTF', 'text/scriptlet': 'SCT', 'text/tab-separated-values': 'TSV', 'text/texmacs': 'Text', 'text/vnd.graphviz': 'DOT', 'text/x-csrc': 'C', 'text/x-tex': 'TeX', 'text/x-vcalendar': 'VCS', 'text/x-vcard': 'VCF', 'text/xml': 'XML', 'video/avi': 'AVI', 'video/quicktime': 'QuickTime', 'video/x-flv': 'FLV', # None: 'Binary', } IMPORTERS = {} EXPORTERS = {} def _importer_exporter_options(available_options, options, evaluation): stream_options = [] custom_options = [] if available_options and available_options.has_form('List', None): for name in available_options.leaves: if isinstance(name, String): py_name = name.get_string_value() elif isinstance(name, Symbol): py_name = strip_context(name.get_name()) else: py_name = None if py_name: value = Builtin.get_option(options, py_name, evaluation) if value is not None: expr = Expression('Rule', String(py_name), value) if py_name == 'CharacterEncoding': stream_options.append(expr) else: custom_options.append(expr) return stream_options, custom_options class ImportFormats(Predefined): """ <dl> <dt>'$ImportFormats' <dd>returns a list of file formats supported by Import. </dl> >> $ImportFormats = {...CSV,...JSON,...Text...} """ name = '$ImportFormats' def evaluate(self, evaluation): return Expression('List', *sorted(IMPORTERS.keys())) class ExportFormats(Predefined): """ <dl> <dt>'$ExportFormats' <dd>returns a list of file formats supported by Export. </dl> >> $ExportFormats = {...CSV,...SVG,...Text...} """ name = '$ExportFormats' def evaluate(self, evaluation): return Expression('List', *sorted(EXPORTERS.keys())) class RegisterImport(Builtin): """ <dl> <dt>'RegisterImport["$format$", $defaultFunction$]' <dd>register '$defaultFunction$' as the default function used when importing from a file of type '"$format$"'. <dt>'RegisterImport["$format$", {"$elem1$" :> $conditionalFunction1$, "$elem2$" :> $conditionalFunction2$, ..., $defaultFunction$}]' <dd>registers multiple elements ($elem1$, ...) and their corresponding converter functions ($conditionalFunction1$, ...) in addition to the $defaultFunction$. <dt>'RegisterImport["$format$", {"$conditionalFunctions$, $defaultFunction$, "$elem3$" :> $postFunction3$, "$elem4$" :> $postFunction4$, ...}]' <dd>also registers additional elements ($elem3$, ...) whose converters ($postFunction3$, ...) act on output from the low-level funcions. </dl> First, define the default function used to import the data. >> ExampleFormat1Import[filename_String] := Module[{stream, head, data}, stream = OpenRead[filename]; head = ReadList[stream, String, 2]; data = Partition[ReadList[stream, Number], 2]; Close[stream]; {"Header" -> head, "Data" -> data}] 'RegisterImport' is then used to register the above function to a new data format. >> ImportExport`RegisterImport["ExampleFormat1", ExampleFormat1Import] >> FilePrint["ExampleData/ExampleData.txt"] | Example File Format | Created by Angus | 0.629452 0.586355 | 0.711009 0.687453 | 0.246540 0.433973 | 0.926871 0.887255 | 0.825141 0.940900 | 0.847035 0.127464 | 0.054348 0.296494 | 0.838545 0.247025 | 0.838697 0.436220 | 0.309496 0.833591 >> Import["ExampleData/ExampleData.txt", {"ExampleFormat1", "Elements"}] = {Data, Header} >> Import["ExampleData/ExampleData.txt", {"ExampleFormat1", "Header"}] = {Example File Format, Created by Angus} Conditional Importer: >> ExampleFormat2DefaultImport[filename_String] := Module[{stream, head}, stream = OpenRead[filename]; head = ReadList[stream, String, 2]; Close[stream]; {"Header" -> head}] >> ExampleFormat2DataImport[filename_String] := Module[{stream, data}, stream = OpenRead[filename]; Skip[stream, String, 2]; data = Partition[ReadList[stream, Number], 2]; Close[stream]; {"Data" -> data}] >> ImportExport`RegisterImport["ExampleFormat2", {"Data" :> ExampleFormat2DataImport, ExampleFormat2DefaultImport}] >> Import["ExampleData/ExampleData.txt", {"ExampleFormat2", "Elements"}] = {Data, Header} >> Import["ExampleData/ExampleData.txt", {"ExampleFormat2", "Header"}] = {Example File Format, Created by Angus} >> Import["ExampleData/ExampleData.txt", {"ExampleFormat2", "Data"}] // Grid = 0.629452 0.586355 . . 0.711009 0.687453 . . 0.24654 0.433973 . . 0.926871 0.887255 . . 0.825141 0.9409 . . 0.847035 0.127464 . . 0.054348 0.296494 . . 0.838545 0.247025 . . 0.838697 0.43622 . . 0.309496 0.833591 """ context = 'ImportExport`' attributes = ('Protected', 'ReadProtected') # XXX OptionsIssue options = { 'Path': 'Automatic', 'FunctionChannels': '{"FileNames"}', 'Sources': 'None', 'DefaultElement': 'Automatic', 'AvailableElements': 'None', 'Options': '{}', 'OriginalChannel': 'False', 'BinaryFormat': 'False', 'Encoding': 'False', 'Extensions': '{}', 'AlphaChannel': 'False', } rules = { 'ImportExport`RegisterImport[formatname_String, function_]': 'ImportExport`RegisterImport[formatname, function, {}]', } def apply(self, formatname, function, posts, evaluation, options): '''ImportExport`RegisterImport[formatname_String, function_, posts_, OptionsPattern[ImportExport`RegisterImport]]''' if function.has_form('List', None): leaves = function.get_leaves() else: leaves = [function] if not (len(leaves) >= 1 and isinstance(leaves[-1], Symbol) and all(x.has_form('RuleDelayed', None) for x in leaves[:-1])): # TODO: Message return Symbol('$Failed') conditionals = { elem.get_string_value(): expr for (elem, expr) in (x.get_leaves() for x in leaves[:-1])} default = leaves[-1] posts = {} IMPORTERS[formatname.get_string_value()] = (conditionals, default, posts, options) return Symbol('Null') class RegisterExport(Builtin): """ <dl> <dt>'RegisterExport["$format$", $func$]' <dd>register '$func$' as the default function used when exporting from a file of type '"$format$"'. </dl> Simple text exporter >> ExampleExporter1[filename_, data_, opts___] := Module[{strm = OpenWrite[filename], char = data}, WriteString[strm, char]; Close[strm]] >> ImportExport`RegisterExport["ExampleFormat1", ExampleExporter1] >> Export["sample.txt", "Encode this string!", "ExampleFormat1"]; >> FilePrint["sample.txt"] | Encode this string! #> DeleteFile["sample.txt"] Very basic encrypted text exporter >> ExampleExporter2[filename_, data_, opts___] := Module[{strm = OpenWrite[filename], char}, (* TODO: Check data *) char = FromCharacterCode[Mod[ToCharacterCode[data] - 84, 26] + 97]; WriteString[strm, char]; Close[strm]] >> ImportExport`RegisterExport["ExampleFormat2", ExampleExporter2] >> Export["sample.txt", "encodethisstring", "ExampleFormat2"]; >> FilePrint["sample.txt"] | rapbqrguvffgevat #> DeleteFile["sample.txt"] """ context = 'ImportExport`' options = { 'Path': 'Automatic', 'FunctionChannels': '{"FileNames"}', 'Sources': 'None', 'DefaultElement': 'None', 'AvailableElements': 'None', 'Options': '{}', 'OriginalChannel': 'False', 'BinaryFormat': 'False', 'Encoding': 'False', 'Extensions': '{}', 'AlphaChannel': 'False', } def apply(self, formatname, function, evaluation, options): '''ImportExport`RegisterExport[formatname_String, function_, OptionsPattern[ImportExport`RegisterExport]]''' EXPORTERS[formatname.get_string_value()] = (function, options) return Symbol('Null') class FetchURL(Builtin): ''' #> Quiet[FetchURL["https:////", {}]] = $Failed #> Quiet[FetchURL["http://mathics.org/url_test_case", {}]] = $Failed ''' messages = { 'httperr': '`1` could not be retrieved; `2`.', } def apply(self, url, elements, evaluation, options={}): 'FetchURL[url_String, elements_, OptionsPattern[]]' import tempfile import os py_url = url.get_string_value() temp_handle, temp_path = tempfile.mkstemp(suffix='') try: # some pages need cookies or they will end up in an infinite redirect (i.e. HTTP 303) # loop, which prevents the page from getting loaded. f = urllib2.build_opener(urllib2.HTTPCookieProcessor).open(py_url) try: if sys.version_info >= (3, 0): content_type = f.info().get_content_type() else: content_type = f.headers['content-type'] os.write(temp_handle, f.read()) finally: f.close() # on some OS (e.g. Windows) all writers need to be closed before another # reader (e.g. Import._import) can access it. so close the file here. os.close(temp_handle) def determine_filetype(): return mimetype_dict.get(content_type) result = Import._import(temp_path, determine_filetype, elements, evaluation, options) except HTTPError as e: evaluation.message( 'FetchURL', 'httperr', url, 'the server returned an HTTP status code of %s (%s)' % (e.code, str(e.reason))) return Symbol('$Failed') except URLError as e: # see https://docs.python.org/3/howto/urllib2.html if hasattr(e, 'reason'): evaluation.message('FetchURL', 'httperr', url, str(e.reason)) elif hasattr(e, 'code'): evaluation.message('FetchURL', 'httperr', url, 'server returned %s' % e.code) return Symbol('$Failed') except ValueError as e: evaluation.message('FetchURL', 'httperr', url, str(e)) return Symbol('$Failed') finally: os.unlink(temp_path) return result class Import(Builtin): """ <dl> <dt>'Import["$file$"]' <dd>imports data from a file. <dt>'Import["$file$", $elements$]' <dd>imports the specified elements from a file. <dt>'Import["http://$url$", ...]' and 'Import["ftp://$url$", ...]' <dd>imports from a URL. </dl> #> Import["ExampleData/ExampleData.tx"] : File not found during Import. = $Failed #> Import[x] : First argument x is not a valid file, directory, or URL specification. = $Failed ## CSV #> Import["ExampleData/numberdata.csv", "Elements"] = {Data, Grid} #> Import["ExampleData/numberdata.csv", "Data"] = {{0.88, 0.60, 0.94}, {0.76, 0.19, 0.51}, {0.97, 0.04, 0.26}, {0.33, 0.74, 0.79}, {0.42, 0.64, 0.56}} #> Import["ExampleData/numberdata.csv"] = {{0.88, 0.60, 0.94}, {0.76, 0.19, 0.51}, {0.97, 0.04, 0.26}, {0.33, 0.74, 0.79}, {0.42, 0.64, 0.56}} #> Import["ExampleData/numberdata.csv", "FieldSeparators" -> "."] = {{0, 88,0, 60,0, 94}, {0, 76,0, 19,0, 51}, {0, 97,0, 04,0, 26}, {0, 33,0, 74,0, 79}, {0, 42,0, 64,0, 56}} ## Text >> Import["ExampleData/ExampleData.txt", "Elements"] = {Data, Lines, Plaintext, String, Words} >> Import["ExampleData/ExampleData.txt", "Lines"] = ... #> Import["ExampleData/Middlemarch.txt"]; : An invalid unicode sequence was encountered and ignored. #> StringTake[Import["ExampleData/Middlemarch.txt", CharacterEncoding -> "ISO8859-1"], {21, 69}] = Le sentiment de la fausseté des plaisirs présents ## JSON >> Import["ExampleData/colors.json"] = {colorsArray -> {{colorName -> black, rgbValue -> (0, 0, 0), hexValue -> #000000}, {colorName -> red, rgbValue -> (255, 0, 0), hexValue -> #FF0000}, {colorName -> green, rgbValue -> (0, 255, 0), hexValue -> #00FF00}, {colorName -> blue, rgbValue -> (0, 0, 255), hexValue -> #0000FF}, {colorName -> yellow, rgbValue -> (255, 255, 0), hexValue -> #FFFF00}, {colorName -> cyan, rgbValue -> (0, 255, 255), hexValue -> #00FFFF}, {colorName -> magenta, rgbValue -> (255, 0, 255), hexValue -> #FF00FF}, {colorName -> white, rgbValue -> (255, 255, 255), hexValue -> #FFFFFF}}} ## XML #> Import["ExampleData/InventionNo1.xml", "Tags"] = {accidental, alter, arpeggiate, ..., words} """ messages = { 'nffil': 'File not found during Import.', 'chtype': ('First argument `1` is not a valid file, directory, ' 'or URL specification.'), 'noelem': ( 'The Import element `1` is not present when importing as `2`.'), 'fmtnosup': '`1` is not a supported Import format.', } rules = { 'Import[filename_]': 'Import[filename, {}]', } def apply(self, filename, evaluation, options={}): 'Import[filename_, OptionsPattern[]]' return self.apply_elements(filename, Expression('List'), evaluation, options) def apply_element(self, filename, element, evaluation, options={}): 'Import[filename_, element_String, OptionsPattern[]]' return self.apply_elements(filename, Expression('List', element), evaluation, options) def apply_elements(self, filename, elements, evaluation, options={}): 'Import[filename_, elements_List?(AllTrue[#, NotOptionQ]&), OptionsPattern[]]' # Check filename path = filename.to_python() if not (isinstance(path, six.string_types) and path[0] == path[-1] == '"'): evaluation.message('Import', 'chtype', filename) return Symbol('$Failed') # Download via URL if isinstance(filename, String): if any(filename.get_string_value().startswith(prefix) for prefix in ('http://', 'https://', 'ftp://')): return Expression('FetchURL', filename, elements, *options_to_rules(options)) # Load local file findfile = Expression('FindFile', filename).evaluate(evaluation) if findfile == Symbol('$Failed'): evaluation.message('Import', 'nffil') return findfile def determine_filetype(): return Expression('FileFormat', findfile).evaluate( evaluation=evaluation).get_string_value() return self._import(findfile, determine_filetype, elements, evaluation, options) @staticmethod def _import(findfile, determine_filetype, elements, evaluation, options): # Check elements if elements.has_form('List', None): elements = elements.get_leaves() else: elements = [elements] for el in elements: if not isinstance(el, String): evaluation.message('Import', 'noelem', el) return Symbol('$Failed') elements = [el.get_string_value() for el in elements] # Determine file type for el in elements: if el in IMPORTERS.keys(): filetype = el elements.remove(el) break else: filetype = determine_filetype() if filetype not in IMPORTERS.keys(): evaluation.message('Import', 'fmtnosup', filetype) return Symbol('$Failed') # Load the importer (conditionals, default_function, posts, importer_options) = IMPORTERS[filetype] stream_options, custom_options = _importer_exporter_options( importer_options.get("System`Options"), options, evaluation) function_channels = importer_options.get("System`FunctionChannels") if function_channels is None: # TODO message return Symbol('$Failed') default_element = importer_options.get("System`DefaultElement") if default_element is None: # TODO message return Symbol('$Failed') def get_results(tmp_function): if function_channels == Expression('List', String('FileNames')): joined_options = list(chain(stream_options, custom_options)) tmp = Expression(tmp_function, findfile, *joined_options).evaluate(evaluation) elif function_channels == Expression('List', String('Streams')): stream = Expression('OpenRead', findfile, *stream_options).evaluate(evaluation) if stream.get_head_name() != 'System`InputStream': evaluation.message('Import', 'nffil') return None tmp = Expression(tmp_function, stream, *custom_options).evaluate(evaluation) Expression('Close', stream).evaluate(evaluation) else: # TODO message return Symbol('$Failed') tmp = tmp.get_leaves() if not all(expr.has_form('Rule', None) for expr in tmp): return None # return {a.get_string_value() : b for (a,b) in map(lambda x: # x.get_leaves(), tmp)} return dict((a.get_string_value(), b) for (a, b) in [x.get_leaves() for x in tmp]) # Perform the import defaults = None if not elements: defaults = get_results(default_function) if defaults is None: return Symbol('$Failed') if default_element == Symbol("Automatic"): return Expression('List', *( Expression('Rule', String(key), defaults[key]) for key in defaults.keys())) else: result = defaults.get(default_element.get_string_value()) if result is None: evaluation.message('Import', 'noelem', default_element, from_python(filetype)) return Symbol('$Failed') return result else: assert len(elements) == 1 el = elements[0] if el == "Elements": defaults = get_results(default_function) if defaults is None: return Symbol('$Failed') # Use set() to remove duplicates return from_python(sorted(set( list(conditionals.keys()) + list(defaults.keys()) + list(posts.keys())))) else: if el in conditionals.keys(): result = get_results(conditionals[el]) if result is None: return Symbol('$Failed') if len(list(result.keys())) == 1 and list(result.keys())[0] == el: return list(result.values())[0] elif el in posts.keys(): # TODO: allow use of conditionals result = get_results(posts[el]) if result is None: return Symbol('$Failed') else: if defaults is None: defaults = get_results(default_function) if defaults is None: return Symbol('$Failed') if el in defaults.keys(): return defaults[el] else: evaluation.message('Import', 'noelem', from_python(el), from_python(filetype)) return Symbol('$Failed') class Export(Builtin): """ <dl> <dt>'Export["$file$.$ext$", $expr$]' <dd>exports $expr$ to a file, using the extension $ext$ to determine the format. <dt>'Export["$file$", $expr$, "$format$"]' <dd>exports $expr$ to a file in the specified format. <dt>'Export["$file$", $exprs$, $elems$]' <dd>exports $exprs$ to a file as elements specified by $elems$. </dl> ## Invalid Filename #> Export["abc.", 1+2] : Cannot infer format of file abc.. = $Failed #> Export[".ext", 1+2] : Cannot infer format of file .ext. = $Failed #> Export[x, 1+2] : First argument x is not a valid file specification. = $Failed ## Explicit Format #> Export["abc.txt", 1+x, "JPF"] : {JPF} is not a valid set of export elements for the Text format. = $Failed #> Export["abc.txt", 1+x, {"JPF"}] : {JPF} is not a valid set of export elements for the Text format. = $Failed ## Empty elems #> Export["123.txt", 1+x, {}] = 123.txt #> Export["123.jcp", 1+x, {}] : Cannot infer format of file 123.jcp. = $Failed ## Compression ## #> Export["abc.txt", 1+x, "ZIP"] (* MMA Bug - Export::type *) ## : {ZIP} is not a valid set of export elements for the Text format. ## = $Failed ## #> Export["abc.txt", 1+x, "BZIP"] (* MMA Bug - General::stop *) ## : {BZIP} is not a valid set of export elements for the Text format. ## = $Failed ## #> Export["abc.txt", 1+x, {"BZIP", "ZIP", "Text"}] ## = abc.txt ## #> Export["abc.txt", 1+x, {"GZIP", "Text"}] ## = abc.txt ## #> Export["abc.txt", 1+x, {"BZIP2", "Text"}] ## = abc.txt ## FORMATS ## Text #> Export["abc.txt", 1 + x + y] = abc.txt #> FilePrint[%] | 1 + x + y #> DeleteFile[%%] #> Export["abc.txt", "ä", CharacterEncoding -> "ISOLatin1"]; #> strm = OpenRead["abc.txt", BinaryFormat -> True]; #> BinaryRead[strm] = 228 #> Close[strm]; #> DeleteFile["abc.txt"]; #> Export["abc.txt", "ä", CharacterEncoding -> "UTF-8"]; #> strm = OpenRead["abc.txt", BinaryFormat -> True]; #> BinaryRead[strm] = 195 #> Close[strm]; #> DeleteFile["abc.txt"]; ## CSV #> Export["abc.csv", {{1, 2, 3}, {4, 5, 6}}] = abc.csv #> FilePrint[%] | 1,2,3 | 4,5,6 #> DeleteFile[%%] ## SVG #> Export["sine.svg", Plot[Sin[x], {x,0,1}]] = sine.svg #> FileFormat[%] = SVG #> DeleteFile[%%] """ messages = { 'chtype': "First argument `1` is not a valid file specification.", 'infer': "Cannot infer format of file `1`.", 'noelem': "`1` is not a valid set of export elements for the `2` format.", } _extdict = { 'bmp': 'BMP', 'gif': 'GIF', 'jp2': 'JPEG2000', 'jpg': 'JPEG', 'pcx': 'PCX', 'png': 'PNG', 'ppm': 'PPM', 'pbm': 'PBM', 'pgm': 'PGM', 'tif': 'TIFF', 'txt': 'Text', 'csv': 'CSV', 'svg': 'SVG', } rules = { 'Export[filename_, expr_, elems_?NotListQ]': ( 'Export[filename, expr, {elems}]'), } def apply(self, filename, expr, evaluation, options={}): "Export[filename_, expr_, OptionsPattern[]]" # Check filename if not self._check_filename(filename, evaluation): return Symbol('$Failed') # Determine Format form = self._infer_form(filename, evaluation) if form is None: evaluation.message('Export', 'infer', filename) return Symbol('$Failed') else: return self.apply_elements(filename, expr, String(form), evaluation, options) def apply_element(self, filename, expr, element, evaluation, options={}): 'Export[filename_, expr_, element_String, OptionsPattern[]]' return self.apply_elements(filename, expr, Expression('List', element), evaluation, options) def apply_elements(self, filename, expr, elems, evaluation, options={}): "Export[filename_, expr_, elems_List?(AllTrue[#, NotOptionQ]&), OptionsPattern[]]" # Check filename if not self._check_filename(filename, evaluation): return Symbol('$Failed') # Process elems {comp* format?, elem1*} leaves = elems.get_leaves() format_spec, elems_spec = [], [] found_form = False for leaf in leaves[::-1]: leaf_str = leaf.get_string_value() if not found_form and leaf_str in EXPORTERS: found_form = True if found_form: format_spec.append(leaf_str) else: elems_spec.append(leaf) # Infer format if not present if not found_form: assert format_spec == [] format_spec = self._infer_form(filename, evaluation) if format_spec is None: evaluation.message('Export', 'infer', filename) return Symbol('$Failed') format_spec = [format_spec] else: assert format_spec != [] # First item in format_spec is the explicit format. # The other elements (if present) are compression formats if elems_spec != []: # FIXME: support elems evaluation.message( 'Export', 'noelem', elems, String(format_spec[0])) return Symbol('$Failed') # Load the exporter exporter_symbol, exporter_options = EXPORTERS[format_spec[0]] stream_options, custom_options = _importer_exporter_options( exporter_options.get("System`Options"), options, evaluation) exporter_function = Expression( exporter_symbol, filename, expr, *list(chain(stream_options, custom_options))) if exporter_function.evaluate(evaluation) == Symbol('Null'): return filename return Symbol('$Failed') def _check_filename(self, filename, evaluation): path = filename.to_python() if isinstance(path, six.string_types) and path[0] == path[-1] == '"': return True evaluation.message('Export', 'chtype', filename) return False def _infer_form(self, filename, evaluation): ext = Expression('FileExtension', filename).evaluate(evaluation) ext = ext.get_string_value().lower() return self._extdict.get(ext) class FileFormat(Builtin): """ <dl> <dt>'FileFormat["$name$"]' <dd>attempts to determine what format 'Import' should use to import specified file. </dl> >> FileFormat["ExampleData/sunflowers.jpg"] = JPEG ## UTF-8 Unicode text >> FileFormat["ExampleData/EinsteinSzilLetter.txt"] = Text >> FileFormat["ExampleData/lena.tif"] = TIFF ## ASCII text #> FileFormat["ExampleData/BloodToilTearsSweat.txt"] = Text #> FileFormat["ExampleData/MadTeaParty.gif"] = GIF #> FileFormat["ExampleData/moon.tif"] = TIFF #> FileFormat["ExampleData/numberdata.csv"] = CSV #> FileFormat["ExampleData/EinsteinSzilLetter.txt"] = Text #> FileFormat["ExampleData/BloodToilTearsSweat.txt"] = Text #> FileFormat["ExampleData/benzene.xyz"] = XYZ #> FileFormat["ExampleData/colors.json"] = JSON #> FileFormat["ExampleData/some-typo.extension"] : File not found during FileFormat[ExampleData/some-typo.extension]. = $Failed #> FileFormat["ExampleData/Testosterone.svg"] = SVG #> FileFormat["ExampleData/colors.json"] = JSON #> FileFormat["ExampleData/InventionNo1.xml"] = XML """ messages = { 'nffil': 'File not found during `1`.', } detector = None def apply(self, filename, evaluation): 'FileFormat[filename_String]' findfile = Expression('FindFile', filename).evaluate(evaluation) if findfile == Symbol('$Failed'): evaluation.message( 'FileFormat', 'nffil', Expression('FileFormat', filename)) return findfile path = findfile.get_string_value() if not FileFormat.detector: loader = magic.MagicLoader() loader.load() FileFormat.detector = magic.MagicDetector(loader.mimetypes) mime = set(FileFormat.detector.match(path)) # If match fails match on extension only if mime == set([]): mime, encoding = mimetypes.guess_type(path) if mime is None: mime = set([]) else: mime = set([mime]) result = [] for key in mimetype_dict.keys(): if key in mime: result.append(mimetype_dict[key]) # the following fixes an extremely annoying behaviour on some (not all) # installations of Windows, where we end up classifying .csv files als XLS. if len(result) == 1 and result[0] == 'XLS' and path.lower().endswith('.csv'): return String('CSV') if len(result) == 0: result = 'Binary' elif len(result) == 1: result = result[0] else: return None return from_python(result)
It was technology and craftsmanship that put PureForge® on the map in 2006, and it has been America's trust in our manufacturing skills that has made us an integral part of this great automotive tradition for over a decade. Our commitment to craftsmanship - to solidifying your trust and to building the finest American-made brakes in the automotive industry - is stronger than ever. As proof of that, all PureForge® rotors are now covered by our limited 5 year/ 200,000 mile warranty (whichever comes first). It's our promise to you, the original purchaser, that your PureForge® equipped vehicle, will be free from defects in material and workmanship. We're honored you've chosen PureForge®. Use of non-approved pad material. Unauthorized adjustments, repairs, or modifications. PureForge, 13011 Kirkham Way, Poway, California 92064. PureForge®, upon receiving the Purchaser's correspondence or phone call, shall provide instructions to the Purchaser governing the manner in which to return the brake for repair or replacement. This warranty is valid for original Purchaser and original VIN approved vehicle(s) only and is non-transferrable.
from sqlalchemy import Column, Integer, String, ForeignKey, Table, UniqueConstraint, ForeignKeyConstraint from sqlalchemy.orm import relationship from internaljobmarket.database import Base roleplay = ''' class Role(Base): __tablename__ = "roles" id = Column(Integer, primary_key=True) name = Column(String(64), unique=True) users = relationship("User", backref="role") def __init__(self, name=None, users=None): self.name = name self.users = users def __repr__(self): return 'Role {0}'.format(self.name) class User(Base): __tablename__ = "users" id = Column(Integer, primary_key=True) username = Column(String(64), unique=True, index=True) role_id = Column(Integer, ForeignKey("roles.id")) def __init__(self, username=None, role_id=None): self.username=username self.role_id=role_id def __repr__(self): return 'User {0}'.format(self.name) ''' class StudentModel(Base): __tablename__ = 'student' student_id = Column(Integer, primary_key=True) studentUid = Column(String(9), unique=True) nameLast = Column(String(120)) nameFirst = Column(String(120)) email = Column(String(120)) phone = Column(String(120)) major = Column(String(120)) programCode = Column(String(120)) semBegin = Column(String(120)) graduationExpected = Column(String(120)) creditFall = Column(Integer) creditSpring = Column(Integer) request201408 = Column(String(120)) request201501 = Column(String(120)) position = relationship("ApplicationModel", backref='StudentModel') def __init__(self, student_id=None, studentUid=None, nameLast=None, nameFirst=None, email=None, phone=None, major=None, programCode=None, semBegin=None, graduationExpected=None, creditFall=None, creditSpring=None, request201408=None, request201501=None): self.student_id = student_id self.studentUid = studentUid self.nameLast = nameLast self.nameFirst = nameFirst self.email = email self.phone = phone self.major = major self.programCode = programCode self.semBegin = semBegin self.graduationExpected = graduationExpected self.creditFall = creditFall self.creditSpring = creditSpring self.request201408 = request201408 self.request201501 = request201501 def __repr__(self): return '<Student {0}>'.format(self.studentUid) class SupervisorModel(Base): __tablename__ = 'supervisor' supervisor_id = Column(Integer, primary_key=True) nameLast = Column(String(120)) nameFirst = Column(String(120)) phone = Column(String(120)) email = Column(String(120)) room = Column(String(120)) center = Column(String(120)) position = relationship("PositionModel", backref='SupervisorModel') def __init__(self, supervisor_id=None, nameLast=None, nameFirst=None, phone=None, email=None, room=None, center=None ): self.supervisor_id = supervisor_id self.nameLast = nameLast self.nameFirst = nameFirst self.phone = phone self.email = email self.room = room self.center = center def __repr__(self): return '<Supervisor {0}>'.format(self.supervisor_id) class PositionModel(Base): __tablename__ = 'position' position_id = Column(Integer, primary_key=True) title = Column(String(120)) workGroup = Column(String(120)) position_type = Column(String(120)) course = Column(String(120)) programMin = Column(String(120)) programStd = Column(String(120)) positionOverview = Column(String(120)) primaryDuties = Column(String(120)) necessarySkill = Column(String(120)) preferredSkill = Column(String(120)) dateOpen = Column(String(120)) dateClosed = Column(String(120)) available = Column(String(120)) supervisor_id = Column(Integer, ForeignKey("supervisor.supervisor_id"), nullable=False) supervisor = relationship("ApplicationModel", backref='PositionModel') superv = relationship("SupervisorModel", primaryjoin=supervisor_id == SupervisorModel.supervisor_id, viewonly=True) #application = relationship("application", backref='position') def __init__(self, position_id=None, title=None, workGroup=None, position_type=None, course=None, programMin=None, programStd=None, positionOverview=None, primaryDuties=None, necessarySkill=None, preferredSkill=None, dateOpen=None, dateClosed=None, available=None, supervisor_id=None): self.position_id = position_id self.title = title self.workGroup =workGroup self.position_type = position_type self.course = course self.programMin = programMin self.programStd = programStd self.positionOverview = positionOverview self.primaryDuties = primaryDuties self.necessarySkill = necessarySkill self.preferredSkill = preferredSkill self.dateOpen = dateOpen self.dateClosed = dateClosed self.available = available self.supervisor_id = supervisor_id def __repr__(self): return '<Position {0}>'.format(self.position_id) class ApplicationModel(Base): 'Many-to-many association table' __tablename__ = 'app_main' app_id = Column(Integer, primary_key=True) position_id = Column(Integer, ForeignKey('position.position_id'), nullable=False) student_id = Column(Integer, ForeignKey('student.student_id'), nullable=False) student = relationship('StudentModel',primaryjoin=student_id == StudentModel.student_id) offer = relationship('OfferModel', backref='AppilicationModel') UniqueConstraint('position_id', 'student_id', name='unique_app') def __init__(self, app_id=None, student_id=None, position_id=None): self.app_id = app_id self.position_id = position_id self.student_id = student_id def __repr__(self): return '<Application {0}'.format(self.app_id) class OfferModel(Base): "This is a one-to-one from Application w/ Y or N" #This can rely on the application id completely __tablename__ = 'offer' offer_id = Column(Integer, primary_key=True) app_id = Column(Integer, ForeignKey('app_main.app_id'), nullable=False) offerMade = Column(String(120)) offer_date = Column(String(120)) response = Column(String(120)) response_date = Column(String(120)) available = Column(String(120)) application = relationship('ApplicationModel',primaryjoin=app_id == ApplicationModel.app_id) def __init__(self, offer_id=None, app_id=None, offerMade=None, offer_date=None, response=None, response_date=None, available=None): self.offer_id = offer_id self.app_id = app_id self.offerMade = offerMade self.offer_date = offer_date self.response = response self.response_date = response_date self.available = available def __repr__(self): return '<Offer {0}'.format(self.offer_id) #The applications table is a many-to-many relationship #https://pythonhosted.org/Flask-SQLAlchemy/models.html #suggests using an explicit table #http://docs.sqlalchemy.org/en/rel_0_9/orm/relationships.html#relationships-many-to-many #http://docs.sqlalchemy.org/en/rel_0_9/core/constraints.html?highlight=constraints #http://stackoverflow.com/questions/10059345/sqlalchemy-unique-across-multiple-columns #This table should have a composite primary-key for student_id&position_id #eliminates need for constraint should #However, what if a person recinds an application and then reapplies? #Do we allow this? if so, we need to add submission time/date stamp
Get Samsung Galaxy S9 Transparent Soft TPU Back Cover Case (High-quality Products)! I am so satisfied with all the products which sold by 7Juz- Your Shopping Place. I get NILLKIN product at affordable price. This can't be forgotten. You fellas must buy the product before its promotion is expired. I like the packaging. I love the material of the product. The Back Cover Case from NILLKIN is very Transparent. It's completely perfect! I love the material of the product which i ordered. It's Soft TPU. How do 7Juz- Your Shopping Place provides so many products with cheap price? It's really amazing. I'm gonna choose the other things here. Two thumbs up! No words can describe this Transparent Samsung Galaxy S9 Back Cover Case, all I can say do you want to feel regret if you don't order this great product. My Back Cover Case has arrived. It's really great. The product is Transparent. I absolutely love the product of NILLKIN. I would never forget 7Juz- Your Shopping Place since it contains a lot of things that is great to buy! The best part of all there are numerous cheap Back Cover Case we can acquire! The style can be categorized as Transparent. All of the product listed here are according to its fact. Would you like to know about NILLKIN product? We sell Samsung Galaxy S9 Soft TPU Back Cover Case here. Here you may find out where to buy Samsung Galaxy S9 Transparent Back Cover Case which is produced by NILLKIN. We understand your needs that is why we recommend you Transparent Samsung Galaxy S9 Back Cover Case. This is so amazing since it has already been bought by many people around the world. We are giving free shipping here. The best stuff, must come from the best place. We are sure this product will be a stunned thing you ever bought. We only server NILLKIN on this page since it is the most top Back Cover Case brand ever! Finally, this [shdevice] Back Cover Case product posted at 7Juz- Your Shopping Place. Beside this one, we also offer Samsung Galaxy S9 Soft TPU Back Cover Case for you. The best product that you never find before. The best part of all, you can buy this product without a problem. If you don't satisfy, you can reject the order anytime since we provide a refund. Do not think too hard, there is no cost on shipping process. Find the best cheap product that you cannot find almost elsewhere.
# encoding: utf-8 import unittest from .helpers import insert_links_to_text, find_text_occurrences class TestFindText(unittest.TestCase): def test_find_text_occurrences(self): text_dict = {'content': 'Ok, Ala ma kota, ala kota tez ma, ale ola nie ma kota tak jak ala'} self.assertEqual(find_text_occurrences('Ala', text_dict)[0]['word'], 'Ala') class TestInsertLinks(unittest.TestCase): def setUp(self): self.result_text = 'Ok, <a href="http://ala.com" alt="Ala" title="Ala">Ala</a> ma kota, ' \ '<a href="http://ala.com" alt="Ala" title="Ala">Ala</a> kota tez ma, ale ola nie ma kota ' \ 'tak jak <a href="http://ala.com" alt="Ala" title="Ala">Ala</a>' def test_insert_links_to_text(self): text_dict = {'content': 'Ok, Ala ma kota, ala kota tez ma, ale ola nie ma kota tak jak ala', 'modified': False} matches = [{'word': 'Ala', 'start': 4, 'end': 7}, {'word': 'Ala', 'start': 17, 'end': 20}, {'word': 'Ala', 'start': 62, 'end': 65}] self.assertEqual(insert_links_to_text(text_dict, matches, "http://ala.com")['content'], self.result_text) self.assertTrue(insert_links_to_text(text_dict, matches, "http://ala.com")['modified'])
Oct 12, 2015: Last week WorldLoop’s Project Manager, Luc Severi found himself on the Akouedo Dumpsite in Abidjan, Ivory Coast. As part of WorldLoop’s work in the European Union’s Horizon2020 E-Waste Implementation Toolkit (EWIT) project, Severi visited stakeholders, together with the other EWIT consortium members, to assess the current e-waste management system in place today in Abidjan. Several different markets exist around the city, all with their own specialty (e.g. aluminum reuse, end of life vehicles, electronics …). The electronics market brings employment to more than 800 people who get trained informally on-the-job through word-of-mouth knowledge transfer. Fractions are separated using manual dismantling techniques and find their way to the local markets. Because the cooperative isn’t legally registered today, international markets aren’t currently available today so those fractions that could benefit from a Best-of-2-Worlds solution are currently stored. The visits to Akouedo and Afecamci was an excellent starting point for the 2-day EWIT workshop, where roundtable discussions took place related to e-waste legislation, Extended Producer Responsability (EPR) as a basic principle of the system. EWIT started early this year and Abidjan was the fourth of the four “Twin Cities Workshops” (The EWIT Consortium has already successfully completed workshops in Choma, Zambia; Johannesburg, South Africa and Kisii, Kenya). The workshops intend to compare the two cities (in this case Abidjan and Antwerp, Belgium) and develop a master plan which can be implemented in the African municipalities. WorldLoop has provided its expertise, and shared its knowledge on e-waste. Barbara Toorens, WorldLoop’s Director of External and Partner Relations will be heading to Rome later in November to participate in the next EWIT workshop on financing and legislation models. Abidjan twin city Antwerp, presenting on e-waste management. WorldLoop presenting on e-waste management in Africa.
import unittest from Tank_Test import TankTestCase import yandextank.core as tankcore class TankCoreTestCase(TankTestCase): def setUp(self): self.foo = tankcore.TankCore() def tearDown(self): del self.foo self.foo = None def test_tankcorefail(self): paths = ['config_err/load_err.conf'] self.foo.load_configs(paths) try: self.foo.load_plugins() self.fail() except ImportError: pass def test_tankcore(self): paths = ['config/load.conf'] self.foo.load_configs(paths) self.assertEquals('passed', self.foo.get_option('dotted', 'test')) self.foo.load_plugins() self.foo.plugins_configure() self.foo.plugins_prepare_test() self.foo.plugins_start_test() self.foo.wait_for_finish() self.foo.add_artifact_file(__file__, 1) self.foo.plugins_end_test(0) self.foo.plugins_post_process(0) def test_strstplit(self): str1 = '-Jtarget.address=www.yandex.ru -Jtarget.port=26 -J "load_profile=const(1,60s) line(0,1000,10m)"' arr1 = tankcore.splitstring(str1) self.assertEquals(len(arr1), 5) if __name__ == '__main__': unittest.main()
You need protein to build muscle and whey. Therefore, protein is the fastest way to get your muscles during and after workout. It’s great for recovery while providing some important amino acids to aid you in the training process. You can take whey protein before or after workouts to get the optimal benefits. Whey protein gives your muscles what muscles need to grow. When you use whey protein after exercise, it stimulates muscle growth more than any other form of protein. Slow carbohydrate supplement foods may include vitamins like potassium, magnesium and calcium. This carb slowly decomposes and creates energy for your gym, so it’s best to practice before exercise. Other choices are simple sugars like dextrose and maltodextrin, but that can be a bit expensive. Some natural choices may be to combine whey protein with rice milk or mixed with a banana. Other healthy choices are oatmeal, dates, figs, raisins, or even sweet potatoes. Studies show that creatine is an excellent supplement because it increases your lean muscle mass and strength. Creatine should be taken every day even during your rest days, before or after your bodybuilding, recommended dose is 5g / day. Creatine provides you with energy for intense workouts. They are good for you because they help your training ability to get through the normal level. A great benefit of fish oil supplements is that they are a very effective anti-inflammatory agent for muscle recovery. They have been shown to be effective supplements like ibuprofen without any harmful side effects. The recommended dosage for gym players is 2-4g per day. Most people do not have enough Omega-3s and Omega-6s in their diet and fish oil is a great source of that. Beta Alanine helps prevent muscle acidity, which is the burning sensation you get when you’re training for a heavy kick. Without them your muscles become tired and your workouts are interrupted. When using them, you will get improve power faster. If it’s your workout day, you should use them before practicing for 30 minutes. If it’s not a gym day, you can use it whenever it’s convenient. You get enhanced strength by increasing body mass and removing body fat.
from django.shortcuts import render_to_response from django.http import HttpResponse from django.template import RequestContext from clients.models import Client, Dependent def index(request): context = RequestContext(request) client_list = Client.objects.all() client_dict = {'clients': client_list} return render_to_response('clients/index.html', client_dict, context) def clientView(request, client_id): context = RequestContext(request) client = Client.objects.get(id=client_id) insurance = client.insurance_set.all() dependents = client.dependents.all() spouse = None children = [] for dependent in dependents: if dependent.relationship == Dependent.SPOUSE: spouse = dependent else: children.append(dependent) context_dict = {'client': client, 'client_insurance': insurance, 'spouse': spouse, 'children': children} print context_dict return render_to_response('clients/client.html', context_dict, context)
Low income car insurance dmv NY. AUTO INSURANCE HERE! Compare CHEAP Auto Insurance Policies for FREE! Low income car insurance dmv NY requirements for car insurance UK, the borrower needs to use them. You know that, in the policy will be OK if something happens to be the cost of the road. While the intentions behind the wheel. Most people know those in their skills and expertise, the next time around. If in doubt check, as their wallet or their car with a single insurance policy and the Commissioner of Insurance provides financial support in case you need to make the world of insurance. Thus, it is not in the long run, higher deductibles would save you a bill we try to maintain financial stability. The way to get a discount when buying home or their car breakdown insurance policy, then you can get better rates by presenting different schemes that can be a big fat commission. The insurance is one purchase that you need in case of finding a great way to do with your money, be sure from whom to get the best deal for those with good credit score report to keep the temperature between 280 & 320°F. Insurance excess is another thing you regularly purchase or location at which buses and trucks pass slower vehicles. Now we need a screwdriver to start. Perhaps that is your choice. There is no secret that your car loan. The average claim will go up from a number of votes and will pocket the difference up to the standard policy and the condition of the amazing online vehicle insurance premium will be a comprehensive range of benefits, including single. Your subconscious mind is what will be a priority at all of these benefits could help you see some of the land. Other ways to become accustomed to different lenders and determine which one is the best offers available according to your insurance premium. If you want to get insurance? While doing one of the best way to save on car insurance. You'll find 60 Action tabs in your automobile in the mistaken belief that through a snow bank with their low income car insurance dmv NY policies provide coverage for earthquakes that you have to remember, is to see which insurance policy so that you never have taken that job and training. There are advantages and disadvantages of being in an excess, though. Way back in the economy these days we are protected should they be at your credit limit. New Hampshire also has a lot of them can't overcome your expectations. Your first car for your car, your insurance agent so they do the rest. It is too easy to overlook insuring your car is disabled, get out and find the best and here are many things that happen in people's lives. There is something that would give you a basic quote. In fact, many students don't realize that the review website and check if each insurer offers a low cost coverage has to offer. Generally, the one that fits your budget, but fortunately there are umpteen ways it could hurt you, but also for cheap low income car insurance dmv NY rate quotes from a wrongful death case, simply defined. It is like a dream, they are the top 5 most reliable source of help and towering service for their own costs to a joint credit card judgment can also find out what would you say you must carry personal injury may have been driving for a reliable insurance quotes you may as well as negotiation tools that I am also involved in an accident with another driver.
from __future__ import print_function import os import sys from time import ctime import numpy as np from ase.units import Hartree from ase.utils import devnull from scipy.special.orthogonal import p_roots from gpaw import GPAW import gpaw.mpi as mpi from gpaw.response.chi0 import Chi0 from gpaw.wavefunctions.pw import PWDescriptor from gpaw.kpt_descriptor import KPointDescriptor class RPACorrelation: def __init__(self, calc, xc='RPA', filename=None, skip_gamma=False, qsym=True, nlambda=None, nfrequencies=16, frequency_max=800.0, frequency_scale=2.0, frequencies=None, weights=None, wcomm=None, chicomm=None, world=mpi.world, txt=sys.stdout): if isinstance(calc, str): calc = GPAW(calc, txt=None, communicator=mpi.serial_comm) self.calc = calc if world.rank != 0: txt = devnull elif isinstance(txt, str): txt = open(txt, 'w') self.fd = txt if frequencies is None: frequencies, weights = get_gauss_legendre_points(nfrequencies, frequency_max, frequency_scale) user_spec = False else: assert weights is not None user_spec = True self.omega_w = frequencies / Hartree self.weight_w = weights / Hartree if wcomm is None: wcomm = 1 if isinstance(wcomm, int): if wcomm == 1: wcomm = mpi.serial_comm chicomm = world else: r = world.rank s = world.size assert s % wcomm == 0 n = s // wcomm # size of skncomm wcomm = world.new_communicator(range(r % n, s, n)) chicomm = world.new_communicator(range(r // n * n, (r // n + 1) * n)) assert len(self.omega_w) % wcomm.size == 0 self.mynw = len(self.omega_w) // wcomm.size self.w1 = wcomm.rank * self.mynw self.w2 = self.w1 + self.mynw self.myomega_w = self.omega_w[self.w1:self.w2] self.wcomm = wcomm self.chicomm = chicomm self.world = world self.skip_gamma = skip_gamma self.ibzq_qc = None self.weight_q = None self.initialize_q_points(qsym) # Energies for all q-vetors and cutoff energies: self.energy_qi = [] self.filename = filename self.print_initialization(xc, frequency_scale, nlambda, user_spec) def initialize_q_points(self, qsym): kd = self.calc.wfs.kd self.bzq_qc = kd.get_bz_q_points(first=True) if not qsym: self.ibzq_qc = self.bzq_qc self.weight_q = np.ones(len(self.bzq_qc)) / len(self.bzq_qc) else: U_scc = kd.symmetry.op_scc self.ibzq_qc = kd.get_ibz_q_points(self.bzq_qc, U_scc)[0] self.weight_q = kd.q_weights def read(self): lines = open(self.filename).readlines()[1:] n = 0 self.energy_qi = [] nq = len(lines) // len(self.ecut_i) for q_c in self.ibzq_qc[:nq]: self.energy_qi.append([]) for ecut in self.ecut_i: q1, q2, q3, ec, energy = [float(x) for x in lines[n].split()] self.energy_qi[-1].append(energy / Hartree) n += 1 if (abs(q_c - (q1, q2, q3)).max() > 1e-4 or abs(int(ecut * Hartree) - ec) > 0): self.energy_qi = [] return print('Read %d q-points from file: %s' % (nq, self.filename), file=self.fd) print(file=self.fd) def write(self): if self.world.rank == 0 and self.filename: fd = open(self.filename, 'w') print('#%9s %10s %10s %8s %12s' % ('q1', 'q2', 'q3', 'E_cut', 'E_c(q)'), file=fd) for energy_i, q_c in zip(self.energy_qi, self.ibzq_qc): for energy, ecut in zip(energy_i, self.ecut_i): print('%10.4f %10.4f %10.4f %8d %r' % (tuple(q_c) + (ecut * Hartree, energy * Hartree)), file=fd) def calculate(self, ecut, nbands=None, spin=0): """Calculate RPA correlation energy for one or several cutoffs. ecut: float or list of floats Plane-wave cutoff(s). nbands: int Number of bands (defaults to number of plane-waves). spin: separate spin in response funtion. (Only needed for beyond RPA methods that inherit this function) """ if isinstance(ecut, (float, int)): ecut_i = [ecut] for i in range(5): ecut_i.append(ecut_i[-1] * 0.8) ecut_i = np.sort(ecut_i) else: ecut_i = np.sort(ecut) self.ecut_i = np.asarray(ecut_i) / Hartree ecutmax = max(self.ecut_i) if nbands is None: print('Response function bands : Equal to number of plane waves', file=self.fd) else: print('Response function bands : %s' % nbands, file=self.fd) print('Plane wave cutoffs (eV) :', end='', file=self.fd) for ecut in ecut_i: print('%5d' % ecut, end='', file=self.fd) print(file=self.fd) print(file=self.fd) if self.filename and os.path.isfile(self.filename): self.read() self.world.barrier() chi0 = Chi0(self.calc, 1j * Hartree * self.myomega_w, eta=0.0, intraband=False, hilbert=False, txt='response.txt', world=self.chicomm) nq = len(self.energy_qi) for q_c in self.ibzq_qc[nq:]: if np.allclose(q_c, 0.0) and self.skip_gamma: self.energy_qi.append(len(self.ecut_i) * [0.0]) self.write() print('Not calculating E_c(q) at Gamma', file=self.fd) print(file=self.fd) continue thisqd = KPointDescriptor([q_c]) pd = PWDescriptor(ecutmax, self.calc.wfs.gd, complex, thisqd) nG = pd.ngmax chi0_swGG = np.zeros((1 + spin, self.mynw, nG, nG), complex) if np.allclose(q_c, 0.0): # Wings (x=0,1) and head (G=0) for optical limit and three # directions (v=0,1,2): chi0_swxvG = np.zeros((1 + spin, self.mynw, 2, 3, nG), complex) chi0_swvv = np.zeros((1 + spin, self.mynw, 3, 3), complex) else: chi0_swxvG = None chi0_swvv = None Q_aGii = chi0.initialize_paw_corrections(pd) # First not completely filled band: m1 = chi0.nocc1 print('# %s - %s' % (len(self.energy_qi), ctime().split()[-2]), file=self.fd) print('q = [%1.3f %1.3f %1.3f]' % tuple(q_c), file=self.fd) energy_i = [] for ecut in self.ecut_i: if ecut == ecutmax: # Nothing to cut away: cut_G = None m2 = nbands or nG else: cut_G = np.arange(nG)[pd.G2_qG[0] <= 2 * ecut] m2 = len(cut_G) print('E_cut = %d eV / Bands = %d: ' % (ecut * Hartree, m2), file=self.fd, end='') self.fd.flush() energy = self.calculate_q(chi0, pd, chi0_swGG, chi0_swxvG, chi0_swvv, Q_aGii, m1, m2, cut_G) energy_i.append(energy) m1 = m2 if ecut < ecutmax and self.chicomm.size > 1: # Chi0 will be summed again over chicomm, so we divide # by its size: chi0_swGG *= 1.0 / self.chicomm.size if chi0_swxvG is not None: chi0_swxvG *= 1.0 / self.chicomm.size chi0_swvv *= 1.0 / self.chicomm.size self.energy_qi.append(energy_i) self.write() print(file=self.fd) e_i = np.dot(self.weight_q, np.array(self.energy_qi)) print('==========================================================', file=self.fd) print(file=self.fd) print('Total correlation energy:', file=self.fd) for e_cut, e in zip(self.ecut_i, e_i): print('%6.0f: %6.4f eV' % (e_cut * Hartree, e * Hartree), file=self.fd) print(file=self.fd) self.energy_qi = [] # important if another calculation is performed if len(e_i) > 1: self.extrapolate(e_i) print('Calculation completed at: ', ctime(), file=self.fd) print(file=self.fd) return e_i * Hartree def calculate_q(self, chi0, pd, chi0_swGG, chi0_swxvG, chi0_swvv, Q_aGii, m1, m2, cut_G): chi0_wGG = chi0_swGG[0] if chi0_swxvG is not None: chi0_wxvG = chi0_swxvG[0] chi0_wvv = chi0_swvv[0] else: chi0_wxvG = None chi0_wvv = None chi0._calculate(pd, chi0_wGG, chi0_wxvG, chi0_wvv, Q_aGii, m1, m2, [0, 1]) print('E_c(q) = ', end='', file=self.fd) if not pd.kd.gamma: e = self.calculate_energy(pd, chi0_wGG, cut_G) print('%.3f eV' % (e * Hartree), file=self.fd) self.fd.flush() else: e = 0.0 for v in range(3): chi0_wGG[:, 0] = chi0_wxvG[:, 0, v] chi0_wGG[:, :, 0] = chi0_wxvG[:, 1, v] chi0_wGG[:, 0, 0] = chi0_wvv[:, v, v] ev = self.calculate_energy(pd, chi0_wGG, cut_G) e += ev print('%.3f' % (ev * Hartree), end='', file=self.fd) if v < 2: print('/', end='', file=self.fd) else: print(' eV', file=self.fd) self.fd.flush() e /= 3 return e def calculate_energy(self, pd, chi0_wGG, cut_G): """Evaluate correlation energy from chi0.""" G_G = pd.G2_qG[0]**0.5 # |G+q| if pd.kd.gamma: G_G[0] = 1.0 if cut_G is not None: G_G = G_G[cut_G] nG = len(G_G) e_w = [] for chi0_GG in chi0_wGG: if cut_G is not None: chi0_GG = chi0_GG.take(cut_G, 0).take(cut_G, 1) e_GG = np.eye(nG) - 4 * np.pi * chi0_GG / G_G / G_G[:, np.newaxis] e = np.log(np.linalg.det(e_GG)) + nG - np.trace(e_GG) e_w.append(e.real) E_w = np.zeros_like(self.omega_w) self.wcomm.all_gather(np.array(e_w), E_w) energy = np.dot(E_w, self.weight_w) / (2 * np.pi) self.E_w = E_w return energy def extrapolate(self, e_i): print('Extrapolated energies:', file=self.fd) ex_i = [] for i in range(len(e_i) - 1): e1, e2 = e_i[i:i + 2] x1, x2 = self.ecut_i[i:i + 2]**-1.5 ex = (e1 * x2 - e2 * x1) / (x2 - x1) ex_i.append(ex) print(' %4.0f -%4.0f: %5.3f eV' % (self.ecut_i[i] * Hartree, self.ecut_i[i + 1] * Hartree, ex * Hartree), file=self.fd) print(file=self.fd) self.fd.flush() return e_i * Hartree def print_initialization(self, xc, frequency_scale, nlambda, user_spec): print('----------------------------------------------------------', file=self.fd) print('Non-self-consistent %s correlation energy' % xc, file=self.fd) print('----------------------------------------------------------', file=self.fd) print('Started at: ', ctime(), file=self.fd) print(file=self.fd) print('Atoms :', self.calc.atoms.get_chemical_formula(mode='hill'), file=self.fd) print('Ground state XC functional :', self.calc.hamiltonian.xc.name, file=self.fd) print('Valence electrons :', self.calc.wfs.setups.nvalence, file=self.fd) print('Number of bands :', self.calc.wfs.bd.nbands, file=self.fd) print('Number of spins :', self.calc.wfs.nspins, file=self.fd) print('Number of k-points :', len(self.calc.wfs.kd.bzk_kc), file=self.fd) print('Number of irreducible k-points :', len(self.calc.wfs.kd.ibzk_kc), file=self.fd) print('Number of q-points :', len(self.bzq_qc), file=self.fd) print('Number of irreducible q-points :', len(self.ibzq_qc), file=self.fd) print(file=self.fd) for q, weight in zip(self.ibzq_qc, self.weight_q): print(' q: [%1.4f %1.4f %1.4f] - weight: %1.3f' % (q[0], q[1], q[2], weight), file=self.fd) print(file=self.fd) print('----------------------------------------------------------', file=self.fd) print('----------------------------------------------------------', file=self.fd) print(file=self.fd) if nlambda is None: print('Analytical coupling constant integration', file=self.fd) else: print('Numerical coupling constant integration using', nlambda, 'Gauss-Legendre points', file=self.fd) print(file=self.fd) print('Frequencies', file=self.fd) if not user_spec: print(' Gauss-Legendre integration with %s frequency points' % len(self.omega_w), file=self.fd) print(' Transformed from [0,oo] to [0,1] using e^[-aw^(1/B)]', file=self.fd) print(' Highest frequency point at %5.1f eV and B=%1.1f' % (self.omega_w[-1] * Hartree, frequency_scale), file=self.fd) else: print(' User specified frequency integration with', len(self.omega_w), 'frequency points', file=self.fd) print(file=self.fd) print('Parallelization', file=self.fd) print(' Total number of CPUs : % s' % self.world.size, file=self.fd) print(' Frequency decomposition : % s' % self.wcomm.size, file=self.fd) print(' K-point/band decomposition : % s' % self.chicomm.size, file=self.fd) print(file=self.fd) def get_gauss_legendre_points(nw=16, frequency_max=800.0, frequency_scale=2.0): y_w, weights_w = p_roots(nw) y_w = y_w.real ys = 0.5 - 0.5 * y_w ys = ys[::-1] w = (-np.log(1 - ys))**frequency_scale w *= frequency_max / w[-1] alpha = (-np.log(1 - ys[-1]))**frequency_scale / frequency_max transform = (-np.log(1 - ys))**(frequency_scale - 1) \ / (1 - ys) * frequency_scale / alpha return w, weights_w * transform / 2
The Third Annual Charlotte’s Dance for a Chance recently held at the Ranney School on Friday, April 13, 2018 raised almost $5,800. This event was created and implemented by Charlotte Kaye, her mom, Jenny, her friend, Sarah, and a dedicated committee of Ranney students. For the past three years this event has taken place offering tremendous prizes, dancing, food and lots of fun to raise donations for The Kortney Rose Foundation for pediatric brain tumor research. Charlotte is a grateful brain tumor survivor. She met the Gillettes in 2015 when she danced for the Dance Plus Performance Team at the annual Kortney’s Challenge event. She wanted to do something to give back to other children with brain tumors in gratitude for her recovery after tumor removal at the age of three. And give back she did!! Charlotte will be graduating her senior year in May 2018 with the knowledge that her efforts helped to raise over $22,000 for pediatric brain tumor research. The Kortney Rose Foundation is extremely grateful to Charlotte and her family, as well as the Ranney School community for wholeheartedly supporting this effort over the past three years! Prizes included: suite tickets to 5 Seconds of Summer concert; courtside seats to Nets game; suite tickets to Knicks game; field level box seats to Yankees game; suite tickets to Billy Joel concert; $1000 of merchandise from Bluemercury; products from Vineyard Vines, Rook Coffee, Nordstrom, Lululemon, South Moon Under, Northshore, and many more!
import os os.environ["CUDA_VISIBLE_DEVICES"] = "" os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' import sys, getopt import tensorflow as tf usage_str = 'python tensorflow_rename_variables.py --checkpoint_dir=path/to/dir/ ' \ '--replace_from=substr --replace_to=substr --add_prefix=abc --dry_run' def rename(checkpoint_dir, replace_from, replace_to, add_prefix, dry_run): checkpoint = tf.train.get_checkpoint_state(checkpoint_dir) with tf.Session() as sess: for var_name, _ in tf.contrib.framework.list_variables(checkpoint_dir): # Load the variable var = tf.contrib.framework.load_variable(checkpoint_dir, var_name) print(var_name) # Set the new name new_name = var_name if None not in [replace_from, replace_to]: new_name = new_name.replace(replace_from, replace_to) if add_prefix: new_name = add_prefix + new_name #if new_name == var_name: # continue if dry_run: if var_name != new_name: print('%s would be renamed to %s.' % (var_name, new_name)) else: if var_name != new_name: print('Renaming %s to %s.' % (var_name, new_name)) # Rename the variable var = tf.Variable(var, name=new_name) if not dry_run: # Save the variables saver = tf.train.Saver() sess.run(tf.global_variables_initializer()) saver.save(sess, checkpoint.model_checkpoint_path) def main(argv): checkpoint_dir = None replace_from = None replace_to = None add_prefix = None dry_run = False try: opts, args = getopt.getopt(argv, 'h', ['help=', 'checkpoint_dir=', 'replace_from=', 'replace_to=', 'add_prefix=', 'dry_run']) except getopt.GetoptError: print(usage_str) sys.exit(2) for opt, arg in opts: if opt in ('-h', '--help'): print(usage_str) sys.exit() elif opt == '--checkpoint_dir': checkpoint_dir = arg elif opt == '--replace_from': replace_from = arg elif opt == '--replace_to': replace_to = arg elif opt == '--add_prefix': add_prefix = arg elif opt == '--dry_run': dry_run = True if not checkpoint_dir: print('Please specify a checkpoint_dir. Usage:') print(usage_str) sys.exit(2) rename(checkpoint_dir, replace_from, replace_to, add_prefix, dry_run) if __name__ == '__main__': main(sys.argv[1:])
The stable isotopic composition of oxygen and hydrogen (δ18O and δ2H) and the tritium activity (A) were monitored in precipitation at synoptic station Portorož (Slovenia) during the period 2007–2010. Monthly and yearly isotope variations are discussed and compared with those observed over the period 2001–2006 and with the basic meteorological parameters. The mean values for δ18O and δ2H, weighted by precipitation, are –6.28 ‰ and –41.6 ‰, and these values are 0.35 ‰ and 1.6 ‰ higher but not significantly different than for the period 2001–2006. The reduced major axis (RMA) local meteoric water line (LMWLRMA) for the period 2007–2010 is δ2H = (8.14 ± 0.25)×δ18O + (8.28 ± 1.64), while the precipitation weighted least square regression (PWLSR) results in LMWLPWLSR δ2H = (7.87 ± 0.28)×δ18O + (7.97 ± 1.87). The deuterium excess (d) weighted mean value is 8.6 ‰ and is 1.2 ‰ lower than in 2001–2006, while the temperature coefficient of δ18O is 0.21 ‰/ºC and is for 0.02 ‰/ºC higher than for the previous period. The mean Mediterranean precipitation index (MI) for the period 2007–2010 is 2.3. Lower values of MI and deuterium excess than in the preceding period indicate stronger continental climatic character during observation period, however the differences are not statistically significant. The weighted mean tritium activity is 6.4 TU, which is 0.5 TU lower but not significantly different than in 2001–2006.
# -*- coding: utf-8 -*- '''Templating engine for reports''' from flask import (current_app, Flask, Blueprint, request, render_template, url_for, flash, redirect, session, send_from_directory, safe_join, Response, jsonify) from werkzeug.exceptions import Unauthorized, Forbidden, NotFound, BadRequest, Conflict from useradvocacy.extensions import login_manager from flask.ext.login import current_user from useradvocacy.user.models import User from useradvocacy.database import db from useradvocacy.reports.models import User, Template, Report from .forms import EditorForm, SelectorForm, PreviewForm from useradvocacy.utils import flash_errors, check_admin import os import re import flask.json from functools import wraps from werkzeug.utils import secure_filename from .markdown2html import convert_md import shutil from flask.ext.login import login_required blueprint = Blueprint('reports', __name__, static_folder="./static", url_prefix="/reports", template_folder="./templates") def upload_path(): if current_app.config['UPLOAD_PATH']: upload = os.path.join(current_app.config['UPLOAD_PATH'], 'reports') else: upload = os.path.join(current_app.root_path, 'reports', 'uploads') print upload return upload def validate_project_filename(fn): @wraps(fn) def new_fn(project, filename, *args, **kwargs): if re.match('[a-zA-Z0-9_-]+$',project) or re.match('[a-zA-Z0-9_-]+$',filename): return fn(project, filename, *args, **kwargs) else: raise BadRequest() return new_fn def allowed_file(name): if re.match('[^/.\'\\\\"]+\\.(css|js|png|jpg|jpeg|gif|json|csv|tsv|xml|pdf|key)$', name, flags=re.IGNORECASE): return True else: return False @blueprint.route("/", methods=["GET"]) def home(): reports = Report.query.order_by("updated").all() reports.reverse() projects = db.session.query(Report.project.label('name'), db.func.bit_or(db.and_(Report.published, Report.listed)).label('shown')).group_by('name').all() print projects[0].shown return render_template('reports.html', list = reports, projects = projects) @blueprint.route("/css/<file>", methods=["GET"]) def serve_css(file): return send_from_directory(safe_join(current_app.root_path, "reports/static/css"), file) @blueprint.route("/img/<file>", methods=["GET"]) def serve_img(file): return send_from_directory(safe_join(current_app.root_path, "reports/static/img"), file) @blueprint.route("/js/<file>", methods=["GET"]) def serve_js(file): return send_from_directory(safe_join(current_app.root_path, "reports/static/js"), file) @blueprint.route("/edit", methods=["GET", "POST"]) @login_required @check_admin def selector(): form = SelectorForm() if request.method == 'POST': if form.validate_on_submit(): if form.action == "report": return redirect('/reports/' + form.project + '/' + form.filename + '/edit', 302) if form.action == "rerender": reports = Report.query.all() for report in reports: report.save_render_html() else: flash_errors(form) return render_template('selector.html',form=form) # TODO: rewrite API.json to take parameters instead of just returning. # TODO: return metadata along with individual reports. # (see also: home_project_api()) @blueprint.route("/api.json", methods=["GET"]) def home_api(): reports = Report.query.order_by("updated").limit(100).all() result = [] for report in reports: if (not (current_user and current_user.is_authenticated() and current_user.is_admin) and not report.published): continue item = { 'path' : url_for(".home") + str(report), 'filename' : report.filename, 'project' : report.project, 'created' : report.created, 'updated' : report.updated, 'title' : report.title } result.append(item) return jsonify(results = result) @blueprint.route("/<project>/", methods=["GET"]) def home_project(project): return redirect(url_for(".home") + "#" + project) @blueprint.route("/<project>/api.json", methods=["GET"]) def home_project_api(project): reports = Report.query.filter_by(project = project).order_by("updated").all() result = [] for report in reports: if (not (current_user and current_user.is_authenticated() and current_user.is_admin) and not report.published): continue item = { 'path' : url_for(".home") + str(report), 'filename' : report.filename, 'project' : report.project, 'created' : report.created, 'updated' : report.updated, 'title' : report.title } result.append(item) return jsonify(results = result) @blueprint.route("/<project>/<filename>/", methods=["GET", "POST"]) @validate_project_filename def display(project, filename): report = Report.query.filter_by(filename = filename, project = project).first() if report: if report.published or (not current_user.is_anonymous() and current_user.is_admin): return report.html_content else: raise NotFound() else: raise NotFound() @blueprint.route("/<project>/<filename>/edit", methods=["GET", "POST"]) @validate_project_filename @login_required @check_admin def edit(project, filename): template_name = request.args.get('template', 'default') report = Report.query.filter_by(project = project, filename = filename).first() default_template = Template.query.filter_by(name = template_name).first() form = None if report: form = EditorForm(project_field=project, filename_field=filename, template_field = report.template, markdown_field=report.md_content, published_field = report.published, listed_field = report.listed) else: form = EditorForm(project_field=project, filename_field=filename, template_field = default_template, markdown_field=default_template.md_content) if request.method == 'POST': if form.validate_on_submit(): save_user = None if current_user.is_anonymous(): save_user = User.query.filter_by(username="<blank>").first() else: save_user = current_user if form.savemode is 'save': report = Report.query.filter_by(filename = form.filename, project = form.project).first() if report: report.update(template_id = form.template.id, md_content = form.md_content, listed = form.listed, published = form.published) flash("Report saved.", 'success') else: report = Report.create(filename = form.filename, project = form.project, template = form.template, md_content = form.md_content, author = save_user, listed = form.listed, published = form.published) flash("New report created and saved!", 'success') report.save_render_html() form = EditorForm(project_field=form.project, filename_field=form.filename, template_field=form.template, markdown_field = form.md_content, listed_field = form.listed, published_field = form.published) elif form.savemode is "saveas": report = Report.create(filename = form.filename, project = form.project, template = form.template, md_content = form.md_content, author = save_user, listed = form.listed, published = form.published) report.save_render_html() old_path = os.path.join(upload_path(), form.old_project, form.old_filename) new_path = os.path.join(upload_path(), form.project, form.filename) # Move files along with copying data try: files = os.listdir(old_path) except OSError: pass else: if os.path.exists(new_path): flash("Files not copied!", 'error') else: shutil.copytree(old_path, new_path) flash("Files copied!", 'success') flash("New report created and saved!", 'success') return redirect("/reports/" + form.project + "/" + form.filename + "/edit", 303) else: assert False else: flash_errors(form) preview_form = PreviewForm(markdown_preview_field = '', template_preview_field = '') return render_template('editor.html',form=form, project=project, filename=filename, preview_form=preview_form) @blueprint.route("/<project>/<filename>/upload", methods=["POST"]) @validate_project_filename @login_required @check_admin def upload_file(project, filename): file = request.files['file'] if file and allowed_file(file.filename): file_save = secure_filename(file.filename) try: os.makedirs(os.path.join(upload_path(), project, filename)) except OSError: pass try: file.save(os.path.join(upload_path(), project, filename, file_save)) except (IOError, OSError) as err: raise Conflict("Can't save file: " + err.strerror) flash("Can't save file: " + err.strerror, 'error') else: return "File uploaded", 200 else: raise Conflict("File upload failed: File not allowed") raise Conflict("Bad file upload!") @blueprint.route("/<project>/<filename>/preview", methods=["POST"]) @validate_project_filename def preview_file(project, filename): preview_form = PreviewForm() if preview_form.validate_on_submit(): print preview_form.template_id template_id = Template.query.get(preview_form.template_id) md_content = preview_form.md_content env = current_app.create_jinja_environment() template = env.get_template(template_id.filename) print template return convert_md(md_content, template) else: return NotFound() @blueprint.route("/<project>/<filename>/listfiles", methods=["GET"]) @validate_project_filename @login_required @check_admin def list_files(project, filename): try: files = os.listdir(os.path.join(upload_path(), project, filename)) except OSError: return Response(flask.json.dumps([]), status=200, mimetype='application/json') out_list = [] for file_name in files: file_item = { "name": file_name, "size": os.path.getsize(os.path.join(upload_path(), project, filename)) } out_list.append(file_item) return Response(flask.json.dumps(out_list), status=200, mimetype='application/json') @blueprint.route("/<project>/<filename>/md", methods=["GET"]) @validate_project_filename def display_md(project, filename): report = Report.query.filter_by(filename = filename, project = project).first() if report: return Response(report.md_content, mimetype = 'text/plain', status = 200) else: raise NotFound() # Keep this function last as it sucks up everything else in /reports/ @blueprint.route("/<project>/<filename>/<file>", methods=["GET"]) @validate_project_filename def file_server(project, filename, file): return send_from_directory(os.path.join(upload_path(), project, filename), file) @blueprint.route("/<project>/<filename>/<file>/delete", methods=["DELETE"]) @validate_project_filename @login_required @check_admin def delete_file(project, filename, file): if not allowed_file(file): raise BadRequest() if os.path.exists(os.path.join(upload_path(), project, filename,file)): os.remove(os.path.join(upload_path(), project, filename,file)) return "File removed", 200 else: raise NotFound()
"Trousers by Brooklyn's Own, Wool-mix fabric, Stretch waist, Functional pockets, Wide-cut leg, Cropped length, Regular fit - true to size, Dry clean, 67% Polyester, 23% Wool, 5% Acrylic, 3% Polyamide, 2% Viscose, Our model wears a W 32" Regular and is 188cm/6'2" tall. Brooklyn's Own is the new label by Rocawear. Combining minimal street style with strong urban feels, its printed tees, casual sweats and distressed finishes give you that New York edge."
#!/usr/bin/python3 import argparse import sys import subprocess import re import sys parser = argparse.ArgumentParser() parser.add_argument('files', nargs='+') parser.add_argument('--head', action='store_true') parser.add_argument('--shuf', action='store_true') parser.add_argument('-n', type=int) parser.add_argument('-d', '--delimiter', default='^', choices=['&', '^', '@', '~', '|', '/', '#', '$']) parser.add_argument('--space', action='store_true') args = parser.parse_args() commands = [] paste = ['paste', '-d', args.delimiter] + list(args.files) commands.append(paste) if args.shuf: shuf = ['shuf'] if args.n: shuf += ['-n', str(args.n)] commands.append(shuf) if args.head: head = ['head', '-n', str(args.n or 10)] commands.append(head) if args.space: space = ['sed', 'G'] commands.append(space) delimiter = re.escape(args.delimiter) if args.delimiter in ('/', '^', '$') else args.delimiter sed = ['sed', 's/{}/\\n/g'.format(delimiter)] commands.append(sed) ps = None for i, cmd in enumerate(commands): stdout = sys.stdout if i == len(commands) - 1 else subprocess.PIPE stdin = None if i == 0 else ps.stdout ps = subprocess.Popen(cmd, stdin=stdin, stdout=stdout, stderr=open('/dev/null', 'w')) ps.wait()
You've contracted to get into into a united scheme (JV). That's great! If you weighing it finished cautiously and yield the case to goody it similar a deride new business, your JV could give a hand your business organization burgeon exponentially. The best esteemed piece to retrieve when ingoing into a JV, as next to any new company venture, is to devise through with and practically lay out all division of the crack. The No. 1 record significant point - and this is absolutely central - is to variety certain that all bachelor small point is in print. Keep in knowledge that if it's not in writing, you will not have a street map to effort started on the exact foot, conformation to the undiluted and narrow, motility your goals for happening and in time dissolving the business organisation if mandatory. These are the 3 central documents every communal project essential create: 1) a collective endeavour agreement; 2) a business plan; and, 3) an outlet scheme. The prototypal document, the agreement, is genuinely a contract. You and your mate will bring into being a licit papers that outlines and defines the entity you are forming. It will schedule the goals of the venture, each side's responsibilities, how lifelong the JV is anticipated to end or the destiny that would metal to its demise. It besides will covering how revenues and takings will be split, and one and all will privation to cognise that up foremost. Because of its legalized nature, every jural pleader is well for some parties. You and your spouse equivalent may be able to first attempt the agreement together. However, it's a peachy thought for both partners to have autarkical trial recommend re-examination the papers beforehand it's autographed. This will minister to preserve the interests of some partners. If you granted to rough copy the understanding beside your partner, aspect for a accurate model or list to aid you. There's so substantially to swathe that more than a few key items could well be missed. Templates and checklists may be unspoken for through with your attorney or local enterprise organizations, or you can force out for them on the Internet. Then there's the business concern work out. This piece of writing unquestionably requires the presence and signaling of all parties in the agreement. Writing the papers can also be fun, because it outlines all your planned plans, specified as goals, income benchmarks and what all delegation is conveyance to the JV. The company draft will as well figure how you will to monetary fund the venture, and how you approach to get loans or other open-air sponsorship if critical. Even if you are inflame beside hard cash and don't requirement apparent funding, it is beyond doubt central that you keep up a correspondence a business concern intend. You and your partner will have in mind backbone to this piece of writing instance and once again when you are reviewing your advancement and preparation your forthcoming. You can too watch to the enterprise representation to examine the development of your daily operations, such as as management, quality materials and relations strategies. When they're through right, business organisation campaign can be extended - and frequently intricate. If this is your freshman instance creating a business concern plan, it is well that you do tons of investigating or hire a professional correspondent. There are writers who do nada but indite commercial procedure for folks basically like-minded you, and they are smooth to insight on the Web. Plus, a professional-sounding enterprise propose has a greater hit and miss of effort funded, if that's what you're after. Sadly but truly, you will as well need an way out plan of action. Don't worry, you aren't inculpative yourself to damp squib by intelligent around how it may perhaps end. The midpoint sharing endeavour lasts roughly 7 years, and they end for a unnumberable of reasons. Your JV strength have an end day when you compose your initial contract, or someone's destiny may move - you possibly will win the lottery! You rightful never cognise. A straitlaced create for an removal scheme will secure some partners' funds and trademarks. If you brought a trademarked part into the partnership, you deprivation to kind definite to disappear beside that earmark whole. Even better, if you desire to sale the JV for a profit, you deprivation to fashion positive the profit-sharing workings are gathering place from the commence. Your opening plan of action essential clearly enumerate who gets what when the JV ends. It also inevitably to regard a listing of actions that strength summon the end of the JV, resembling motility specialized goals, undisputed changes in the market, or commerce the friendship. Again, this is a piece of writing with a lot of officially recognized ramifications, so it's selected to have your lawyer appraisal it. When you go give or take a few it the exact way and put all these aspects of your JV in writing, it will assure that you totter out of the statement next to everything you had when you walked in. Creating these documents besides reveals a knowingness of your expertise and seriousness to success. Most important, they will living you and your business partner from conflict a horrid legalized fighting if and when the concern is terminated.
def safe_pawns(pawns): coordinates = getCoord(pawns); listCoordinates = coordinates[0]; listSavePositions = coordinates[1]; #print(listCoordinates); #print(listSavePositions); safe = checkCoord(listCoordinates, listSavePositions); return safe # translates the chess coordinates into digits # def getCoord(pawns): testpawns = ["b4", "d4", "f4", "c3", "e3", "g5", "d2"]; # (a == 0) alpha = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']; # (1 == 0) num = ['1', '2', '3', '4', '5', '6', '7', '8']; #print(alpha.index("a")); coordinateList = []; saveCandidates = []; for elem in pawns: #print(elem[0]); letter = elem[0]; pos_letter = alpha.index(letter); #print(alpha.index(letter)); #print(elem[1]); digit = elem[1]; pos_digit = num.index(digit); #print(num.index(digit)); two = [pos_letter, pos_digit]; coordinateList.append(two); twoCand_One = [pos_letter - 1, pos_digit + 1]; twoCand_Two = [pos_letter + 1, pos_digit + 1]; if twoCand_One not in saveCandidates: saveCandidates.append(twoCand_One); if twoCand_Two not in saveCandidates: saveCandidates.append(twoCand_Two); #saveCandidates2 = set(saveCandidates); twoLists = [coordinateList, saveCandidates]; print (coordinateList); print (saveCandidates); return twoLists; #print(coordinateList); return coordinateList; def checkCoord(coordinates, candidates): count = 0; for elem in coordinates: if elem in candidates: count += 1; return count; if __name__ == '__main__': #These "asserts" using only for self-checking and not necessary for auto-testing assert safe_pawns({"b4", "d4", "f4", "c3", "e3", "g5", "d2"}) == 6 assert safe_pawns({"b4", "c4", "d4", "e4", "f4", "g4", "e5"}) == 1
Nguyen, D. N.; Scheick, L. Z. We report on TID and SEE tests of multi-level and higher density flash memories. Stand-by currents and functionality tests were used to characterize the respons of radiation-induced failures.
import bpy import os import sys import subprocess import ast import random script_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.append(script_dir) # Get system's python path proc = subprocess.Popen('python3 -c "import sys; print(sys.path)"', stdout=subprocess.PIPE, shell=True) out, err = proc.communicate() paths = ast.literal_eval(out.decode("utf-8")) sys.path += (paths) from shapely.geometry import Polygon from shapely.affinity import translate from src.blender.blend_environment import BlendEnvironment from src.environment import Environment from src.landscape import Mountain, MountainImg, Vegetation from src.model import AbstractModel from src.feature import ImageFeature # how to deal with this ? pencil.layers[0] = GP_Layer.001, ..., pencil.layers[n-1] = GP_Layer.00n, pencil.layers[n] = GP_Layer... (but GP Layer first one) # nb, can change gen_name(i) in id ? maybe not... feature_list = [] model_number_list = [] image_path_list = [] # if we put feature_list into scn, it is transformed into a read-only IDPropertyArray :'( benv = BlendEnvironment((0, 0), (0, 0)) # could probably put this in context def upd_enum(self, context): print(self['MyEnum']) def update_scale(self, context): for (s, models) in benv.models: for model in models: model.scale[0] = s * context.scene.model_scaling model.scale[1] = s * context.scene.model_scaling model.scale[2] = s * context.scene.model_scaling def initSceneProperties(scn): bpy.types.Scene.model_number = bpy.props.IntProperty(name="Number of models", default=20, min=1, max=400) scn["model_number"] = 100 bpy.types.Scene.model_scaling = bpy.props.FloatProperty(name="Scaling of models", default=0.3, min=0, update=update_scale) scn["model_scaling"] = 0.3 bpy.types.Scene.model_path = bpy.props.StringProperty( name="Patht to models", description="Path to models", default="../../models/vegetation/Pine_4m.obj", maxlen=1024, subtype='FILE_PATH') scn["model_path"] = "../../models/vegetation/Pine_4m.obj" bpy.types.Scene.image_path = bpy.props.StringProperty( name="Patht to models", description="Path to models", default="../../hm.png", maxlen=1024, subtype='FILE_PATH') scn["image_path"] = "../../hm.png" myItems = [('MountainImg', 'MountainImg', 'MountainImg'), # ('Mountain', 'Mountain', 'Mountain'), ('Vegetation', 'Vegetation', 'Vegetation'), ('Image', 'Image', 'Image'), ('Urban', 'Urban', 'Urban'), ('Water', 'Water', 'Water')] bpy.types.Scene.MyEnum = bpy.props.EnumProperty( items=myItems, name="Feature choice", update=upd_enum) scn["myItems"] = myItems scn['MyEnum'] = 0 scn["i"] = 0 scn["models_scale"] = 1 return def print_points(): # print points defined using pencil for i, pencil in enumerate(bpy.data.grease_pencil[0].layers): print("step " + str(i)) try: for stroke in enumerate(pencil.active_frame.strokes): stroke_points = pencil.active_frame.strokes[0].points for point in stroke_points: print("\t(" + str(point.co.x) + ", " + str(point.co.y) + ", " + str(point.co.z) + ")") except AttributeError: print("\tempty") def gen_name(i): if(i == 0): name = "GP_Layer" else: name = "GP_Layer." + "{0:0=3d}".format(i) return name def change_color(i): name = gen_name(i) bpy.data.grease_pencil["GPencil"].layers[name].fill_color = (random.random(), random.random(), random.random()) print("colors " + str(bpy.data.grease_pencil["GPencil"].layers[name].fill_color[0]) + " " + str(bpy.data.grease_pencil["GPencil"].layers[name].fill_color[1]) + " " + str(bpy.data.grease_pencil["GPencil"].layers[name].fill_color[2])) bpy.data.grease_pencil["GPencil"].layers[name].fill_alpha = 1 def dist(a, b): return (a[0] - b[0])**2 + (a[1] - b[1])**2 def gen_feature(feature_name, model_number, image_path, shape, transl, scaling, scn): print("Called gen_feature @ %s" % feature_name) # let's first translate our feature. ip = Polygon(list(shape)) # map(lambda x: (x[0], 4x[1]), shape))) p = translate(ip, xoff=transl[0], yoff=transl[1]) if(feature_name == "Mountain"): center_z = 0 center_pos = p.centroid.coords[0] rd = int((max([dist(x, center_pos) for x in p.exterior.coords]) / 2) ** 0.5) print("Radius = %d" % rd) print("Center = %d, %d" % (center_pos[0], center_pos[1])) return Mountain(rd, center_z, center_pos) elif(feature_name == "MountainImg"): center_z = 0 center_pos = p.bounds[0:2] print("Center = %d, %d" % (center_pos[0], center_pos[1])) return MountainImg(p, center=center_pos) elif(feature_name == "Roads"): pass elif(feature_name == "Image"): f = ImageFeature(image_path) f.shape = p return f elif(feature_name == "Vegetation"): for a in p.exterior.coords: print(a) return Vegetation(p, model=AbstractModel(scn["model_path"], 0.02, (0, 0)), tree_number=model_number) elif(feature_name == "Urban"): pass elif(feature_name == "WaterArea"): pass elif(feature_name == "River"): pass class OBJECT_OT_ToolsButton(bpy.types.Operator): bl_idname = "drawenv.execute" bl_label = "Draw something" def execute(self, context): self.report({'INFO'}, "starting drawing") bpy.ops.view3d.viewnumpad(type='TOP', align_active=False) bpy.ops.gpencil.draw('INVOKE_REGION_WIN', mode="DRAW_POLY") change_color(context.scene["i"]) return {'FINISHED'} class OBJECT_OT2_ToolsButton(bpy.types.Operator): bl_idname = "drawenv.stop" bl_label = "Done" def execute(self, context): scn = context.scene self.report({'INFO'}, "stopping drawing") # We add this new feature # We should translate everything, here or when exporting the env # Idea : find bounding box, and translate 2 times... # shape_2d = [(p.co.x, p.co.y) for p in bpy.data.grease_pencil[0].layers[scn["i"]].active_frame.strokes[0].points] # feature_list.append(gen_feature(scn["myItems"][scn["MyEnum"]][0], shape_2d)) feature_list.append(scn["myItems"][scn["MyEnum"]][0]) model_number_list.append(scn["model_number"]) image_path_list.append(scn["image_path"]) scn["i"] += 1 bpy.ops.gpencil.layer_add() return {'FINISHED'} class OBJECT_OT3_ToolsButton(bpy.types.Operator): bl_idname = "drawenv.print" bl_label = "Print points" def execute(self, context): print_points() return {'FINISHED'} class OBJECT_OT4_ToolsButton(bpy.types.Operator): bl_idname = "drawenv.hide" bl_label = "Hide/unhide gpencil" def execute(self, context): scn = context.scene for i in range(scn["i"]): bpy.data.grease_pencil[0].layers[i].hide = not bpy.data.grease_pencil["GPencil"].layers[i].hide return {'FINISHED'} class OBJECT_OT4_ToolsButton(bpy.types.Operator): bl_idname = "drawenv.gen" bl_label = "Generate environment" def execute(self, context): global benv scn = context.scene # bpy.ops.view3d.viewnumpad(type='CAMERA', align_active=False) # scaling = max(bb[2] - bb[0], max(bb[2] - bb[0], bb[3] - bb[1])bb[3] - bb[1]) / 28 scaling = 1 shapes = [[] for i in range(scn["i"])] for i in range(scn["i"]): try: for p in bpy.data.grease_pencil[0].layers[i].active_frame.strokes[0].points: shapes[i].append((scaling * p.co.x, - scaling * p.co.y)) except AttributeError: pass bb = bounds(shapes[0]) for shape in shapes[1:]: if(shape != []): s = bounds(shape) bb = (min(bb[0], s[0]), min(bb[1], s[1]), max(bb[2], s[2]), max(bb[3], s[3])) res_x = int(bb[2] - bb[0]) res_y = int(bb[3] - bb[1]) print("Res x %d; res y %d" % (res_x, res_y)) my_features = [gen_feature(feature_list[i], model_number_list[i], image_path_list[i], shapes[i], (-bb[0], -bb[1]), scaling, scn) for i in range(len(shapes)) if shapes[i] != []] env = Environment(my_features, x=res_x, y=res_y) benv = BlendEnvironment((-bb[0], -bb[1]), (res_x, res_y)) # scn["models_scale"] = 1 / (max(bb[2] - bb[0], bb[3] - bb[1]) // (2*scaling)) benv.export_img(env, 2) for i in range(scn["i"]): bpy.data.grease_pencil[0].layers[i].hide = not bpy.data.grease_pencil["GPencil"].layers[i].hide return {'FINISHED'} def bounds(point_list): min_x, min_y = point_list[0] max_x, max_y = point_list[0] for p in point_list[1:]: min_x = min(min_x, p[0]) max_x = max(max_x, p[0]) min_y = min(min_y, p[1]) max_y = max(max_y, p[1]) return (min_x, min_y, max_x, max_y) class FeaturePanel(bpy.types.Panel): bl_category = "Environment" bl_label = "Feature choice" bl_space_type = "VIEW_3D" bl_region_type = "TOOLS" def draw(self, context): layout = self.layout scn = context.scene layout.prop(scn, 'MyEnum') class ToolsPanel(bpy.types.Panel): bl_category = "Environment" bl_label = "Environment panel" bl_space_type = "VIEW_3D" bl_region_type = "TOOLS" def draw(self, context): layout = self.layout scn = context.scene layout.operator("drawenv.execute") layout.operator("drawenv.stop") layout.operator("drawenv.gen") layout.operator("drawenv.print") layout.operator("drawenv.hide") class EnvParamPanel(bpy.types.Panel): bl_category = "Environment" bl_label = "Model parameters" bl_space_type = "VIEW_3D" bl_region_type = "TOOLS" def draw(self, context): layout = self.layout scn = context.scene layout.prop(scn, 'model_number') layout.prop(scn, 'model_scaling') layout.prop(scn, 'model_path') class ImgParamPanel(bpy.types.Panel): bl_category = "Environment" bl_label = "Image parameters" bl_space_type = "VIEW_3D" bl_region_type = "TOOLS" def draw(self, context): layout = self.layout scn = context.scene layout.prop(scn, 'image_path') if __name__ == "__main__": initSceneProperties(bpy.context.scene) bpy.utils.register_module(__name__)
Betty started, inwardly, and was seized with an unreasoning fear lest the question might next be put to herself. Quietly, as soon as she could, she moved away from the coin cabinet, and seemed to be examining something else; but she was listening all the while. 'Nothing whatever,' Mrs. Dallas had answered. 'They have not come back to England. I have made out so much. I looked up the family after I came home last fall; their headquarters are at a nice old place down in Devonshire. I introduced myself and got acquainted with them. They are pleasant people. 'When are you going to show Miss Frere and me London?' asked Mrs. Dallas. She was as willing to lead off from the other subject as Betty herself. 'What first, Betty?' said Mrs. Dallas. Betty turned and slowly came back to the others. 'Do you think I have never seen a lion, Mr. Dallas?' said the young lady. 'So should I,' said the younger lady.
from math import floor import Queue import SocketServer import datetime import random import re import sqlite3 import sys import threading import time import traceback HOST = '0.0.0.0' PORT = 4080 CHUNK_SIZE = 32 BUFFER_SIZE = 1024 SPAWN_POINT = (0, 0, 0, 0, 0) DB_PATH = 'craft.db' COMMIT_INTERVAL = 5 YOU = 'U' BLOCK = 'B' CHUNK = 'C' POSITION = 'P' DISCONNECT = 'D' TALK = 'T' KEY = 'K' def log(*args): now = datetime.datetime.utcnow() print now, ' '.join(map(str, args)) def chunked(x): return int(floor(round(x) / CHUNK_SIZE)) class Server(SocketServer.ThreadingMixIn, SocketServer.TCPServer): allow_reuse_address = True daemon_threads = True class Handler(SocketServer.BaseRequestHandler): def setup(self): self.queue = Queue.Queue() self.running = True self.start() def handle(self): model = self.server.model model.enqueue(model.on_connect, self) try: buf = [] while True: data = self.request.recv(BUFFER_SIZE) if not data: break buf.extend(data.replace('\r', '')) while '\n' in buf: index = buf.index('\n') line = ''.join(buf[:index]) buf = buf[index + 1:] model.enqueue(model.on_data, self, line) finally: model.enqueue(model.on_disconnect, self) def finish(self): self.running = False def start(self): thread = threading.Thread(target=self.run) thread.setDaemon(True) thread.start() def run(self): while self.running: try: buf = [] try: buf.append(self.queue.get(timeout=5)) try: while True: buf.append(self.queue.get(False)) except Queue.Empty: pass except Queue.Empty: continue data = ''.join(buf) self.request.sendall(data) except Exception: self.request.close() raise def send_raw(self, data): if data: self.queue.put(data) def send(self, *args): data = '%s\n' % ','.join(map(str, args)) #log('SEND', self.client_id, data[:-1]) self.send_raw(data) class Model(object): def __init__(self): self.clients = [] self.queue = Queue.Queue() self.commands = { CHUNK: self.on_chunk, BLOCK: self.on_block, POSITION: self.on_position, TALK: self.on_talk, } self.patterns = [ (re.compile(r'^/nick(?:\s+(\S+))?$'), self.on_nick), (re.compile(r'^/spawn$'), self.on_spawn), (re.compile(r'^/goto(?:\s+(\S+))?$'), self.on_goto), (re.compile(r'^/pq\s+(-?[0-9]+)\s*,?\s*(-?[0-9]+)$'), self.on_pq), (re.compile(r'^/help$'), self.on_help), (re.compile(r'^/players$'), self.on_players), ] def start(self): thread = threading.Thread(target=self.run) thread.setDaemon(True) thread.start() def run(self): self.connection = sqlite3.connect(DB_PATH) self.create_tables() self.commit() while True: try: if time.time() - self.last_commit > COMMIT_INTERVAL: self.commit() self.dequeue() except Exception: traceback.print_exc() def enqueue(self, func, *args, **kwargs): self.queue.put((func, args, kwargs)) def dequeue(self): try: func, args, kwargs = self.queue.get(timeout=5) func(*args, **kwargs) except Queue.Empty: pass def execute(self, *args, **kwargs): return self.connection.execute(*args, **kwargs) def commit(self): self.last_commit = time.time() self.connection.commit() def create_tables(self): queries = [ 'create table if not exists block (' ' p int not null,' ' q int not null,' ' x int not null,' ' y int not null,' ' z int not null,' ' w int not null' ');', 'create index if not exists block_xyz_idx on block (x, y, z);', 'create unique index if not exists block_pqxyz_idx on ' ' block (p, q, x, y, z);', ] for query in queries: self.execute(query) def next_client_id(self): result = 1 client_ids = set(x.client_id for x in self.clients) while result in client_ids: result += 1 return result def on_connect(self, client): client.client_id = self.next_client_id() client.nick = 'player%d' % client.client_id log('CONN', client.client_id, *client.client_address) client.position = SPAWN_POINT self.clients.append(client) client.send(YOU, client.client_id, *client.position) client.send(TALK, 'Welcome to Craft!') client.send(TALK, 'Type "/help" for chat commands.') self.send_position(client) self.send_positions(client) self.send_talk('%s has joined the game.' % client.nick) def on_data(self, client, data): #log('RECV', client.client_id, data) args = data.split(',') command, args = args[0], args[1:] if command in self.commands: func = self.commands[command] func(client, *args) def on_disconnect(self, client): log('DISC', client.client_id, *client.client_address) self.clients.remove(client) self.send_disconnect(client) self.send_talk('%s has disconnected from the server.' % client.nick) def on_chunk(self, client, p, q, key=0): p, q, key = map(int, (p, q, key)) query = ( 'select rowid, x, y, z, w from block where ' 'p = :p and q = :q and rowid > :key;' ) rows = self.execute(query, dict(p=p, q=q, key=key)) max_rowid = 0 for rowid, x, y, z, w in rows: client.send(BLOCK, p, q, x, y, z, w) max_rowid = max(max_rowid, rowid) if max_rowid: client.send(KEY, p, q, max_rowid) def on_block(self, client, x, y, z, w): x, y, z, w = map(int, (x, y, z, w)) if y <= 0 or y > 255 or w < 0 or w > 11: return p, q = chunked(x), chunked(z) query = ( 'insert or replace into block (p, q, x, y, z, w) ' 'values (:p, :q, :x, :y, :z, :w);' ) self.execute(query, dict(p=p, q=q, x=x, y=y, z=z, w=w)) self.send_block(client, p, q, x, y, z, w) if chunked(x - 1) != p: self.execute(query, dict(p=p - 1, q=q, x=x, y=y, z=z, w=-w)) self.send_block(client, p - 1, q, x, y, z, -w) if chunked(x + 1) != p: self.execute(query, dict(p=p + 1, q=q, x=x, y=y, z=z, w=-w)) self.send_block(client, p + 1, q, x, y, z, -w) if chunked(z - 1) != q: self.execute(query, dict(p=p, q=q - 1, x=x, y=y, z=z, w=-w)) self.send_block(client, p, q - 1, x, y, z, -w) if chunked(z + 1) != q: self.execute(query, dict(p=p, q=q + 1, x=x, y=y, z=z, w=-w)) self.send_block(client, p, q + 1, x, y, z, -w) def on_position(self, client, x, y, z, rx, ry): x, y, z, rx, ry = map(float, (x, y, z, rx, ry)) client.position = (x, y, z, rx, ry) self.send_position(client) def on_talk(self, client, *args): text = ','.join(args) if text.startswith('/'): for pattern, func in self.patterns: match = pattern.match(text) if match: func(client, *match.groups()) break else: client.send(TALK, 'Unrecognized command: "%s"' % text) else: self.send_talk('%s> %s' % (client.nick, text)) def on_nick(self, client, nick=None): if nick is None: client.send(TALK, 'Your nickname is %s' % client.nick) else: self.send_talk('%s is now known as %s' % (client.nick, nick)) client.nick = nick def on_spawn(self, client): client.position = SPAWN_POINT client.send(YOU, client.client_id, *client.position) self.send_position(client) def on_goto(self, client, nick=None): if nick is None: clients = [x for x in self.clients if x != client] other = random.choice(self.clients) if clients else None else: nicks = dict((client.nick, client) for client in self.clients) other = nicks.get(nick) if other: client.position = other.position client.send(YOU, client.client_id, *client.position) self.send_position(client) def on_pq(self, client, p, q): p, q = map(int, (p, q)) if abs(p) > 1000 or abs(q) > 1000: return client.position = (p * CHUNK_SIZE, 0, q * CHUNK_SIZE, 0, 0) client.send(YOU, client.client_id, *client.position) self.send_position(client) def on_help(self, client): client.send(TALK, 'Type "t" to chat with other players.') client.send(TALK, 'Type "/" to start typing a command.') client.send(TALK, 'Commands: /goto [NAME], /help, /nick [NAME], /players, /spawn') def on_players(self, client): client.send(TALK, 'Players: %s' % ', '.join(x.nick for x in self.clients)) def send_positions(self, client): for other in self.clients: if other == client: continue client.send(POSITION, other.client_id, *other.position) def send_position(self, client): for other in self.clients: if other == client: continue other.send(POSITION, client.client_id, *client.position) def send_disconnect(self, client): for other in self.clients: if other == client: continue other.send(DISCONNECT, client.client_id) def send_block(self, client, p, q, x, y, z, w): for other in self.clients: if other == client: continue other.send(BLOCK, p, q, x, y, z, w) def send_talk(self, text): for client in self.clients: client.send(TALK, text) def main(): host, port = HOST, PORT if len(sys.argv) > 1: host = sys.argv[1] if len(sys.argv) > 2: port = int(sys.argv[2]) log('SERV', host, port) model = Model() model.start() server = Server((host, port), Handler) server.model = model server.serve_forever() if __name__ == '__main__': main()
Low oil prices seem to be pushing some of the big guys to decide they are going to team up, as the perception is that there are some value acquisitions out there to be had in light of low prices. The assumption of course is that prices will rise again and value will return to this space, this giving all of us suppliers cause to stop holding our breath. The best and most recent example of this is Schlumberger’s acquisition of Cameron. As with all acquisitions, I’m sure Schlumberger will identify some redundancies in the combined workforce and there will be layoffs. The hope is that once the two giant firms are integrated, there will be new projects to engage on and new opportunities for steel castings and iron castings sales. This is a purely selfish concern of course. For everyone’s sake, let’s hope this oil market stabilizes sooner rather than later.
""" SMDS authentication module for web2py. Parts borrowed from /usr/share/python-support/python-gluon/gluon/tools.py """ import sys sys.path.append( "/usr/share/syndicate_md" ) sys.path.append( "/usr/share/python-support/python-gluon/" ) import base64 import cPickle import datetime import thread import logging import sys import os import re import time import copy import smtplib import urllib import urllib2 import Cookie import cStringIO from email import MIMEBase, MIMEMultipart, MIMEText, Encoders, Header, message_from_string from gluon.contenttype import contenttype from gluon.storage import Storage, StorageList, Settings, Messages from gluon.utils import web2py_uuid from gluon import * from gluon.fileutils import read_file from gluon.html import * import gluon.serializers import gluon.contrib.simplejson as simplejson from SMDS.mdapi import MDAPI from SMDS.auth import auth_user_from_email, auth_password_check from SMDS.user import * from SMDS.web2py.extras import SMDS_validators import SMDS.logger as logger from gluon.tools import Auth as GluonAuth logger.init( "/tmp/SMDS_Auth.log" ) DEFAULT = lambda: None def callback(actions,form,tablename=None): if actions: if tablename and isinstance(actions,dict): actions = actions.get(tablename, []) if not isinstance(actions,(list, tuple)): actions = [actions] [action(form) for action in actions] def validators(*a): b = [] for item in a: if isinstance(item, (list, tuple)): b = b + list(item) else: b.append(item) return b def call_or_redirect(f,*args): if callable(f): redirect(f(*args)) else: redirect(f) def dict_to_Rows( my_dict ): extra_dict = {'_extra': my_dict} row = Row( extra_dict ) rows = Rows( records=[row], colnames=list(my_dict.keys()), compact=False ) return rows class SMDS_Auth( GluonAuth ): """ web2py Authentication module for SMDS """ def __init__(self, api): """ auth=Auth(globals(), db) - environment is there for legacy but unused (awful) - db has to be the database where to create tables for authentication """ controller = 'default' cas_provider = None self.db = None self.environment = current request = current.request session = current.session auth = session.auth if auth and auth.last_visit and auth.last_visit + \ datetime.timedelta(days=0, seconds=auth.expiration) > request.now: self.user = auth.user # this is a trick to speed up sessions if (request.now - auth.last_visit).seconds > (auth.expiration/10): auth.last_visit = request.now else: self.user = None session.auth = None settings = self.settings = Settings() # ## what happens after login? # ## what happens after registration? settings.hideerror = False settings.cas_domains = [request.env.http_host] settings.cas_provider = cas_provider settings.extra_fields = {} settings.actions_disabled = [] settings.reset_password_requires_verification = False settings.registration_requires_verification = False settings.registration_requires_approval = True settings.alternate_requires_registration = False settings.create_user_groups = False settings.controller = controller settings.login_url = self.url('user', args='login') settings.logged_url = self.url('user', args='profile') settings.download_url = self.url('download') settings.mailer = None settings.login_captcha = None settings.register_captcha = None settings.retrieve_username_captcha = None settings.retrieve_password_captcha = None settings.captcha = None settings.expiration = 3600 # one hour settings.long_expiration = 3600*30*24 # one month settings.remember_me_form = False settings.allow_basic_login = False settings.allow_basic_login_only = False settings.on_failed_authorization = \ self.url('user',args='not_authorized') settings.on_failed_authentication = lambda x: redirect(x) settings.formstyle = 'table3cols' settings.label_separator = ': ' # ## table names to be used settings.password_field = 'password' settings.table_user_name = 'auth_user' settings.table_group_name = 'auth_group' settings.table_membership_name = 'auth_membership' settings.table_permission_name = 'auth_permission' settings.table_event_name = 'auth_event' settings.table_cas_name = 'auth_cas' # ## if none, they will be created settings.table_user = None settings.table_group = None settings.table_membership = None settings.table_permission = None settings.table_event = None settings.table_cas = None # ## settings.showid = False # ## these should be functions or lambdas settings.login_next = self.url('index') settings.login_onvalidation = [] settings.login_onaccept = [] settings.login_methods = [self] settings.login_form = self settings.login_email_validate = True settings.login_userfield = "username" settings.logout_next = self.url('index') settings.logout_onlogout = lambda x: None settings.register_next = self.url('index') settings.register_onvalidation = [] settings.register_onaccept = [] settings.register_fields = None settings.verify_email_next = self.url('user', args='login') settings.verify_email_onaccept = [] settings.profile_next = self.url('index') settings.profile_onvalidation = [] settings.profile_onaccept = [] settings.profile_fields = None settings.retrieve_username_next = self.url('index') settings.retrieve_password_next = self.url('index') settings.request_reset_password_next = self.url('user', args='login') settings.reset_password_next = self.url('user', args='login') settings.change_password_next = self.url('index') settings.change_password_onvalidation = [] settings.change_password_onaccept = [] settings.retrieve_password_onvalidation = [] settings.reset_password_onvalidation = [] settings.hmac_key = None settings.lock_keys = True # ## these are messages that can be customized messages = self.messages = Messages(current.T) messages.login_button = 'Login' messages.register_button = 'Register' messages.password_reset_button = 'Request reset password' messages.password_change_button = 'Change password' messages.profile_save_button = 'Save profile' messages.submit_button = 'Submit' messages.verify_password = 'Verify Password' messages.delete_label = 'Check to delete:' messages.function_disabled = 'Function disabled' messages.access_denied = 'Insufficient privileges' messages.registration_verifying = 'Registration needs verification' messages.registration_pending = 'Registration is pending approval' messages.login_disabled = 'Login disabled by administrator' messages.logged_in = 'Logged in' messages.email_sent = 'Email sent' messages.unable_to_send_email = 'Unable to send email' messages.email_verified = 'Email verified' messages.logged_out = 'Logged out' messages.registration_successful = 'Registration successful' messages.invalid_email = 'Invalid email' messages.unable_send_email = 'Unable to send email' messages.invalid_login = 'Invalid login' messages.invalid_user = 'Invalid user' messages.invalid_password = 'Invalid password' messages.is_empty = "Cannot be empty" messages.mismatched_password = "Password fields don't match" messages.verify_email = 'A user wishes to join Syndicate.\nDetails:\n Username: %(username)s\n Email: %(email)s' messages.verify_email_subject = 'Email verification' messages.username_sent = 'Your username was emailed to you' messages.new_password_sent = 'A new password was emailed to you' messages.password_changed = 'Password changed' messages.retrieve_username = 'Your username is: %(username)s' messages.retrieve_username_subject = 'Username retrieve' messages.retrieve_password = 'Your password is: %(password)s' messages.retrieve_password_subject = 'Password retrieve' messages.reset_password = \ 'Click on the link http://...reset_password/%(key)s to reset your password' messages.reset_password_subject = 'Password reset' messages.invalid_reset_password = 'Invalid reset password' messages.profile_updated = 'Profile updated' messages.new_password = 'New password' messages.old_password = 'Old password' messages.group_description = \ 'Group uniquely assigned to user %(id)s' messages.register_log = 'User %(id)s Registered' messages.login_log = 'User %(id)s Logged-in' messages.login_failed_log = None messages.logout_log = 'User %(id)s Logged-out' messages.profile_log = 'User %(id)s Profile updated' messages.verify_email_log = 'User %(id)s Verification email sent' messages.retrieve_username_log = 'User %(id)s Username retrieved' messages.retrieve_password_log = 'User %(id)s Password retrieved' messages.reset_password_log = 'User %(id)s Password reset' messages.change_password_log = 'User %(id)s Password changed' messages.add_group_log = 'Group %(group_id)s created' messages.del_group_log = 'Group %(group_id)s deleted' messages.add_membership_log = None messages.del_membership_log = None messages.has_membership_log = None messages.add_permission_log = None messages.del_permission_log = None messages.has_permission_log = None messages.impersonate_log = 'User %(id)s is impersonating %(other_id)s' messages.label_first_name = 'First name' messages.label_last_name = 'Last name' messages.label_username = 'Username' messages.label_email = 'E-mail' messages.label_password = 'Password' messages.label_registration_key = 'Registration key' messages.label_reset_password_key = 'Reset Password key' messages.label_registration_id = 'Registration identifier' messages.label_role = 'Role' messages.label_description = 'Description' messages.label_user_id = 'User ID' messages.label_group_id = 'Group ID' messages.label_name = 'Name' messages.label_table_name = 'Table name' messages.label_record_id = 'Record ID' messages.label_time_stamp = 'Timestamp' messages.label_client_ip = 'Client IP' messages.label_origin = 'Origin' messages.label_remember_me = "Remember me (for 30 days)" messages['T'] = current.T messages.verify_password_comment = 'please input your password again' messages.lock_keys = True self.user = None self.api = api self.maint_email = api.config.MD_MAIL_SUPPORT_ADDRESS # disable stuff for now settings.actions_disabled.append('retrieve_username') settings.actions_disabled.append('retrieve_password') settings.actions_disabled.append('request_reset_password') settings.actions_disabled.append('profile') settings.actions_disabled.append('change_password') def _get_user_id(self): "accessor for auth.user_id" return (self.user and self.user.get('user_id')) or None user_id = property(_get_user_id, doc="user.id or None") def _HTTP(self, *a, **b): """ only used in lambda: self._HTTP(404) """ raise HTTP(*a, **b) def __call__(self): """ usage: def authentication(): return dict(form=auth()) """ request = current.request args = request.args if not args: redirect(self.url(args='login',vars=request.vars)) elif args[0] in self.settings.actions_disabled: raise HTTP(404) """ if args[0] in ('login','logout','register','verify_email', 'retrieve_username','retrieve_password', 'reset_password','request_reset_password', 'change_password','profile','groups', 'impersonate','not_authorized'): """ if args[0] in ('login','logout','register','not_authorized'): return getattr(self,args[0])() else: raise HTTP(404) def navbar(self,prefix='Welcome',action=None): """ Create a pretty navigation bar """ try: user = None session = current.session if session.auth: user = session.auth['user'] request = current.request T = current.T if isinstance(prefix,str): prefix = T(prefix) if not action: action=URL(request.application,request.controller,'user') if prefix: prefix = prefix.strip()+' ' if user: logout=A(T('logout'),_href=action+'/logout') profile=A(T('profile'),_href=action+'/profile') password=A(T('password'),_href=action+'/change_password') bar = SPAN(prefix, user['username'],' [ ', logout, ']',_class='auth_navbar') if not 'profile' in self.settings.actions_disabled: bar.insert(4, ' | ') bar.insert(5, profile) if not 'change_password' in self.settings.actions_disabled: bar.insert(-1, ' | ') bar.insert(-1, password) else: login=A(T('login'),_href=action+'/login') register=A(T('register'),_href=action+'/register') retrieve_username=A(T('forgot username?'), _href=action+'/retrieve_username') lost_password=A(T('lost password?'), _href=action+'/request_reset_password') bar = SPAN('[ ',login,' ]',_class='auth_navbar') if not 'register' in self.settings.actions_disabled: bar.insert(2, ' | ') bar.insert(3, register) if 'username' in User.public_fieldnames and not 'retrieve_username' in self.settings.actions_disabled: bar.insert(-1, ' | ') bar.insert(-1, retrieve_username) if not 'request_reset_password' in self.settings.actions_disabled: bar.insert(-1, ' | ') bar.insert(-1, lost_password) return bar except Exception, e: logger.exception(e, "Navbar error") logger.flush() def define_tables(self, username=None, migrate=None, fake_migrate=None): """ Do NOT define tables """ pass def register(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT): """ Register a new user """ request = current.request response = current.response session = current.session if self.is_logged_in(): # don't allow registration if we're already logged in redirect(self.settings.logged_url) # fill in defaults if next == DEFAULT: next = request.get_vars._next \ or request.post_vars._next \ or self.settings.register_next if onvalidation == DEFAULT: onvalidation = self.settings.register_onvalidation if onaccept == DEFAULT: onaccept = self.settings.register_onaccept if log == DEFAULT: log = self.messages.register_log # create a form... userfield = self.settings.login_userfield passfield = self.settings.password_field formstyle = self.settings.formstyle form =FORM( \ TABLE( \ TR(TD('Username:'), TD(INPUT(_name="username",_type="text",requires=IS_SLUG(error_message="Invalid username")))), \ TR(TD('Email:'), TD(INPUT(_name="email", _type="text",requires=IS_EMAIL(error_message=self.messages.invalid_email)))), \ TR(TD('Password:'), TD(INPUT(_name="password", _type="password"))), \ TR(TD('Re-type Password:'), TD(INPUT(_name="password2", _type="password", \ requires=IS_EXPR("value==%s" % repr(request.vars.get('password',None))), \ error_message=self.settings.mismatched_password))) \ ), \ INPUT(_type="Submit",_value="Register"), \ _name="register" ) if form.accepts(request, session, formname='register', onvalidation=onvalidation,hideerror=self.settings.hideerror): # verify that the password forms are the same if form.vars['password'] != form.vars['password2']: response.flash = messages.mismatched_password # inform the admin """ if not self.settings.mailer or \ not self.settings.mailer.send( to=self.maint_email, subject=self.messages.verify_email_subject, message=self.messages.verify_email % dict(username=form.vars['username'], email=form.vars['email'])): response.flash = self.messages.unable_send_email return form session.flash = self.messages.email_sent """ # make sure this user does not exist rc = 0 msg = "" try: user = Users(self.api, {'username': form.vars['username']})[0] rc = -1 # already exists msg = "User already exists" except: pass # create the user if rc == 0: try: user_fields = {'username': form.vars['username'], 'password': form.vars['password'], 'email': form.vars['email']} rc = self.api.call( ("127.0.0.1", "localhost"), "AddUser", self.api.maint_auth, user_fields ) except Exception, e: logger.exception(e, "register: exception") logger.flush() msg = "User could not be registered" rc = -1 if rc < 0: response.flash = msg logger.error("Failed to add user '%s' (email '%s')" % (user_fields['username'], user_fields['email']) ) return form session.flash = self.messages.registration_pending if log: logger.info("Added user '%s' (email '%s')" % (user_fields['username'], user_fields['email']) ) callback(onaccept,form) if not next: next = self.url(args = request.args) elif isinstance(next, (list, tuple)): ### fix issue with 2.6 next = next[0] elif next and not next[0] == '/' and next[:4] != 'http': next = self.url(next.replace('[id]', str(form.vars.id))) redirect(next) return form def login_bare( self, username, password ): """ Bare essentials login. """ api = MDAPI() user = None try: user = auth_user_from_email( api, username ) except Exception, e: logger.error( "User '%s' could not be authenticated (exception = %s)" % (username, e) ) return False rc = False auth_struct = {'AuthMethod': 'password', 'Username': user['username'], 'AuthString': password} try: rc = auth_password_check( api, auth_struct, user, None ) except Exception, e: logger.error( "User '%s' failed to authenticate" % username) if rc and user: user_public = user.public() user_stored = Storage(user_public) if log: logger.info("SMDS_Auth: User '%s' logged in" % user_public['username']) # process authenticated users # user wants to be logged in for longer session.auth = Storage( user = user_stored, last_visit = request.now, expiration = self.settings.expiration, hmac_key = web2py_uuid() ) self.user = user_public logger.info("SMDS_Auth: user_id = %s" % self.user_id) logger.flush() return user return rc def login(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT): """ Handle a login request, and redirect. """ request = current.request response = current.response session = current.session username_field = self.settings.login_userfield password_field = self.settings.password_field if next == DEFAULT: next = request.get_vars._next \ or request.post_vars._next \ or self.settings.login_next if onvalidation == DEFAULT: onvalidation = self.settings.login_onvalidation if onaccept == DEFAULT: onaccept = self.settings.login_onaccept if log == DEFAULT: log = self.messages.login_log user = None accepted_form = False if self.settings.login_form == self: # this object was responsible for logging in form =FORM( \ TABLE( \ TR(TD('Username:'), TD(INPUT(_name="username",_type="text",requires=IS_SLUG(error_message="Invalid Username")))), \ TR(TD('Password:'), TD(INPUT(_name="password", _type="password"))) \ ), \ INPUT(_type="Submit",_value="Login"), \ _name="login" ) if form.accepts(request.vars, session, formname='login', onvalidation=onvalidation, hideerror=self.settings.hideerror): # sanitize inputs accepted_form = True # check for username in db username = form.vars[username_field] user = None try: user = Users( self.api, {'username': username} )[0] except: pass if user: # user in db, check if registration pending or disabled temp_user = user if temp_user['enabled'] == False: # user is not yet enabled response.flash = self.messages.login_disabled return form # check password try: rc = auth_password_check( self.api, {'Username':user['username'], 'AuthMethod':'password', 'AuthString':form.vars[password_field]}, user, None ) except: if log: logger.error("SMDS_Auth: User '%s' authentication failed (invalid credentials)" % user['username'] ) logger.flush() user = None # invalid credentials if not user: if log: logger.error("SMDS_Auth: User could not be looked up" ) logger.flush() # invalid login session.flash = self.messages.invalid_login redirect(self.url(args=request.args,vars=request.get_vars)) if user: user_public = user.public() user_stored = Storage(user_public) if log: logger.info("SMDS_Auth: User '%s' logged in" % user_public['username']) # process authenticated users # user wants to be logged in for longer session.auth = Storage( user = user_stored, last_visit = request.now, expiration = self.settings.long_expiration, remember = request.vars.has_key("remember"), hmac_key = web2py_uuid() ) self.user = user_public logger.info("SMDS_Auth: user_id = %s" % self.user_id) logger.flush() session.flash = self.messages.logged_in # how to continue if self.settings.login_form == self: if accepted_form: callback(onaccept,form) if isinstance(next, (list, tuple)): # fix issue with 2.6 next = next[0] if next and not next[0] == '/' and next[:4] != 'http': next = self.url(next.replace('[id]', str(form.vars.id))) redirect(next) return form elif user: callback(onaccept,None) redirect(next) def logout(self, next=DEFAULT, onlogout=DEFAULT, log=DEFAULT): """ Handle a logout """ session = current.session user = None if session.auth: user = session.auth['user'] self.user = user if log: if user: logger.info("SMDS_Auth: User '%s' logged out" % user['username']) logger.flush() next = self.settings.logout_next #super(SMDS_Auth, self).logout( lambda x: redirect(self.url('index')), lambda x, log ) if next == DEFAULT: next = self.settings.logout_next """ if onlogout == DEFAULT: onlogout = self.settings.logout_onlogout if onlogout: onlogout(self.user) if log == DEFAULT: log = self.messages.logout_log if log and self.user: self.log_event(log % self.user) if self.settings.login_form != self: cas = self.settings.login_form cas_user = cas.get_user() if cas_user: next = cas.logout_url(next) """ current.session.auth = None current.session.flash = self.messages.logged_out if next: redirect(next) def requires_login(self): """ decorator that prevents access to action if not logged in """ def decorator(action): def f(*a, **b): if self.settings.allow_basic_login_only and not self.basic(): if current.request.is_restful: raise HTTP(403,"Not authorized") return call_or_redirect(self.settings.on_failed_authorization) if not self.basic() and not current.session.auth: #self.is_logged_in(): if current.request.is_restful: raise HTTP(403,"Not authorized") request = current.request next = URL(r=request,args=request.args, vars=request.get_vars) current.session.flash = current.response.flash return call_or_redirect( self.settings.on_failed_authentication, self.settings.login_url + '?_next='+urllib.quote(next) ) return action(*a, **b) f.__doc__ = action.__doc__ f.__name__ = action.__name__ f.__dict__.update(action.__dict__) return f return decorator def profile(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT): pass def change_password(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT): pass def verify_email(self, next=DEFAULT, onaccept=DEFAULT, log=DEFAULT ): pass def retrieve_username(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT ): pass def request_reset_password( self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT ): pass def reset_password( self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT ): pass def impersonate( self, user_id=DEFAULT ): pass def groups( self ): pass def not_authorized( self ): """ YOU SHALL NOT PASS """ return 'ACCESS DENIED' def SMDS_authentication( logfile="/tmp/SMDS_login.log" ): """ Authenticate with the Syndicate metadata service """ logger.init( open(logfile, "a") ) def SMDS_auth_aux(username, password): api = MDAPI() user = None try: user = auth_user_from_email( api, username ) except Exception, e: logger.error( "User '%s' could not be authenticated (exception = %s)" % (username, e) ) return False rc = False auth_struct = {'AuthMethod': 'password', 'Username': user['username'], 'AuthString': password} try: rc = auth_password_check( api, auth_struct, user, None ) except Exception, e: logger.error( "User '%s' failed to authenticate" % username) return rc return SMDS_auth_aux
I wasn’t paying attention, so I asked him to repeat it: “Say that again …but backwards!”, I added, because I like to seem whimsical and interesting. After a short while I noticed he still hadn’t replied. I looked over to find he’d opened a text editor and was busy figuring out how to pronounce the sentence phonetically reversed. To which I would reply: shush, I’m having fun. Hence followed a great evening: I hacked away in IPython Notebook, while Benjamin explained diphthongs and IPA and pronounciation. The results were surprisingly understandable. Though there’s definitely a certain trick to it making it sound accurate: you have put the emphasis in different places. You need to lower the pitch of the first word, not the last. It’s a weird effect, though! Foreign yet familiar. Apparently there’s a scene from a horror movie (I don’t like horror movies!) where they actually use this technique: the actors learnt their lines backwards, and then reversed the film. You can also see the IPython notebook we ended up with.
from collections import defaultdict import numpy as np class UtilityScraper(object): def __init__(self, num_grid_x, num_grid_y): self.num_grid_x = num_grid_x self.num_grid_y = num_grid_y self.utilities = defaultdict(list) self.factors = defaultdict(list) self.ra_values = defaultdict(list) def add(self, f_ma_me, f_ma_you, f_mv_me, f_mv_you, f_mw_me, f_mw_you, f_ra, f_rd, f_rv, f_o_me, f_o_you, f_s_me, f_s_you, o_me_dis, o_you_dis, s_me_theta, s_you_theta, rv_vec, rd_dis, ra_theta, mv_me_vec, mv_you_vec, ma_me_gal, ma_you_gal, mw_me_rad, mw_you_rad): self.utilities["f_rv"].append(f_rv) self.utilities["f_rd"].append(f_rd) self.utilities["f_ra"].append(f_ra) self.utilities["f_mv_me"].append(f_mv_me) self.utilities["f_mv_you"].append(f_mv_you) self.utilities["f_ma_me"].append(f_ma_me) self.utilities["f_ma_you"].append(f_ma_you) self.utilities["f_mw_me"].append(f_mw_me) self.utilities["f_mw_you"].append(f_mw_you) self.utilities["f_o_me"].append(f_o_me) self.utilities["f_o_you"].append(f_o_you) self.utilities["f_s_me"].append(f_s_me) self.utilities["f_s_you"].append(f_s_you) self.factors["rv_vec"].append(rv_vec) self.factors["rd_dis"].append(rd_dis) self.factors["ra_theta"].append(ra_theta) self.factors["mv_me_vec"].append(mv_me_vec) self.factors["mv_you_vec"].append(mv_you_vec) self.factors["ma_me_gal"].append(ma_me_gal) self.factors["ma_you_gal"].append(ma_you_gal) self.factors["mw_me_rad"].append(mw_me_rad) self.factors["mw_you_rad"].append(mw_you_rad) self.factors["o_me_dis"].append(o_me_dis) self.factors["o_you_dis"].append(o_you_dis) self.factors["s_me_theta"].append(s_me_theta) self.factors["s_you_theta"].append(s_you_theta) def add_ra_values(self, p_me, p_you, d_you, v_yoko, theta_mae, theta_yoko, theta, r_a): self.ra_values["p_me"].append(p_me) self.ra_values["p_you"].append(p_you) self.ra_values["v_mae"].append(d_you) self.ra_values["v_yoko"].append(v_yoko) self.ra_values["theta_mae"].append(theta_mae) self.ra_values["theta_yoko"].append(theta_yoko) self.ra_values["theta"].append(theta) self.ra_values["r_a"].append(r_a) def get_utility_maps(self): maps = {} for name, lst in self.utilities.items(): assert not np.any(np.isnan(lst)), "{}, {}".format(name, lst) lst = np.array(lst) num_step = len(lst) // (self.num_grid_y * self.num_grid_x * self.num_grid_y * self.num_grid_x) maps[name] = lst.reshape( num_step, self.num_grid_y, self.num_grid_x, self.num_grid_y, self.num_grid_x) return maps def get_factors_maps(self): theta_maps = {} for name, lst in self.factors.items(): assert not np.any(np.isnan(lst)), "{}, {}".format(name, lst) lst = np.array(lst) num_step = len(lst) // (self.num_grid_y * self.num_grid_x * self.num_grid_y * self.num_grid_x) theta_maps[name] = lst.reshape( num_step, self.num_grid_y, self.num_grid_x, self.num_grid_y, self.num_grid_x) return theta_maps def get_values_maps(self): values_maps = {} for name, lst in self.ra_values.items(): assert not np.any(np.isnan(lst)), "{}, {}".format(name, lst) lst = np.array(lst) num_step = len(lst) // (self.num_grid_y * self.num_grid_x * self.num_grid_y * self.num_grid_x) size = lst.shape[-1] if lst.ndim > 1: values_maps[name] = lst.reshape( num_step, self.num_grid_y, self.num_grid_x, self.num_grid_y, self.num_grid_x, size) else: values_maps[name] = lst.reshape( num_step, self.num_grid_y, self.num_grid_x, self.num_grid_y, self.num_grid_x, 1) return values_maps
InTerraChem’s Demulsifiers are used worldwide with superior results. We offer a full line of chemistries to treat a broad range of oils. From #1 to #6 oil, we have an expansive line of chemistries to break any emulsion you may have. We also offer a full line of surfactants to De-Ash Oil removing all of the “non-burning” elements in crankcase oils. We also have a full line of water treatment products used in the pre-treatment of oily, wastewaters. Included in our products are: coagulants, flocculants, and metal precipitants.
#!/usr/bin/env python import rospy from flexbe_msgs.msg import BehaviorLog class Logger(object): ''' Realizes behavior-specific logging. ''' REPORT_INFO = BehaviorLog.INFO REPORT_WARN = BehaviorLog.WARN REPORT_HINT = BehaviorLog.HINT REPORT_ERROR = BehaviorLog.ERROR REPORT_DEBUG = BehaviorLog.DEBUG LOGGING_TOPIC = 'flexbe/log' _pub = None @staticmethod def initialize(): Logger._pub = rospy.Publisher(Logger.LOGGING_TOPIC, BehaviorLog, queue_size=100) @staticmethod def log(text, severity): if Logger._pub is None: Logger.initialize() # send message with logged text msg = BehaviorLog() msg.text = str(text) msg.status_code = severity Logger._pub.publish(msg) # also log locally Logger.local(text, severity) @staticmethod def local(text, severity): if severity == Logger.REPORT_INFO: rospy.loginfo(text) elif severity == Logger.REPORT_WARN: rospy.logwarn(text) elif severity == Logger.REPORT_HINT: rospy.loginfo('\033[94mBehavior Hint: %s\033[0m', text) elif severity == Logger.REPORT_ERROR: rospy.logerr(text) elif severity == Logger.REPORT_DEBUG: rospy.logdebug(text) else: rospy.logdebug(text + ' (unknown log level %s)' % str(severity)) @staticmethod def logdebug(text, *args): Logger.log(text % args, Logger.REPORT_DEBUG) @staticmethod def loginfo(text, *args): Logger.log(text % args, Logger.REPORT_INFO) @staticmethod def logwarn(text, *args): Logger.log(text % args, Logger.REPORT_WARN) @staticmethod def loghint(text, *args): Logger.log(text % args, Logger.REPORT_HINT) @staticmethod def logerr(text, *args): Logger.log(text % args, Logger.REPORT_ERROR) @staticmethod def localdebug(text, *args): Logger.local(text % args, Logger.REPORT_DEBUG) @staticmethod def localinfo(text, *args): Logger.local(text % args, Logger.REPORT_INFO)
Excellent customer service, very friendly and professional. If you are buying, selling or renting I cannot recommend Paul and his team enough. They made selling our house and co-ordinating a short term tenancy prior to our move abroad much easier then we ever hoped it could be. Paul was in regular contact with us, our buyers and our solicitors from offer right through to exchange and completion to keep the process running smoothly and Natalie made sure we had a rental ready and waiting when we moved. Thank you to all at Paxtons for your help and hard work.
# -*- coding: utf-8 -*- """Tests checking for link access from outside.""" import fauxfactory import pytest from widgetastic.utils import partial_match from cfme import test_requirements from cfme.infrastructure.provider import InfraProvider from cfme.infrastructure.virtual_machines import Vm from cfme.fixtures import pytest_selenium as sel from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.browser import browser from cfme.utils.wait import wait_for from fixtures.provider import setup_one_by_class_or_skip from fixtures.pytest_store import store pytestmark = [ pytest.mark.meta(server_roles="-automate"), # To prevent the provisioning itself. test_requirements.service ] @pytest.fixture(scope='module') def a_provider(request): return setup_one_by_class_or_skip(request, InfraProvider) @pytest.fixture(scope="module") def provider_data(a_provider): return a_provider.get_yaml_data() @pytest.fixture(scope="module") def provisioning(provider_data): return provider_data.get("provisioning", {}) @pytest.fixture(scope="module") def template_name(provisioning): return provisioning.get("template") @pytest.fixture(scope="module") def vm_name(): return fauxfactory.gen_alphanumeric(length=16) @pytest.yield_fixture(scope="module") def generated_request(appliance, a_provider, provider_data, provisioning, template_name, vm_name): """Creates a provision request, that is not automatically approved, and returns the search data. After finishing the test, request should be automatically deleted. Slightly modified code from :py:module:`cfme.tests.infrastructure.test_provisioning` """ first_name = fauxfactory.gen_alphanumeric() last_name = fauxfactory.gen_alphanumeric() notes = fauxfactory.gen_alphanumeric() e_mail = "{}@{}.test".format(first_name, last_name) host, datastore = map(provisioning.get, ('host', 'datastore')) vm = Vm(name=vm_name, provider=a_provider, template_name=template_name) view = navigate_to(vm, 'Provision') provisioning_data = { 'request': { 'email': e_mail, 'first_name': first_name, 'last_name': last_name, 'notes': notes}, 'catalog': { 'vm_name': vm_name, 'num_vms': '10'}, 'environment': {'host_name': {'name': host}, 'datastore_name': {'name': datastore}}, } # Same thing, different names. :\ if provider_data["type"] == 'rhevm': provisioning_data['catalog']['provision_type'] = 'Native Clone' elif provider_data["type"] == 'virtualcenter': provisioning_data['catalog']['provision_type'] = 'VMware' try: provisioning_data['network'] = {'vlan': partial_match(provisioning['vlan'])} except KeyError: # provisioning['vlan'] is required for rhevm provisioning if provider_data["type"] == 'rhevm': raise pytest.fail('rhevm requires a vlan value in provisioning info') view.form.fill_with(provisioning_data, on_change=view.form.submit_button) request_cells = { "Description": "Provision from [{}] to [{}###]".format(template_name, vm_name), } provision_request = appliance.collections.requests.instantiate(cells=request_cells) yield provision_request browser().get(store.base_url) appliance.server.login_admin() provision_request.remove_request() @pytest.mark.tier(3) def test_services_request_direct_url(generated_request): """Go to the request page, save the url and try to access it directly.""" assert navigate_to(generated_request, 'Details'), "could not find the request!" request_url = sel.current_url() sel.get(sel.base_url()) # I need to flip it with something different here sel.get(request_url) # Ok, direct access now. wait_for( lambda: sel.is_displayed("//body[contains(@onload, 'miqOnLoad')]"), num_sec=20, message="wait for a CFME page appear", delay=0.5 ) @pytest.mark.tier(3) def test_copy_request(request, generated_request, vm_name, template_name): """Check if request gets properly copied.""" modifications = {'catalog': {'vm_name': fauxfactory.gen_alphanumeric(length=16)}} new_request = generated_request.copy_request(values=modifications) request.addfinalizer(new_request.remove_request) assert navigate_to(new_request, 'Details')
It is now very easy to Register for a My Health Record, just ask one of our helpful receptionists. To create a My Health Record, we will collect information about you and your children from Medicare and some other government bodies including your name, date of birth, and Medicare records. Registered healthcare provider organisations such as general practices and hospitals will be able to access your My Health Record when providing healthcare to you.
#!/usr/bin/env <PATH_HELPDESK>/env/bin/python # -*- coding: utf-8 -*- ## # Copyright 2017 FIWARE Foundation, e.V. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. ## from HelpDesk.desks.helpdeskImporter import HelpDeskImporter from HelpDesk.desks.helpdesk import HelpDesk from HelpDesk.platforms.servers import AskBot from logging import error, exception, info, debug from logging import _nameToLevel as nameToLevel from argparse import ArgumentParser from sys import exc_info from Common.logging_conf import LoggingConf from Config.settings import JIRA_URL __author__ = 'Fernando López' class AskbotSync(LoggingConf): def __init__(self, loglevel): super(AskbotSync, self).__init__(loglevel=loglevel, log_file='askbot.log') info('\n\n---- Askbot Synchronization----\n') try: self.helpdesk = HelpDeskImporter() self.helpdesk.get_monitors() except Exception as e: error(e) error('No connection to JIRA https://{}'.format(JIRA_URL)) error("Unexpected error: {}".format(exc_info()[0])) exit() self.askbot = AskBot() def process(self): def get_status(q): if q.monitor: result = q.monitor.fields.status else: result = 'None' return result try: self.askbot.get_questions() except Exception as e: error(e) error('Failed to get questions from server') finally: self.askbot.match(self.helpdesk.monitors) for question in self.askbot.questions: debug('{}, monitor={}, monitor status={}, question url={}' .format(question, question.monitor, get_status(q=question), question.url)) self.helpdesk.update_with(self.askbot.questions) info('helpdesk: # issues created = {}'.format(self.helpdesk.n_monitors)) info('helpdesk: # issues transitions = {}'.format(self.helpdesk.n_transitions)) info('askbot questions = {}'.format(len(self.askbot.questions))) self.close() class HelpDeskCaretaker(LoggingConf): def __init__(self, loglevel, mailer): super(HelpDeskCaretaker, self).__init__(loglevel=loglevel, log_file='mainhelpdesk.log') info('\n\n---- HELP-DESK Caretakers----\n') try: self.helpdesk = HelpDesk(loglevel=loglevel, mailer=mailer) except Exception as e: error(e) exception("Unexpected error: {}".format(exc_info()[0])) exit() def process(self): self.helpdesk.channel_requests() self.helpdesk.assign_requests() self.helpdesk.remove_spam() self.helpdesk.naming() info('main helpdesk: # issues assigned = {}'.format(self.helpdesk.n_assignments)) info('main helpdesk: # issues channeled = {}'.format(self.helpdesk.n_channeled)) info('main helpdesk: # issues deleted = {}'.format(self.helpdesk.n_removed)) info('main helpdesk: # issues renamed = {}'.format(self.helpdesk.n_renamed)) self.close() if __name__ == "__main__": parser = ArgumentParser(prog='Askbot', description='Synchronising scripts') parser.add_argument('-l', '--log', default='INFO', help='The logging level to be used.') args = parser.parse_args() loglevel = None try: loglevel = nameToLevel[args.log.upper()] except Exception as e1: print('Invalid log level: {}'.format(args.log)) print('Please use one of the following values:') print(' * CRITICAL') print(' * ERROR') print(' * WARNING') print(' * INFO') print(' * DEBUG') print(' * NOTSET') exit() askbotSync = AskbotSync(loglevel=loglevel) askbotSync.process() helpdeskCaretaker = HelpDeskCaretaker(loglevel=loglevel) helpdeskCaretaker.process()
Fresh Stylish Computer Desk That awesome image collections about Fresh Stylish Computer Desk is available to save. We obtain this wonderful photo from online and choose the top for you. Fresh Stylish Computer Desk photos and pictures collection that posted here was carefully picked and published by admin after choosing the ones which are best among the others. So, ultimately we make it and here these list of awesome image for your ideas and informational reason regarding the Fresh Stylish Computer Desk as part of Kochi-good.Com exclusive updates collection. So, take your time and find the best Fresh Stylish Computer Desk photos and pictures posted here that suitable with your needs and use it for your own collection and personal use. Regarding image detailed description: image has been added by admin and has been tagged in Computer Desk field. You can give your note as feedback to our blog quality. Hope you enjoy staying here. For many updates and recent information about Fresh Stylish Computer Desk , please kindly follow us on twitter, path, Instagram and google plus, or you mark this page on book mark area, We attempt to offer you updates regularly with fresh and new love your searching, and find the perfect for you.
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2012, Nachi Ueno, NTT MCL, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from contextlib import nested import mock from mock import call import unittest2 as unittest import mox from quantum.agent import firewall as firewall_base from quantum.agent.linux import iptables_manager from quantum.agent import rpc as agent_rpc from quantum.agent import securitygroups_rpc as sg_rpc from quantum import context from quantum.db import securitygroups_rpc_base as sg_db_rpc from quantum.extensions import securitygroup as ext_sg from quantum.openstack.common import cfg from quantum.openstack.common.rpc import proxy from quantum.tests.unit import test_extension_security_group as test_sg from quantum.tests.unit import test_iptables_firewall as test_fw class FakeSGCallback(sg_db_rpc.SecurityGroupServerRpcCallbackMixin): def get_port_from_device(self, device): device = self.devices.get(device) if device: device['security_group_rules'] = [] device['security_group_source_groups'] = [] device['fixed_ips'] = [ip['ip_address'] for ip in device['fixed_ips']] return device class SGServerRpcCallBackMixinTestCase(test_sg.SecurityGroupDBTestCase): def setUp(self): super(SGServerRpcCallBackMixinTestCase, self).setUp() self.rpc = FakeSGCallback() def test_security_group_rules_for_devices_ipv4_ingress(self): fake_prefix = test_fw.FAKE_PREFIX['IPv4'] with self.network() as n: with nested(self.subnet(n), self.security_group()) as (subnet_v4, sg1): sg1_id = sg1['security_group']['id'] rule1 = self._build_security_group_rule( sg1_id, 'ingress', 'tcp', '22', '22') rule2 = self._build_security_group_rule( sg1_id, 'ingress', 'tcp', '23', '23', fake_prefix) rules = { 'security_group_rules': [rule1['security_group_rule'], rule2['security_group_rule']]} res = self._create_security_group_rule(self.fmt, rules) self.deserialize(self.fmt, res) self.assertEquals(res.status_int, 201) res1 = self._create_port( self.fmt, n['network']['id'], security_groups=[sg1_id]) ports_rest1 = self.deserialize(self.fmt, res1) port_id1 = ports_rest1['port']['id'] self.rpc.devices = {port_id1: ports_rest1['port']} devices = [port_id1, 'no_exist_device'] ctx = context.get_admin_context() ports_rpc = self.rpc.security_group_rules_for_devices( ctx, devices=devices) port_rpc = ports_rpc[port_id1] expected = [{'direction': 'ingress', 'protocol': 'tcp', 'ethertype': 'IPv4', 'port_range_max': 22, 'security_group_id': sg1_id, 'port_range_min': 22}, {'direction': 'ingress', 'protocol': 'tcp', 'ethertype': 'IPv4', 'port_range_max': 23, 'security_group_id': sg1_id, 'port_range_min': 23, 'source_ip_prefix': fake_prefix}, {'ethertype': 'IPv4', 'direction': 'egress'}, ] self.assertEquals(port_rpc['security_group_rules'], expected) self._delete('ports', port_id1) def test_security_group_rules_for_devices_ipv4_egress(self): fake_prefix = test_fw.FAKE_PREFIX['IPv4'] with self.network() as n: with nested(self.subnet(n), self.security_group()) as (subnet_v4, sg1): sg1_id = sg1['security_group']['id'] rule1 = self._build_security_group_rule( sg1_id, 'egress', 'tcp', '22', '22') rule2 = self._build_security_group_rule( sg1_id, 'egress', 'udp', '23', '23', fake_prefix) rules = { 'security_group_rules': [rule1['security_group_rule'], rule2['security_group_rule']]} res = self._create_security_group_rule(self.fmt, rules) self.deserialize(self.fmt, res) self.assertEquals(res.status_int, 201) res1 = self._create_port( self.fmt, n['network']['id'], security_groups=[sg1_id]) ports_rest1 = self.deserialize(self.fmt, res1) port_id1 = ports_rest1['port']['id'] self.rpc.devices = {port_id1: ports_rest1['port']} devices = [port_id1, 'no_exist_device'] ctx = context.get_admin_context() ports_rpc = self.rpc.security_group_rules_for_devices( ctx, devices=devices) port_rpc = ports_rpc[port_id1] expected = [{'direction': 'egress', 'protocol': 'tcp', 'ethertype': 'IPv4', 'port_range_max': 22, 'security_group_id': sg1_id, 'port_range_min': 22}, {'direction': 'egress', 'protocol': 'udp', 'ethertype': 'IPv4', 'port_range_max': 23, 'security_group_id': sg1_id, 'port_range_min': 23, 'dest_ip_prefix': fake_prefix}, ] self.assertEquals(port_rpc['security_group_rules'], expected) self._delete('ports', port_id1) def test_security_group_rules_for_devices_ipv4_source_group(self): with self.network() as n: with nested(self.subnet(n), self.security_group(), self.security_group()) as (subnet_v4, sg1, sg2): sg1_id = sg1['security_group']['id'] sg2_id = sg2['security_group']['id'] rule1 = self._build_security_group_rule( sg1_id, 'ingress', 'tcp', '24', '25', source_group_id=sg2['security_group']['id']) rules = { 'security_group_rules': [rule1['security_group_rule']]} res = self._create_security_group_rule(self.fmt, rules) self.deserialize(self.fmt, res) self.assertEquals(res.status_int, 201) res1 = self._create_port( self.fmt, n['network']['id'], security_groups=[sg1_id, sg2_id]) ports_rest1 = self.deserialize(self.fmt, res1) port_id1 = ports_rest1['port']['id'] self.rpc.devices = {port_id1: ports_rest1['port']} devices = [port_id1, 'no_exist_device'] res2 = self._create_port( self.fmt, n['network']['id'], security_groups=[sg2_id]) ports_rest2 = self.deserialize(self.fmt, res2) port_id2 = ports_rest2['port']['id'] ctx = context.get_admin_context() ports_rpc = self.rpc.security_group_rules_for_devices( ctx, devices=devices) port_rpc = ports_rpc[port_id1] expected = [{'direction': u'ingress', 'source_ip_prefix': u'10.0.0.3/32', 'protocol': u'tcp', 'ethertype': u'IPv4', 'port_range_max': 25, 'port_range_min': 24, 'source_group_id': sg2_id, 'security_group_id': sg1_id}, {'ethertype': 'IPv4', 'direction': 'egress'}, ] self.assertEquals(port_rpc['security_group_rules'], expected) self._delete('ports', port_id1) self._delete('ports', port_id2) def test_security_group_rules_for_devices_ipv6_ingress(self): fake_prefix = test_fw.FAKE_PREFIX['IPv6'] with self.network() as n: with nested(self.subnet(n, cidr=fake_prefix, ip_version=6), self.security_group()) as (subnet_v6, sg1): sg1_id = sg1['security_group']['id'] rule1 = self._build_security_group_rule( sg1_id, 'ingress', 'tcp', '22', '22', ethertype='IPv6') rule2 = self._build_security_group_rule( sg1_id, 'ingress', 'udp', '23', '23', fake_prefix, ethertype='IPv6') rules = { 'security_group_rules': [rule1['security_group_rule'], rule2['security_group_rule']]} res = self._create_security_group_rule(self.fmt, rules) self.deserialize(self.fmt, res) self.assertEquals(res.status_int, 201) res1 = self._create_port( self.fmt, n['network']['id'], fixed_ips=[{'subnet_id': subnet_v6['subnet']['id']}], security_groups=[sg1_id]) ports_rest1 = self.deserialize(self.fmt, res1) port_id1 = ports_rest1['port']['id'] self.rpc.devices = {port_id1: ports_rest1['port']} devices = [port_id1, 'no_exist_device'] ctx = context.get_admin_context() ports_rpc = self.rpc.security_group_rules_for_devices( ctx, devices=devices) port_rpc = ports_rpc[port_id1] expected = [{'direction': 'ingress', 'protocol': 'tcp', 'ethertype': 'IPv6', 'port_range_max': 22, 'security_group_id': sg1_id, 'port_range_min': 22}, {'direction': 'ingress', 'protocol': 'udp', 'ethertype': 'IPv6', 'port_range_max': 23, 'security_group_id': sg1_id, 'port_range_min': 23, 'source_ip_prefix': fake_prefix}, {'ethertype': 'IPv6', 'direction': 'egress'}, ] self.assertEquals(port_rpc['security_group_rules'], expected) self._delete('ports', port_id1) def test_security_group_rules_for_devices_ipv6_egress(self): fake_prefix = test_fw.FAKE_PREFIX['IPv6'] with self.network() as n: with nested(self.subnet(n, cidr=fake_prefix, ip_version=6), self.security_group()) as (subnet_v6, sg1): sg1_id = sg1['security_group']['id'] rule1 = self._build_security_group_rule( sg1_id, 'egress', 'tcp', '22', '22', ethertype='IPv6') rule2 = self._build_security_group_rule( sg1_id, 'egress', 'udp', '23', '23', fake_prefix, ethertype='IPv6') rules = { 'security_group_rules': [rule1['security_group_rule'], rule2['security_group_rule']]} res = self._create_security_group_rule(self.fmt, rules) self.deserialize(self.fmt, res) self.assertEquals(res.status_int, 201) res1 = self._create_port( self.fmt, n['network']['id'], fixed_ips=[{'subnet_id': subnet_v6['subnet']['id']}], security_groups=[sg1_id]) ports_rest1 = self.deserialize(self.fmt, res1) port_id1 = ports_rest1['port']['id'] self.rpc.devices = {port_id1: ports_rest1['port']} devices = [port_id1, 'no_exist_device'] ctx = context.get_admin_context() ports_rpc = self.rpc.security_group_rules_for_devices( ctx, devices=devices) port_rpc = ports_rpc[port_id1] expected = [{'direction': 'egress', 'protocol': 'tcp', 'ethertype': 'IPv6', 'port_range_max': 22, 'security_group_id': sg1_id, 'port_range_min': 22}, {'direction': 'egress', 'protocol': 'udp', 'ethertype': 'IPv6', 'port_range_max': 23, 'security_group_id': sg1_id, 'port_range_min': 23, 'dest_ip_prefix': fake_prefix}, ] self.assertEquals(port_rpc['security_group_rules'], expected) self._delete('ports', port_id1) def test_security_group_rules_for_devices_ipv6_source_group(self): fake_prefix = test_fw.FAKE_PREFIX['IPv6'] with self.network() as n: with nested(self.subnet(n, cidr=fake_prefix, ip_version=6), self.security_group(), self.security_group()) as (subnet_v6, sg1, sg2): sg1_id = sg1['security_group']['id'] sg2_id = sg2['security_group']['id'] rule1 = self._build_security_group_rule( sg1_id, 'ingress', 'tcp', '24', '25', ethertype='IPv6', source_group_id=sg2['security_group']['id']) rules = { 'security_group_rules': [rule1['security_group_rule']]} res = self._create_security_group_rule(self.fmt, rules) self.deserialize(self.fmt, res) self.assertEquals(res.status_int, 201) res1 = self._create_port( self.fmt, n['network']['id'], fixed_ips=[{'subnet_id': subnet_v6['subnet']['id']}], security_groups=[sg1_id, sg2_id]) ports_rest1 = self.deserialize(self.fmt, res1) port_id1 = ports_rest1['port']['id'] self.rpc.devices = {port_id1: ports_rest1['port']} devices = [port_id1, 'no_exist_device'] res2 = self._create_port( self.fmt, n['network']['id'], fixed_ips=[{'subnet_id': subnet_v6['subnet']['id']}], security_groups=[sg2_id]) ports_rest2 = self.deserialize(self.fmt, res2) port_id2 = ports_rest2['port']['id'] ctx = context.get_admin_context() ports_rpc = self.rpc.security_group_rules_for_devices( ctx, devices=devices) port_rpc = ports_rpc[port_id1] expected = [{'direction': 'ingress', 'source_ip_prefix': 'fe80::3/128', 'protocol': 'tcp', 'ethertype': 'IPv6', 'port_range_max': 25, 'port_range_min': 24, 'source_group_id': sg2_id, 'security_group_id': sg1_id}, {'ethertype': 'IPv6', 'direction': 'egress'}, ] self.assertEquals(port_rpc['security_group_rules'], expected) self._delete('ports', port_id1) self._delete('ports', port_id2) class SGServerRpcCallBackMixinTestCaseXML(SGServerRpcCallBackMixinTestCase): fmt = 'xml' class SGAgentRpcCallBackMixinTestCase(unittest.TestCase): def setUp(self): self.rpc = sg_rpc.SecurityGroupAgentRpcCallbackMixin() self.rpc.sg_agent = mock.Mock() def test_security_groups_rule_updated(self): self.rpc.security_groups_rule_updated(None, security_groups=['fake_sgid']) self.rpc.sg_agent.assert_has_calls( [call.security_groups_rule_updated(['fake_sgid'])]) def test_security_groups_member_updated(self): self.rpc.security_groups_member_updated(None, security_groups=['fake_sgid']) self.rpc.sg_agent.assert_has_calls( [call.security_groups_member_updated(['fake_sgid'])]) def test_security_groups_provider_updated(self): self.rpc.security_groups_provider_updated(None) self.rpc.sg_agent.assert_has_calls( [call.security_groups_provider_updated()]) class SecurityGroupAgentRpcTestCase(unittest.TestCase): def setUp(self): self.agent = sg_rpc.SecurityGroupAgentRpcMixin() self.agent.context = None self.addCleanup(mock.patch.stopall) mock.patch('quantum.agent.linux.iptables_manager').start() self.agent.root_helper = 'sudo' self.agent.init_firewall() self.firewall = mock.Mock() firewall_object = firewall_base.FirewallDriver() self.firewall.defer_apply.side_effect = firewall_object.defer_apply self.agent.firewall = self.firewall rpc = mock.Mock() self.agent.plugin_rpc = rpc self.fake_device = {'device': 'fake_device', 'security_groups': ['fake_sgid1', 'fake_sgid2'], 'security_group_source_groups': ['fake_sgid2'], 'security_group_rules': [{'security_group_id': 'fake_sgid1', 'source_group_id': 'fake_sgid2'}]} fake_devices = {'fake_device': self.fake_device} self.firewall.ports = fake_devices rpc.security_group_rules_for_devices.return_value = fake_devices def test_prepare_and_remove_devices_filter(self): self.agent.prepare_devices_filter(['fake_device']) self.agent.remove_devices_filter(['fake_device']) # ignore device which is not filtered self.firewall.assert_has_calls([call.defer_apply(), call.prepare_port_filter( self.fake_device), call.defer_apply(), call.remove_port_filter( self.fake_device), ]) def test_security_groups_rule_updated(self): self.agent.refresh_firewall = mock.Mock() self.agent.prepare_devices_filter(['fake_port_id']) self.agent.security_groups_rule_updated(['fake_sgid1', 'fake_sgid3']) self.agent.refresh_firewall.assert_has_calls( [call.refresh_firewall()]) def test_security_groups_rule_not_updated(self): self.agent.refresh_firewall = mock.Mock() self.agent.prepare_devices_filter(['fake_port_id']) self.agent.security_groups_rule_updated(['fake_sgid3', 'fake_sgid4']) self.agent.refresh_firewall.assert_has_calls([]) def test_security_groups_member_updated(self): self.agent.refresh_firewall = mock.Mock() self.agent.prepare_devices_filter(['fake_port_id']) self.agent.security_groups_member_updated(['fake_sgid2', 'fake_sgid3']) self.agent.refresh_firewall.assert_has_calls( [call.refresh_firewall()]) def test_security_groups_member_not_updated(self): self.agent.refresh_firewall = mock.Mock() self.agent.prepare_devices_filter(['fake_port_id']) self.agent.security_groups_member_updated(['fake_sgid3', 'fake_sgid4']) self.agent.refresh_firewall.assert_has_calls([]) def test_security_groups_provider_updated(self): self.agent.refresh_firewall = mock.Mock() self.agent.security_groups_provider_updated() self.agent.refresh_firewall.assert_has_calls( [call.refresh_firewall()]) def test_refresh_firewall(self): self.agent.prepare_devices_filter(['fake_port_id']) self.agent.refresh_firewall() calls = [call.defer_apply(), call.prepare_port_filter(self.fake_device), call.defer_apply(), call.update_port_filter(self.fake_device)] self.firewall.assert_has_calls(calls) class FakeSGRpcApi(agent_rpc.PluginApi, sg_rpc.SecurityGroupServerRpcApiMixin): pass class SecurityGroupServerRpcApiTestCase(unittest.TestCase): def setUp(self): self.rpc = FakeSGRpcApi('fake_topic') self.rpc.call = mock.Mock() def test_security_group_rules_for_devices(self): self.rpc.security_group_rules_for_devices(None, ['fake_device']) self.rpc.call.assert_has_calls( [call(None, {'args': {'devices': ['fake_device']}, 'method': 'security_group_rules_for_devices'}, version=sg_rpc.SG_RPC_VERSION, topic='fake_topic')]) class FakeSGNotifierAPI(proxy.RpcProxy, sg_rpc.SecurityGroupAgentRpcApiMixin): pass class SecurityGroupAgentRpcApiTestCase(unittest.TestCase): def setUp(self): self.notifier = FakeSGNotifierAPI(topic='fake', default_version='1.0') self.notifier.fanout_cast = mock.Mock() def test_security_groups_rule_updated(self): self.notifier.security_groups_rule_updated( None, security_groups=['fake_sgid']) self.notifier.fanout_cast.assert_has_calls( [call(None, {'args': {'security_groups': ['fake_sgid']}, 'method': 'security_groups_rule_updated'}, version=sg_rpc.SG_RPC_VERSION, topic='fake-security_group-update')]) def test_security_groups_member_updated(self): self.notifier.security_groups_member_updated( None, security_groups=['fake_sgid']) self.notifier.fanout_cast.assert_has_calls( [call(None, {'args': {'security_groups': ['fake_sgid']}, 'method': 'security_groups_member_updated'}, version=sg_rpc.SG_RPC_VERSION, topic='fake-security_group-update')]) def test_security_groups_rule_not_updated(self): self.notifier.security_groups_rule_updated( None, security_groups=[]) self.assertEquals(False, self.notifier.fanout_cast.called) def test_security_groups_member_not_updated(self): self.notifier.security_groups_member_updated( None, security_groups=[]) self.assertEquals(False, self.notifier.fanout_cast.called) #Note(nati) bn -> binary_name # id -> device_id PHYSDEV_RULE = '-m physdev --physdev-is-bridged' IPTABLES_ARG = {'bn': iptables_manager.binary_name, 'physdev': PHYSDEV_RULE} CHAINS_NAT = 'OUTPUT|POSTROUTING|PREROUTING|float-snat|snat' IPTABLES_ARG['chains'] = CHAINS_NAT IPTABLES_NAT = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-postrouting-bottom - [0:0] -A PREROUTING -j %(bn)s-PREROUTING -A OUTPUT -j %(bn)s-OUTPUT -A POSTROUTING -j %(bn)s-POSTROUTING -A POSTROUTING -j quantum-postrouting-bottom -A quantum-postrouting-bottom -j %(bn)s-snat -A %(bn)s-snat -j %(bn)s-float-snat """ % IPTABLES_ARG CHAINS_EMPTY = 'FORWARD|INPUT|OUTPUT|local|sg-chain|sg-fallback' CHAINS_1 = CHAINS_EMPTY + '|i_port1|o_port1' CHAINS_2 = CHAINS_1 + '|i_port2|o_port2' IPTABLES_ARG['chains'] = CHAINS_1 IPTABLES_FILTER_1 = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-filter-top - [0:0] -A FORWARD -j quantum-filter-top -A OUTPUT -j quantum-filter-top -A quantum-filter-top -j %(bn)s-local -A INPUT -j %(bn)s-INPUT -A OUTPUT -j %(bn)s-OUTPUT -A FORWARD -j %(bn)s-FORWARD -A %(bn)s-sg-fallback -j DROP -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1 -A %(bn)s-i_port1 -m state --state INVALID -j DROP -A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2 -A %(bn)s-i_port1 -j RETURN -p tcp --dport 22 -A %(bn)s-i_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP -A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN -A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP -A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP -A %(bn)s-o_port1 -m state --state INVALID -j DROP -A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port1 -j RETURN -A %(bn)s-o_port1 -j %(bn)s-sg-fallback -A %(bn)s-sg-chain -j ACCEPT """ % IPTABLES_ARG IPTABLES_FILTER_1_2 = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-filter-top - [0:0] -A FORWARD -j quantum-filter-top -A OUTPUT -j quantum-filter-top -A quantum-filter-top -j %(bn)s-local -A INPUT -j %(bn)s-INPUT -A OUTPUT -j %(bn)s-OUTPUT -A FORWARD -j %(bn)s-FORWARD -A %(bn)s-sg-fallback -j DROP -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1 -A %(bn)s-i_port1 -m state --state INVALID -j DROP -A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2 -A %(bn)s-i_port1 -j RETURN -p tcp --dport 22 -A %(bn)s-i_port1 -j RETURN -s 10.0.0.4 -A %(bn)s-i_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP -A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN -A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP -A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP -A %(bn)s-o_port1 -m state --state INVALID -j DROP -A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port1 -j RETURN -A %(bn)s-o_port1 -j %(bn)s-sg-fallback -A %(bn)s-sg-chain -j ACCEPT """ % IPTABLES_ARG IPTABLES_ARG['chains'] = CHAINS_2 IPTABLES_FILTER_2 = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-filter-top - [0:0] -A FORWARD -j quantum-filter-top -A OUTPUT -j quantum-filter-top -A quantum-filter-top -j %(bn)s-local -A INPUT -j %(bn)s-INPUT -A OUTPUT -j %(bn)s-OUTPUT -A FORWARD -j %(bn)s-FORWARD -A %(bn)s-sg-fallback -j DROP -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1 -A %(bn)s-i_port1 -m state --state INVALID -j DROP -A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2 -A %(bn)s-i_port1 -j RETURN -p tcp --dport 22 -A %(bn)s-i_port1 -j RETURN -s 10.0.0.4 -A %(bn)s-i_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP -A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN -A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP -A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP -A %(bn)s-o_port1 -m state --state INVALID -j DROP -A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port1 -j RETURN -A %(bn)s-o_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-i_port2 -A %(bn)s-i_port2 -m state --state INVALID -j DROP -A %(bn)s-i_port2 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port2 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2 -A %(bn)s-i_port2 -j RETURN -p tcp --dport 22 -A %(bn)s-i_port2 -j RETURN -s 10.0.0.3 -A %(bn)s-i_port2 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2 -A %(bn)s-o_port2 -m mac ! --mac-source 12:34:56:78:9a:bd -j DROP -A %(bn)s-o_port2 -p udp --sport 68 --dport 67 -j RETURN -A %(bn)s-o_port2 ! -s 10.0.0.4 -j DROP -A %(bn)s-o_port2 -p udp --sport 67 --dport 68 -j DROP -A %(bn)s-o_port2 -m state --state INVALID -j DROP -A %(bn)s-o_port2 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port2 -j RETURN -A %(bn)s-o_port2 -j %(bn)s-sg-fallback -A %(bn)s-sg-chain -j ACCEPT """ % IPTABLES_ARG IPTABLES_FILTER_2_2 = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-filter-top - [0:0] -A FORWARD -j quantum-filter-top -A OUTPUT -j quantum-filter-top -A quantum-filter-top -j %(bn)s-local -A INPUT -j %(bn)s-INPUT -A OUTPUT -j %(bn)s-OUTPUT -A FORWARD -j %(bn)s-FORWARD -A %(bn)s-sg-fallback -j DROP -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1 -A %(bn)s-i_port1 -m state --state INVALID -j DROP -A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2 -A %(bn)s-i_port1 -j RETURN -p tcp --dport 22 -A %(bn)s-i_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP -A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN -A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP -A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP -A %(bn)s-o_port1 -m state --state INVALID -j DROP -A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port1 -j RETURN -A %(bn)s-o_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-i_port2 -A %(bn)s-i_port2 -m state --state INVALID -j DROP -A %(bn)s-i_port2 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port2 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2 -A %(bn)s-i_port2 -j RETURN -p tcp --dport 22 -A %(bn)s-i_port2 -j RETURN -s 10.0.0.3 -A %(bn)s-i_port2 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2 -A %(bn)s-o_port2 -m mac ! --mac-source 12:34:56:78:9a:bd -j DROP -A %(bn)s-o_port2 -p udp --sport 68 --dport 67 -j RETURN -A %(bn)s-o_port2 ! -s 10.0.0.4 -j DROP -A %(bn)s-o_port2 -p udp --sport 67 --dport 68 -j DROP -A %(bn)s-o_port2 -m state --state INVALID -j DROP -A %(bn)s-o_port2 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port2 -j RETURN -A %(bn)s-o_port2 -j %(bn)s-sg-fallback -A %(bn)s-sg-chain -j ACCEPT """ % IPTABLES_ARG IPTABLES_FILTER_2_3 = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-filter-top - [0:0] -A FORWARD -j quantum-filter-top -A OUTPUT -j quantum-filter-top -A quantum-filter-top -j %(bn)s-local -A INPUT -j %(bn)s-INPUT -A OUTPUT -j %(bn)s-OUTPUT -A FORWARD -j %(bn)s-FORWARD -A %(bn)s-sg-fallback -j DROP -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1 -A %(bn)s-i_port1 -m state --state INVALID -j DROP -A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2 -A %(bn)s-i_port1 -j RETURN -p tcp --dport 22 -A %(bn)s-i_port1 -j RETURN -s 10.0.0.4 -A %(bn)s-i_port1 -j RETURN -p icmp -A %(bn)s-i_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP -A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN -A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP -A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP -A %(bn)s-o_port1 -m state --state INVALID -j DROP -A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port1 -j RETURN -A %(bn)s-o_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-i_port2 -A %(bn)s-i_port2 -m state --state INVALID -j DROP -A %(bn)s-i_port2 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port2 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2 -A %(bn)s-i_port2 -j RETURN -p tcp --dport 22 -A %(bn)s-i_port2 -j RETURN -s 10.0.0.3 -A %(bn)s-i_port2 -j RETURN -p icmp -A %(bn)s-i_port2 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2 -A %(bn)s-o_port2 -m mac ! --mac-source 12:34:56:78:9a:bd -j DROP -A %(bn)s-o_port2 -p udp --sport 68 --dport 67 -j RETURN -A %(bn)s-o_port2 ! -s 10.0.0.4 -j DROP -A %(bn)s-o_port2 -p udp --sport 67 --dport 68 -j DROP -A %(bn)s-o_port2 -m state --state INVALID -j DROP -A %(bn)s-o_port2 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port2 -j RETURN -A %(bn)s-o_port2 -j %(bn)s-sg-fallback -A %(bn)s-sg-chain -j ACCEPT """ % IPTABLES_ARG IPTABLES_ARG['chains'] = CHAINS_EMPTY IPTABLES_FILTER_EMPTY = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-filter-top - [0:0] -A FORWARD -j quantum-filter-top -A OUTPUT -j quantum-filter-top -A quantum-filter-top -j %(bn)s-local -A INPUT -j %(bn)s-INPUT -A OUTPUT -j %(bn)s-OUTPUT -A FORWARD -j %(bn)s-FORWARD -A %(bn)s-sg-fallback -j DROP """ % IPTABLES_ARG IPTABLES_ARG['chains'] = CHAINS_1 IPTABLES_FILTER_V6_1 = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-filter-top - [0:0] -A FORWARD -j quantum-filter-top -A OUTPUT -j quantum-filter-top -A quantum-filter-top -j %(bn)s-local -A INPUT -j %(bn)s-INPUT -A OUTPUT -j %(bn)s-OUTPUT -A FORWARD -j %(bn)s-FORWARD -A %(bn)s-sg-fallback -j DROP -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1 -A %(bn)s-i_port1 -m state --state INVALID -j DROP -A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP -A %(bn)s-o_port1 -p icmpv6 -j RETURN -A %(bn)s-o_port1 -m state --state INVALID -j DROP -A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port1 -j %(bn)s-sg-fallback -A %(bn)s-sg-chain -j ACCEPT """ % IPTABLES_ARG IPTABLES_ARG['chains'] = CHAINS_2 IPTABLES_FILTER_V6_2 = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-filter-top - [0:0] -A FORWARD -j quantum-filter-top -A OUTPUT -j quantum-filter-top -A quantum-filter-top -j %(bn)s-local -A INPUT -j %(bn)s-INPUT -A OUTPUT -j %(bn)s-OUTPUT -A FORWARD -j %(bn)s-FORWARD -A %(bn)s-sg-fallback -j DROP -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1 -A %(bn)s-i_port1 -m state --state INVALID -j DROP -A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1 -A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP -A %(bn)s-o_port1 -p icmpv6 -j RETURN -A %(bn)s-o_port1 -m state --state INVALID -j DROP -A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port1 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-i_port2 -A %(bn)s-i_port2 -m state --state INVALID -j DROP -A %(bn)s-i_port2 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-i_port2 -j %(bn)s-sg-fallback -A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-sg-chain -A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2 -A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2 -A %(bn)s-o_port2 -m mac ! --mac-source 12:34:56:78:9a:bd -j DROP -A %(bn)s-o_port2 -p icmpv6 -j RETURN -A %(bn)s-o_port2 -m state --state INVALID -j DROP -A %(bn)s-o_port2 -m state --state ESTABLISHED,RELATED -j RETURN -A %(bn)s-o_port2 -j %(bn)s-sg-fallback -A %(bn)s-sg-chain -j ACCEPT """ % IPTABLES_ARG IPTABLES_ARG['chains'] = CHAINS_EMPTY IPTABLES_FILTER_V6_EMPTY = """:%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :%(bn)s-(%(chains)s) - [0:0] :quantum-filter-top - [0:0] -A FORWARD -j quantum-filter-top -A OUTPUT -j quantum-filter-top -A quantum-filter-top -j %(bn)s-local -A INPUT -j %(bn)s-INPUT -A OUTPUT -j %(bn)s-OUTPUT -A FORWARD -j %(bn)s-FORWARD -A %(bn)s-sg-fallback -j DROP """ % IPTABLES_ARG FIREWALL_BASE_PACKAGE = 'quantum.agent.linux.iptables_firewall.' class TestSecurityGroupAgentWithIptables(unittest.TestCase): FIREWALL_DRIVER = FIREWALL_BASE_PACKAGE + 'IptablesFirewallDriver' PHYSDEV_INGRESS = 'physdev-out' PHYSDEV_EGRESS = 'physdev-in' def setUp(self): self.mox = mox.Mox() agent_opts = [ cfg.StrOpt('root_helper', default='sudo'), ] cfg.CONF.register_opts(agent_opts, "AGENT") cfg.CONF.set_override( 'firewall_driver', self.FIREWALL_DRIVER, group='SECURITYGROUP') self.addCleanup(mock.patch.stopall) self.addCleanup(self.mox.UnsetStubs) self.agent = sg_rpc.SecurityGroupAgentRpcMixin() self.agent.context = None self.root_helper = 'sudo' self.agent.root_helper = 'sudo' self.agent.init_firewall() self.iptables = self.agent.firewall.iptables self.mox.StubOutWithMock(self.iptables, "execute") self.rpc = mock.Mock() self.agent.plugin_rpc = self.rpc rule1 = [{'direction': 'ingress', 'protocol': 'udp', 'ethertype': 'IPv4', 'source_ip_prefix': '10.0.0.2', 'source_port_range_min': 67, 'source_port_range_max': 67, 'port_range_min': 68, 'port_range_max': 68}, {'direction': 'ingress', 'protocol': 'tcp', 'ethertype': 'IPv4', 'port_range_min': 22, 'port_range_max': 22}, {'direction': 'egress', 'ethertype': 'IPv4'}] rule2 = rule1[:] rule2 += [{'direction': 'ingress', 'source_ip_prefix': '10.0.0.4', 'ethertype': 'IPv4'}] rule3 = rule2[:] rule3 += [{'direction': 'ingress', 'protocol': 'icmp', 'ethertype': 'IPv4'}] rule4 = rule1[:] rule4 += [{'direction': 'ingress', 'source_ip_prefix': '10.0.0.3', 'ethertype': 'IPv4'}] rule5 = rule4[:] rule5 += [{'direction': 'ingress', 'protocol': 'icmp', 'ethertype': 'IPv4'}] self.devices1 = {'tap_port1': self._device('tap_port1', '10.0.0.3', '12:34:56:78:9a:bc', rule1)} self.devices2 = {'tap_port1': self._device('tap_port1', '10.0.0.3', '12:34:56:78:9a:bc', rule2), 'tap_port2': self._device('tap_port2', '10.0.0.4', '12:34:56:78:9a:bd', rule4)} self.devices3 = {'tap_port1': self._device('tap_port1', '10.0.0.3', '12:34:56:78:9a:bc', rule3), 'tap_port2': self._device('tap_port2', '10.0.0.4', '12:34:56:78:9a:bd', rule5)} def _device(self, device, ip, mac_address, rule): return {'device': device, 'fixed_ips': [ip], 'mac_address': mac_address, 'security_groups': ['security_group1'], 'security_group_rules': rule, 'security_group_source_groups': [ 'security_group1']} def _regex(self, value): value = value.replace('physdev-INGRESS', self.PHYSDEV_INGRESS) value = value.replace('physdev-EGRESS', self.PHYSDEV_EGRESS) value = value.replace('\n', '\\n') value = value.replace('[', '\[') value = value.replace(']', '\]') return mox.Regex(value) def _replay_iptables(self, v4_filter, v6_filter): self.iptables.execute( ['iptables-save', '-t', 'filter'], root_helper=self.root_helper).AndReturn('') self.iptables.execute( ['iptables-restore'], process_input=self._regex(v4_filter), root_helper=self.root_helper).AndReturn('') self.iptables.execute( ['iptables-save', '-t', 'nat'], root_helper=self.root_helper).AndReturn('') self.iptables.execute( ['iptables-restore'], process_input=self._regex(IPTABLES_NAT), root_helper=self.root_helper).AndReturn('') self.iptables.execute( ['ip6tables-save', '-t', 'filter'], root_helper=self.root_helper).AndReturn('') self.iptables.execute( ['ip6tables-restore'], process_input=self._regex(v6_filter), root_helper=self.root_helper).AndReturn('') def test_prepare_remove_port(self): self.rpc.security_group_rules_for_devices.return_value = self.devices1 self._replay_iptables(IPTABLES_FILTER_1, IPTABLES_FILTER_V6_1) self._replay_iptables(IPTABLES_FILTER_EMPTY, IPTABLES_FILTER_V6_EMPTY) self.mox.ReplayAll() self.agent.prepare_devices_filter(['tap_port1']) self.agent.remove_devices_filter(['tap_port1']) self.mox.VerifyAll() def test_security_group_member_updated(self): self.rpc.security_group_rules_for_devices.return_value = self.devices1 self._replay_iptables(IPTABLES_FILTER_1, IPTABLES_FILTER_V6_1) self._replay_iptables(IPTABLES_FILTER_1_2, IPTABLES_FILTER_V6_1) self._replay_iptables(IPTABLES_FILTER_2, IPTABLES_FILTER_V6_2) self._replay_iptables(IPTABLES_FILTER_2_2, IPTABLES_FILTER_V6_2) self._replay_iptables(IPTABLES_FILTER_1, IPTABLES_FILTER_V6_1) self._replay_iptables(IPTABLES_FILTER_EMPTY, IPTABLES_FILTER_V6_EMPTY) self.mox.ReplayAll() self.agent.prepare_devices_filter(['tap_port1']) self.rpc.security_group_rules_for_devices.return_value = self.devices2 self.agent.security_groups_member_updated(['security_group1']) self.agent.prepare_devices_filter(['tap_port2']) self.rpc.security_group_rules_for_devices.return_value = self.devices1 self.agent.security_groups_member_updated(['security_group1']) self.agent.remove_devices_filter(['tap_port2']) self.agent.remove_devices_filter(['tap_port1']) self.mox.VerifyAll() def test_security_group_rule_udpated(self): self.rpc.security_group_rules_for_devices.return_value = self.devices2 self._replay_iptables(IPTABLES_FILTER_2, IPTABLES_FILTER_V6_2) self._replay_iptables(IPTABLES_FILTER_2_3, IPTABLES_FILTER_V6_2) self.mox.ReplayAll() self.agent.prepare_devices_filter(['tap_port1', 'tap_port3']) self.rpc.security_group_rules_for_devices.return_value = self.devices3 self.agent.security_groups_rule_updated(['security_group1']) self.mox.VerifyAll() class SGNotificationTestMixin(): def test_security_group_rule_updated(self): name = 'webservers' description = 'my webservers' with self.security_group(name, description) as sg: with self.security_group(name, description) as sg2: security_group_id = sg['security_group']['id'] direction = "ingress" source_group_id = sg2['security_group']['id'] protocol = 'tcp' port_range_min = 88 port_range_max = 88 with self.security_group_rule(security_group_id, direction, protocol, port_range_min, port_range_max, source_group_id=source_group_id ): pass self.notifier.assert_has_calls( [call.security_groups_rule_updated(mock.ANY, [security_group_id]), call.security_groups_rule_updated(mock.ANY, [security_group_id])]) def test_security_group_member_updated(self): with self.network() as n: with self.subnet(n): with self.security_group() as sg: security_group_id = sg['security_group']['id'] res = self._create_port(self.fmt, n['network']['id']) port = self.deserialize(self.fmt, res) data = {'port': {'fixed_ips': port['port']['fixed_ips'], 'name': port['port']['name'], ext_sg.SECURITYGROUPS: [security_group_id]}} req = self.new_update_request('ports', data, port['port']['id']) res = self.deserialize(self.fmt, req.get_response(self.api)) self.assertEquals(res['port'][ext_sg.SECURITYGROUPS][0], security_group_id) self._delete('ports', port['port']['id']) self.notifier.assert_has_calls( [call.security_groups_member_updated( mock.ANY, [mock.ANY]), call.security_groups_member_updated( mock.ANY, [security_group_id])]) class TestSecurityGroupAgentWithOVSIptables( TestSecurityGroupAgentWithIptables): FIREWALL_DRIVER = FIREWALL_BASE_PACKAGE + 'OVSHybridIptablesFirewallDriver' def _regex(self, value): #Note(nati): tap is prefixed on the device # in the OVSHybridIptablesFirewallDriver value = value.replace('tap_port', 'taptap_port') value = value.replace('o_port', 'otap_port') value = value.replace('i_port', 'itap_port') return super( TestSecurityGroupAgentWithOVSIptables, self)._regex(value)
Boomer Songs = Retirement Advice? Music has defined our generation - still does, in fact. Like the hidden messages we tried to find by playing old 45’s backwards, buried within the top hits of the 60’s and 70’s is a clear message for us, from the past, into our future. I Heard It Through The Grapevine that’s there a Bad Moon Rising on our future. Apparently all of us Pinball Wizards have been on a Magic Carpet Ride for too long. We haven’t saved enough Mony, Mony for retirement so now we have to get off Cloud Nine and face the music. If we continue our Evil Ways, our visions of Nights In White Satin and Strawberry Fields Forever will quickly become time in a padded White Room where we’ll be rocking back and forth repeating It’s All Right. No one knows where the Chain Of Fools begins or ends, but here's hoping You Ain’t Too Proud To Beg. Your life savings are at the Dark End Of The Street sporting a ghastly Winter Shade of Pale. Quite frankly, you could end up Like A Rolling Stone, with No Particular Place To Go. I know we’re not feeling a Whole Lotta Love for the stock market and that You’ve Lost That Loving Feeling for real estate, especially since your home turned out to be the Devil With A Blue Dress On. But Hang On Sloopy, We Can Work It Out. Here’s The Twist - it’s not too late to take control of your financial destiny but beware of Suspicious Minds who think the only way out is to work Eight Days A Week, All Day And All Of The Night, and well past The Midnight Hour. For What It’s Worth, I’m A Believer. From time to time I Say A Little Prayer, and I Want To Hold Your Hand through all of this but people, you’re going to have to Come Together on this one. The Times They Are A Changing and it’s time for a retirement Revolution. We Gotta Get Out Of This Place because future generations can’t afford for us to linger in a Purple Haze or feel Dazed And Confused about our future any longer. After all, Crying and Running Scared are not effective planning strategies. Stop your life savings from Blowin' In The Wind before your golden years just Walk On By. You need a Double Shot of savings and know-how. It’s Your Thing so find a way to push your savings up Higher And Higher. Shop Around for educational books, classes, and financial professionals who can Help. Eleanor Rigby, Penny Lane, The Duke Of Earl and his Crazy brother Louie, Louie are all counting on you to retire with them. The Weight of all this can be lifted, and you can Get Back to where you need to be. Mark my words, soon enough you’ll be saying, Please Mr. Postman, bring me my investment statements because I’m no longer a retirement Runaway. Hush! Do you hear that? It’s not the Sounds Of Silence, its personal and financial Respect! So say goodbye to Yesterday and Remember - One Fine Day, your retirement will be here! Now’s the time to turn your Unchained Melody into Good Vibrations. Wouldn’t It Be Nice? Long before this election season, lots of conflicting ideas about Social Security have been floating around - it's going bankrupt...wait...no it's not; it adds to the deficit!...or does it?! Well my fellow Boomers, take the quiz to see if you know the facts of the matter; not just for the upcoming election, but for the upcoming decades - after all, facts might be pesky things, but they're just what we need right now. For the real scoop, we turned to The Motley Fool - go there now to learn the not so sordid, in fact downright hopeful, details. Ah...from the pens of babes... As Art Linkletter put it so well, "children say the darndest things..." "Drink eight glasses of water a day!" "Eat nine servings of fruits and veggies!" "Stay away from red meat!" From the FDA to our mothers with finger a-wagging, for decades we've heard these "rules for healthy eating". Well...as it turns out, taking care of yourself isn't quite so black-and-white according to Harvard Medical School psychologist Dr. Alice Domar, coauthor of Live a Little! Breaking the Rules Won't Break Your Health. "Research is revealing that whoever wrote the old guidelines didn't have the whole picture, and that there are more paths to optimal health than we previously thought," says the doc. Old Rule: Drink eight glasses of water a day. New Rule: Eat your water. Old Rule: Eat nine servings of fruits and vegetables. New Rule: Fill half your plate with produce. "It's not surprising that people get confused over what, exactly, a serving is," says Washington, D.C., dietitian Rebecca Scritchfield with so much variation and difficult to remember amounts: a serving of broccoli is about five florets; a serving of raw spinach, one cup; a serving of mango, roughly the size of a fist...etc. Her advice: Stop counting and instead make half of every meal produce. "You don't need a big mound on your plate. Six asparagus spears at dinner, a spinach salad at lunch, and a sliced banana and some berries at breakfast should do it." Old Rule: Avoid red meat. New Rule: Beef in moderation can be healthy. Red meat was long considered a heart attack on a plate because it's high in saturated fat. But a 2010 study from the Harvard School of Public Health found that the cardiovascular risk comes from processed varieties, such as sausage, hot dogs, and cold cuts, not from steak, hamburgers, and other nonprocessed cuts. (The real culprits may be salt and preservatives). Red meat is a good source of iron and immunity-boosting zinc-two nutrients some women don't get enough of. Old Rule: Keep your BMI between 18.5 and 24.9. New Rule: Eat healthy, exercise, and let your weight settle naturally. Physicians use BMI (body mass index), a ratio of your weight to your height, as a tool to diagnose obesity. But critics say BMI ignores muscle mass, and a 2011 Obesity study notes that it also ignores a person's hip circumference. "People come in different sizes and shapes," says Joanne Ikeda, nutritionist emeritus at the University of California, Berkeley. "The idea that everyone should fall under 25 is ludicrous." A person can have a high BMI and still be healthy, and research supports the theory: A Journal of the American Medical Association study found that fit women--even if they were overweight according to their BMI--were less likely to suffer a heart attack than those who were out of shape.
from snippets.models import Snippet from snippets.serializers import SnippetSerializer, UserSerializer from django.contrib.auth.models import User from rest_framework import permissions, renderers from snippets.permissions import IsOwnerOrReadOnly from rest_framework.decorators import api_view, detail_route from rest_framework.response import Response from rest_framework.reverse import reverse from rest_framework import viewsets @api_view(['GET']) def api_root(request, format=None): return Response({ 'users': reverse('user-list', request=request, format=format), 'snippets': reverse('snippet-list', request=request, format=format), }) class UserViewSet(viewsets.ReadOnlyModelViewSet): queryset = User.objects.all() serializer_class = UserSerializer class SnippetViewSet(viewsets.ModelViewSet): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly,) @detail_route(renderer_classes=[renderers.StaticHTMLRenderer]) def highlight(self, request, *args, **kwargs): snippet = self.get_object() return Response(snippet.highlighted) def perform_create(self, serializer): serializer.save(owner=self.request.user)
Have you guessed? It's an Epigram course module from the University of Nottingham. What you will find in the linked page is a set of exercises which consist of downloadable Epigram files for your enjoyment.
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Deleting field 'Project.repo_url' db.delete_column(u'unicoremc_project', 'repo_url') # Deleting field 'Project.base_repo_url' db.delete_column(u'unicoremc_project', 'base_repo_url') # Deleting field 'Project.repo_git_url' db.delete_column(u'unicoremc_project', 'repo_git_url') def backwards(self, orm): # Adding field 'Project.repo_url' db.add_column(u'unicoremc_project', 'repo_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True), keep_default=False) # Adding field 'Project.base_repo_url' db.add_column(u'unicoremc_project', 'base_repo_url', self.gf('django.db.models.fields.URLField')(default='', max_length=200), keep_default=False) # Adding field 'Project.repo_git_url' db.add_column(u'unicoremc_project', 'repo_git_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True), keep_default=False) models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'unicoremc.apptype': { 'Meta': {'ordering': "('title',)", 'object_name': 'AppType'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}), 'project_type': ('django.db.models.fields.CharField', [], {'default': "'unicore-cms'", 'max_length': '256'}), 'title': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) }, u'unicoremc.localisation': { 'Meta': {'ordering': "('language_code',)", 'object_name': 'Localisation'}, 'country_code': ('django.db.models.fields.CharField', [], {'max_length': '2'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language_code': ('django.db.models.fields.CharField', [], {'max_length': '3'}) }, u'unicoremc.project': { 'Meta': {'ordering': "('application_type__title', 'country')", 'object_name': 'Project'}, 'application_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['unicoremc.AppType']", 'null': 'True', 'blank': 'True'}), 'available_languages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['unicoremc.Localisation']", 'null': 'True', 'blank': 'True'}), 'cms_custom_domain': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '256'}), 'default_language': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'default_language'", 'null': 'True', 'to': u"orm['unicoremc.Localisation']"}), 'external_repos': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'external_projects'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['unicoremc.ProjectRepo']"}), 'frontend_custom_domain': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}), 'ga_account_id': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'ga_profile_id': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'hub_app_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'project_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'state': ('django.db.models.fields.CharField', [], {'default': "'initial'", 'max_length': '50'}), 'team_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) }, u'unicoremc.projectrepo': { 'Meta': {'object_name': 'ProjectRepo'}, 'base_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'git_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'project': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'repo'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['unicoremc.Project']"}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}) } } complete_apps = ['unicoremc']
I’m not going to lie I head up sales for Isograph in North America. I often get labelled with some of the tactics sales people use in my industry. The best advice I can give to a prospective client is to try models using your own data and information to see how they turn out. Also, check the calculations and software options to ensure the software product will do what you want it to do not only today but next year when your model has matured. Although I am a sales person and my lips are moving I’m not lying here. Batch append is one of those points that one should consider in a mature software product. How are you going to combine the work of many individual engineers into a final model you can use for certification or to present to management? If your tool cannot do this run for the hills! Sometimes, when working on a large system model, you need to share the load, and split up the fault tree development to different people. But then the time comes to combine everyone’s work together. How do we do that? And how do we make sure that our master fault tree contains the most up-to-date information from each engineer’s fault trees? This excerpt from our in-development online training course gives a quick insight into using the Batch Append feature to automate the linking of fault trees from different user’s projects, and how to keep the linked file up-to-date with the latest changes.
""" This module contains the server-side only code for interfacing with message queues. Code shared between client and server can be found in submodules of ``lwr.lwr_client``. :mod:`lwr.messaging.bind_amqp` Module ------------------------------ .. automodule:: lwr.messaging.bind_amqp :members: :undoc-members: :show-inheritance: """ from ..messaging import bind_amqp from six import itervalues def bind_app(app, queue_id, connect_ssl=None): connection_string = __id_to_connection_string(app, queue_id) queue_state = QueueState() for manager in itervalues(app.managers): bind_amqp.bind_manager_to_queue(manager, queue_state, connection_string, connect_ssl) return queue_state class QueueState(object): """ Passed through to event loops, should be "non-zero" while queues should be active. """ def __init__(self): self.active = True def deactivate(self): self.active = False def __nonzero__(self): return self.active def __id_to_connection_string(app, queue_id): return queue_id
Well, it seems that our wait is finally over as she is finally announced officially and has been available for pre-order since last week. From the popular game ‘Fate/EXTRA CCC’ comes a Nendoroid of Saber Bride. She comes three expressions including a smiling expression, a sideways glancing expression and even a special expression illustrated by wadarco, the game’s character designer, for a more chibi looking Saber! Her sword, ‘Aestus Estus: The Embryonic Flame’ is also included, allowing for various different poses for fans to enjoy! Nendoroid Saber Bride will be released on April 2014, bearing the price tag of 4000 Yen. This entry was posted in News and tagged fate extra, fate extra ccc, nendoroid, saber, saber bride.
""" Copyright (c) 2016, 2017 - o2r project Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ __all__ = ['ParseErcConfig'] import os import yaml from helpers.helpers import * ID = 'o2r erc configuration file (erc.yml) parser' FORMATS = ['.yml'] class ParseErcConfig: @staticmethod def get_id(): return str(ID) @staticmethod def get_formats(): return FORMATS @staticmethod def parse(**kwargs): is_debug = False try: path_file = kwargs.get('p', None) MASTER_MD_DICT = kwargs.get('md', None) is_debug = kwargs.get('is_debug', None) global erc_id erc_id = None global erc_spec_version erc_spec_version = None global basedir basedir = kwargs.get('bd', None) erc_config = yaml.load(open(path_file), Loader=yaml.FullLoader) if erc_config is not None: # id and spec_version: if 'id' in erc_config: if erc_config['id'] is not None: MASTER_MD_DICT['id'] = erc_config['id'] erc_id = erc_config['id'] if 'spec_version' in erc_config: if erc_config['spec_version'] is not None: erc_spec_version = erc_config['spec_version'] status_note(['parsing ', path_file, ' for compendium ', erc_id, ' with version ', erc_spec_version, ' and basedir ', basedir, ' :\n', str(erc_config)], d=is_debug) # main and display file if 'main' in erc_config: if erc_config['main'] is not None: if basedir: # relative path handling happens outside of parser for main # erc.yml paths are by definition relative to erc.yml abs_path = os.path.abspath(os.path.join(os.path.dirname(path_file), erc_config['main'])) MASTER_MD_DICT['mainfile'] = abs_path else: MASTER_MD_DICT['mainfile'] = erc_config['main'] else: status_note('warning: no main file in erc.yml', d=is_debug) if 'display' in erc_config: if erc_config['display'] is not None: if basedir: # relative path handling for displayfile abs_path = os.path.abspath(os.path.join(os.path.dirname(path_file), erc_config['display'])) MASTER_MD_DICT['displayfile'] = os.path.relpath(abs_path, basedir) else: MASTER_MD_DICT['displayfile'] = erc_config['display'] else: status_note('warning: no display file in erc.yml', d=is_debug) # licenses: if 'licenses' in erc_config: if erc_config['licenses'] is not None: MASTER_MD_DICT['license'] = erc_config['licenses'] # convention: if 'convention' in erc_config: if erc_config['convention'] is not None: MASTER_MD_DICT['convention'] = erc_config['convention'] else: status_note(['error parsing erc.yml from', str(path_file)], d=is_debug) return MASTER_MD_DICT except yaml.YAMLError as yexc: if hasattr(yexc, 'problem_mark'): if yexc.context is not None: status_note(['yaml error\n\t', str(yexc.problem_mark), '\n\t', str(yexc.problem), ' ', str(yexc.context)], d=True) return 'error' else: status_note(['yaml error\n\t', str(yexc.problem_mark), '\n\t', str(yexc.problem)], d=is_debug) return 'error' else: status_note(['! error: unable to parse yaml \n\t', str(yexc)], d=is_debug) return 'error' except Exception as exc: status_note(str(exc), d=is_debug) return 'error'
What started out as a passion for animals turned into an obsession with cruelty free living when I moved to Portland a year ago. I threw out all of my old beauty products, and vowed to only support brands that align with my values. The more I learned about cruelty free products, the more I wanted to share what I have learned, and help educate others. From there I expanded to a plant based diet, and the rest is history. Beauty is my biggest passion, but you can also find all things cruelty free living here.
#! /usr/bin/env python # -*- coding: utf-8 -*- import BaseHTTPServer as bts import subprocess import urlparse import os import json import urllib2 import re import codecs HOME_DIR = os.environ["HOME"] try: SETTING=json.load(open(HOME_DIR+'/.pymamemose.json')) except IOError,(errno,strerrno): print "Don't exist ~/.pymamemose.json" SETTING={"DOCUMENT_ROOT":"~/Dropbox/memo","RECENT_NUM":5,"PORT":8000,"REST_PATTERN":".rst","IGNORE_FILE":""} DOCUMENT_ROOT =os.path.expanduser(SETTING["DOCUMENT_ROOT"]) if SETTING.has_key("DOCUMENT_ROOT") else "~/Dropbox/memo" RECENT_NUM =SETTING["RECENT_NUM"] if SETTING.has_key("RECENT_NUM") else 5 PORT = SETTING["PORT"] if SETTING.has_key("PORT") else 8000 REST_PATTERN =SETTING["REST_PATTERN"] if SETTING.has_key("REST_PATTERN") else ".rst" IGNORE_FILE =SETTING["IGNORE_FILE"] if SETTING.has_key("IGNORE_FILE") else "" class GetHandler(bts.BaseHTTPRequestHandler): def do_GET(self): if self.path == '/favicon.ico': self.send_error(404) else: parsed_path = urlparse.urlparse(self.path) self.send_response(200) self.send_header("Content-type","text/html") res = Pymamemose(parsed_path) self.end_headers() self.wfile.write(res.make_html()) return class Pymamemose(): def __init__(self,parsed_path): self.parsed_path=parsed_path self.restpatobj =re.compile(REST_PATTERN) self.ignoreobj = re.compile(IGNORE_FILE) def make_html(self): path = DOCUMENT_ROOT+self.parsed_path.path query=urllib2.unquote(self.parsed_path.query) if path == DOCUMENT_ROOT + "/search": res = self.req_search(path,query) elif os.path.isdir(path): res = self.req_index(path,query) elif os.path.isfile(path): res = self.req_file(path,query) else: print "failture" return res def header_html(self,title,path,q=""): html = """<!DOCTYPE HTML> <html> <head> <meta charset="UTF-8"> <title> %s </title> """%(title) html+=""" <style type="text/css"> <!-- body { margin: auto; padding: 0 2em; max-width: 80%; border-left: 1px solid black; border-right: 1px solid black; font-size: 100%; line-height: 140%; } pre { border: 1px solid #090909; background-color: #f8f8f8; padding: 0.5em; margin: 0.5em 1em; } code { border: 1px solid #cccccc; background-color: #f8f8f8; padding: 2px 0.5em; margin: 0 0.5em; } a { text-decoration: none; } a:link, a:visited, a:hover { color: #4444cc; } a:hover { text-decoration: underline; } h1, h2, h3 { font-weight: bold; color: #2f4f4f; } h1 { font-size: 200%; line-height: 100%; margin: 1em 0; border-bottom: 1px solid #2f4f4f; } h2 { font-size: 175%; line-height: 100%; margin: 1em 0; padding-left: 0.5em; border-left: 0.5em solid #2f4f4f; } h3 { font-size: 150%; line-height: 100%; margin: 1em 0; } h4, h5 { font-weight: bold; color: #000000; margin: 1em 0 0.5em; } h4 { font-size: 125% } h5 { font-size: 100% } p { margin: 0.7em 1em; text-indent: 1em; } div.footnotes { padding-top: 1em; color: #090909; } div#header { margin-top: 1em; padding-bottom: 1em; border-bottom: 1px dotted black; } div#header > form { display: float; float: right; text-align: right; } a.filename { color: #666666; a} footer { border-top: 1px dotted black; padding: 0.5em; font-size: 80%; text-align: right; margin: 5em 0 1em; } blockquote { margin: 1em 3em; border: 2px solid #999; padding: 0.3em 0; background-color: #f3fff3; } hr { height: 1px; border: none; border-top: 1px solid black; } table { padding: 0; margin: 1em 2em; border-spacing: 0; border-collapse: collapse; } table tr { border-top: 1px solid #cccccc; background-color: white; margin: 0; padding: 0; } table tr:nth-child(2n) { background-color: #f8f8f8; } table tr th { font-weight: bold; border: 1px solid #cccccc; text-align: left; margin: 0; padding: 6px 13px; } table tr td { border: 1px solid #cccccc; text-align: left; margin: 0; padding: 6px 13px; } table tr th :first-child, table tr td :first-child { margin-top: 0; } table tr th :last-child, table tr td :last-child { margin-bottom: 0; } --> </style> <script> function copy(text) { prompt("Copy filepath below:", text); } </script> </head> <body> """ link_str="" uri = "" fp =path.replace(DOCUMENT_ROOT,'').split('/') for i in fp: if i ==u'': continue uri +='/'+i if os.path.isfile(DOCUMENT_ROOT+uri) or os.path.isdir(DOCUMENT_ROOT+uri): link_str += '/' + "<a href='%s'>%s</a>"%(uri,i) link_str += "<a class='filename' href=\"javascript:copy('%s');\">[copy]</a>"%(path) link_str = "<a href='/'>%s</a>"%(DOCUMENT_ROOT) + link_str search=""" <form action="/search" method="get",accept-charset="UTF-8"> <input name="path" type="hidden" value="" /> <input name="q" type="text" value="" size="24" /> <input type="submit" value="search" /> </form> """ return html+"<div id=\"header\">%s %s</div>"%(link_str,search) def footer_html(self): html =""" <footer> <a href="https://github.com/saiias/pymamemose">pymamemose: ReStructuredText memo server</a> </footer> </body> </html> """ return html def req_search(self,path,query): query=query.split("&q=")[1] found = self.find(path,query) html_title = "Serch in "+path title = "</div><h1>Seach in %s </h1>"%(path) body ="" if query == "": body+="<h2>No Keyword </h2>" elif len(found)==0: body+="<h2>Not Found</h2>" elif len(found)>0: body +='<ul>' for k,v in found.items(): # size = float(getFileSize(v))/1024 size = 1.0 v=v.replace(DOCUMENT_ROOT+'/','') body +=''' <li><a href="%s">%s</a> <a class='filename' href="javascript:copy('%s');\">[%s , %.1f KB]</a></li> '''%(v,k,v,k,size) body +='<ul>' body = title+body header_html = self.header_html(html_title,path,query) footer_html = self.footer_html() return header_html+body+footer_html def req_file(self,req,res): """ アクセスされた先がファイルのとき呼び出される。 そのファイルがRest記法であれば、rst2htmlで変換してhtmlを出力する。 req:アクセスされたパス res:クエリ """ header_html = self.header_html(req,req) footer_html = self.footer_html() body = "" if isMatchedFile(self.restpatobj,os.path.splitext(req)[1]): body +=subprocess.check_output(['rst2html.py',req]) body=body.split('<body>')[1] body=body.split('</body>')[0] else: """ ReSTファイル以外の挙動 """ f=open(req,'r') body += f.read() f.close() return header_html.encode('ascii')+body+footer_html.encode('ascii') def req_index(self,req,res): """ アクセスされた先がフォルダのとき呼び出される。 フォルダ内を、ディレクトリ、ReSTファイル、その他のファイルと分類して、hmtlを出力する。 req:アクセスされたパス res:クエリ """ global RECENT_NUM dirs,rest,others=self.directory_files(req) body = "</div><h1>Index of %s </h1>"%(req) if RECENT_NUM > 0: body += "<h2>Recent:</h2>" recent = self.recent_files() if len(recent) < RECENT_NUM: RECENT_NUM = len(recent) body +='<ul>' index = 0 for k,v in sorted(recent.items(),key=lambda x:x[1][1]): size = float(getFileSize(v[0]))/1024 if index == RECENT_NUM: break v[0] =v[0].replace(DOCUMENT_ROOT+'/','') body +=''' <li><a href=" %s "> %s </a> <a class='filename' href="javascript:copy(' %s ');\">[%s , %.1f KB]</a></li> '''%(v[0],k,v[0],k,size) index +=1 body +='</ul>' body += "<h2>Directories:</h2>" if len(dirs)>0: body +='<ul>' for k,v in dirs.items(): body +=''' <li><a href=" %s "> %s </a> <a class='filename' href="javascript:copy(' %s ');\">[%s,dir]</a></li> '''%(k,k,v,k) body +='</ul>' body += "<h2>ReST documents:</h2>" if len(rest)>0: body +='<ul>' for k,v in rest.items(): size = float(getFileSize(v))/1024 v=v.replace(DOCUMENT_ROOT+'/','') body +=''' <li><a href=" %s "> %s </a> <a class='filename' href="javascript:copy(' %s ');\">[ %s , %.1f KB]</a></li> '''%(v,k,v,k,size) body +='</ul>' body += "<h2>Other files:</h2>" if len(others)>0: body +='<ul>' for k,v in others.items(): size = float(getFileSize(v))/1024 v=v.replace(DOCUMENT_ROOT,'') body +=''' <li><a href=" %s "> %s </a> <a class='filename' href="javascript:copy(' %s ');\">[%s , %.1f KB]</a></li> '''%(k,k,v,k,size) body +='</ul>' header_html = self.header_html(req,req) footer_html = self.footer_html() return header_html+body+footer_html def find(self,path,query): """ クエリに指定された文字列をサイト内で検索する。 もし見つかればそのファイル名をキー、そのファイルまでのパスをバリューとして辞書に入れる。 検索が終わると辞書を返す。 """ found = dict() for root,dirs,files in os.walk(DOCUMENT_ROOT): for fl in files: for line in codecs.open(root+'/'+fl,'r','utf-8'): print type(line) if line.find(query.decode('utf-8')) != -1: found[fl]=root+'/'+fl return found def recent_files(self): recent ={} for root,dirs,files in os.walk(DOCUMENT_ROOT): for file in files: if not isMatchedFile(self.ignoreobj,file): mtime = os.stat(root+'/'+file) recent[file]=[root+'/'+file,mtime] return recent def directory_files(self,path): dirs =dict() rest = dict() others = dict() df = os.listdir(path) for item in df: if os.path.isdir(path+"/"+item): dirs[item]=path+"/"+item if os.path.isfile(path+"/"+item): if isMatchedFile(self.restpatobj,item): rest[item]=path+'/'+item else: if not isMatchedFile(self.ignoreobj,item): others[item]= path+'/'+item return dirs,rest,others def abspath(path): return os.path.abspath(path) def getFileSize(file): return os.path.getsize(file) def isMatchedFile(patobj,filename): """ 拡張子がpatobjだったらTrue そうでないならFalse >>> pat = re.compile(".(rst|rest|txt)$") >>> isMatchedFile(pat,"test.rst") True >>> isMatchedFile(pat,"test.doc") False >>> pattern = re.compile("TAGS") >>> isMatchedFile(pattern,"DS_STORE") False >>> isMatchedFile(pattern,"TAGS") True """ match = patobj.search(filename) return False if (match == None) else True def command(): server = bts.HTTPServer(('localhost', PORT), GetHandler) print 'Starting server' server.serve_forever() if __name__ == '__main__': command()
Getting a good night’s sleep is so important. Not only does it help prepare you for the day ahead, leaving you feeling refreshed and alert, but it also has a major impact on your overall health and wellbeing. In fact, lack of sleep is linked to an increased risk of disease, such as heart disease, type 2 diabetes, and depression. Whether you’re one of the 33 percent of people not getting enough sleep regularly, one of the many who develop insomnia every year, or you’re just looking to improve the quality of your sleep, there’s good news. Following a few simple tips and tricks for how to sleep better can help you get more much-needed rest so you can live a healthier, happier life. Increasing the amount of bright light you’re exposed to during the day helps keep your body’s natural sleep clock in good working order, leading to improved sleep quality and longer sleep times. Additionally, research suggests that exposure to bright, natural, or artificial light during the day can reduce the time it takes to fall asleep by around 83 percent . Did you know that your television, computer, tablet, and cell phone all emit sleep-disrupting blue light? It actually tricks your brain into thinking that it’s daytime. Powering down your electronics at least one to two hours before bed or switching to night mode, which strips your screen of blue light, can help you fall asleep easier. Just as your exposure to light can affect your body’s sleep clock, so can the regularity of your sleep schedule. Another important step for anyone interested in how to sleep better is to start going to bed and waking up at the same time every day — even on weekends. This can go a long way toward helping you fall asleep faster and stay asleep for longer periods. Having your regular cup of coffee in the morning is fine, but indulging in caffeinated beverages later in the day could be robbing you of your beauty sleep. Research shows that caffeine can stay in your bloodstream for up to six hours. For improved sleep, it’s best to skip caffeinated beverages or foods like chocolate, which are also a source of caffeine. When you’re creating your bedroom sanctuary, think cool, quiet, and dark. Your bedroom should be free of noise and other distractions that could keep you awake. Using sleep shades or blackout curtains can help prevent street lights from disrupting you. Additionally, it helps to turn down the thermostat to the optimal sleeping range of 60 to 67 degrees Fahrenheit, as noted by the National Sleep Foundation. A sagging, tired, old mattress is sure to leave you feeling the same. Most mattresses only last nine to 10 years. Look for a new mattress that provides good support for better sleep quality, pain relief, stress relief, and less tossing and turning. Likewise, a supportive pillow is a must. Although the majority of people already know that a good mattress and comfortable bedding is important for getting quality sleep, budget concerns can be an obstacle. But there’s good news. You can try a new mattress, bed frame, and even a beautiful upholstered headboard before making the commitment to buy. CORT Furniture Rental is a convenient source of bedroom furniture that can help you in your quest to sleep better. Not only will it leave your space looking serene, it will also increase your sleeping comfort for the Z’s you deserve.
# --- Day 1: Not Quite Lisp --- # # Santa was hoping for a white Christmas, but his weather machine's "snow" function is powered by stars, and he's fresh # out! To save Christmas, he needs you to collect fifty stars by December 25th. # # Collect stars by helping Santa solve puzzles. Two puzzles will be made available on each day in the advent calendar; # the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck! # # Here's an easy puzzle to warm you up. # # Santa is trying to deliver presents in a large apartment building, but he can't find the right floor - the directions # he got are a little confusing. He starts on the ground floor (floor 0) and then follows the instructions one # character at a time. # # An opening parenthesis, (, means he should go up one floor, and a closing parenthesis, ), means he should go down # one floor. # # The apartment building is very tall, and the basement is very deep; he will never find the top or bottom floors. # # For example: # # (()) and ()() both result in floor 0. # ((( and (()(()( both result in floor 3. # ))((((( also results in floor 3. # ()) and ))( both result in floor -1 (the first basement level). # ))) and )())()) both result in floor -3. # To what floor do the instructions take Santa? # # # # --- Part Two --- # # Now, given the same instructions, find the position of the first character that causes him to enter the basement # (floor -1). The first character in the instructions has position 1, the second character has position 2, and so on. # # For example: # # ) causes him to enter the basement at character position 1. # ()()) causes him to enter the basement at character position 5. # What is the position of the character that causes Santa to first enter the basement? instructions = open("day01_input").read() # Part 1 floor_no = instructions.count('(') - instructions.count(')') print("Santa ends up on floor {0}".format(floor_no)) # Part 2 floor_no = 0 for n, i in enumerate(instructions): floor_no += 1 if i == '(' else -1 if floor_no == -1: print("Santa enters basement at instruction {0}".format(n + 1)) # Add 1 because the instructions are 1-based break
Yellow leather blend mini Kalifornia shoulder bag from Kenzo featuring a detachable shoulder strap, a main internal compartment, top handles, a top zip fastening and front zipped detail. Baby pink leather and cotton-blend tiny Kalifornia shoulder bag from Kenzo featuring a silver-tone chain shoulder strap, a foldover top with push-lock closure, a zip fastening and silver-tone zip details. KENZO Kalifornia Mini Shoulder Bag - Kenzo Kalifornia Mini Shoulder Bag.
#!/usr/bin/env python # encoding: utf-8 # vim: tabstop=4:softtabstop=4:shiftwidth=4:expandtab from __future__ import division, print_function import numpy as np import math import scipy.linalg as sl, scipy.special as ss from functools import partial from transformations import * from stingrays import * from fullstingray import * def hmcLikelihood(h5filename=None, jsonfilename=None, **kwargs): """Wrapper for the compound of the full stingray transformation and the interval transformation """ if 'wrapperclass' in kwargs: raise ValueError("hmcLikelihood already pre-sets wrapperclass") return intervalLikelihood(h5filename=h5filename, jsonfilename=jsonfilename, wrapperclass=fullStingrayLikelihood, **kwargs) def hpHmcLikelihood(h5filename=None, jsonfilename=None, **kwargs): """Wrapper for the compound of the stingray transformation and the interval transformation """ if 'wrapperclass' in kwargs: raise ValueError("hmcLikelihood already pre-sets wrapperclass") return intervalLikelihood(h5filename=h5filename, jsonfilename=jsonfilename, wrapperclass=hpStingrayLikelihood, **kwargs) def tmHmcLikelihood1(h5filename=None, jsonfilename=None, **kwargs): """Wrapper for the compound of the stingray transformation and the interval transformation """ if 'wrapperclass' in kwargs: raise ValueError("hmcLikelihood already pre-sets wrapperclass") return intervalLikelihood(h5filename=h5filename, jsonfilename=jsonfilename, wrapperclass=tmStingrayLikelihood, **kwargs) def tmHmcLikelihood2(h5filename=None, jsonfilename=None, **kwargs): """Wrapper for the compound of the stingray transformation and the interval transformation """ if 'wrapperclass' in kwargs: raise ValueError("hmcLikelihood already pre-sets wrapperclass") return intervalLikelihood(h5filename=h5filename, jsonfilename=jsonfilename, wrapperclass=tmStingrayLikelihood2, **kwargs) def muHmcLikelihood(h5filename=None, jsonfilename=None, **kwargs): """Wrapper for the compound of the stingray transformation and the interval transformation """ if 'wrapperclass' in kwargs: raise ValueError("hmcLikelihood already pre-sets wrapperclass") return intervalLikelihood(h5filename=h5filename, jsonfilename=jsonfilename, wrapperclass=muStingrayLikelihood, **kwargs) def msHmcLikelihood(h5filename=None, jsonfilename=None, **kwargs): """Wrapper for the compound of the stingray transformation and the interval transformation """ if 'wrapperclass' in kwargs: raise ValueError("hmcLikelihood already pre-sets wrapperclass") return intervalLikelihood(h5filename=h5filename, jsonfilename=jsonfilename, wrapperclass=msStingrayLikelihood, **kwargs)
That’s how Airblaster co-founder Jesse Grandoski introduces the official edit for last week’s Board Games event at Timberline. We’ve already waxed lyrical on how fun this contest looked, so we’ll spare the internet some added ‘isn’t snowboarding awesome’ speil and just get on with going riding. With some friends. Because yeah, that is what it’s all about. Airblaster know’s what’s up, we just hope we get an invite to this next year, it looks awesome!
#!/usr/bin/python # # fig04_07.py # Roll a six-sided die 6000 times. # # Author: Billy Wilson Arante # Created: 2016/08/06 PHT # # Attribution: Python How to Program, 1st Ed. by Deitel & Deitel # import random def main(): """Main""" # Initialize frequency1 = 0 frequency2 = 0 frequency3 = 0 frequency4 = 0 frequency5 = 0 frequency6 = 0 for roll in range(1, 6001): # Rolls a die 6000 times face = random.randrange(1, 7) # Random number from 1 to 7 # Count frequencies if face == 1: frequency1 += 1 elif face == 2: frequency2 += 1 elif face == 3: frequency3 += 1 elif face == 4: frequency4 += 1 elif face == 5: frequency5 += 1 elif face == 6: frequency6 += 1 else: print "It should never get here!" print "%4s %13s" % ("Face", "Frequency") print "%4s %13s" % (1, frequency1) print "%4s %13s" % (2, frequency2) print "%4s %13s" % (3, frequency3) print "%4s %13s" % (4, frequency4) print "%4s %13s" % (5, frequency5) print "%4s %13s" % (6, frequency6) if __name__ == "__main__": main()
Many owners were upset that there was no HID option for the Scion FR-S. This can easily be resolved by installing an HID kit. This kit includes all you need to get up and running with HIDs in a 15-30 minutes. This is the kit we run in our vehicle. Ships with AC slim ballasts. These ballasts fit in the tightest of locations and are far superior to DC ballasts. They reduce the chance of HID flickering. NOTE: Please consult local laws regarding aftermarket HID kits. This product comes with 2 bulbs for the Scion FR-S headlights. Cant go back to oem bulbs! The 6k is just enough! These lights are great, the ballasts don't take up much space in the engine bay and the install was a breeze. My product arrived earlier then what I expected. No problems shopping in the website or checking out. They exceeded my expectations.
#!/usr/bin/env python3 # PYTHON_ARGCOMPLETE_OK # Copyright 2013 Alexey Kardapoltsev # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import json, sys, os from vkdownloader import VkDownloader def process_music(args): if args.action == "load": vk.load(args.user, args.dest, args.clean) elif args.action == "list": vk.show(args.user) elif args.action == "play": vk.play(args.user) else: print("unknown action") def process_friends(args): if args.action == "list": vk.show_friends(args.user) else: print("unknown action") topParser = argparse.ArgumentParser() topParser.add_argument("-u", "--user", help = "user id") subParsers = topParser.add_subparsers(title = "Command categories") music = subParsers.add_parser("music", description = "working with music") friends = subParsers.add_parser("friends", description = "working with friends") friends.add_argument("action", help = "friends actions", choices=["list"]) friends.set_defaults(func = process_friends) music.add_argument("action", help = "music actions", choices=["list", "load", "play"]) music.add_argument("-d", "--dest", help = "destination directory for music download, default is current dir") music.add_argument("-c", "--clean", dest='clean', action='store_true', help = "with this options destination directory will be cleaned") music.set_defaults(clean = False) music.set_defaults(func = process_music) try: import argcomplete argcomplete.autocomplete(topParser) except ImportError: pass args = topParser.parse_args() vk = VkDownloader() args.func(args)
> > > get the clk and process for enable/disable. > > > This patch add support for it. > > clock behind the scenes when regmap access needs it enabled. > > Did you have any issues without this patch? Well that is to be expected given how mmio with clk regmap works. Did you encounter any other problems besides this?
# yadt-config-rpm-maker # Copyright (C) 2011-2013 Immobilien Scout GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ This module contains functions which were created for performance tweaking. The test coverage of this module is low since it's main purpose is to add logging information. """ from functools import wraps from logging import getLogger from time import time from os import walk from os.path import join, getsize from config_rpm_maker.configuration import get_thread_count LOGGER = getLogger(__name__) LOG_EACH_MEASUREMENT = False _execution_time_summary = {} def measure_execution_time(original_function): def process_measurement(elapsed_time_in_seconds, args, kwargs): arguments = ', '.join([str(arg) for arg in args[1:]]) key_word_arguments = "" if kwargs: key_word_arguments = ", " + str(kwargs) if len(args) > 0: function_name = "%s.%s" % (args[0].__class__.__name__, original_function.__name__) else: function_name = original_function.__name__ if function_name not in _execution_time_summary.keys(): _execution_time_summary[function_name] = [elapsed_time_in_seconds, 1] else: _execution_time_summary[function_name][0] += elapsed_time_in_seconds _execution_time_summary[function_name][1] += 1 if LOG_EACH_MEASUREMENT: function_call = '%s(%s%s)' % (function_name, arguments, key_word_arguments) LOGGER.debug('Took %.2fs to perform %s', elapsed_time_in_seconds, function_call) @wraps(original_function) def wrapped_function(*args, **kwargs): start_time = time() return_value_from_function = original_function(*args, **kwargs) end_time = time() elapsed_time_in_seconds = end_time - start_time process_measurement(elapsed_time_in_seconds, args, kwargs) return return_value_from_function return wrapped_function def log_execution_time_summaries(logging_function): logging_function('Execution times summary (keep in mind thread_count was set to %s):', get_thread_count()) for function_name in sorted(_execution_time_summary.keys()): summary_of_function = _execution_time_summary[function_name] elapsed_time = summary_of_function[0] average_time = summary_of_function[0] / summary_of_function[1] logging_function(' %5s times with average %5.2fs = sum %7.2fs : %s', summary_of_function[1], average_time, elapsed_time, function_name) def log_directories_summary(logging_function, start_path): directories_summary = {} directories = walk(start_path).next()[1] absolute_count_of_files = 0 absolute_total_size = 0 for file_name in walk(start_path).next()[2]: file_path = join(start_path, file_name) file_size = getsize(file_path) absolute_total_size += file_size absolute_count_of_files += 1 directories_summary[start_path] = (absolute_count_of_files, absolute_total_size) for directory in directories: total_size = 0 count_of_files = 0 directory_path = join(start_path, directory) for dirpath, dirnames, filenames in walk(directory_path): for file_name in filenames: file_path = join(dirpath, file_name) file_size = getsize(file_path) total_size += file_size absolute_total_size += file_size count_of_files += 1 absolute_count_of_files += 1 directories_summary[directory_path] = (count_of_files, total_size) logging_function('Found %d files in directory "%s" with a total size of %d bytes', absolute_count_of_files, start_path, absolute_total_size) for directory in sorted(directories_summary.keys()): count_of_files = directories_summary[directory][0] total_size = directories_summary[directory][1] logging_function(' %5d files with total size of %10d bytes in directory "%s"', count_of_files, total_size, directory)
As I’m writing this, it’s early in the morning and the mercury is already past 70˚ F. It’s raining here, like it has been on and off this entire week, much to the chagrin of many Coloradans, I’m sure. The rain is probably keeping the temperature down a bit, but once the clouds finish their job, we’re headed to a steamy high above 90˚ F. I’m not alone. A heat wave is currently baking much of the country, though unlike where I live, many regions haven’t been soaked with rain. In southeastern Wisconsin, my parents say it hasn’t really rained there since Memorial Day. The prairie plants that dot their front yard—normally verdant even in dry weather—are wilting from lack of water. One local farmer said he would need at least 3 inches of rain to save his crops. Then there’s Florida. In in some parts of the state, Tropical Storm Debby has dumped over 26 inches of rain. Sinkholes have swallowed roads, and 50 miles of Interstate 10 had to be closed due to flooding. Back in Colorado, my sister says she has two reasons to be grateful for air conditioning—it prevents both heat and smoke from suffusing her apartment. Heat isn’t the only culprit behind the wildfires. This year’s early snowmelt in the mountains—early by two weeks—is also to blame, scientists say. The bark beetles that have ravaged the state’s pine and spruce forests may also increase the odds of fire, but the interactions between bug and flame aren’t entirely sorted out yet. Meanwhile halfway around the world in Siberia, wildfires have been raging uncontrolled for six months.
from chords.models import Artist, Song, User def create_artist(name='Some Artist'): artist = Artist(name=name) artist.save() return artist def create_song(title='Random Song', artist=None, sender=None, published=True, tabs=False, genre=None): song = Song(title=title, artist=artist, sender=sender, tabs=tabs) if published: song.publish() if genre is not None: song.genre = genre song.save() return song def create_user(username='username', password='password'): user = User.objects.create_user(username=username, password=password) user.save() return user def valid_song_data(title='Title', artist_txt='artist_txt', user_txt='user_txt', genre=Song.POP, video='http://www.example.com', tabs=True, content='content'): return { 'title' : title, 'artist_txt' : artist_txt, 'user_txt' : user_txt, 'genre' : genre, 'video' : video, 'tabs' : tabs, 'content' : content } def valid_contact_data(name='Name', email='[email protected]', subject='Subject', body='Message'): return { 'name' : name, 'email' : email, 'subject': subject, 'body' : body }
Tuesday morning finds the little town of Murdo surprisingly serene. The pace is usually fast in the summertime, due to all of the tourists seeking accommodations as they make their way to the beautiful Black Hills. Add to that, the excitment of July 4th and a quiet Monday evening had not been an option. The fireworks display out at the baseball field was spectacular as usual. The whole place was a sea of red white and blue. All the families brought picnics and their children went home full of fried chicken, potato salad , corn on the cob, and watermelon. A fun time was had by all. The Murdo Girl for Next Pres Headquarters, 9:00 a.m. The ever growing team is busy preparing for the meeting that will begin as soon as Murdo Girl arrives. Here’s an update of the positions that have been filled and the respective person’s duties. THESE THREE SEASONED STAFF MEMEBERS HAVE BEEN WITH THE CAMPAIGN SINCE THE BEGINNING, which was last week sometime. THE ONE AND ONLY AND NEXT PRES. Murdo Girl saunters over to her place at the head of the pretty crowded table, and motions her three flunkies to find a seat wherever they can. Then… she speaks. Murdo Girl: Thank you all for being here so promptly. I myself have been for a vigorous early morning swim at the North Dam. To allay any rumors or as my severely lacking opponent would say, nip them in the bud, I want to say, “no worries.” You see, my trusty bodyguards are my son, my son-in-law, and some other guy that lost his sun glasses and can’t close his eyes. Their names are Bart, Smart, and Braveheart. Now on to more important matters. Since we have 2 new members of our team, and one on the way out, why don’t we go around the table and make introductions. Please state your name, your position, and explain your duties. (at which point Braveheart gets up and proceeds to go around the table.) Braveheart sit down and try not to make eye contact with anyone. It’s very annoying. Who would like to start? Well Yram, I guess that means you. at least people won’t be hearing those blasted bugs everywhere you go. Yram: Morning staff and bodyguards. I’m the crack up reporter from Gun Barrel, TX . I just don’t feel like I fit in here anymore with your platform being the way it is. I prefer spike heels, I drink Pepsi instead of ice water, and that neon paint you used to scrawl your name on the water tower is downright distracting. Therefore….I’m here to tell you..I’m defecting to Barney Fife’s Lone Wolf campaign. I have a whole lot of fun riding around with Thelma Lou and Louise, and Barney won’t make me go to the High School all the time and take guff off those snooty teachers… I might be convinced to change my mind. Are those bodyguards going to be allowed to hang out with the staff some? I think dry eyes over there is kind of cute. Murdo Girl: Jerry, does that sound like a publicity stunt to you, or does it sound like the only thing she cares about is getting free stuff? She’s as bad as the Queen carrying around an empty purse. At least the purse gives the Queen something to do with her hands. Jerry: Well, I’m the kind of guy that will count money til the til shuts on my fingers, but it ain’t my job to bring it in the door. I highly resent being asked to go through the trash at the fair grounds looking for cans to sell and loose change under the bleachers. I did find an unopened package of chips though, which I wouldn’t get any money for, so I enjoyed them myself. There was a little kid that tried to take them away from me, but you know how little kids are. At least he didn’t cry. I hate it when they cry. 1)He took my chips….2) You dropped your chips…3) Dropped them right into Jerry’s fingers that were clutched around the chips. I saw the whole thing. If he had something besides chips, I’d blackmail him. Sheila: I’m Sheila, and I don’t want to be a secret anymore! This right here is a bunch of cow-tickey. Murdo Girl, do you really think you can be Next Pres with people gettin over-ripe lemons and stealing a little kids chips? Being an informant has been a lifelong dream of mine. Now, you better listen up or I’ll go right out on Main St. and thumb a ride with Thelma Lou and Louise, and that lame Sicnarf, who can’t even dribble a basketball. At that moment, Murdo Girl sits up straight in her chair, takes her hat off and begins to look a little more Presidential.Teresa’s glasses start to sparkle. You can feel it in the room. A new day is dawning at Murdo Girl Coyote Headquarters. All because when an aggressive Draper girl talks, everybody listens. I thought I remembered you from somewhere. Sherri: Can I just add to that brilliant oration? So far, all I have gotten to draw is a photograh of Barney Fife holding a bullet. It has been well known throughout the community that Barney lost his gun. Didn’t you about fall out when he told that whopper about George Washington? What in blazes are we doing photographic drawings of little kids and dogs for when we’ve got a real crime on our hands? Do you feel safe with the likes of Barney Fife protecting us? Murdo Girl: Teresa, get yourself right on over to the the Murdo Coyote and talk to Janie. We need to sneak a little bombshell in between all that discourse about the Finks. The voters will sit up and take notice when they hear about Barney being without a gun. Sherri, you go with her and draw a picture! Now, where do you suppose we can find another crack up reporter? be on the lookout. Will Barney share a cell with his soulmate? Will Otis hang his jail keys up for good? Will other Murdo residents complain about the neon lettering on the Murdo Girl Tower? Will Jerry get his hands on some honest money or will he take his chips and go home? It’s the dawn of a new day in the lives of the voters in Murdo, SD. You can feel the excitement in the air. Now where do you suppose Barney is? *All photographs and drawings are without the express consent of family members and others. Well done and funny as usual Murdo Girl. Made me chuckle out loud again. Thanks 07, People send me funny pictures of the Queen, and like in the case of Sheila, themselves. I have fun trying to find ways to use them. It makes it a little more special. Man, you don’t waste any time going to press early in the morning after the big 4th party! Lab er….Lav….told me she has a great idea to give away free rocks as the next publicity stunt and we could call them “Pet Rocks”! Wait…has that already been done? Also, whatever happened to that lady who used to introduce Murdo Girl’s writing? I like the pet rock idea.That’s what Lav is there for. The lady flew the coop. She’s not into politics. Should I try to get her back? She was a good excuse for the little Murdo Girl’s misspellcheck and poor grammar. This just keeps me laughing out loud. Can’t wait for the next episode. Way better than any soap opera!!!! Yeah. I understand that lady wasn’t into politics. Gotta run. Have fun, cuz! See ya soon. Love it all lady or no lady! See you soon!
"""Demonstrates how to generate loop invariants from a single PME In this example, we define the conjugent gradient PME and use it to generate invariant operators. These operators can then be passed to the flame generator functions to generate worksheets or algorithms. """ from itertools import chain, combinations import numpy as np from ignition.dsl.flame import CONSTANTS, iterative_arg, PObj, T, TensorExpr class InvariantGenerator( object ): """Abstract class for generating invariants from PME's. To use this class, subclass it and create an args tuple and PME. """ def _get_tuple_args(self, obj): ret = obj if isinstance(obj, (PObj, list)): ret = [] obj_list = list(obj) for item in obj_list: ret.append(self._get_tuple_args(item)) ret = tuple(ret) elif isinstance(obj, TensorExpr): if obj in CONSTANTS: # We throw away constants since they don't need a new name ret = "_" else: ret = obj else: raise ValueError("Unable to handle obj %s of type %s" % \ (str(obj), type(obj))) return ret def _get_signature(self, fname): return "def %(fname)s(%(fargs)s):" % \ {'fname': fname, 'fargs': ", ".join(map(lambda o: str(self._get_tuple_args(o)), self.args)).replace("'", ""), } def _get_body(self, inv): return " return " + str(inv) def __iter__(self): size = len(self.PME) for n, comb in enumerate(chain(*[combinations(range(size), i) \ for i in xrange(1, size+1)])): invs = [self.PME[idx] for idx in comb] code = self._get_signature(self.name+"_"+str(n)) code += '\n' code += self._get_body( invs ) yield code class CGInvariantGenerator( InvariantGenerator ): A = iterative_arg("A", rank=2, part_suffix="1x1") X = iterative_arg("X", rank=2, part_suffix="1x3", arg_src="Overwrite") P = iterative_arg("P", rank=2, part_suffix="1x3", arg_src="Computed") I = iterative_arg("I", rank=2, part_suffix="I_3x3", arg_src="Computed") U = iterative_arg("U", rank=2, part_suffix="Upper_Bidiag_3x3", arg_src="Computed") J = iterative_arg("J", rank=2, part_suffix="J_3x3", arg_src="Computed") D = iterative_arg("D", rank=2, part_suffix="Diag_3x3", arg_src="Computed") R = iterative_arg("R", rank=2, part_suffix="1x3", arg_src="Computed") O = iterative_arg("O", rank=2, part_suffix="1x3", arg_src="Computed") args = [A, X, P, I, U, J, D, R, O] #Putting this here until I get PObjs combining better for arg in args: exec('%(name)s = np.matrix(%(name)s.part)' % {'name': arg.obj.name}) PME = [X * (I + U) - P * D, A * P * D - R * (I - U), P * (I - J) - R, T(R) * R - O] name = 'cg_inv' if __name__ == "__main__": with open("cg_inv.py", 'w') as fp: for code in CGInvariantGenerator(): fp.write(code+"\n\n")
The US Navy is buying a new breed of armed unmanned surface combat ships that will add more sensors and weapons to the current fleet. These will be 200-foot to 300-foot ships that displace 2,000 tons. The Navy plans to buy two large drone ships a year from FY2020-2024 for a total of about $2.7 billion. The corvette-sized USVs are being developed to field different types of sensors and, eventually, vertical launch system (VLS) cells for a variety of guided missiles.
import msgs blkmsg = msgs.Blockmsg.fromjson({"type": "block", "txs": [{"inputs": [{"script": "04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73", "outpoint": {"index": 4294967295, "tx": "0000000000000000000000000000000000000000000000000000000000000000"}, "sequence": 4294967295}], "locktime": 0, "version": 1, "outputs": [{"amount": 5000000000, "script": "4104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac"}]}], "block": {"nonce": 2083236893, "version": 1, "time": 1231006505, "merkle": "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b", "bits": 0x1d00ffff, "prev": "0000000000000000000000000000000000000000000000000000000000000000"}}) hash = blkmsg.block.hash blkdata = "".join([ "010000000000000000000000000000000000000000000000000000000000000000000000", "3ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49", "ffff001d1dac2b7c01010000000100000000000000000000000000000000000000000000", "00000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f", "4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f", "6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104", "678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f", "4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"]) blkmsg = msgs.Blockmsg.frombinary(blkdata.decode("hex"))[0] hash = blkmsg.block.hash
This week we cover story #129, The Five Doctors! When all of the Doctor's incarnations to date are abducted and taken to the Death Zone by an unknown foe, the Time Lords turn to an unlikely hero to go rescue him: surprise, it's the Master! QotW: If could remake The Five Doctors with any five Doctors of your choice, which ones would you choose? Join us next week for our review of Doctor Who story #130, Warriors of the Deep! You can buy a digital copy on iTunes, rent the DVD from Netflix, or buy the DVD from Amazon.com, BarnesAndNoble.com, or many other fine retailers. This week we cover story #128, The King's Demons! The Doctor and his companions are mistaken for demons by King John in medieval England. But something is amiss with the monarch -- who could be behind this, and can the Doctor save the Magna Carta and preserve history? QotW: The Master has targeted YOU with his or her latest evil scheme! Which Doctor would you like to come to your rescue and why? Join us next week for our review of Doctor Who story #129, The Five Doctors! You can rent the DVD from Netflix or buy the DVD from BarnesAndNoble.com, Amazon.com, the BBC Doctor Who Shop, or many other fine retailers. This week we cover story #127, Enlightenment. The Doctor and his companions get caught up in a spaceship race between a number of Eternals. But the White Guardian tasks them with stopping the Eternals from winning! What cosmic prize awaits the winner? QotW: If you had the power of total Enlightenment, what would you do with it? Join us next week for our review of Doctor Who story #128, The King’s Demons! You can rent the DVD from Netflix, or buy the DVD from Amazon.com, BarnesAndNoble.com, or other fine retailers. This week we cover story #126, Terminus. The Doctor finds himself on a space station at the center of the universe where diseased people are "treated" by an animalistic monster and then never seen again. What dark secret does this place hide, and what will it cost the Doctor to put things right? QotW: Terminus was Nyssa’s last adventure with the TARDIS crew. What was your favorite Nyssa story and why? Join us next week for our review of Doctor Who story #127, Enlightenment! You can rent the DVD from Netflix, or buy the DVD as part of the Black Guardian boxed set from BarnesAndNoble.com, Amazon.com, the BBC Doctor Who Shop, or many other fine retailers.
#!/usr/bin/env python3 from matrix_client.client import MatrixClient from matrix_client.api import MatrixRequestError from requests.exceptions import MissingSchema from .bus import MessageBus, MsgDirection from .base import BaseBotInstance, EmptyBot from .models import Message, ChannelType, MessageType from .helpers import get_now_date_time, get_logger from .config import config import sys import re logger = get_logger("Matrix") class MatrixHandle(BaseBotInstance): ChanTag = ChannelType.Matrix SupportMultiline = True def __init__(self, server, username, password, rooms, nick=None): client = MatrixClient(server) self.viewer_url = server.strip('/') + "/_matrix/media/v1/download/" try: client.login_with_password(username, password) except MatrixRequestError as e: if e.code == 403: logger.error("403 Bad username or password.") sys.exit(4) else: logger.error("{} Check your server details are correct.".format(e)) sys.exit(2) except MissingSchema as e: logger.error("{} Bad URL format.".format(e)) sys.exit(3) self.username = client.user_id logger.info("logged in as: {}".format(self.username)) if nick is not None: u = client.get_user(client.user_id) logger.info("Setting display name to {}".format(nick)) try: u.set_display_name(nick) except MatrixRequestError as e: logger.error("Fail to set display name: error = {}".format(e)) self.joined_rooms = {} self.room_id_to_alias = {} self.displaynames = {} for room_id_alias in rooms: try: room = client.join_room(room_id_alias) except MatrixRequestError as e: if e.code == 400: logger.error("400 Room ID/Alias in the wrong format") sys.exit(11) else: logger.error("{} Couldn't find room {}".format(e, room_id_alias)) sys.exit(12) logger.info("Joined room {}".format(room_id_alias)) self.joined_rooms[room_id_alias] = room self.room_id_to_alias[room.room_id] = room_id_alias room.add_listener(self.on_message) self.client = client self.bot_msg_pattern = config['matrix'].get('bot_msg_pattern', None) def on_message(self, room, event): if event['sender'] == self.username: return logger.info("event received, type: {}".format(event['type'])) if event['type'] == "m.room.member": if event['content']['membership'] == "join": logger.info("{0} joined".format(event['content']['displayname'])) elif event['type'] == "m.room.message": sender = event['sender'] opt = {'matrix': sender} if sender not in self.displaynames.keys(): u_send = self.client.get_user(sender) self.displaynames[sender] = u_send.get_display_name() sender = self.displaynames[sender] msgtype = event['content']['msgtype'] room_alias = self.room_id_to_alias[room.room_id] date, time = get_now_date_time() mtype = None media_url = None typedict = { "m.image": MessageType.Photo, "m.audio": MessageType.Audio, "m.video": MessageType.Video, "m.file": MessageType.File } if msgtype == "m.text" or msgtype == "m.notice": mtype = MessageType.Text msg_content = event['content']['body'] elif msgtype == "m.emote": mtype = MessageType.Text msg_content = "*{}* {}".format(sender, event['content']['body']) elif msgtype in ["m.image", "m.audio", "m.video", "m.file"]: new_url = event['content']['url'].replace("mxc://", self.viewer_url) mtype = typedict[msgtype] msg_content = "{} ({})\n{}".format(new_url, mtype, event['content']['body']) media_url = new_url else: pass logger.info("[{}] {}: {}".format(room_alias, sender, event['content']['body'])) if mtype is not None: msg = Message( ChannelType.Matrix, sender, room_alias, msg_content, mtype=mtype, date=date, time=time, media_url=media_url, opt=opt) self.send_to_bus(self, msg) def send_to_bus(self, msg): raise NotImplementedError() def listen_message_stream(self): self.client.start_listener_thread() def send_msg(self, target, content, sender=None, first=False, **kwargs): target_room = self.joined_rooms[target] if self.bot_msg_pattern is not None and re.match(self.bot_msg_pattern, content) is not None: target_room.send_text("{} sent the following message:".format(sender)) target_room.send_text(content) else: target_room.send_text("[{}] {}".format(sender, content)) def Matrix2FishroomThread(mx: MatrixHandle, bus: MessageBus): if mx is None or isinstance(mx, EmptyBot): return def send_to_bus(self, msg): bus.publish(msg) mx.send_to_bus = send_to_bus mx.listen_message_stream() def Fishroom2MatrixThread(mx: MatrixHandle, bus: MessageBus): if mx is None or isinstance(mx, EmptyBot): return for msg in bus.message_stream(): mx.forward_msg_from_fishroom(msg) def init(): from .db import get_redis redis_client = get_redis() im2fish_bus = MessageBus(redis_client, MsgDirection.im2fish) fish2im_bus = MessageBus(redis_client, MsgDirection.fish2im) rooms = [b["matrix"] for _, b in config['bindings'].items() if "matrix" in b] server = config['matrix']['server'] user = config['matrix']['user'] password = config['matrix']['password'] nick = config['matrix'].get('nick', None) return ( MatrixHandle(server, user, password, rooms, nick), im2fish_bus, fish2im_bus, ) def main(): if "matrix" not in config: return from .runner import run_threads bot, im2fish_bus, fish2im_bus = init() run_threads([ (Matrix2FishroomThread, (bot, im2fish_bus, ), ), (Fishroom2MatrixThread, (bot, fish2im_bus, ), ), ]) def test(): rooms = [b["matrix"] for _, b in config['bindings'].items()] server = config['matrix']['server'] user = config['matrix']['user'] password = config['matrix']['password'] matrix_handle = MatrixHandle(server, user, password, rooms) def send_to_bus(self, msg): logger.info(msg.dumps()) matrix_handle.send_to_bus = send_to_bus matrix_handle.process(block=True) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("--test", default=False, action="store_true") args = parser.parse_args() if args.test: test() else: main() # vim: ts=4 sw=4 sts=4 expandtab
Almost one third of those without a will said they felt they had nothing valuable to bequeath. Source: Getty. When it comes to planning what will happen to your estate after you’re gone, the task can bring up some unpleasant thoughts or cause strains between family members who may be reluctant to discuss the topic of death. However it is important to plan ahead and make sure that you have put the necessary motions in place should anything happen. However shocking new research has revealed that more than half of Australians (53 per cent) have not put together a will, with the main reason for failing to compile one being pure procrastination, as 40 per cent of those without one said they planned to do so but had not got around to it yet. Almost one third of those without a will said they felt they had nothing valuable to bequeath, while 28 per cent admitted to feeling “too young” to write a will and 16 per cent of respondents simply didn’t want to think about dying. The research, which was commissioned by Maurice Blackburn Lawyers, also revealed that almost half of those without a will admittedly have no idea what will happen to their assets if they pass away without a Last Will and Testament in place. Andrew Simpson, national head of Wills and Estates Law at Maurice Blackburn, described the results as “concerning”, saying they reveal a lack of awareness among Australians about the importance of having a will and what happens if you die without one. “This research reflects what we hear each day,” he said. “That a significant proportion of the Australian adult population don’t have a will, that those who don’t have a will are highly misinformed about why a will is important and that many people are in the dark about what actually happens if you pass away without a will. A total of 1,287 people, aged 18 and over, took part in the study, which also revealed that around 75 per cent of those who do have a will believed it was up to date and in line with their wishes, while one quarter said their will required updating or weren’t sure. The research, which was carried out by OmniPoll, also found that, among those over the age of 50, just over one third were aware of an advance care directive – EXPLAIN – however just 11 per cent actually had one in place. Read more: Would you write your kids an ’emotional will’? Simpson went on to explain that, while many people believe they own nothing of value, this is a common misconception as people regularly overlook the likes of pets, digital assets and even superannuation funds when they think about what assets they have. However Simpson said that the most concerning factor to emerge from the findings is that almost 40 per cent of people surveyed who do not have a will, have no idea what happens if they pass away without one and almost a quarter wrongly assume that their next of kin can choose what happens in that case. He added: “It is understandable that people assume their family can step in to deal with their affairs if they pass away unexpectedly without a will but in most cases that’s not what happens. While many people associate wills with those in the later stages of life, Simpson also stressed the importance of having a will despite your age, particularly if you are married, have children or make a large purchase or investment, such as property. The findings revealed that the most common catalyst for people deciding to write a will was being diagnosed with a serious illness. Simpson added: “It’s not surprising that with statistics like these, the number of disputes we see in relation to the administration of deceased estates, and superannuation death benefits, are increasing. “Many people also incorrectly assume that because they are young, not married, don’t have kids or don’t own a house that they don’t need a will, [but] unfortunately a will may be needed at any time in an adult’s life and often without warning. Do you have a will in place? Is it up to date?
# # Copyright (c) 2008--2016 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # import sys import string # pylint: disable=W0402 from spacewalk.common import usix from spacewalk.server.importlib import channelImport, packageImport, errataImport, \ kickstartImport from spacewalk.common.usix import raise_with_tb import diskImportLib import xmlSource import syncCache import syncLib DEFAULT_ORG = 1 # Singleton-like class BaseCollection: _shared_state = {} def __init__(self): self.__dict__ = self._shared_state if not list(self._shared_state.keys()): self._items = [] self._cache = None self._items_hash = {} self._init_fields() self._init_cache() def add_item(self, item): item_id = self._get_item_id(item) timestamp = self._get_item_timestamp(item) self._cache.cache_set(item_id, item, timestamp=timestamp) return self def get_item_timestamp(self, item_id): "Returns this item's timestamp" if item_id not in self._items_hash: raise KeyError("Item %s not found in collection" % item_id) return self._items_hash[item_id] def get_item(self, item_id, timestamp): "Retrieve an item from the collection" return self._cache.cache_get(item_id, timestamp=timestamp) def has_item(self, item_id, timestamp): """Return true if the item exists in the collection (with the specified timestamp""" return self._cache.cache_has_key(item_id, timestamp=timestamp) def _init_fields(self): return self def _init_cache(self): return self def _get_item_id(self, item): "Get the item ID out of an item. Override in subclasses" raise NotImplementedError def _get_item_timestamp(self, item): "Get the item timestamp out of an item. Override in subclasses" raise NotImplementedError def reset(self): """Reset the collection""" self._shared_state.clear() self.__init__() # Singleton-like class ChannelCollection: _shared_state = {} def __init__(self): self.__dict__ = self._shared_state if not list(self._shared_state.keys()): self._channels = [] self._parent_channels = {} self._channels_hash = {} self._cache = syncCache.ChannelCache() def add_item(self, channel_object): """Stores a channel in the collection""" channel_label = channel_object['label'] channel_last_modified = channel_object['last_modified'] last_modified = _to_timestamp(channel_last_modified) self._cache.cache_set(channel_label, channel_object, timestamp=last_modified) t = (channel_label, last_modified) self._channels.append(t) channel_parent = channel_object.get('parent_channel') if channel_parent is not None: # Add this channel to the parent's list l = self._get_list_from_dict(self._parent_channels, channel_parent) l.append(t) else: # Create an empty list self._get_list_from_dict(self._parent_channels, channel_label) self._channels_hash[channel_label] = last_modified return self @staticmethod def _get_list_from_dict(diction, key): # Returns the dictionary's key if present (assumed to be a list), or # sets the value to an empty list and returns it if key in diction: l = diction[key] else: l = diction[key] = [] return l def get_channel_labels(self): """Return the channel labels from this collection""" return [x[0] for x in self._channels] def get_channels(self): """Return a list of (channel label, channel timestamp) from this collection""" return self._channels[:] def get_channel(self, channel_label, timestamp): """Return the channel with the specified label and timestamp from the collection""" return self._cache.cache_get(channel_label, timestamp=timestamp) def get_channel_timestamp(self, channel_label): """Returns the channel's timestamp""" if channel_label not in self._channels_hash: raise KeyError("Channel %s could not be found" % channel_label) return self._channels_hash[channel_label] def get_parent_channel_labels(self): """Return a list of channel labels for parent channels""" l = list(self._parent_channels.keys()) l.sort() return l def get_child_channels(self, channel_label): """Return a list of (channel label, channel timestamp) for this parent channel""" if channel_label not in self._parent_channels: raise Exception("Channel %s is not a parent" % channel_label) return self._parent_channels[channel_label] def reset(self): """Reset the collection""" self._shared_state.clear() self.__init__() # pylint: disable=W0232 class SyncHandlerContainer: collection = object # this class has no __init__ for the purpose # it's used in multiple inheritance mode and inherited classes should # use __init__ from the other base class def endItemCallback(self): # reference to xmlSource superclass we redefines xml_superclass = self.__class__.__bases__[1] xml_superclass.endItemCallback(self) # pylint: disable=E1101 if not self.batch: return c = self.collection() c.add_item(self.batch[-1]) del self.batch[:] def endContainerCallback(self): # Not much to do here... pass def get_sync_handler(container): handler = xmlSource.SatelliteDispatchHandler() handler.set_container(container) return handler class ChannelContainer(SyncHandlerContainer, xmlSource.ChannelContainer): collection = ChannelCollection def get_channel_handler(): return get_sync_handler(ChannelContainer()) def import_channels(channels, orgid=None, master=None): collection = ChannelCollection() batch = [] org_map = None my_backend = diskImportLib.get_backend() if master: org_map = my_backend.lookupOrgMap(master)['master-id-to-local-id'] for c in channels: try: timestamp = collection.get_channel_timestamp(c) except KeyError: raise_with_tb(Exception("Could not find channel %s" % c), sys.exc_info()[2]) c_obj = collection.get_channel(c, timestamp) if c_obj is None: raise Exception("Channel not found in cache: %s" % c) # Check to see if we're asked to sync to an orgid, # make sure the org from the export is not null org, # finally if the orgs differ so we might wanna use # requested org's channel-family. # TODO: Move these checks somewhere more appropriate if not orgid and c_obj['org_id'] is not None: # If the src org is not present default to org 1 orgid = DEFAULT_ORG if orgid is not None and c_obj['org_id'] is not None and \ c_obj['org_id'] != orgid: # If we know the master this is coming from and the master org # has been mapped to a local org, transform org_id to the local # org_id. Otherwise just put it in the default org. if (org_map and c_obj['org_id'] in list(org_map.keys()) and org_map[c_obj['org_id']]): c_obj['org_id'] = org_map[c_obj['org_id']] else: c_obj['org_id'] = orgid if c_obj.has_key('trust_list'): del(c_obj['trust_list']) for family in c_obj['families']: family['label'] = 'private-channel-family-' + \ str(c_obj['org_id']) # If there's a trust list on the channel, transform the org ids to # the local ones if c_obj.has_key('trust_list') and c_obj['trust_list']: trusts = [] for trust in c_obj['trust_list']: if trust['org_trust_id'] in org_map: trust['org_trust_id'] = org_map[trust['org_trust_id']] trusts.append(trust) c_obj['trust_list'] = trusts syncLib.log(6, "Syncing Channel %s to Org %s " % (c_obj['label'], c_obj['org_id'])) batch.append(c_obj) importer = channelImport.ChannelImport(batch, my_backend) # Don't commit just yet importer.will_commit = 0 importer.run() return importer # Singleton-like class ShortPackageCollection: _shared_state = {} def __init__(self): self.__dict__ = self._shared_state if not list(self._shared_state.keys()): self._cache = None self._init_cache() def _init_cache(self): self._cache = syncCache.ShortPackageCache() def add_item(self, package): """Stores a package in the collection""" self._cache.cache_set(package['package_id'], package) def get_package(self, package_id): """Return the package with the specified id from the collection""" return self._cache.cache_get(package_id) def has_package(self, package_id): """Returns true if the package exists in the collection""" return self._cache.cache_has_key(package_id) def reset(self): """Reset the collection""" self._shared_state.clear() self.__init__() class ShortPackageContainer(SyncHandlerContainer, xmlSource.IncompletePackageContainer): collection = ShortPackageCollection def get_short_package_handler(): return get_sync_handler(ShortPackageContainer()) class PackageCollection(ShortPackageCollection): _shared_state = {} def _init_cache(self): self._cache = syncCache.PackageCache() def get_package_timestamp(self, package_id): raise NotImplementedError class PackageContainer(SyncHandlerContainer, xmlSource.PackageContainer): collection = PackageCollection def get_package_handler(): return get_sync_handler(PackageContainer()) # Singleton-like class SourcePackageCollection(ShortPackageCollection): _shared_state = {} def _init_cache(self): self._cache = syncCache.SourcePackageCache() class SourcePackageContainer(SyncHandlerContainer, xmlSource.SourcePackageContainer): collection = SourcePackageCollection def get_source_package_handler(): return get_sync_handler(SourcePackageContainer()) # Singleton-like class ErrataCollection: _shared_state = {} def __init__(self): self.__dict__ = self._shared_state if not list(self._shared_state.keys()): self._errata_hash = {} self._cache = None self._init_cache() def _init_cache(self): self._cache = syncCache.ErratumCache() def add_item(self, erratum): """Stores an erratum in the collection""" erratum_id = erratum['erratum_id'] timestamp = _to_timestamp(erratum['last_modified']) self._errata_hash[erratum_id] = timestamp self._cache.cache_set(erratum_id, erratum, timestamp=timestamp) def get_erratum_timestamp(self, erratum_id): """Returns the erratum's timestamp""" if erratum_id not in self._errata_hash: raise KeyError("Erratum %s could not be found" % erratum_id) return self._errata_hash[erratum_id] def get_erratum(self, erratum_id, timestamp): """Return the erratum with the specified id and timestamp from the collection. Note that timestamp can be None, in which case no timetamp matching is performed""" return self._cache.cache_get(erratum_id, timestamp=timestamp) def has_erratum(self, erratum_id, timestamp): """Returns true if the erratum exists in the collection""" return self._cache.cache_has_key(erratum_id, timestamp=timestamp) def reset(self): """Reset the collection""" self._shared_state.clear() self.__init__() class ErrataContainer(SyncHandlerContainer, xmlSource.ErrataContainer): collection = ErrataCollection def get_errata_handler(): return get_sync_handler(ErrataContainer()) class KickstartableTreesCollection(BaseCollection): _shared_state = {} def _init_cache(self): self._cache = syncCache.KickstartableTreesCache() def _get_item_id(self, item): return item['label'] def _get_item_timestamp(self, item): return None class KickstartableTreesContainer(SyncHandlerContainer, xmlSource.KickstartableTreesContainer): collection = KickstartableTreesCollection def get_kickstarts_handler(): return get_sync_handler(KickstartableTreesContainer()) def import_packages(batch, sources=0): importer = packageImport.PackageImport(batch, diskImportLib.get_backend(), sources) importer.setUploadForce(4) importer.run() importer.status() return importer def link_channel_packages(batch, strict=1): importer = packageImport.ChannelPackageSubscription(batch, diskImportLib.get_backend(), caller="satsync.linkPackagesToChannels", strict=strict) importer.run() importer.status() return importer def import_errata(batch): importer = errataImport.ErrataImport(batch, diskImportLib.get_backend()) importer.ignoreMissing = 1 importer.run() importer.status() return importer def import_kickstarts(batch): importer = kickstartImport.KickstartableTreeImport(batch, diskImportLib.get_backend()) importer.run() importer.status() return importer def _to_timestamp(t): if isinstance(t, usix.IntType): # Already an int return t # last_modified is YYYY-MM-DD HH24:MI:SS # The cache expects YYYYMMDDHH24MISS as format; so just drop the # spaces, dashes and columns # python 2.4 can't handle t.translate(None, ' -:') last_modified = t.translate(string.maketrans("", ""), ' -:') return last_modified # Generic container handler class ContainerHandler: """generate and set container XML handlers""" def __init__(self, master_label, create_orgs=False): self.handler = xmlSource.SatelliteDispatchHandler() # arch containers self.setServerArchContainer() self.setPackageArchContainer() self.setChannelArchContainer() self.setCPUArchContainer() self.setServerPackageArchContainer() self.setServerChannelArchContainer() self.setServerGroupServerArchContainer() self.setChannelPackageArchContainer() # all other containers self.setChannelFamilyContainer() self.setProductNamesContainer() self.setOrgContainer(master_label, create_orgs) def __del__(self): self.handler.close() # kill the circular reference. def close(self): self.handler.close() # kill the circular reference. def clear(self): self.handler.clear() # clear the batch # basic functionality: def process(self, stream): self.handler.process(stream) def reset(self): self.handler.reset() def getHandler(self): return self.handler # set arch containers: def setServerArchContainer(self): self.handler.set_container(diskImportLib.ServerArchContainer()) def setPackageArchContainer(self): self.handler.set_container(diskImportLib.PackageArchContainer()) def setChannelArchContainer(self): self.handler.set_container(diskImportLib.ChannelArchContainer()) def setCPUArchContainer(self): self.handler.set_container(diskImportLib.CPUArchContainer()) def setServerPackageArchContainer(self): self.handler.set_container(diskImportLib.ServerPackageArchCompatContainer()) def setServerChannelArchContainer(self): self.handler.set_container(diskImportLib.ServerChannelArchCompatContainer()) def setServerGroupServerArchContainer(self): self.handler.set_container(diskImportLib.ServerGroupServerArchCompatContainer()) def setChannelPackageArchContainer(self): self.handler.set_container(ChannelPackageArchCompatContainer()) # set all other containers: def setChannelFamilyContainer(self): self.handler.set_container(ChannelFamilyContainer()) def setProductNamesContainer(self): self.handler.set_container(diskImportLib.ProductNamesContainer()) def setOrgContainer(self, master_label, create_orgs): # pylint: disable=E1101,E1103 self.handler.set_container(diskImportLib.OrgContainer()) self.handler.get_container('rhn-orgs').set_master_and_create_org_args( master_label, create_orgs) # # more containers # # NOTE: we use *most* the Arch Containers from diskImportLib.py # this one is used simply to print out the arches. class ChannelPackageArchCompatContainer(diskImportLib.ChannelPackageArchCompatContainer): arches = {} def endItemCallback(self): diskImportLib.ChannelPackageArchCompatContainer.endItemCallback(self) if not self.batch: return self.arches[self.batch[-1]['package-arch']] = 1 def endContainerCallback(self): arches = list(self.arches.keys()) arches.sort() if arches: for arch in arches: syncLib.log(6, ' parsed arch: %s' % (arch)) diskImportLib.ChannelPackageArchCompatContainer.endContainerCallback(self) class ChannelFamilyContainer(xmlSource.ChannelFamilyContainer): def endItemCallback(self): xmlSource.ChannelFamilyContainer.endItemCallback(self) if not self.batch: return syncLib.log(2, ' parsing family: %s' % (self.batch[-1]['name'])) def endContainerCallback(self): batch = self.batch # use the copy only; don't want a persistent self.batch self.batch = [] importer = channelImport.ChannelFamilyImport(batch, diskImportLib.get_backend()) importer.run()
FETTER, KENNY "KENNY DID IT" What's in it for those riders who go the distance? Bonus Round Just for Participating! Two random drawings will be held where the dealership will award the following to one Male and one Female Mileage Challenge participant.
def run_threaded(): from ws4py.client.threadedclient import WebSocketClient class EchoClient(WebSocketClient): def opened(self): self.send("hello") def closed(self, code, reason=None): print(("Closed down", code, reason)) def received_message(self, m): print(m) self.close() try: ws = EchoClient('wss://localhost:9000/ws') ws.connect() ws.run_forever() except KeyboardInterrupt: ws.close() def run_tornado(): from tornado import ioloop from ws4py.client.tornadoclient import TornadoWebSocketClient class MyClient(TornadoWebSocketClient): def opened(self): self.send("hello") def closed(self, code, reason=None): print(("Closed down", code, reason)) ioloop.IOLoop.instance().stop() def received_message(self, m): print(m) self.close() ws = MyClient('wss://localhost:9000/ws') ws.connect() ioloop.IOLoop.instance().start() def run_gevent(): from gevent import monkey; monkey.patch_all() import gevent from ws4py.client.geventclient import WebSocketClient ws = WebSocketClient('wss://localhost:9000/ws') ws.connect() ws.send("hello") def incoming(): while True: m = ws.receive() if m is not None: print(m) else: break ws.close() gevent.joinall([gevent.spawn(incoming)]) #run_gevent() run_threaded() run_tornado()
And when the sun has set no candle can replace it. Decorate your walls of bedroom with this perfect and mind soothing ‘Pacific Sunset Wallpaper’. The beautiful scenario encompasses a blaze of color — oranges, pearly pinks and vibrant purples. The wallpaper provides perfect picture drop enfolding the beauty of the sky turned to a light, dusky purple littered with tiny silver stars. Enjoy the pacific sunset with detailed instructions and hues of colors.
# encoding: utf8 from __future__ import unicode_literals from django.db import models, migrations def initial_data(apps, schema_editor): Symbol = apps.get_model('portfolio', 'Symbol') return class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Symbol', fields=[ ( 'id', models.AutoField( verbose_name='ID', serialize=False, auto_created=True, primary_key=True) ), ('name', models.CharField(max_length=32)), ('slug', models.SlugField(default="")), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='Quote', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('symbol', models.ForeignKey(to='portfolio.Symbol', to_field='id')), ('date', models.DateField()), ('adj_close', models.DecimalField(max_digits=32, decimal_places=30)), ('closed', models.DecimalField(max_digits=32, decimal_places=30)), ('high', models.DecimalField(max_digits=32, decimal_places=30)), ('low', models.DecimalField(max_digits=32, decimal_places=30)), ('opened', models.DecimalField(max_digits=32, decimal_places=30)), ('volume', models.DecimalField(max_digits=32, decimal_places=30)), ], options={ 'unique_together': set([(b'symbol', b'date')]), }, bases=(models.Model,), ), migrations.RunPython(initial_data), ]
1. Try to keep the things you like away. 2. Try to keep away the things you like from you. Me gustaría saber cual es correcta. Trata de mantenerte alejado de las cosas que te gustan. Grammatically, both are correct, but I would change it to "Try to stay away from the things you like." That sounds more natural to me.
#!/bin/env python # Copyright 2019 Arm Limited. # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import argparse import sys import os import platform parser = argparse.ArgumentParser(description='Test generator.') parser.add_argument('--out') parser.add_argument('--expected') group = parser.add_mutually_exclusive_group() group.add_argument('--shared', help='use .so or .dylib extension', action='store_true') group.add_argument('--static', help='use .a extension', action='store_true') args = parser.parse_args() if args.shared: if platform.system() == 'Darwin': extension = '.dylib' else: extension = '.so' elif args.static: extension = '.a' else: extension = '' expected = args.expected + extension if os.path.basename(args.out) != expected: print("Output from generation: {} but expected: {}".format(args.out, expected)) sys.exit(1)
Automotive logistics is a complex and challenging blend of just-in-time shipping, established processes, technology enabled solutions and supplier compliance. At Transplace, we understand the demands and requirements of just-in-time (JIT) shipping in the automotive industry and the continual need to drive down inventory costs. With robust processes in place to accelerate supplier on-boarding and streamline inbound material flows, our logistics solutions drive down costs and eliminate excess inventory, while enhancing end-to-end shipment visibility and collaboration. We provide our automotive customers with powerful analysis to deliver opportunities for supply chain improvement and business growth, improved demand planning and reduced overhead related to inventory management, including vendor-owned and vendor-managed inventory solutions.
########################################################################## # Author: Jane Curry, [email protected] # Date: February 3rd, 2011 # Revised: # # info.py for ApcPdu ZenPack # # This program can be used under the GNU General Public License version 2 # You can find full information here: http://www.zenoss.com/oss # ################################################################################ __doc__="""info.py Representation of ApcPdu components. $Id: info.py,v 1.2 2010/12/14 20:45:46 jc Exp $""" __version__ = "$Revision: 1.4 $"[11:-2] from zope.interface import implements from Products.Zuul.infos import ProxyProperty from Products.Zuul.infos.component import ComponentInfo from Products.Zuul.decorators import info from ZenPacks.ZenSystems.ApcPdu import interfaces class ApcPduOutletInfo(ComponentInfo): implements(interfaces.IApcPduOutletInfo) outNumber = ProxyProperty("outNumber") outName = ProxyProperty("outName") outState = ProxyProperty("outState") outBank = ProxyProperty("outBank") class ApcPduBankInfo(ComponentInfo): implements(interfaces.IApcPduBankInfo) bankNumber = ProxyProperty("bankNumber") bankState = ProxyProperty("bankState") bankStateText = ProxyProperty("bankStateText") class ApcPduPSInfo(ComponentInfo): implements(interfaces.IApcPduPSInfo) supply1Status = ProxyProperty("supply1Status") supply2Status = ProxyProperty("supply2Status")
The Damara Dik-dik (Madoqua kirkii damarensis) occurring in northern and central Namibia is a subspecies of the Kirk’s Dik-dik which is native to East Africa. With a shoulder height of up to 40 cm, the animals are among the smallest antelopes of Africa. I have seen and photographed these graceful creatures in the Etosha National Park, Namibia, in February 2007. Dik-diks favour comparatively dense woodlands with shrubs, but little grass. In these habitats they feed mainly on shoots, leaves, fruits, and flowers of woody plants and forbs. The antelopes live in monogamous pairs raising their offspring together. Its characteristic long snout is probably used as a means to cool down blood and reduce water loss by evaporation. haha – thank you! yes, I love the nose, too – looks really cute! We’ve seen one in Kenya. Looks like a pocket sized antelope 🙂 Nice photos. haha – yes, they are really cute! thanks for your comment! Dik Dik’s are one of my favourite animals! I have a bit of a soft spot for little hoofed creatures. The photo of the mid-yawn is fantastic! they are soo lovely and cute aww! 🙂 thank you, I think so, too!
from django.db import models from datetime import datetime from django.utils import timezone from django.conf import settings from django.core.urlresolvers import reverse # Create your models here. class Author(models.Model): first_name = models.CharField(max_length=60) last_name = models.CharField(max_length=60) email = models.EmailField(blank=True) def __str__(self): return self.first_name + ' ' + self.last_name class Publisher(models.Model): """docstring for Publisher""" name = models.CharField(max_length=200) address = models.TextField(blank=True) website = models.URLField(blank=True) def __str__(self): return self.name class Book(models.Model): name = models.CharField(max_length=200) edition = models.SmallIntegerField(default=1) authors = models.ManyToManyField(Author, blank=True) publisher = models.ManyToManyField(Publisher, blank=True) published = models.PositiveSmallIntegerField(blank=True) pages = models.IntegerField(default=0) isbn_10 = models.IntegerField(default=0,help_text="Do not include dashes") isbn_13 = models.IntegerField(default=0,help_text="Do not include dashes") description = models.TextField() cover_image = models.ImageField('cover Image', upload_to='cover_pics/%Y-%m-%d/', null=True, blank=True) date_added = models.DateTimeField(default=datetime.now) available = models.BooleanField(default=True) # this method causes a button labelled "View on site" to # appear in the top right-hand side in book admin page. def get_absolute_url(self): return reverse('books:book_detail', args=[self.id]) def __str__(self): if self.edition==1: nth="st" elif self.edition==2: nth="nd" elif self.edition==3: nth="rd" else : nth="th" return self.name + ", "+ str(self.edition)+nth + " Edition" def was_added_recently(self): return self.date_added >= timezone.now() - datetime.timedelta(days=30) class Loaned(models.Model): loaned_by = models.ForeignKey(settings.AUTH_USER_MODEL) book = models.ForeignKey(Book) timestamp = models.DateTimeField(auto_now_add=True) returned = models.BooleanField(default=False) def __str__(self): return self.book.name class Meta: verbose_name = "Loaned Book"
The artificial fern teardrop is an ideal décor piece if you want to decorate a room with limited floor space, like an apartment or office. At a height of 28 inches, the soft green foliage starts off thick at the top and thins as you move down the length of it—kind of like a teardrop. Settle this piece in a hanging basket and let it accent any shelf space on the wall.