text
stringlengths 29
850k
|
---|
#!/usr/bin/env python3
# NOTE: Before running this script, you may want to either get my cache,
# or run 'fetch.py' from the branch 'crawler-fetch'.
import json
import nice
import os
import subprocess
# If True:
# - only export images where CHOICES_PRIORITY is still relevant
# (i.e., more than one image and no entry in CHOICES_OVERRIDE)
# - no pols.json written
# - non-standard filenames
# - no thumbnails
# If False:
# - export "all" images, defaulting to the order in CHOICES_PRIORITY.
# - pols.json written
# - standard filenames, identical in scheme to the old ones.
CHOICE_MODE = False
# To make the following dict shorter:
w, l, c, s, g = 'wiki', 'die linke', 'cxu', 'spd', 'gruene'
CHOICES_OVERRIDE = {
# 'pid': 'slug',
# Recommended: open a new editor and just write down entries like '52g',
# and let regexes do the rest.
'0': l,
'4': c,
'5': w,
'6': w,
'7': s,
'9': s,
'12': g,
'14': w,
'16': c,
'22': s,
'23': s,
'24': l,
'25': w,
'28': g,
'29': g,
'32': c,
'33': l,
'34': w,
'40': c,
'41': c,
'42': l,
'43': s,
'45': l,
'56': g,
'59': w,
'60': w,
'61': c,
'62': w,
'64': w,
'67': s,
'68': s,
'70': s,
'74': l,
'76': l,
'77': g,
'78': s,
'85': w,
'88': g,
'89': w,
'91': g,
'95': s,
'97': l,
'98': s,
'99': s,
'104': w,
'105': w,
'111': c,
'114': s,
'117': s,
'118': s,
'124': c,
'125': w,
'127': s,
'130': w,
'132': w,
'133': l,
'134': w,
'142': l,
'145': w,
'147': s,
'150': w,
'153': w,
'156': l,
'159': w,
'162': c,
'165': c,
'166': l,
'172': w,
'173': s,
'175': l,
'176': w,
'177': w,
'178': s,
'179': s,
'181': g,
'182': w,
'183': c,
'184': c,
'186': w,
'188': s,
'189': c,
'190': w,
'196': s,
'204': s,
'209': w,
'211': s,
'214': w,
'215': g,
'217': w,
'218': g,
'224': c,
'226': l,
'229': s,
'231': g,
'233': w,
'234': l,
'238': c,
'239': w,
'240': s,
'243': w,
'244': s,
'245': s,
'252': l,
'254': w,
'257': w,
'259': w,
'260': w,
'261': s,
'264': c,
'265': w,
'267': w,
'268': s,
'270': c,
'271': w,
'272': c,
'273': s,
'275': g,
'276': c,
'278': w,
'282': l,
'283': w,
'284': g,
'287': l,
'288': w,
'290': w,
'291': g,
'293': c,
'294': w,
'295': g,
'298': c,
'299': w,
'301': g,
'309': s,
'313': s,
'314': l,
'315': w,
'317': l,
'319': g,
'320': s,
'321': c,
'325': l,
'326': w,
'328': l,
'329': c,
'332': g,
'335': s,
'339': l,
'341': w,
'344': l,
'346': w,
'348': g,
'350': s,
'351': w,
'356': w,
'357': s,
'360': w,
'361': w,
'369': g,
'373': l,
'375': w,
'379': w,
'385': w,
'386': w,
'389': g,
'392': w,
'393': c,
'395': s,
'397': l,
'398': g,
'399': g,
}
CHOICES_PRIORITY = [
'twitter', # Just in case we ever do that
'spd',
'die linke',
'gruene',
'wiki', # Not the best source of images
'cxu', # Often enough worse than Wikipedia's images
]
DIR_PREFIX = 'preview'
os.mkdir(DIR_PREFIX) # If this fails: you should always start from scratch here!
def convert(*args):
try:
subprocess.run(['convert', *args],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
check=True)
except subprocess.CalledProcessError as e:
print('stdout:\n{}\nstderr:\n{}\n'.format(e.stdout, e.stderr))
raise
def checkout(pid, fields):
img_prefix = os.path.join(DIR_PREFIX, pid)
dl_path = nice.get(fields['url'])
freshest_path = dl_path
# Provide '_raw' for intermediate processing
raw_dst_path = img_prefix + '_raw.jpg'
if fields.get('is_compressed'):
with open(raw_dst_path, 'wb') as raw_fp:
subprocess.run(['unzip', '-p', dl_path],
stdout=raw_fp, stderr=subprocess.PIPE, check=True)
freshest_path = raw_dst_path
else:
# Need '../' to get out of 'preview/'
os.symlink('../' + dl_path, raw_dst_path)
# Something about digitally rotated images (Michael Grosse-Brömer, 154)
# doesn't work as it should.
inject = []
if '154' in pid:
inject = ['-rotate', '-90']
# Provide ready-to-use image
convert(freshest_path,
'-resize', '330x330^',
'-gravity', 'north',
'-extent', '330x330',
*inject,
'-strip',
img_prefix + '.jpg')
if not CHOICE_MODE:
# Provide thumbnail
convert(freshest_path,
'-thumbnail', '75x75^',
'-gravity', 'north',
'-extent', '75x75',
*inject,
img_prefix + '_t.jpg')
# TODO: Use '-strip'.
# Don't do it right now in order to
# avoid blowing up 'heavy' even more.
# Retract '_raw'
os.remove(raw_dst_path)
entry = {
'pathToImage': pid + '.jpg',
'pathToThumb': pid + '_t.jpg',
'license': fields['license'],
}
if 'copyright' in fields:
entry['copyright'] = fields['copyright']
return entry
def choose_img(pid, imgs):
if pid in CHOICES_OVERRIDE:
choice = CHOICES_OVERRIDE[pid]
elif len(imgs) == 1:
choice = list(imgs.keys())[0]
else:
print('[WARN] No human selection for ' + pid)
appliccable = [ch for ch in CHOICES_PRIORITY if ch in imgs]
assert len(appliccable) > 0, (imgs.keys(), CHOICES_PRIORITY)
choice = appliccable[0]
return imgs[choice]
SPOOF_USERS = {
'hot': {
"twittering": {
"twitterId": "4718199753",
"twitterUserName": "HouseOfTweetsSB"
},
"self_bird": "amsel",
"party": "Gr\u00fcn",
"name": "House Of Tweets",
"pid": "hot",
"cv": {
"en": "A good bird person. Very reliable. But also killed. In bird culture, that was considered a 'dick move'.",
"de": "Uhh, keine Ahnung, ich kenn das Zitat nur auf Englisch."
},
"images": {
"pathToThumb": "tgroup_greengr\u00fcn.jpg",
"pathToImage": "group_greengr\u00fcn.jpg"
},
"citizen_bird": "amsel"
},
'523': {
"twittering": {
"twitterId": "237115617",
"twitterUserName": "sc_ontour"
},
"self_bird": "girlitz",
"party": "SPD",
"name": "Stephan Schweitzer",
"pid": "523",
"cv": {
"en": "Schweitzer, born in Saarland, used to work in the Willy-Brandt-Haus in Berlin for four years. He started out as head of Astrid Klug's office, then became the head of the department for communication. Lastly, he was technical director for the election campaign. Before transferring to Berlin, the certified public administration specialist directed the affairs of the Saar-SPD. His career began as publicist for the Saarlouis county in 1993.",
"de": "Schweitzer, ein gebürtiger Saarländer, arbeitete zuvor vier Jahre im Willy-Brandt-Haus in Berlin, zunächst als Büroleiter der damaligen SPD-Bundesgeschäftsführerin Astrid Klug, dann als Abteilungsleiter für Kommunikation und zuletzt als technischer Wahlkampfleiter im Bundestagswahlkampf. Vor seinem Wechsel nach Berlin hatte der Diplom-Verwaltungswirt, der seine Laufbahn 1993 als Pressesprecher des Landkreises Saarlouis begann, die Geschäfte der Saar-SPD geführt."
},
"images": {
"pathToThumb": "523_t.jpg",
"pathToImage": "523.jpg"
},
"citizen_bird": "zaunkoenig"
}
}
def prune_convert(pols):
pols = {poli['pid']: poli for poli in pols if 'twittering' in poli}
for poli in pols.values():
del poli['imgs']
for (pid, poli) in SPOOF_USERS.items():
assert pid == poli['pid']
pols[pid] = poli
return pols
def run():
with open('converge_each.json', 'r') as fp:
pols = json.load(fp)
for e in pols:
if 'twittering' not in e:
print('[INFO] Skipping (not twittering) ' + e['full_name'])
continue
if len(e['imgs']) == 0:
print('[WARN] No images at all for ' + e['full_name'])
continue
print('[INFO] Checking out files for ' + e['full_name'])
if not CHOICE_MODE:
fields = choose_img(e['pid'], e['imgs'])
e['images'] = checkout(e['pid'], fields)
elif len(e['imgs']) >= 2 and e['pid'] not in CHOICES_OVERRIDE:
for slug, fields in e['imgs'].items():
checkout(e['pid'] + slug, fields)
if not CHOICE_MODE:
print('[INFO] CHOICE_MODE = False, so I\'ll write out pols.json')
pols = prune_convert(pols)
with open('pols.json', 'w') as fp:
json.dump(pols, fp, sort_keys=True, indent=2)
else:
print('[INFO] CHOICE_MODE = True, so not writing anything')
if __name__ == '__main__':
if not CHOICE_MODE:
print('[INFO] If there\'s many complaints about missing human choices, re-run with CHOICE_MODE = True')
run()
print('Done.')
|
Radio Jockeys have to talk a lot. In fact they make one wonder if they ever kept quiet! Atul Kulkarni has decidedto go ahead and check it ot for himself. He intends to invite one of the RJs for a 'silent' dinner.
There he stands in the corner, arms folded, his fingers playing on imaginary piano keys on his arms. There's a certain nervous energy about him. He's waiting for the verdict.
For years after reading about a foreigner who settled down in Coorg and created a forest nearby, Atul Kulkarni nursed the same dream.
The latest from the world's largest film industry. The Indian film industry in the world. It make almost three films a day, spends half a billion dollars in doing so, earns a billion in return, and is growing at a rate of 15 percent a year.
Two times National Award winner, actor Atul Kulkarni's thoughts on politics and patriotism are refreshing. When he comments that complacency has become the key world in the current situation.
Ready for a Pav-Wow ?
Mumbai doesn't think on an empty stomach. And here's some food for thought. Think of the satisfaction you felt when you took that first bite of a piping hot, butter-fried pav bhaji, the soft, warm mouthful offset by the crunchy, freshly-chopped kanda sprinkled on top, doused with lemon juice.
The movement around the Jan Lokpal bill may be subdued now, but the events of April-August 2011 will haunt India. Therefore, it is important to understand what actually occurred and what will be the consequences.
Whether it was Shriram Abhyankar in Heyy Ram, Potya Sawant in Chandani Bar, Laxman Pandey in Rang De Basanti, Atul Kulkarni's performance is stupendous in every film.
... पहिले ते राजकारण !
‘हिंदीत नोकराची भुमिका करणार नाही’ असं अभिनेता सिद्धार्थ जाधवने गेल्याच आठवड्यात ‘मुंटा’शी गप्पा मारताना सांगितल. सिद्धार्थने घेतलेल्या या भूमिकेचं मराठी इंडस्ट्रीत खूप कौतुक झालं. या निमित्ताने, बॉलिवुडमध्ये मराठी कलाकारांचं काय स्थान आहे? अशा मुद्यांवर पुन्हा चर्चा होऊ लागलीय. हिंदी सिनेसृष्टीत अनेक वर्षं वावरणारा अष्टपैलू अभिनेता अतुल कुलकर्णी याने याबाबतची आपली मतं ‘मुंटा’शी शेअर केली.
Copyright © 2013 Atul Kulkarni. All Rights Reserved.
|
__problem_title__ = "Integer-valued polynomials"
__problem_url___ = "https://projecteuler.net/problem=402"
__problem_description__ = "It can be shown that the polynomial + 4 + 2 + 5 is a multiple of 6 " \
"for every integer . It can also be shown that 6 is the largest " \
"integer satisfying this property. Define M( , , ) as the maximum such " \
"that + + + is a multiple of for all integers . For example, M(4, 2, " \
"5) = 6. Also, define S( ) as the sum of M( , , ) for all 0 < , , ≤ . " \
"We can verify that S(10) = 1972 and S(10000) = 2024258331114. Let F " \
"be the Fibonacci sequence: F = 0, F = 1 and F = F + F for ≥ 2. Find " \
"the last 9 digits of Σ S(F ) for 2 ≤ ≤ 1234567890123."
import timeit
class Solution():
@staticmethod
def solution1():
pass
@staticmethod
def time_solutions():
setup = 'from __main__ import Solution'
print('Solution 1:', timeit.timeit('Solution.solution1()', setup=setup, number=1))
if __name__ == '__main__':
s = Solution()
print(s.solution1())
s.time_solutions()
|
ThanksSearch.com is a browser hijacker which promises to provide you with a search engine that gives improved results. Improved results mean saving time, time is money so it certainly seems like you’re offered an excellent deal. But, as is often the case, what’s promised and what’s actually given are two different things. Browser hijackers are created with one single purpose – displaying advertisements inside the browser of a computer which has been affected. Those advertisements generate traffic and traffic brings revenue. So improved search results are nowhere to be found here – only ads that slow the computer down and lead to system crashes.
Wait, system crashes? How’s this possible if your computer have always been stable as a rock before and never crashed? Well, because it never had to deal with the stream of advertisements that’s literally endless. It creates a tremendous amount of pressure on CPU and RAM and they’re not able to process everything that’s getting thrown their way. Remember how good it felt to create a powerful computer? Browser hijackers render your efforts useless. And, to make matters even worse, they steal your personal data such as credit card information, home address and phone number. This may be used against you and that’s why it’s very important to avoid those kinds of situations.
Hijackers like ThanksSearch.com can wait for you on different dubious sites so you need to only visit them when there’s no other way to get what you need. Unfamiliar browser extensions and plugins should also be ignored by you. Be careful when downloading torrents and also keep in mind that Custom installation is always the way to go, because you’re able to uncheck everything that may cause harm to the computer. Follow those simple steps and everything should be dandy.
You can easily remove ThanksSearch.com browser hijacker from your system using one of the effective anti-malware tools listed below, or you can follow our detailed step-by-step manual removal guide.
If you have an antivirus software running on your computer, please check for its updates and scan your system. If the problem still persists, download and install one of the recommended anti-malware tools to automatically remove ThanksSearch.com and other malware threats from your PC.
Download and install one of the recommended anti-malware tools to automatically remove ThanksSearch.com and other malware threats from your PC.
We’ve created this step-by-step removal guide to help you manually remove ThanksSearch.com and other similar security threats from your computer. Please carefully follow all the steps listed in the instruction below. We’ve attached detailed screenshots and video guides for your convenience. If you have any questions or issues, please contact us via email, create a public forum question or contact us using online contact form. Please add your comments and suggestions to this guide below.
|
# -*- encoding: utf-8 -*-
#
# Copyright © 2014-2015 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from oslo_config import cfg
from stevedore import driver
from gnocchi import exceptions
OPTS = [
cfg.StrOpt('driver',
default='file',
help='Storage driver to use'),
]
Measure = collections.namedtuple('Measure', ['timestamp', 'value'])
class Metric(object):
def __init__(self, id, archive_policy,
created_by_user_id=None,
created_by_project_id=None,
name=None,
resource_id=None):
self.id = id
self.archive_policy = archive_policy
self.created_by_user_id = created_by_user_id
self.created_by_project_id = created_by_project_id
self.name = name
self.resource_id = resource_id
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.id)
def __hash__(self):
return id(self)
def __str__(self):
return str(self.id)
def __eq__(self, other):
return (isinstance(self, Metric)
and self.id == other.id
and self.archive_policy == other.archive_policy
and self.created_by_user_id == other.created_by_user_id
and self.created_by_project_id == other.created_by_project_id
and self.name == other.name
and self.resource_id == other.resource_id)
class InvalidQuery(Exception):
pass
class MetricDoesNotExist(Exception):
"""Error raised when this metric does not exist."""
def __init__(self, metric):
self.metric = metric
super(MetricDoesNotExist, self).__init__(
"Metric %s does not exist" % metric)
class AggregationDoesNotExist(Exception):
"""Error raised when the aggregation method doesn't exists for a metric."""
def __init__(self, metric, method):
self.metric = metric
self.method = method
super(AggregationDoesNotExist, self).__init__(
"Aggregation method '%s' for metric %s does not exist" %
(method, metric))
class MetricAlreadyExists(Exception):
"""Error raised when this metric already exists."""
def __init__(self, metric):
self.metric = metric
super(MetricAlreadyExists, self).__init__(
"Metric %s already exists" % metric)
class NoDeloreanAvailable(Exception):
"""Error raised when trying to insert a value that is too old."""
def __init__(self, first_timestamp, bad_timestamp):
self.first_timestamp = first_timestamp
self.bad_timestamp = bad_timestamp
super(NoDeloreanAvailable, self).__init__(
"%s is before %s" % (bad_timestamp, first_timestamp))
class MetricUnaggregatable(Exception):
"""Error raised when metrics can't be aggregated."""
def __init__(self, metrics, reason):
self.metrics = metrics
self.reason = reason
super(MetricUnaggregatable, self).__init__(
"Metrics %s can't be aggregated: %s"
% (" ,".join((str(m.id) for m in metrics)), reason))
def _get_driver(name, conf):
"""Return the driver named name.
:param name: The name of the driver.
:param conf: The conf to pass to the driver.
"""
d = driver.DriverManager('gnocchi.storage',
name).driver
return d(conf)
def get_driver(conf):
"""Return the configured driver."""
return _get_driver(conf.storage.driver,
conf.storage)
class StorageDriver(object):
@staticmethod
def __init__(conf):
pass
@staticmethod
def create_metric(metric):
"""Create a metric.
:param metric: The metric object.
"""
raise exceptions.NotImplementedError
@staticmethod
def add_measures(metric, measures):
"""Add a measure to a metric.
:param metric: The metric measured.
:param measures: The actual measures.
"""
raise exceptions.NotImplementedError
@staticmethod
def get_measures(metric, from_timestamp=None, to_timestamp=None,
aggregation='mean'):
"""Get a measure to a metric.
:param metric: The metric measured.
:param from timestamp: The timestamp to get the measure from.
:param to timestamp: The timestamp to get the measure to.
:param aggregation: The type of aggregation to retrieve.
"""
raise exceptions.NotImplementedError
@staticmethod
def delete_metric(metric):
raise exceptions.NotImplementedError
@staticmethod
def get_cross_metric_measures(metrics, from_timestamp=None,
to_timestamp=None, aggregation='mean',
needed_overlap=None):
"""Get aggregated measures of multiple entities.
:param entities: The entities measured to aggregate.
:param from timestamp: The timestamp to get the measure from.
:param to timestamp: The timestamp to get the measure to.
:param aggregation: The type of aggregation to retrieve.
"""
raise exceptions.NotImplementedError
@staticmethod
def search_value(metrics, query, from_timestamp=None,
to_timestamp=None,
aggregation='mean'):
"""Search for an aggregated value that realizes a predicate.
:param metrics: The list of metrics to look into.
:param query: The query being sent.
:param from_timestamp: The timestamp to get the measure from.
:param to_timestamp: The timestamp to get the measure to.
:param aggregation: The type of aggregation to retrieve.
"""
raise exceptions.NotImplementedError
|
Batesville Last Updated: 15 April 19 | Trust, quality and experience you can afford. Exclusive same day service in Batesville.
Check out automotive experts in Batesville.
|
import json
class Messenger:
"""
Messenger is a wrapper for either a Message or SlackClient instance.
"""
def __init__(self, service, channel=None):
self.service = service
self.service_type = type(service).__name__
self.channel = channel
def reply(self, message):
if self.service_type == "Message":
self.service.reply(message)
else:
self.service.send_message(self.channel, message)
def send(self, message):
if self.service_type == "Message":
self.service.send(message)
else:
self.service.send_message(self.channel, message)
def send_attachments(self, attachments):
if self.service_type == "Message":
self.service.send_webapi('', json.dumps(attachments))
else:
self.service.send_message(self.channel, '', json.dumps(attachments))
def full_name(self):
if self.service_type == "Message":
return self.service.channel._client.users[self.service.body['user']][u'real_name']
else:
return "*Unknown Person*" # Or should I throw an error?
def sender_id(self):
if self.service_type == "Message":
return self.service.channel._client.users[self.service.body['user']]['id']
else:
return 0 # Or should I throw an error?
|
TBM Hour 1: Whats Trending & Should Matt Ryan Matter In Making THE Decision?
Andy started off the hour by going through our Whats Trending topics. He then went in to Matt Ryan's comments about Steve Sarkisian.
|
""" The data in airports.json is a subset of US airports with field
elevations > 1500 meters. The query result was taken from
.. code-block:: none
http://services.nationalmap.gov/arcgis/rest/services/GlobalMap/GlobalMapWFS/MapServer/10/query
on October 15, 2015.
"""
from __future__ import absolute_import
from bokeh.util.dependencies import import_required
pd = import_required('pandas',
'airports sample data requires Pandas (http://pandas.pydata.org) to be installed')
import json
import os
from . import _data_dir
with open(os.path.join(_data_dir(), 'airports.json'), 'r') as data_file:
content = data_file.read()
airports = json.loads(content)
schema = [['attributes', 'nam'], ['attributes', 'zv3'], ['geometry', 'x'], ['geometry', 'y']]
data = pd.io.json.json_normalize(airports['features'], meta=schema)
data.rename(columns={'attributes.nam': 'name', 'attributes.zv3': 'elevation'}, inplace=True)
data.rename(columns={'geometry.x': 'x', 'geometry.y': 'y'}, inplace=True)
|
The growth and value of every enterprise depends on its ability to combine talent and technology for value creation. Business innovation is helped by IT as this is rapidly becoming an enabler.But conventional IT now includes a wide world of cloud, mobile devices and apps.This diffusion of information and communications technology (ICT) in society and everyday life has significantly increased the volume, velocity and variety of production of data. But this volume and ready accessibility brings in the question of accountability, governance and security.
This conference bring together thought leaders who discuss various aspects on this theme and offer solutions for the problems raised. The format of the day is a mixture of hour long in-depth presentations from these thought leaders and then supporting case studies. Also the programme has two panel sessions where there is the opportunity to ask further questions to all the speakers, making this a fact finding and interactive day.
Due to the high level of threat there is an urgent need for organizations to truly understand their cyber security risk status so that, where necessary they can take urgent remedial actions to rectify weaknesses.This presentation will show how to model cyber security threats and controls, measure risk status and react to change.
Cyber Security is often perceived as a technology issue. Although technology is of paramount importance to help protect organisations from cyber attacks and meet company’s security needs, both internally and externally, the evolving regulatory and compliance landscape in relation to cyber security and data protection will force organisations to revisit their operating models and business processes by enhancing security controls across the entire organization. Compliance and data protection regulation will increasingly become a board level matter. We will look into how technology can help companies achieve a higher degree of compliance and monitoring in the context of an enterprise wide risk management framework.
Data protection is undergoing a seismic change. In a judgment last October the European Court of Justice roundly condemned the “fiddle” that had operated for years allowing data to be freely exported to the US by way of the “Safe Harbour” arrangements. Those arrangements are in the process of being replaced by the new Privacy Shield backed up by the new SU Judicial Redress Act. In addition the new European General Data Protection Regulation is about to be enacted paving the way for more fundamental changes in data protection legislation, including “privacy by design” obligations. Dai Davis will give a brief tour of the changes and what companies should be doing to prepare for those changes.
All speakers with questions from the floor.
The social engineering 'problem' and why security awareness training alone will not save you!
The dual-issues of users' perceived weaknesses and lack of information/cyber security awareness are regarded by many as ever-increasing problems within organisations, backed up in a number of recent surveys. This webinar will explore the issue of social engineering and discuss attack motivations, methods and approaches, and why social engineering attacks work. Given that many organisations use security awareness/education programmes to mitigate the perceived problems, a case will be made as to why awareness/education courses on their own will not solve the problem.
All organisations want to be secure, however it is not always clear how to make appropriate decisions. We present an approach to optimising the defences of SMEs based on user capability. The approach is demonstrated using a case study of a company comprising of remote workers and cloud based resources.
This talk will look at the increasing impact of cybercrime on organisations and the consequence of the growing shortage of cybersecurity professionals. A key part of the solution to this shortage depends on the knowledge and skills of university computing graduates, not just those specialising in cybersecurity degrees. Recent curriculum changes at all educational levels, such as a joint initiative between industry and academia to specify cybersecurity degree accreditation requirements, will be discussed. The talk will also look at societal trends, the implications for cybersecurity and the educational needs of the population, both now and in the future.
Close of Conference. Drinks Reception and Networking.
|
# -*- coding: utf-8 -*-<
import math
import threading
from collections import namedtuple
from threading import Lock
from copy import deepcopy
from .utils import Vector2D, MobileMixin
from .events import SoccerEvents
from . import settings
from .utils import dict_to_json
import random
import time
import zipfile
import traceback
import logging
logger = logging.getLogger("soccersimulator.mdpsoccer")
###############################################################################
# SoccerAction
###############################################################################
class SoccerAction(object):
""" Action d'un joueur : comporte un vecteur acceleration et un vecteur shoot.
"""
def __init__(self, acceleration=None, shoot=None,name=None):
self.acceleration = acceleration or Vector2D()
self.shoot = shoot or Vector2D()
self.name = name or ""
def copy(self):
return deepcopy(self)
def set_name(self,name):
self.name = name
return self
def __str__(self):
return "Acc:%s, Shoot:%s, Name:%s" % (str(self.acceleration), str(self.shoot), str(self.name))
def __repr__(self):
return "SoccerAction(%s,%s,%s)" % (self.acceleration.__repr__(),self.shoot.__repr__(),self.name)
def __eq__(self, other):
return (other.acceleration == self.acceleration) and (other.shoot == self.shoot)
def __add__(self, other):
return SoccerAction(self.acceleration + other.acceleration, self.shoot + other.shoot)
def __sub__(self, other):
return Vector2D(self.acceleration - other.acceleration, self.shoot - other.shoot)
def __iadd__(self, other):
self.acceleration += other.acceleration
self.shoot += other.shoot
return self
def __isub__(self, other):
self.acceleration -= other.acceleration
self.shoot -= other.shoot
return self
def to_dict(self):
return {"acceleration":self.acceleration,"shoot":self.shoot,"name":self.name}
###############################################################################
# Ball
###############################################################################
class Ball(MobileMixin):
def __init__(self,position=None,vitesse=None,**kwargs):
super(Ball,self).__init__(position,vitesse,**kwargs)
def next(self,sum_of_shoots):
vitesse = self.vitesse.copy()
vitesse.norm = self.vitesse.norm - settings.ballBrakeSquare * self.vitesse.norm ** 2 - settings.ballBrakeConstant * self.vitesse.norm
## decomposition selon le vecteur unitaire de ball.speed
snorm = sum_of_shoots.norm
if snorm > 0:
u_s = sum_of_shoots.copy()
u_s.normalize()
u_t = Vector2D(-u_s.y, u_s.x)
speed_abs = abs(vitesse.dot(u_s))
speed_ortho = vitesse.dot(u_t)
speed_tmp = Vector2D(speed_abs * u_s.x - speed_ortho * u_s.y, speed_abs * u_s.y + speed_ortho * u_s.x)
speed_tmp += sum_of_shoots
vitesse = speed_tmp
self.vitesse = vitesse.norm_max(settings.maxBallAcceleration).copy()
self.position += self.vitesse
def inside_goal(self):
return (self.position.x < 0 or self.position.x > settings.GAME_WIDTH)\
and abs(self.position.y - (settings.GAME_HEIGHT / 2.)) < settings.GAME_GOAL_HEIGHT / 2.
def __repr__(self):
return "Ball(%s,%s)" % (self.position.__repr__(),self.vitesse.__repr__())
def __str__(self):
return "Ball: pos: %s, vit: %s" %(str(self.position),str(self.vitesse))
###############################################################################
# PlayerState
###############################################################################
class PlayerState(MobileMixin):
""" Represente la configuration d'un joueur : un etat mobile (position, vitesse), et une action SoccerAction
"""
def __init__(self, position=None, vitesse=None,**kwargs):
"""
:param position: position du joueur
:param acceleration: acceleration du joueur
:param action: action SoccerAction du joueur
:return:
"""
super(PlayerState,self).__init__(position,vitesse)
self.action = kwargs.pop('action', SoccerAction())
self.last_shoot = kwargs.pop('last_shoot', 0)
self.__dict__.update(kwargs)
def to_dict(self):
return {"position":self.position,"vitesse":self.vitesse,"action":self.action,"last_shoot":self.last_shoot}
def __str__(self):
return "pos: %s, vit: %s, action:%s" %(str(self.position),str(self.acceleration),str(self.action))
def __repr__(self):
return "PlayerState(position=%s,vitesse=%s,action=%s,last_shoot=%d)" % \
(self.position.__repr__(),self.vitesse.__repr__(),self.action.__repr__(),self.last_shoot)
@property
def acceleration(self):
"""
:return: Vector2D Action acceleration du joueur
"""
return self.action.acceleration.norm_max(settings.maxPlayerAcceleration)
@acceleration.setter
def acceleration(self,v):
self.action.acceleration = v
@property
def shoot(self):
""" Vector2D Action shoot du joueur
:return:
"""
return self.action.shoot.norm_max(settings.maxPlayerShoot)
@shoot.setter
def shoot(self,v):
self.action.shoot = v
def next(self, ball, action=None):
""" Calcul le prochain etat en fonction de l'action et de la position de la balle
:param ball:
:param action:
:return: Action shoot effectue
"""
if not (hasattr(action,"acceleration") and hasattr(action,"shoot")):
action = SoccerAction()
self.action = action.copy()
self.vitesse *= (1 - settings.playerBrackConstant)
self.vitesse = (self.vitesse + self.acceleration).norm_max(settings.maxPlayerSpeed)
self.position += self.vitesse
if self.position.x < 0 or self.position.x > settings.GAME_WIDTH \
or self.position.y < 0 or self.position.y > settings.GAME_HEIGHT:
self.position.x = max(0, min(settings.GAME_WIDTH, self.position.x))
self.position.y = max(0, min(settings.GAME_HEIGHT, self.position.y))
self.vitesse = Vector2D()
if self.shoot.norm == 0 or not self.can_shoot():
self._dec_shoot()
return Vector2D()
self._reset_shoot()
if self.position.distance(ball.position) > (settings.PLAYER_RADIUS + settings.BALL_RADIUS):
return Vector2D()
return self._rd_angle(self.shoot,(self.vitesse.angle-self.shoot.angle)*(0 if self.vitesse.norm==0 else 1),\
self.position.distance(ball.position)/(settings.PLAYER_RADIUS+settings.BALL_RADIUS))
@staticmethod
def _rd_angle(shoot,dangle,dist):
eliss = lambda x, alpha: (math.exp(alpha*x)-1)/(math.exp(alpha)-1)
dangle = abs((dangle+math.pi*2) %(math.pi*2) -math.pi)
dangle_factor =eliss(1.-max(dangle-math.pi/2,0)/(math.pi/2.),5)
norm_factor = eliss(shoot.norm/settings.maxPlayerShoot,4)
dist_factor = eliss(dist,10)
angle_prc = (1-(1.-dangle_factor)*(1.-norm_factor)*(1.-0.5*dist_factor))*settings.shootRandomAngle*math.pi/2.
norm_prc = 1-0.3*dist_factor*dangle_factor
return Vector2D(norm=shoot.norm*norm_prc,
angle=shoot.angle+2*(random.random()-0.5)*angle_prc)
def can_shoot(self):
""" Le joueur peut-il shooter
:return:
"""
return self.last_shoot <= 0
def _dec_shoot(self):
self.last_shoot -= 1
def _reset_shoot(self):
self.last_shoot = settings.nbWithoutShoot
def copy(self):
return deepcopy(self)
###############################################################################
# SoccerState
###############################################################################
class SoccerState(object):
""" Etat d'un tour du jeu. Contient la balle, l'ensemble des etats des joueurs, le score et
le numero de l'etat.
"""
def __init__(self,states=None,ball=None,**kwargs):
self.states = states or dict()
self.ball = ball or Ball()
self.strategies = kwargs.pop('strategies',dict())
self.score = kwargs.pop('score', {1: 0, 2: 0})
self.step = kwargs.pop('step', 0)
self.max_steps = kwargs.pop('max_steps', settings.MAX_GAME_STEPS)
self.goal = kwargs.pop('goal', 0)
self.__dict__.update(kwargs)
def __str__(self):
return ("Step: %d, %s " %(self.step,str(self.ball)))+\
" ".join("(%d,%d):%s" %(k[0],k[1],str(p)) for k,p in sorted(self.states.items()))+\
(" score : %d-%d" %(self.score_team1,self.score_team2))
def __repr__(self):
return self.__str__()
def to_dict(self):
return dict(states=dict_to_json(self.states),
strategies=dict_to_json( self.strategies),
ball=self.ball,score=dict_to_json(self.score),step=self.step,
max_steps=self.max_steps,goal=self.goal)
def player_state(self, id_team, id_player):
""" renvoie la configuration du joueur
:param id_team: numero de la team du joueur
:param id_player: numero du joueur
:return:
"""
return self.states[(id_team, id_player)]
@property
def players(self):
""" renvoie la liste des cles des joueurs (idteam,idplayer)
:return: liste des cles
"""
return sorted(self.states.keys())
def nb_players(self, team):
""" nombre de joueurs de la team team
:param team: 1 ou 2
:return:
"""
return len([x for x in self.states.keys() if x[0] == team])
def get_score_team(self, idx):
""" score de la team idx : 1 ou 2
:param idx: numero de la team
:return:
"""
return self.score[idx]
@property
def score_team1(self):
return self.get_score_team(1)
@property
def score_team2(self):
return self.get_score_team(2)
def copy(self):
return deepcopy(self)
def apply_actions(self, actions=None,strategies=None):
if strategies: self.strategies.update(strategies)
sum_of_shoots = Vector2D()
self.goal = 0
if actions:
for k, c in self.states.items():
if k in actions:
sum_of_shoots += c.next(self.ball, actions[k])
self.ball.next(sum_of_shoots)
self.step += 1
if self.ball.inside_goal():
self._do_goal(2 if self.ball.position.x <= 0 else 1)
return
if self.ball.position.x < 0:
self.ball.position.x = -self.ball.position.x
self.ball.vitesse.x = -self.ball.vitesse.x
if self.ball.position.y < 0:
self.ball.position.y = -self.ball.position.y
self.ball.vitesse.y = -self.ball.vitesse.y
if self.ball.position.x > settings.GAME_WIDTH:
self.ball.position.x = 2 * settings.GAME_WIDTH - self.ball.position.x
self.ball.vitesse.x = -self.ball.vitesse.x
if self.ball.position.y > settings.GAME_HEIGHT:
self.ball.position.y = 2 * settings.GAME_HEIGHT - self.ball.position.y
self.ball.vitesse.y = -self.ball.vitesse.y
def _do_goal(self, idx):
self.score[idx]+=1
self.goal = idx
@classmethod
def create_initial_state(cls, nb_players_1=0, nb_players_2=0,max_steps=settings.MAX_GAME_STEPS):
""" Creer un etat initial avec le nombre de joueurs indique
:param nb_players_1: nombre de joueur de la team 1
:param nb_players_2: nombre de joueur de la teamp 2
:return:
"""
state = cls()
state.reset_state(nb_players_1=nb_players_1,nb_players_2= nb_players_2)
return state
def reset_state(self, nb_players_1=0, nb_players_2=0):
if nb_players_1 == 0 and self.nb_players(1) > 0:
nb_players_1 = self.nb_players(1)
if nb_players_2 == 0 and self.nb_players(2) > 0:
nb_players_2 = self.nb_players(2)
quarters = [i * settings.GAME_HEIGHT / 4. for i in range(1, 4)]
rows = [settings.GAME_WIDTH * 0.1, settings.GAME_WIDTH * 0.35, settings.GAME_WIDTH * (1 - 0.35),
settings.GAME_WIDTH * (1 - 0.1)]
if nb_players_1 == 1:
self.states[(1, 0)] = PlayerState(position=Vector2D(rows[0], quarters[1]))
if nb_players_2 == 1:
self.states[(2, 0)] = PlayerState(position=Vector2D(rows[3], quarters[1]))
if nb_players_1 == 2:
self.states[(1, 0)] = PlayerState(position=Vector2D(rows[0], quarters[0]))
self.states[(1, 1)] = PlayerState(position=Vector2D(rows[0], quarters[2]))
if nb_players_2 == 2:
self.states[(2, 0)] = PlayerState(position=Vector2D(rows[3], quarters[0]))
self.states[(2, 1)] = PlayerState(position=Vector2D(rows[3], quarters[2]))
if nb_players_1 == 3:
self.states[(1, 0)] = PlayerState(position=Vector2D(rows[0], quarters[1]))
self.states[(1, 1)] = PlayerState(position=Vector2D(rows[0], quarters[0]))
self.states[(1, 2)] = PlayerState(position=Vector2D(rows[0], quarters[2]))
if nb_players_2 == 3:
self.states[(2, 0)] = PlayerState(position=Vector2D(rows[3], quarters[1]))
self.states[(2, 1)] = PlayerState(position=Vector2D(rows[3], quarters[0]))
self.states[(2, 2)] = PlayerState(position=Vector2D(rows[3], quarters[2]))
if nb_players_1 == 4:
self.states[(1, 0)] = PlayerState(position=Vector2D(rows[0], quarters[0]))
self.states[(1, 1)] = PlayerState(position=Vector2D(rows[0], quarters[2]))
self.states[(1, 2)] = PlayerState(position=Vector2D(rows[1], quarters[0]))
self.states[(1, 3)] = PlayerState(position=Vector2D(rows[1], quarters[2]))
if nb_players_2 == 4:
self.states[(2, 0)] = PlayerState(position=Vector2D(rows[3], quarters[0]))
self.states[(2, 1)] = PlayerState(position=Vector2D(rows[3], quarters[2]))
self.states[(2, 2)] = PlayerState(position=Vector2D(rows[2], quarters[0]))
self.states[(2, 3)] = PlayerState(position=Vector2D(rows[2], quarters[2]))
self.ball = Ball(Vector2D(settings.GAME_WIDTH / 2, settings.GAME_HEIGHT / 2),Vector2D())
self.goal = 0
###############################################################################
# SoccerTeam
###############################################################################
class Player(object):
def __init__(self,name=None,strategy=None):
self.name = name or ""
self.strategy = strategy
def to_dict(self):
return dict(name=self.name)
def __str__(self):
return "%s (%s)" %(self.name,str(self.strategy))
def __repr__(self):
return self.__str__()
def to_dict(self):
return {"name":self.name,"strategy":self.strategy.__repr__()}
class SoccerTeam(object):
""" Equipe de foot. Comporte une liste ordonnee de Player.
"""
def __init__(self, name=None, players=None, login=None):
"""
:param name: nom de l'equipe
:param players: liste de joueur Player(name,strategy)
:return:
"""
self.name, self.players, self.login = name or "", players or [], login or ""
def to_dict(self):
return {"name":self.name,"players":self.players,"login":self.login}
def __iter__(self):
return iter(self.players)
def __str__(self):
return str(self.name)+"("+self.login+")"+": "+" ".join(str(p) for p in self.players)
def __repr__(self):
return self.__str__()
def add(self,name,strategy):
self.players.append(Player(name,strategy))
return self
@property
def players_name(self):
"""
:return: liste des noms des joueurs de l'equipe
"""
return [x.name for x in self.players]
def player_name(self, idx):
"""
:param idx: numero du joueur
:return: nom du joueur
"""
return self.players[idx].name
@property
def strategies(self):
"""
:return: liste des strategies des joueurs
"""
return [x.strategy for x in self.players]
def strategy(self, idx):
"""
:param idx: numero du joueur
:return: strategie du joueur
"""
return self.players[idx].strategy
def compute_strategies(self, state, id_team):
""" calcule les actions de tous les joueurs
:param state: etat courant
:param id_team: numero de l'equipe
:return: dictionnaire action des joueurs
"""
return dict([((id_team, i), x.strategy.compute_strategy(state.copy(), id_team, i)) for i, x in
enumerate(self.players) if hasattr( x.strategy,"compute_strategy")])
@property
def nb_players(self):
"""
:return: nombre de joueurs
"""
return len(self.players)
def copy(self):
return deepcopy(self)
###############################################################################
# Simulation
###############################################################################
class Simulation(object):
def __init__(self,team1=None,team2=None, max_steps = settings.MAX_GAME_STEPS,initial_state=None,**kwargs):
self.team1, self.team2 = team1 or SoccerTeam(),team2 or SoccerTeam()
self.initial_state = initial_state or SoccerState.create_initial_state(self.team1.nb_players,self.team2.nb_players,max_steps)
self.state = self.initial_state.copy()
self.max_steps = max_steps
self.state.max_steps = self.initial_state.max_steps = max_steps
self.listeners = SoccerEvents()
self._thread = None
self._on_going = False
self._thread = None
self._kill = False
self.states = []
self.error = False
self.replay = type(self.team1.strategy(0))==str or type(self.team1.strategy(0)) == unicode
for s in self.team1.strategies + self.team2.strategies:
self.listeners += s
self.__dict__.update(kwargs)
def reset(self):
self.replay = type(self.team1.strategy(0))==str or type(self.team1.strategy(0)) == unicode
self._thread = None
self._kill = False
self._on_going = False
if self.replay:
return
self.states = []
self.state = self.get_initial_state()
self.error = False
def to_dict(self):
return dict(team1=self.team1,team2=self.team2,state=self.state,max_steps=self.max_steps,states=self.states,initial_state=self.initial_state)
def get_initial_state(self):
return self.initial_state.copy()
def start_thread(self):
if not self._thread or not self._thread.isAlive():
self._kill = False
self._thread = threading.Thread(target=self.start)
self._thread.start()
def kill(self):
self._kill = True
def set_state(self,state):
state.score = self.state.score
self.state = state
self.state.max_steps = self.max_steps
self.state.step = len(self.states)
def start(self):
if self._on_going:
return
if self.replay:
self.state = self.states[0]
self.begin_match()
while not self.stop():
self.next_step()
self.end_match()
self._on_going = False
return self
@property
def step(self):
return self.state.step
def get_score_team(self,i):
return self.state.get_score_team(i)
def next_step(self):
if self.stop():
return
if self.replay:
self.state = self.states[self.state.step+1]
else:
actions=dict()
strategies=dict()
for i,t in enumerate([self.team1,self.team2]):
try:
actions.update(t.compute_strategies(self.state, i+1))
strategies.update(dict([((i,j),s.name) for j,s in enumerate(t.strategies)]))
except Exception as e:
time.sleep(0.0001)
logger.debug("%s" % (traceback.format_exc(),))
logger.warning("%s" %(e,))
self.state.step=self.max_steps
self.state.score[2-i]=100
self.error = True
logger.warning("Error for team %d -- loose match" % ((i+1),))
self.states.append(self.state.copy())
return
self.state.apply_actions(actions,strategies)
self.states.append(self.state.copy())
self.update_round()
def get_team(self,idx):
if idx==1:
return self.team1
if idx == 2:
return self.team2
def stop(self):
return self._kill or self.state.step >= self.max_steps or (self.replay and self.step+1>=len(self.states))
def update_round(self):
self.listeners.update_round(self.team1,self.team2,self.state.copy())
if self.state.goal > 0:
self.end_round()
def begin_round(self):
if not self.replay:
score=dict(self.state.score)
self.set_state(self.get_initial_state())
self.listeners.begin_round(self.team1,self.team2,self.state.copy())
self.states.append(self.state.copy())
self.listeners.begin_round(self.team1,self.team2,self.state.copy())
def end_round(self):
self.listeners.end_round(self.team1, self.team2, self.state.copy())
if not self.stop():
self.begin_round()
def begin_match(self):
self._on_going = True
self._kill = False
self.listeners.begin_match(self.team1,self.team2,self.state.copy())
self.begin_round()
def end_match(self):
self._kill = True
self.listeners.end_match(self.team1,self.team2,self.state.copy())
self.replay = True
def send_strategy(self,key):
self.listeners.send_strategy(key)
|
Offset Single Post Pyramids are a perfectly economical way to shade smaller areas while keeping the support column out of the way. And Shade Systems is the only manufacturer of heavy-duty outdoor shade structures offering the patented Turn-N-Slide™ easy canopy removal system. On playgrounds, sports fields, poolside and concession areas, a perfectly economical solution for harmful sun exposure is a durable and attractive product from Shade Systems.
Concession areas are a popular attraction in outdoor public spaces. The traditional picnic table may not be the look you want here. This is where you need Pilot Rock’s new Model FCT-4 Food Court Table. It seats four adults and offers wide end openings for wheelchairs or strollers. Steel frames and steel top/seats are designed to withstand public use and outdoor exposure. The frame design allows easy cleanup around the table.
Avoid expensive and time-consuming coping removal by using RAMUC Coping Spray Paint to brighten, restore the appeal and protect the existing coping material. Easy to use, environmentally safe and economical. Gloss finish.
Designed to be ADA-compliant, No Fault Safety Surface for Accessible Ballfields promotes inclusive play for children of all ages and abilities. It is professionally installed by blending the highest-quality TPV granules and polyurethane binder. Both new and existing ballfields are poured at a minimum ˝-inch thickness over concrete or the old rubber surfacing. Poured on-site, the surface allows for the blending of color combinations to create custom patterns, lines and designs.
Improve community wellness with high-quality composite fitness structures from GameTime. THRIVE™ fitness systems accommodate multiple users at once, reducing wait times and providing a wide range of training options at every station. THRIVE makes it fun and easy for people of all fitness levels to spend more time outdoor training for sports, races or everyday life.
Bring thrilling height—a soaring 20-feet—plus built-in safety to your playground with multiple climbing opportunities and exhilarating sliding. Even the compact interior will have kids climbing and crawling to explore all the different activities. The Alpha Tower’s distinctive geometric design encourages imaginative play. It also gives kids plenty of unique ways to look out, while perforated and slotted panels provide visibility from outside.
The AquaTRAM® 90 pool access lift rotates 90 degrees left or right for easy pool access. Anchors directly into the deck with the popular Quickset Dual Wedge™ Anchor socket—making it removable without tools and easy to retrofit in existing anchors. Includes dual flip-up armrests, comfortable seat with adjustable lap belt and footrest plus simple-to-operate controls. Improved lift reach clears spa benches and most gutter types. Complete with a rechargeable battery-powered operating system. 300-pound lifting capacity. ADA compliant.
|
# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import time
import six.moves.urllib.parse as urlparse
from tempest.common.utils import data_utils
NUM_ALARM_DEFINITIONS = 2
NUM_MEASUREMENTS = 100
def create_metric(name='name-1',
dimensions={
'key-1': 'value-1',
'key-2': 'value-2'
},
timestamp=None,
value=0.0,
value_meta={
'key-1': 'value-1',
'key-2': 'value-2'
},
):
metric = {}
if name is not None:
metric['name'] = name
if dimensions is not None:
metric['dimensions'] = dimensions
if timestamp is not None:
metric['timestamp'] = timestamp
else:
metric['timestamp'] = int(time.time() * 1000)
if value is not None:
metric['value'] = value
if value_meta is not None:
metric['value_meta'] = value_meta
return metric
def create_notification(name=data_utils.rand_name('notification-'),
type='EMAIL',
address='[email protected]',
period=0):
notification = {}
if name is not None:
notification['name'] = name
if type is not None:
notification['type'] = type
if address is not None:
notification['address'] = address
if period is not None:
notification['period'] = period
return notification
def create_alarm_definition(name=None,
description=None,
expression=None,
match_by=None,
severity=None,
alarm_actions=None,
ok_actions=None,
undetermined_actions=None):
alarm_definition = {}
if name is not None:
alarm_definition['name'] = name
if description is not None:
alarm_definition['description'] = description
if expression is not None:
alarm_definition['expression'] = expression
if match_by is not None:
alarm_definition['match_by'] = match_by
if severity is not None:
alarm_definition['severity'] = severity
if alarm_actions is not None:
alarm_definition['alarm_actions'] = alarm_actions
if ok_actions is not None:
alarm_definition['ok_actions'] = ok_actions
if undetermined_actions is not None:
alarm_definition['undetermined_actions'] = undetermined_actions
return alarm_definition
def delete_alarm_definitions(monasca_client):
# Delete alarm definitions
resp, response_body = monasca_client.list_alarm_definitions()
elements = response_body['elements']
if elements:
for element in elements:
alarm_def_id = element['id']
monasca_client.delete_alarm_definition(alarm_def_id)
def timestamp_to_iso(timestamp):
time_utc = datetime.datetime.utcfromtimestamp(timestamp / 1000.0)
time_iso_base = time_utc.strftime("%Y-%m-%dT%H:%M:%S")
time_iso_base += 'Z'
return time_iso_base
def timestamp_to_iso_millis(timestamp):
time_utc = datetime.datetime.utcfromtimestamp(timestamp / 1000.0)
time_iso_base = time_utc.strftime("%Y-%m-%dT%H:%M:%S")
time_iso_microsecond = time_utc.strftime(".%f")
time_iso_millisecond = time_iso_base + time_iso_microsecond[0:4] + 'Z'
return time_iso_millisecond
def get_query_param(uri, query_param_name):
query_param_val = None
parsed_uri = urlparse.urlparse(uri)
for query_param in parsed_uri.query.split('&'):
parsed_query_name, parsed_query_val = query_param.split('=', 1)
if query_param_name == parsed_query_name:
query_param_val = parsed_query_val
return query_param_val
def get_expected_elements_inner_offset_limit(all_elements, offset, limit, inner_key):
expected_elements = []
total_statistics = 0
if offset is None:
offset_id = 0
offset_time = ""
else:
offset_tuple = offset.split('_')
offset_id = int(offset_tuple[0]) if len(offset_tuple) > 1 else 0
offset_time = offset_tuple[1] if len(offset_tuple) > 1 else offset_tuple[0]
for element in all_elements:
element_id = int(element['id'])
if offset_id is not None and element_id < offset_id:
continue
next_element = None
for value in element[inner_key]:
if (element_id == offset_id and value[0] > offset_time) or \
element_id > offset_id:
if not next_element:
next_element = element.copy()
next_element[inner_key] = [value]
else:
next_element[inner_key].append(value)
total_statistics += 1
if total_statistics >= limit:
break
if next_element:
expected_elements.append(next_element)
if total_statistics >= limit:
break
for i in xrange(len(expected_elements)):
expected_elements[i]['id'] = str(i)
return expected_elements
|
Manuscripts should be submitted, in duplicate to the Editor in Chief. Contributions should be in English or Bahasa Malaysia.
Papers in Bahasa Malaysia should be accompanied by an abstract in English.
The Manuscript, including the abstract, references, tables, figures and figures caption should be clearly typewritten, double-spaced and on one side only, on good A-4 size papers with ample margins. Each page of the manuscript should be numbered at the top. If the manuscript is typed with a word processor, please enclose the diskette containing the manuscript.
Tables and figures should be numbered in the order of appearance in the text. Each table and each figure should occupy a separate page in a form ready for reproduction and they shoal be large enough for reduction to a minimum of 50% of their original size.
Abstract should not be more than 300 words long for Letters.
References should be numbered in square brackets, e.g. , in the order or citation. The Following examples show the preferred format for references.
Sakka, S. (1998); Sol-Gel Technology for Thin Films, Fibres Performs, Electronics and Speciality Shapes; Ed. Lisa C. Klein. Noyes, New Jersey. Chap. 7, pp. 140.
All submitted papers will be acknowledged and referred. The acceptance and rejection of paper is the sole prerogative of the Editor. They will not be returned to the author.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
## @file ostap/math/covtransform.py
# Transformation of covariance matrices
# for \f$ y = y ( x ) \f$, it gets
# \f$ C_y = J C_x J^\mathrm{T} \f$,
# where \f$ J = \left( \frac{\partial y }{\partial x } \right) \f$
# @author Vanya BELYAEV [email protected]
# @date 2020-05-14
# =============================================================================
""" Transformation of covariand matrices
- for y = y ( x ) it gets C(y) = J C(x) J^T,
- where J is Jacobi matrix
"""
# =============================================================================
from __future__ import print_function
# =============================================================================
__author__ = "Vanya BELYAEV [email protected]"
__date__ = "2009-09-12"
__version__ = ""
# =============================================================================
__all__ = (
'transform' , ## transfrom covarinance matrix
)
# =============================================================================
from builtins import range
import ROOT
# =============================================================================
# logging
# =============================================================================
from ostap.logger.logger import getLogger
if '__main__' == __name__ : logger = getLogger ( 'ostap.math.covtransform' )
else : logger = getLogger ( __name__ )
# =============================================================================
from ostap.core.core import Ostap, VE
from ostap.math.derivative import Partial
import ostap.math.linalg
# =============================================================================
# =============================================================================
## Transform the covariance nmatrix C at point X to the variables Y(X)
# for \f$ y = y ( x ) \f$, it gets
# \f[ C(y)= J C(x) J^\mathrm{T} \f],
# where \f$ J = \left( \frac{\partial y }{\partial x } \right) \f$
# @code
# X = 1 , 2
# C = Ostap.SymMatrix(2)()
# C[ 0 , 0 ] = 0.20
# C[ 1 , 1 ] = 0.05
# C[ 1 , 1 ] = 0.30
# r = lambda x , y : (x*x+y*y)**2
# phi = lambda x , y : math.atan2 ( y , x )
# C_polar = transform ( C , X , r , phi )
# @endcode
# @param C "old" covatiance matrix
# @param X "old" varibales (arary iof values)
# @param Y "new" variables (array of callables)
# @return covarinance matrix for variables Y
def transform ( C , X , *Y ) :
""" Transform the covariance nmatrix C at point X to the variables Y(X)
>>> X = 1 , 2
>>> C = Ostap.SymMatrix(2)()
>>> C [ 0 , 0 ] = 0.20
>>> C [ 0 , 1 ] = 0.05
>>> C [ 1 , 1 ] = 0.30
>>> r = lambda x , y : (x*x+y*y)**2
>>> phi = lambda x , y : math.atan2 ( y , x )
>>> C_polar = transform ( C , X , r , phi )
"""
ny = len ( Y )
assert 1 <= ny , 'Invalid size of Y!'
nx = len ( X )
if C is None and 1 <= nx :
C = Ostap.SymMatrix ( nx ) ()
for i , x in enumerate ( X ) :
xx = VE ( x )
C [ i, i ] = xx.cov2 ()
shape = C.shape
assert shape [ 0 ] == shape[1] and shape[0] == nx , 'Invalid shape of matrix C!'
CC = Ostap.SymMatrix ( nx ) ()
for i in range ( CC.kRows ) :
CC [ i , i ] = C ( i , i )
for j in range ( i + 1 , CC.kCols ) :
v = 0.5 * ( C ( i , j ) + C ( j , i ) )
CC [ i , j ] = v
XX = Ostap.Vector ( nx ) ()
for i , x in enumerate ( X ) :
XX [ i ] = float ( x )
## get vector-with-errors
XX = Ostap.VectorE ( nx ) ( XX , CC )
R = XX.transform ( *Y )
return R.cov2()
# =============================================================================
if '__main__' == __name__ :
from ostap.utils.docme import docme
docme ( __name__ , logger = logger )
# =============================================================================
## The END
# =============================================================================
|
“Subversive tactics of Yugoslavian amateur film” as Cinematic Lecture-Performance/Performing Film-Essay is a project done by Doplgenger and as a part of Transimage platform.
In communist Yugoslavia amateur film had a place of socially approved and highly desired hobby being available to everyone under the slogan “technology to the people”. Over the years this practice overcame it’s original purpose by writing itself in the history of art and strongly influencing, what would later become, the most original film movement in the Balkans – The Black Wave.
Doplgenger presented the cinematic lecture-performance/performing film-essay at IMAGES CONTRE NATURE 2012, international festival of experimental video in Marseille.
|
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module provides a gRPC service for updating remote job info to MLMD."""
from concurrent import futures
from typing import Optional
from absl import logging
import grpc
from tfx.orchestration import metadata
from tfx.proto.orchestration import execution_watcher_pb2
from tfx.proto.orchestration import execution_watcher_pb2_grpc
from ml_metadata.proto import metadata_store_pb2
def generate_service_stub(
address: str,
creds: Optional[grpc.ChannelCredentials] = None,
) -> execution_watcher_pb2_grpc.ExecutionWatcherServiceStub:
"""Generates a gRPC service stub for a given server address."""
channel = grpc.secure_channel(
address, creds) if creds else grpc.insecure_channel(address)
return execution_watcher_pb2_grpc.ExecutionWatcherServiceStub(channel)
class ExecutionWatcher(
execution_watcher_pb2_grpc.ExecutionWatcherServiceServicer):
"""A gRPC service server for updating remote job info to MLMD.
Attributes:
local_address: Local network address to the server.
address: Remote network address to the server, same as local_address if not
configured.
"""
def __init__(self,
port: int,
mlmd_connection: metadata.Metadata,
execution: metadata_store_pb2.Execution,
address: Optional[str] = None,
creds: Optional[grpc.ServerCredentials] = None):
"""Initializes the gRPC server.
Args:
port: Which port the service will be using.
mlmd_connection: ML metadata connection.
execution: The MLMD Execution to keep track of.
address: Remote address used to contact the server. Should be formatted as
an ipv4 or ipv6 address in the format `address:port`. If left as
None, server will use local address.
creds: gRPC server credentials. If left as None, server will use an
insecure port.
"""
super().__init__()
self._port = port
self._address = address
self._creds = creds
self._mlmd_connection = mlmd_connection
self._server = self._create_server()
if not execution.HasField('id'):
raise ValueError(
'execution id must be set to be tracked by ExecutionWatcher.')
self._execution = execution
def UpdateExecutionInfo(
self, request: execution_watcher_pb2.UpdateExecutionInfoRequest,
context: grpc.ServicerContext
) -> execution_watcher_pb2.UpdateExecutionInfoResponse:
"""Updates the `custom_properties` field of Execution object in MLMD."""
logging.info('Received request to update execution info: updates %s, '
'execution_id %s', request.updates, request.execution_id)
if request.execution_id != self._execution.id:
context.set_code(grpc.StatusCode.NOT_FOUND)
context.set_details(
'Execution with given execution_id not tracked by server: '
f'{request.execution_id}')
return execution_watcher_pb2.UpdateExecutionInfoResponse()
for key, value in request.updates.items():
self._execution.custom_properties[key].CopyFrom(
value)
# Only the execution is needed
with self._mlmd_connection as m:
m.store.put_executions((self._execution,))
return execution_watcher_pb2.UpdateExecutionInfoResponse()
def _create_server(self):
"""Creates a gRPC server and add `self` on to it."""
result = grpc.server(futures.ThreadPoolExecutor())
execution_watcher_pb2_grpc.add_ExecutionWatcherServiceServicer_to_server(
self, result)
if self._creds is None:
result.add_insecure_port(self.local_address)
else:
result.add_secure_port(self.local_address, self._creds)
return result
@property
def local_address(self) -> str:
# Local network address to the server.
return f'localhost:{self._port}'
@property
def address(self) -> str:
return self._address or self.local_address
def start(self):
"""Starts the server."""
self._server.start()
def stop(self):
"""Stops the server."""
self._server.stop(grace=None)
|
Electrical Consumption Logger for industry applications focused on energy saving. The system monitors single and three-phase signals of different frequencies and works in Volt-Ampere (VA) and Watts (W) units. The logged data are transmitted in real-time or on request via Ethernet or USB.
|
## textures/base_door/light_panel1
## {
## qer_editorimage textures/base_door/stedoorframe2.tga
## bumpmap textures/base_door/stedoorframe2_local.tga
## diffusemap textures/base_door/stedoorframe2_d.tga
## specularmap textures/base_door/stedoorframe2_s.tga
## {
## if ( parm7 == 0 )
## blend add
## map textures/base_door/stedoorframered_add.tga
## rgb 5
## }
## {
## if ( parm7 == 1 )
## blend add
## map textures/base_door/stedoorframegrn_add.tga
## rgb 5
## }
## }
import fileinput
import re
##############
## Patterns ##
##############
WHITE_SPACE = r'[\\s]+'
NUM_PATTERN = r'[0-9]' + WHITE_SPACE + r'[-+0-9]'
TEX_PATH = r'(savegames|fonts|textures|guis|ui|guisurfs|particles|lights|models|env)[\\/][a-z_\\/0-9A-Z]*'
re.compile (NUM_PATTERN)
re.compile (WHITE_SPACE)
#############
## Globals ##
#############
mode = 0
in_material = False
blend_mode = 0
is_blend = False
did_diffuse = False
did_specular = False
did_bumpmap = False
s = []
##################
## get_material ##
##################
def get_material (line):
mat_name = re.search (TEX_PATH, line)
if mat_name:
mat_name = re.sub (r'[\\/]', '_', mat_name.group (0))
else:
mat_name = re.search (r'_[a-z_\\/0-9A-Z]*', line)
return mat_name.group (0)
return mat_name
##################
## process_line ##
##################
def process_line (line, next_line):
global mode
global in_material
global is_blend
global blend_mode
global s
## Strip the EOL
line = line.strip ()
line = re.sub (r'//.*', '', line)
if re.search (r'^table', line):
return
## Ignore empty lines
if not line:
return
if re.search (r'{', line):
s.append ('Crap')
if re.search (r'}', line):
if len (s) != 0:
s.pop ()
if len (s) == 0 and in_material:
in_material = False
print ('}')
## See if we are at the start of a material
if not in_material and re.search (r'^' + TEX_PATH, line):
in_material = True
print ('Material')
print ('{')
print (' Name {string {\"' + get_material (line) + '\"}}')
elif in_material:
## A "blended" texture
if re.search (r'^blend' + WHITE_SPACE, line):
## Handle blend modes
if re.search (r'[dD]iffuse[mM]ap', line):
is_blend = True
blend_mode = 0
elif re.search (r'[sS]pecular[mM]ap', line):
is_blend = True
blend_mode = 1
elif re.search (r'[bB]ump[mM]ap', line):
is_blend = True
blend_mode = 2
else:
blend_mode = -1
## Handle a blended texture and ignore other attributes
elif is_blend and re.search (r'^[mM]ap' + WHITE_SPACE, line):
is_blend = False
if re.search (r'addnormals', line):
return
elif blend_mode == 0:
print (' Texture (attrib = "diffuse") {string {\"' + get_material (line) + '\"}}')
elif blend_mode == 1:
print (' Texture (attrib = "specular") {string {\"' + get_material (line) + '\"}}')
elif blend_mode == 2:
print (' Texture (attrib = "normal") {string {\"' + get_material (line) + '\"}}')
## Normal path for diffuse, specular, and normal textures
elif re.search (r'^[dD]iffuse[mM]ap', line):
print (' Texture (attrib = "diffuse") {string {\"' + get_material (line) + '\"}}')
elif re.search (r'^[sS]pecular[mM]ap', line):
print (' Texture (attrib = "specular") {string {\"' + get_material (line) + '\"}}')
elif re.search (r'^[bB]ump[mM]ap', line):
print (' Texture (attrib = "normal") {string {\"' + get_material (line) + '\"}}')
elif re.search (r'^qer_editorimage', line):
print (' Texture (attrib = "editor") {string {\"' + get_material (line) + '\"}}')
##########
## Main ##
##########
## Iterate over the file line by line
first_iteration = True
previous_line = ''
for current_line in fileinput.input():
## We have to save up 2 lines before processing
if not first_iteration:
process_line (previous_line, current_line)
else:
first_iteration = False
previous_line = current_line
## Handle the remaining line
if previous_line:
process_line (previous_line, '')
#######################
## replace_key_value ##
#######################
# def replace_key_value (line, key, new_key, kind, is_array):
# global found_key_value
# ## No need to waste time
# if found_key_value:
# return line
# ## String key value needs to be wrapped in quotes
# if not re.search (r' ' + key + ' ', line):
# return line
# ## We must have found one
# found_key_value = True
# if kind == "string":
# text = re.sub (key + WHITE_SPACE, " " + new_key + " {string {\"", line)
# if text != line:
# text = text + "}}"
# ## Array types need an extra curly level
# elif not is_array:
# text = re.sub (r"\"" + key + "\" \"", " " + new_key + " {" + kind + " {", line)
# if text != line:
# text = re.sub (r"\"", "}}", text.rstrip ())
# ## Otherwise it is a normal discrete or numeric kind
# else:
# text = re.sub (r"\"" + key + "\" \"", " " + new_key + " {" + kind + " {{", line)
# if text != line:
# text = re.sub (r"\"", "}}}", text.rstrip ())
# ## Give the result
# return text
|
The lychee (Litchi chinensis) (Chinese: ??; pinyin: lì zh?) is the sole member of the genus Litchi in the soapberry family, Sapindaceae. It is a tropical and subtropical fruit tree native to the Guangdong and Fujian provinces of China, and now cultivated in many parts of the world. The fresh fruit has a "delicate, whitish pulp" with a floral smell and a fragrant, sweet flavor. Since this perfume-like flavor is lost in the process of canning, the fruit is usually eaten fresh.
|
from reddit_to_csv import sub2csv
import csv
import sqlite3
'''insert into table sub_categories first'''
'''make sure constraints are met'''
# -- for foreign_keys support (on delete cascade)
# --con.execute("PRAGMA foreign_keys = ON") for python
def csv_2_list(csv_name):
with open(csv_name) as f:
data = [list(line) for line in csv.reader(f)]
return data
def list_2_database(data_list, category_id=0, sub_category_id=0):
conn = sqlite3.connect('w2w.db')
conn.execute("PRAGMA foreign_keys = ON")
for link in data_list:
cursor = conn.cursor()
data_2_insert = [category_id, sub_category_id,
link[0].decode('utf-8'), link[1].decode('utf-8')]
# print data_2_insert
cursor.execute(
"insert into all_links (c_fid, s_fid,link_title, link_url)values (?,?,?,?)", data_2_insert)
conn.commit()
conn.close()
'''standupcomedy'''
# sub_csv_name = sub2csv('standupcomedy', 450)
# sub_data = csv_2_list(sub_csv_name)
# list_2_database(sub_data, category_id=2, sub_category_id=1)
# '''Music sub categories'''
# sub_csv_name = sub2csv('musicvideos', 200)
# sub_data = csv_2_list(sub_csv_name)
# list_2_database(sub_data, category_id=4, sub_category_id=1)
# sub_csv_name = sub2csv('coversongs', 200)
# sub_data = csv_2_list(sub_csv_name)
# list_2_database(sub_data, category_id=4, sub_category_id=2)
sub_csv_name = sub2csv('AcousticCovers', 200)
sub_data = csv_2_list(sub_csv_name)
list_2_database(sub_data, category_id=4, sub_category_id=3)
|
On display are over 1400 exhibits. These include 101 tractors from 1912 onwards, including the oldest working tractor in New Zealand and thirty vintage cars from 1905. An interesting collection of classic cars. There are many items of farm equipment including hot air engines, stationary motors and a Threshing Mill. Also on display is a 1929 Spartan Biplane, household items, agricultural equipment and much much more.
This museum is run entirely by volunteers. Guided tours by arrangement preferred.
|
"""Benchmarks the parts of the system."""
import time
from control.command import Command
from control.simple_waypoint_generator import SimpleWaypointGenerator
from control.location_filter import LocationFilter
from control.telemetry import Telemetry
from control.test.dummy_driver import DummyDriver
from control.test.dummy_logger import DummyLogger
# pylint: disable=invalid-name
# pylint: disable=protected-access
# pylint: disable=line-too-long
def benchmark_location_filter_update_gps():
"""Benchmark the location filter GPS update."""
location_filter = LocationFilter(0.0, 0.0, 0.0)
iterations = 100
start = time.time()
for _ in range(iterations):
location_filter.update_gps(100.0, 100.0, 1.0, 1.0, 20.0, 4.5)
end = time.time()
print(
'{} iterations of LocationFilter.update_gps, each took {:.5}'.format(
iterations,
(end - start) / float(iterations)
)
)
def benchmark_location_filter_update_compass():
"""Benchmark the location filter compass update."""
location_filter = LocationFilter(0.0, 0.0, 0.0)
iterations = 100
start = time.time()
for _ in range(iterations):
location_filter.update_compass(20.0)
end = time.time()
print(
'{} iterations of LocationFilter.update_compass, each took {:.5}'.format(
iterations,
(end - start) / float(iterations)
)
)
def benchmark_location_filter_update_dead_reckoning():
"""Benchmark the location filter with dead reckoning and no other input."""
location_filter = LocationFilter(0.0, 0.0, 0.0)
iterations = 1000
start = time.time()
for _ in range(iterations):
location_filter.update_dead_reckoning()
end = time.time()
print(
'{} iterations of LocationFilter.update_dead_reckoning, each took {:.5}'.format(
iterations,
(end - start) / float(iterations)
)
)
def benchmark_command_run_course_iterator():
"""Benchmark the logic for driving the car."""
logger = DummyLogger()
telemetry = Telemetry(logger)
waypoint_generator = SimpleWaypointGenerator(
SimpleWaypointGenerator.get_waypoints_from_file_name(
'paths/solid-state-depot.kmz'
)
)
driver = DummyDriver(telemetry, logger)
command = Command(telemetry, driver, waypoint_generator, logger)
iterations = 250
start = time.time()
iterator = command._run_course_iterator()
step = None
for step in zip(range(iterations), iterator):
pass
assert step is not None
assert step[0] == iterations - 1
end = time.time()
print(
'{} iterations of Command._run_course_iterator, each took {:.5}'.format(
iterations,
(end - start) / float(iterations)
)
)
def main():
"""Runs all the benchmarks."""
benchmark_location_filter_update_gps()
benchmark_location_filter_update_compass()
benchmark_location_filter_update_dead_reckoning()
benchmark_command_run_course_iterator()
if __name__ == '__main__':
main()
|
I Have A Couple of Sacramento Duplexes for Sale… See Below…!
They Are In Good Areas… Bring Me an Offer…!
Our Office has recently listed some Duplexes for sale in various parts of the Sacramento Region. We do manage one of them, the others are managed by different property management companies.
This Owner is moving out of the area and wants to liquidate these properties. They are all “occupied”. Rents on some of them are low because they have been occupied by long term tenants.
The links below will have some pictures, some will have “cash flow” figures and short videos of the exterior.
1. 955-957 Woodshire, Sacramento – 2 bedrooms, 1 bath, 1 car garage each side – Priced at $289,000 – Located in a Pride of Ownership area consisting of primarily single family homes.
2. 9398 Mary Ellen Wayu, Elk Grove, Ca – 2 bedrooms, 1 bath, 1 car garage each side – Priced at $269,000 – Located in a Pride of Ownership area consisting of primarily single family homes. One unit has been totally updated, the other unit is in good condition but not updated. Click here to get the slide show and short video of the exterior.
3. 7226-7228 Oconee Court, Citrus Heights, Ca – 2 bedrooms, 1 bath, 1 car garage each side – Priced at $259,000 – Located on Quiet Cul-De-Sac Street in a great area near Parks, Schools and Shopping.
4. 4713-4715 Greenholme, Sacramento, Ca – 2 bedrooms, 1 bath, 1 car garage each side – Priced at $209,950. Fully leased with great tenants.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2004-2006 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""a set of helper functions for filters..."""
import operator
def countmatch(str1, str2, countstr):
"""checks whether countstr occurs the same number of times in str1 and str2"""
return str1.count(countstr) == str2.count(countstr)
def funcmatch(str1, str2, func, *args):
"""returns whether the result of func is the same for str1 and str2"""
return func(str1, *args) == func(str2, *args)
def countsmatch(str1, str2, countlist):
"""checks whether each element in countlist occurs the same number of times in str1 and str2"""
return reduce(operator.and_, [countmatch(str1, str2, countstr) for countstr in countlist], True)
def funcsmatch(str1, str2, funclist):
"""checks whether the results of each func in funclist match for str1 and str2"""
return reduce(operator.and_, [funcmatch(str1, str2, funcstr) for funcstr in funclist], True)
def filtercount(str1, func):
"""returns the number of characters in str1 that pass func"""
return len(filter(func, str1))
def filtertestmethod(testmethod, strfilter):
"""returns a version of the testmethod that operates on filtered strings using strfilter"""
def filteredmethod(str1, str2):
return testmethod(strfilter(str1), strfilter(str2))
filteredmethod.__doc__ = testmethod.__doc__
filteredmethod.name = getattr(testmethod, 'name', testmethod.__name__)
return filteredmethod
def multifilter(str1, strfilters, *args):
"""passes str1 through a list of filters"""
for strfilter in strfilters:
str1 = strfilter(str1, *args)
return str1
def multifiltertestmethod(testmethod, strfilters):
"""returns a version of the testmethod that operates on filtered strings using strfilter"""
def filteredmethod(str1, str2):
return testmethod(multifilter(str1, strfilters), multifilter(str2, strfilters))
filteredmethod.__doc__ = testmethod.__doc__
filteredmethod.name = getattr(testmethod, 'name', testmethod.__name__)
return filteredmethod
|
Farmhouse design has been trending for a period of time. Everyone has their own take on it from coast, to industrial, or modern. But what does just farmhouse design look like? Farmhouse design is based of old traditional farmhouses just as it sounds. Farmhouses typically used natural materials, neutral color pallets, and an array of furniture. Textures were heavy and distressed from the high usage and rough natural environment. material were natural and often created from material surrounding the area. Furniture was mismatched and collected over time and for its durability. Most of the design elements were created for practical purposes. Now days, we still follow those main styles for design. To highlight those elements, when using tile in the space, often wood look warm neutral and heavily textured material are used such as our Barrel wood look porcelain. Also, the other neutral elements that are incorporated are white subway, keeping that classic look. You can get the perfect subway with our traditions collection. Textured stone adds a great element to include another natural material in neutral, while including heavy textures, such as our Candid Heather Hexagon. Grey is another common element included sometimes in a bit more decorative tile to add an interesting accent. One last element that can make a nice touch to farmhouse looks, is encaustic look tile. The traditional patterned tile has been used for centuries, and the rustic edge added to the tile makes a perfect finishing element to your farmhouse style.
Porcelain or Non-Porcelain Ceramic Tiles?
|
'''
Created on Feb 28, 2017
@contact: Irving Duran
@author: [email protected]
@summary: Collect and send via SMS your current month data usage.
'''
# TODO: Rewrite to accept one high-level argument (instead of two separate)
# python scripts to be used an an input in crontab
import os
import datetime
import sys
import mdup
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
from pathlib import Path
prog_path = os.path.dirname(os.path.realpath(sys.argv[0])) #get python file path
os.chdir(prog_path) #change working directory
conf = pd.read_table('conf', sep='=', header=None) #store config file
# TODO: Better way to extract results and storing it
used, left, daysleft, dataused, datesnap, startday, datacap = mdup.get_data(prog_path, conf)
comb = used + ',' + left + ',' + daysleft + ',' + dataused + ',' + datesnap + '\n'
fp = Path('isp.log')
# file exists append new data, else create headers and dump data
if fp.is_file():
###############################################################################
# Convert strings to date and check if there is a newer snapshot from Mediacom
# if there if new snapshot, continue, else quit
###############################################################################
dt_datesnap = datetime.datetime.strptime(datesnap, '%m/%d/%Y %H:%M')
last_dt_datesnap = datetime.datetime.strptime(pd.read_csv('isp.log')['datesnap']
.tail(1).to_string(header=False, index=False),
'%m/%d/%Y %H:%M')
if last_dt_datesnap >= dt_datesnap:
print('No need to dump new data since latest version exist on the log file.',
'\nWill still continue and run prediction.')
#mdup.kill(dvr, disp) #try to started services
###############################################################################
# Gather date information to align with reporting month
###############################################################################
today = datetime.date.today() #return today's date as a string
#source http://stackoverflow.com/questions/37396329/finding-first-day-of-the-month-in-python
if today.day > startday:
today += datetime.timedelta(1)
startdate = str(today.replace(day=startday)) #return XXth of the previous month
else:
#source http://stackoverflow.com/questions/36155332/how-to-get-the-first-day-and-last-day-of-current-month-in-python
startdate = str(datetime.date(today.year, today.month - 1, startday)) #return XXth of the previous month
enddate = mdup.add_months(datetime.datetime(*[int(item) for item in startdate.split('-')]), 1).strftime("%Y-%m-%d")
###############################################################################
# Build prediction model using linear regression
###############################################################################
df = pd.read_csv('isp.log')
df.replace(r'( \d:\d\d)|( \d\d:\d\d)', '', inplace=True, regex=True) #remove time
df['datesnap'] = pd.to_datetime(df['datesnap'], format="%m/%d/%Y") #fix date issue
df = df[df['datesnap'] > startdate] #select records on the current month
X = df.as_matrix(columns=['daysleft']) # current days
y = df.as_matrix(columns=['dataused']) # data usage to predict
model = LinearRegression()
model.fit(X, y)
# create and sort descending order for days left
# then predict data usage based on days left on the month by excluding
# day zero from the selection
X_predict = np.arange(np.min(X)); X_predict = X_predict[:0:-1]
X_predict = X_predict[:, np.newaxis] #transpose
y_predict = model.predict(X_predict) #predict data usage
#fc = np.concatenate((X_predict, y_predict), axis=1) #forecast
# calculate the over usage based on 50GB blocks at $10 a piece.
f_msg = str('\n[Mediacom] With ' + str(np.min(X)) + ' days left, ' +
'your current ' + dataused + 'GB and projected ' +
str(np.max(np.round(y_predict, decimals=1))) + 'GB data usage.')
b_msg = str(' That is ~' + str(np.round(np.max(y_predict)-datacap, decimals=0).astype(int)) +
'GB or ~$' + str(mdup.round10(((np.max(y_predict)-datacap)/50) * 10)) +
' over.')
# if over usage data prediction is less than zero,
# don't append prediction over usage
dta_msg = str(f_msg +
'' if np.round(np.max(y_predict)-datacap, decimals=0).astype(int) < 0
else f_msg + b_msg)
###############################################################################
# Email the prediction results
###############################################################################
username = conf.iloc[2][1]
password = conf.iloc[3][1]
to = sys.argv[2].split(sep=',')
mdup.email_msg(username, password, to, dta_msg)
#mdup.kill(dvr, disp) #try to started services
print('DONE processing the whole thing.')
sys.exit(0)
else:
f = open('isp.log', mode='a')
f.write(comb)
f.close()
###############################################################################
# Gather date information to align with reporting month
###############################################################################
today = datetime.date.today() # return today's date as a string
#source http://stackoverflow.com/questions/37396329/finding-first-day-of-the-month-in-python
if today.day > startday:
today += datetime.timedelta(1)
startdate = str(today.replace(day=startday)) #return XXth of the previous month
else:
#source http://stackoverflow.com/questions/36155332/how-to-get-the-first-day-and-last-day-of-current-month-in-python
startdate = str(datetime.date(today.year, today.month - 1, startday)) #return XXth of the previous month
enddate = mdup.add_months(datetime.datetime(*[int(item) for item in startdate.split('-')]), 1).strftime("%Y-%m-%d")
###############################################################################
# Build prediction model using linear regression
###############################################################################
df = pd.read_csv('isp.log')
df.replace(r'( \d:\d\d)|( \d\d:\d\d)', '', inplace=True, regex=True) #remove time
df['datesnap'] = pd.to_datetime(df['datesnap'], format="%m/%d/%Y") #fix date issue
df = df[df['datesnap'] > startdate] #select records on the current month
X = df.as_matrix(columns=['daysleft']) # current days
y = df.as_matrix(columns=['dataused']) # data usage to predict
model = LinearRegression()
model.fit(X, y)
# create and sort descending order for days left
# then predict data usage based on days left on the month
X_predict = np.arange(np.min(X)); X_predict = X_predict[::-1]
X_predict = X_predict[:, np.newaxis] #transpose
y_predict = model.predict(X_predict) #predict data usage
#fc = np.concatenate((X_predict, y_predict), axis=1) #forecast
# calculate the over usage based on 50GB blocks at $10 a piece.
f_msg = str('\n[Mediacom] With ' + str(np.min(X)) + ' days left, ' +
'your current ' + dataused + 'GB and projected ' +
str(np.max(np.round(y_predict, decimals=1))) + 'GB data usage.')
b_msg = str(' That is ~' + str(np.round(np.max(y_predict)-datacap, decimals=0).astype(int)) +
'GB or ~$' + str(mdup.round10(((np.max(y_predict)-datacap)/50) * 10)) +
' over.')
# if over usage data prediction is less than zero,
# don't append prediction over usage
dta_msg = str(f_msg +
'' if np.round(np.max(y_predict)-datacap, decimals=0).astype(int) < 0
else f_msg + b_msg)
###############################################################################
# Email the prediction results
###############################################################################
username = conf.iloc[2][1]
password = conf.iloc[3][1]
to = sys.argv[2].split(sep=',')
mdup.email_msg(username, password, to, dta_msg)
#mdup.kill(dvr, disp) #try to started services
print('DONE processing the whole thing.')
sys.exit(0)
else:
f = open('isp.log', 'w')
f.write('used,left,daysleft,dataused,datesnap\n') #write header
f.write(comb)
f.close()
print('Creating new file since it does not exist. Next run you should get a prediction.')
#mdup.kill(dvr, disp) #try to started services
sys.exit(0)
|
Bella Sposa Events is a full service wedding planning and design business. We take pride and making sure that every bride has the most amazing day and it is absolutely stress free. Owner, Jennifer Lopez, also has a background in interior designer and takes pride in making sure that the décor side of the event is always looks beautiful.
|
import sys
import os
import json
from resources.lib.config import cConfig
from resources.lib import common, logger
class cPluginHandler:
def __init__(self):
self.addon = common.addon
self.rootFolder = common.addonPath
self.settingsFile = os.path.join(self.rootFolder, 'resources', 'settings.xml')
self.profilePath = common.profilePath
self.pluginDBFile = os.path.join(self.profilePath,'pluginDB')
logger.info('profile folder: %s' % self.profilePath)
logger.info('root folder: %s' % self.rootFolder)
self.defaultFolder = os.path.join(self.rootFolder, 'sites')
logger.info('default sites folder: %s' % self.defaultFolder)
def getAvailablePlugins(self):
pluginDB = self.__getPluginDB()
# default plugins
update = False
fileNames = self.__getFileNamesFromFolder(self.defaultFolder)
for fileName in fileNames:
plugin = {'name':'', 'icon':'', 'settings':'', 'modified':0}
if fileName in pluginDB:
plugin.update(pluginDB[fileName])
try:
modTime = os.path.getmtime(os.path.join(self.defaultFolder,fileName+'.py'))
except OSError:
modTime = 0
if fileName not in pluginDB or modTime > plugin['modified']:
logger.info('load plugin: ' + str(fileName))
# try to import plugin
pluginData = self.__getPluginData(fileName)
if pluginData:
pluginData['modified'] = modTime
pluginDB[fileName] = pluginData
update = True
# check pluginDB for obsolete entries
deletions = []
for pluginID in pluginDB:
if pluginID not in fileNames:
deletions.append(pluginID)
for id in deletions:
del pluginDB[id]
if update or deletions:
self.__updateSettings(pluginDB)
self.__updatePluginDB(pluginDB)
return self.getAvailablePluginsFromDB()
def getAvailablePluginsFromDB(self):
plugins = []
oConfig = cConfig()
iconFolder = os.path.join(self.rootFolder, 'resources','art','sites')
pluginDB = self.__getPluginDB()
for pluginID in pluginDB:
plugin = pluginDB[pluginID]
pluginSettingsName = 'plugin_%s' % pluginID
plugin['id'] = pluginID
if 'icon' in plugin:
plugin['icon'] = os.path.join(iconFolder, plugin['icon'])
else:
plugin['icon'] = ''
# existieren zu diesem plugin die an/aus settings
if oConfig.getSetting(pluginSettingsName) == 'true':
plugins.append(plugin)
return plugins
def __updatePluginDB(self, data):
if not os.path.exists(self.profilePath):
os.makedirs(self.profilePath)
file = open(self.pluginDBFile, 'w')
json.dump(data,file)
file.close()
def __getPluginDB(self):
if not os.path.exists(self.pluginDBFile):
return dict()
file = open(self.pluginDBFile, 'r')
try:
data = json.load(file)
except ValueError:
logger.error("pluginDB seems corrupt, creating new one")
data = dict()
file.close()
return data
def __updateSettings(self, pluginData):
'''
data (dict): containing plugininformations
'''
xmlString = '<plugin_settings>%s</plugin_settings>'
import xml.etree.ElementTree as ET
tree = ET.parse(self.settingsFile)
#find Element for plugin Settings
pluginElem = False
for elem in tree.findall('category'):
if elem.attrib['label']=='30022':
pluginElem = elem
break
if not pluginElem:
logger.info('could not update settings, pluginElement not found')
return False
pluginElements = pluginElem.findall('setting')
for elem in pluginElements:
pluginElem.remove(elem)
# add plugins to settings
for pluginID in sorted(pluginData):
plugin = pluginData[pluginID]
subEl = ET.SubElement(pluginElem,'setting', {'type': 'lsep', 'label':plugin['name']})
subEl.tail = '\n\t'
attrib = {'default': 'false', 'type': 'bool'}
attrib['id'] = 'plugin_%s' % pluginID
attrib['label'] = plugin['name']
subEl = ET.SubElement(pluginElem, 'setting', attrib)
subEl.tail = '\n\t'
if 'settings' in plugin:
customSettings = []
try:
customSettings = ET.XML(xmlString % plugin['settings']).findall('setting')
except:
logger.info('Parsing of custom settings for % failed.' % plugin['name'])
for setting in customSettings:
setting.tail = '\n\t'
pluginElem.append(setting)
pluginElements = pluginElem.findall('setting')[-1].tail = '\n'
try:
ET.dump(pluginElem)
except:
logger.info('Settings update failed')
return
tree.write(self.settingsFile)
def __getFileNamesFromFolder(self, sFolder):
aNameList = []
items = os.listdir(sFolder)
for sItemName in items:
if sItemName.endswith('.py'):
sItemName = os.path.basename(sItemName[:-3])
aNameList.append(sItemName)
return aNameList
def __getPluginData(self, fileName):
pluginData = {}
try:
plugin = __import__(fileName, globals(), locals())
pluginData['name'] = plugin.SITE_NAME
except Exception, e:
logger.error("Can't import plugin: %s :%s" % (fileName, e))
return False
try:
pluginData['icon'] = plugin.SITE_ICON
except:
pass
try:
pluginData['settings'] = plugin.SITE_SETTINGS
except:
pass
return pluginData
|
Posted on September 14, 2012 at 4:56:02 am by Lauren C.
Consider it the smorgasbord of the local art world. The annual Fort Wayne Museum of Art Trolley Tour, on Sept. 21 from 6-10 p.m., is an opportunity to take in some good art in a an eclectic atmosphere. While the event has been a favorite for some time, new to this year is the timing. It has been traditionally held on a Thursday; while this year it will take place on Friday. Take in the sights of 13 galleries-three new-while eating food from 16 caterers. Patrons can enjoy a new "Southwest Route" that includes: Castle Gallery; Terry Ratliff’s gallery(RatArt); University of Saint Francis; Orchard Gallery and B. Mitchell Fine Jewelry. The Main Library will, for the first time, have a cash bar this year provided by Flanagan’s. A mix of art and good food, it truly is a sensory experience! More trolleys and buses will be available than in years past, which will add to the experience. Tickets (also called passports) are on sale Aug. 28. Insider's Note: The museum sold out the night of the event last years, so you might want to get your tickets early! You can also pick them up at the Paradigm Gallery (Museum Store), the Orchard Gallery, online, or by calling 422-6467.
|
# -*- encoding: utf-8 -*-
import platform
import unittest
from abjad.tools import abjadbooktools
from abjad.tools import systemtools
@unittest.skipIf(
platform.python_implementation() != 'CPython',
'Only for CPython.',
)
class TestLaTeXDocumentHandler_syntax_error(unittest.TestCase):
def test_syntax_error_1(self):
input_file_contents = [
'<abjad>',
'foo bar baz',
'</abjad>',
]
document_handler = abjadbooktools.LaTeXDocumentHandler(
input_file_contents=input_file_contents,
)
self.assertRaises(
abjadbooktools.AbjadBookError,
document_handler.__call__,
)
def test_syntax_error_2(self):
input_file_contents = [
'<abjad>[allow_exceptions=true]',
'foo bar baz',
'</abjad>',
]
document_handler = abjadbooktools.LaTeXDocumentHandler(
input_file_contents=input_file_contents,
)
rebuilt_source = document_handler(return_source=True)
assert rebuilt_source == systemtools.TestManager.clean_string(
'''
<abjad>[allow_exceptions=true]
foo bar baz
</abjad>
%%% ABJADBOOK START %%%
\\begin{lstlisting}
>>> foo bar baz
File "<stdin>", line 1
foo bar baz
^
SyntaxError: invalid syntax
\\end{lstlisting}
%%% ABJADBOOK END %%%
''',
)
def test_syntax_error_3(self):
input_file_contents = [
'<abjad>[allow_exceptions=true]',
'foo bar baz',
'</abjad>',
'',
'<abjad>',
'foo bar baz',
'</abjad>',
]
document_handler = abjadbooktools.LaTeXDocumentHandler(
input_file_contents=input_file_contents,
)
self.assertRaises(
abjadbooktools.AbjadBookError,
document_handler.__call__
)
|
Frank Edward Gilbert Esq M private (living) Arthur Stanley Gilbert .
Harry H Gilbert M 24 Jun 1903 Stanley C Gilbert .
Margaret Gilbert F ca. Apr 1898 Stanley C Gilbert .
Mary Jane Gilbert F 12 Sep 1905 Stanley C Gilbert .
Mary L Gilbert F ca. 1907 Stanley C Gilbert .
Mildred Gilbert F ca. Jan 1899 Stanley C Gilbert .
Roger Gilbert M ca. 1910 Stanley C Gilbert .
Ruth Gilbert F ca. 1909 Stanley C Gilbert .
Stanley C Gilbert Jr M ca. 1902 Stanley C Gilbert .
|
# -*- coding: utf-8 -*-
# WindowでGroupByの区間を区切る
import apache_beam as beam
# Dataflowの基本設定
# ジョブ名、プロジェクト名、一時ファイルの置き場を指定します。
options = beam.options.pipeline_options.PipelineOptions()
gcloud_options = options.view_as(
beam.options.pipeline_options.GoogleCloudOptions)
gcloud_options.job_name = 'dataflow-tutorial7'
gcloud_options.project = 'PROJECTID'
gcloud_options.staging_location = 'gs://PROJECTID/staging'
gcloud_options.temp_location = 'gs://PROJECTID/temp'
# Dataflowのスケール設定
# Workerの最大数や、マシンタイプ等を設定します。
# WorkerのDiskサイズはデフォルトで250GB(Batch)、420GB(Streaming)と大きいので、
# ここで必要サイズを指定する事をオススメします。
worker_options = options.view_as(beam.options.pipeline_options.WorkerOptions)
worker_options.disk_size_gb = 20
worker_options.max_num_workers = 2
# worker_options.num_workers = 2
# worker_options.machine_type = 'n1-standard-8'
# 実行環境の切り替え
# DirectRunner: ローカルマシンで実行します
# DataflowRunner: Dataflow上で実行します
# options.view_as(beam.options.pipeline_options.StandardOptions).runner = 'DirectRunner'
options.view_as(beam.options.pipeline_options.StandardOptions).runner = 'DataflowRunner'
def assign_timevalue(v):
# pcollectionのデータにタイムスタンプを付加する
# 後段のwindowはこのタイムスタンプを基準に分割される
# ここでは適当に乱数でタイムスタンプを入れている
import apache_beam.transforms.window as window
import random
import time
return window.TimestampedValue(v, int(time.time()) + random.randint(0, 1))
def modify_data3(kvpair):
# groupbyによりkeyとそのkeyを持つデータのリストのタプルが渡される
# windowで分割されているのでデータ数が少なくなる
# kvpair = (u'word only', [4, 4, 6, 6, 7])
return {'count_type': kvpair[0],
'sum': sum(kvpair[1])
}
p7 = beam.Pipeline(options=options)
query = 'SELECT * FROM [PROJECTID:testdataset.testtable3] LIMIT 20'
(p7 | 'read' >> beam.io.Read(beam.io.BigQuerySource(project='PROJECTID', use_standard_sql=False, query=query))
| "assign tv" >> beam.Map(assign_timevalue)
| 'window' >> beam.WindowInto(beam.window.FixedWindows(1))
| 'pair' >> beam.Map(lambda x: (x['count_type'], x['word_count']))
| "groupby" >> beam.GroupByKey()
| 'modify' >> beam.Map(modify_data3)
| 'write' >> beam.io.Write(beam.io.BigQuerySink(
'testdataset.testtable5',
schema='count_type:STRING, sum:INTEGER',
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE))
)
p7.run() # .wait_until_finish()
|
CARMEL, Ind. — MISO’s 10 sectors are split over whether state regulators should be subjected to a one-year moratorium before they’re eligible to serve on the RTO’s Board of Directors.
The controversy surfaced in early fall with the nomination Minnesota Public Utilities Commission Chair Nancy Lange. Last month, MISO membership elected Lange to the board, though some stakeholders said she should be subject to the same one-year moratorium that the RTO requires of directors coming from member companies. (See MISO Elects Lange to Board; Keeps 2 Incumbents.) This is the first time MISO has elected a sitting commissioner from one of the states in its footprint.
The board’s Corporate Governance and Strategic Planning Committee has agreed to consider expanding the moratorium in 2019.
Lange has not yet resigned from the Minnesota PUC, though MISO’s new director orientation begins Dec. 11. MISO will hold another two-day orientation session in late January. Lange’s term ends Jan. 7 and overlap between her PUC appointment and MISO training seems inevitable. MISO officials had promised an early resignation in order to avoid overlap. Meanwhile, 14 applicants are vying for Lange’s seat in Minnesota.
Lange did not respond to RTO Insider’s calls to her Minnesota office.
During a Dec. 5 Advisory Committee meeting, MISO Senior Vice President and Board Secretary Stephen Kozey said the cooling off period was introduced in 1996 to prevent conflicts of interest by member companies offering their former executives to serve on the board. While the stay-out period was not required by FERC, the commission accepted MISO’s language.
Mark Volpe, representing the Independent Power Producers sector, pointed out that state regulators in MISO are on equal footing with dues-paying members through sector voting. He said that even though Environmental sector representatives are not dues-paying members, it would nevertheless be inappropriate for an environmental representative at the Advisory Committee to immediately transition to a director position.
Though Volpe said he had no reservations about Lange personally, he said she could have been seated at one of the four regulator seats at the Advisory Committee days before joining the MISO board.
“It’s the spirit of the rules that’s the real concern here,” Volpe said.
Chris Plante, representative of the Transmission-Dependent Utilities sector, said he agreed with Volpe’s observations.
Missouri Public Service Commissioner Daniel Hall, however, said regulators bring valuable experience and do not stand to benefit from MISO decisions. Hall was one of two stakeholders this year on the board’s Nominating Committee, which is charged with selecting board nominees.
“This shouldn’t be an issue at all. I don’t see how a commissioner serving on the board after their tenure is a conflict,” Hall said.
Others said additional rules are unnecessary because many in MISO’s stakeholder community maintain professional licenses that instruct individuals to avoid conflicts of interest and the appearance of impropriety. Advisory Committee Chair Audrey Penner said possible revisions to the Transmission Owners Agreement might include language about board nominees recusing themselves when they face conflicts of interest.
Arkansas Public Service Commission Chairman Ted Thomas said that while commissioners could decide to sit at the Advisory Committee table, Lange has not. Lange’s colleague, Commissioner Matthew Schuerger, currently serves in the Organization of MISO States and is one of four commissioners representing the State Regulatory sector.
Thomas said regulators that have not been involved with MISO’s stakeholder process should be free to accept director appointments.
“If we’re going to draw the line, let’s draw it in the right place,” he said.
Advisory Committee Vice Chair Tia Elliott agreed that regulators that are not involved in the State Regulatory sector through OMS probably have little idea about MISO’s inner workings. But she said the appearance of the situation is something members should consider. She pointed out that MISO transmission projects come before regulatory bodies in those states.
But Eligible End-Use Customers sector representative Kevin Murray said he thought the current situation is rare. “I think the odds of it happening again are extremely slim. I think we’re making a big ado about nothing,” Murray said.
Citigroup Energy’s Barry Trayers, of the Power Marketing sector, said MISO may benefit from a person with a less steep learning curve joining the board.
Clean Grid Alliance’s Beth Soholt asked if it’s difficult for the RTO to attract qualified candidates. “We had a very wide and very deep pool, so it’s not like we had to shake the bushes and rattle the trees to get candidates,” said Madison Gas and Electric’s Megan Wisersky, the other stakeholder who sat on this year’s Nominating Committee.
Director Thomas Rainwater, who chairs the Corporate Governance and Strategic Planning Committee, asked members to come to a consensus on whether they would prefer a one-year sit-out.
“The board very much wants to be viewed as independent,” he told stakeholders.
Former Minnesota Public Utilities Commission Chairman David C. Boyd has joined MISO as vice president of government and regulatory affairs.
Baltimore Gas and Electric will pay $170,530 to MISO members to end a dispute over cross-system congestion costs under a settlement approved by FERC.
|
import datetime
from mock import Mock, patch
import pytest
from elasticsearch_dsl.exceptions import ValidationException
from elasticsearch_dsl.utils import AttrList
from nefertari_es import fields
from .fixtures import (
id_model,
story_model,
tag_model,
person_model,
parent_model,
)
class TestFieldHelpers(object):
def test_custom_mapping_mixin(self):
class DummyBase(object):
def to_dict(self):
return {'foo': 1, 'bar': 2}
class DummyField(fields.CustomMappingMixin, DummyBase):
_custom_mapping = {'foo': 3, 'zoo': 4}
obj = DummyField()
assert obj.to_dict() == {'foo': 3, 'bar': 2, 'zoo': 4}
class TestFields(object):
def test_basefieldmixin(self):
class DummyBase(object):
def __init__(self, required=False):
self.required = required
class DummyField(fields.BaseFieldMixin, DummyBase):
pass
field = DummyField(primary_key=True)
assert field._primary_key
assert field.required
def test_drop_invalid_kwargs(self):
class DummyBase(object):
pass
class DummyField(fields.BaseFieldMixin, DummyBase):
_valid_kwargs = ('foo',)
field = DummyField()
assert field.drop_invalid_kwargs({'foo': 1, 'bar': 2}) == {
'foo': 1}
def test_idfield(self):
field = fields.IdField()
assert field._primary_key
assert not field._required
def test_idfield_empty(self):
field = fields.IdField()
assert field._empty() is None
def test_intervalfield_to_python(self):
from datetime import timedelta
field = fields.IntervalField()
val = field._to_python(600)
assert isinstance(val, timedelta)
assert val.total_seconds() == 600
class TestDateTimeField(object):
def test_to_python_no_data(self):
obj = fields.DateTimeField()
assert obj._to_python({}) is None
assert obj._to_python([]) is None
assert obj._to_python(None) is None
assert obj._to_python('') is None
def test_to_python_datetime(self):
obj = fields.DateTimeField()
date = datetime.datetime.now()
assert obj._to_python(date) is date
def test_to_python_string_parse(self):
obj = fields.DateTimeField()
expected = datetime.datetime(year=2000, month=11, day=12)
assert obj._to_python('2000-11-12') == expected
def test_to_python_parse_failed(self):
obj = fields.DateTimeField()
with pytest.raises(ValidationException) as ex:
obj._to_python('asd')
expected = 'Could not parse datetime from the value'
assert expected in str(ex.value)
class TestTimeField(object):
def test_to_python_no_data(self):
obj = fields.TimeField()
assert obj._to_python({}) is None
assert obj._to_python([]) is None
assert obj._to_python(None) is None
assert obj._to_python('') is None
def test_to_python_time(self):
obj = fields.TimeField()
time = datetime.datetime.now().time()
assert obj._to_python(time) is time
def test_to_python_datetime(self):
obj = fields.TimeField()
date = datetime.datetime.now()
assert obj._to_python(date) == date.time()
def test_to_python_string_parse(self):
obj = fields.TimeField()
expected = datetime.time(17, 40)
assert obj._to_python('2000-11-12 17:40') == expected
def test_to_python_parse_failed(self):
obj = fields.TimeField()
with pytest.raises(ValidationException) as ex:
obj._to_python('asd')
expected = 'Could not parse time from the value'
assert expected in str(ex.value)
class TestRelationshipField(object):
def test_to_dict_nested(self, story_model,
person_model, tag_model):
story_model._nested_relationships = ('author', 'tags')
req = Mock()
s = story_model(name='Moby Dick')
assert s.to_dict(request=req) == {
'name': 'Moby Dick',
'_pk': 'Moby Dick',
'_type': 'Story'
}
s.author = person_model(name='Melville')
assert s.to_dict(request=req)['author'] == {
'_pk': 'Melville', '_type': 'Person', 'name': 'Melville'}
s.tags = [tag_model(name='whaling'), tag_model(name='literature')]
assert s.to_dict(request=req)['tags'] == [
{'_pk': 'whaling', '_type': 'Tag', 'name': 'whaling'},
{'_pk': 'literature', '_type': 'Tag', 'name': 'literature'}]
def test_to_dict_not_nested(self, story_model,
person_model, tag_model):
req = Mock()
s = story_model(name='Moby Dick')
assert s.to_dict(request=req) == {
'name': 'Moby Dick',
'_pk': 'Moby Dick',
'_type': 'Story'
}
s.author = person_model(name='Melville')
assert s.to_dict(request=req)['author'] == 'Melville'
t1 = tag_model(name='whaling')
t2 = tag_model(name='literature')
s.tags = [t1, t2]
assert s.to_dict(request=req)['tags'] == ['whaling', 'literature']
def test_to_dict_es(self, story_model, person_model, tag_model):
s = story_model(name='Moby Dick')
assert s.to_dict() == {'name': 'Moby Dick'}
a = person_model(name='Melville')
s.author = a
assert s.to_dict()['author'] == 'Melville'
t1 = tag_model(name='whaling')
t2 = tag_model(name='literature')
s.tags = [t1, t2]
assert s.to_dict()['tags'] == ['whaling', 'literature']
class TestReferenceField(object):
def _get_field(self):
return fields.ReferenceField(
'Foo', uselist=False, backref_name='zoo')
def test_init(self):
field = self._get_field()
assert field._doc_class_name == 'Foo'
assert not field._multi
assert field._backref_kwargs == {'name': 'zoo'}
def test_drop_invalid_kwargs(self):
field = self._get_field()
kwargs = {'required': True, 'backref_required': True, 'Foo': 1}
assert field.drop_invalid_kwargs(kwargs) == {
'required': True, 'backref_required': True}
@patch('nefertari_es.meta.get_document_cls')
def test_doc_class(self, mock_get):
field = self._get_field()
assert field._doc_class_name == 'Foo'
klass = field._doc_class
mock_get.assert_called_once_with('Foo')
assert klass == mock_get()
def test_empty_not_required(self):
field = self._get_field()
field._required = False
field._multi = True
val = field.empty()
assert isinstance(val, AttrList)
assert len(val) == 0
field._multi = False
assert field.empty() is None
@patch('nefertari_es.meta.get_document_cls')
def test_clean(self, mock_get):
mock_get.return_value = dict
field = self._get_field()
field._doc_class
val = 'asdasdasdasd'
assert field.clean(val) is val
class TestIdField(object):
def test_read_only(self, id_model):
d = id_model()
with pytest.raises(AttributeError) as e:
d.id = 'fail'
assert str(e.value) == 'id is read-only'
def test_sync_id(self, id_model):
d = id_model()
assert d.id is None
# simulate a save
d._id = 'ID'
d._sync_id_field()
assert d.id == d._id
|
Have you ever wondered if your blog posts are really engaging? Especially if you’ve written a lot already, you have that particular writing style that you’re comfortable with. But that may not be the best writing style.
Well, today’s infographic gives you a writing style to consider. This will surely make your readers enjoy reading your content more.
People are naturally inclined to like stories. Tell your readers one, and they won’t get bored.
Saturday Biz Tip: Get These 5 Distractions Off Your Workspace [Infographic] Why is Having a Good Name Important?
|
#
# Copyright (C) 2014 Sean Poyser - With acknowledgement to some original code by twinther (Tommy Winther)
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with XBMC; see the file COPYING. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
#
import datetime
import threading
import time
import xbmc
import xbmcgui
import source as src
from notification import Notification
from strings import *
import buggalo
import streaming
import xbmcaddon
import xbmc
import os
import shutil
import urllib
import dixie
import deleteDB
xbmcgui.Window(10000).setProperty('TVG_TEST_TEXT', 'THIS IS A TEST')
ADDON = xbmcaddon.Addon(id = 'script.tvguidedixie')
HOME = ADDON.getAddonInfo('path')
TITLE = 'OnTapp.TV'
VERSION = '2.3.2'
MASHMODE = (ADDON.getSetting('mashmode') == 'true')
SKIN = ADDON.getSetting('dixie.skin')
GMTOFFSET = dixie.GetGMTOffset()
TRAILERS = ADDON.getSetting('trailers.addon')
USTV = ADDON.getSetting('ustv.addon')
datapath = xbmc.translatePath(ADDON.getAddonInfo('profile'))
extras = os.path.join(datapath, 'extras')
skinfolder = os.path.join(datapath, extras, 'skins')
mashpath = os.path.join(skinfolder, 'Mash Up')
skinpath = os.path.join(skinfolder, SKIN)
mashfile = os.path.join(xbmc.translatePath('special://profile/addon_data/plugin.video.movie25/Dixie/mashup.ini'))
if MASHMODE:
PATH = mashpath
else:
PATH = skinpath
dixie.SetSetting('mashmode', 'false')
if TRAILERS == 'HD-Trailers.net':
trailers = 'XBMC.RunAddon(plugin.video.hdtrailers_net)'
if TRAILERS == 'Apple iTunes Trailers':
trailers = 'XBMC.RunAddon(plugin.video.itunes_trailers)'
if USTV == 'Hulu':
ustv = 'XBMC.RunAddon(plugin.video.hulu)'
if USTV == 'Hulu-Beta':
ustv = 'XBMC.RunAddon(plugin.video.hulu-beta)'
if USTV == 'USTV VoD':
ustv = 'XBMC.RunAddon(plugin.video.ustvvod)'
xml_file = os.path.join('script-tvguide-main.xml')
if os.path.join(SKIN, 'extras', 'skins', 'Default', '720p', xml_file):
XML = xml_file
DEBUG = False
MODE_EPG = 'EPG'
MODE_TV = 'TV'
MODE_OSD = 'OSD'
ACTION_LEFT = 1
ACTION_RIGHT = 2
ACTION_UP = 3
ACTION_DOWN = 4
ACTION_PAGE_UP = 5
ACTION_PAGE_DOWN = 6
ACTION_SELECT_ITEM = 7
ACTION_PARENT_DIR = 9
ACTION_PREVIOUS_MENU = 10
ACTION_SHOW_INFO = 11
ACTION_NEXT_ITEM = 14
ACTION_PREV_ITEM = 15
ACTION_MOUSE_WHEEL_UP = 104
ACTION_MOUSE_WHEEL_DOWN = 105
ACTION_MOUSE_MOVE = 107
ACTION_TOUCH_TAP = 401
ACTION_TOUCH_LONGPRESS = 411
ACTION_GESTURE_SWIPE_LEFT = 511
ACTION_GESTURE_SWIPE_RIGHT = 521
ACTION_GESTURE_SWIPE_UP = 531
ACTION_GESTURE_SWIPE_DOWN = 541
ACTION_GESTURE_ZOOM = 502
ACTION_GESTURE_ROTATE = 503
ACTION_GESTURE_PAN = 504
KEY_NAV_BACK = 92
KEY_CONTEXT_MENU = 117
KEY_HOME = 159
KEY_SUPER_SEARCH = 77
CHANNELS_PER_PAGE = 8
TEXT_COLOR = '0xffffffff'
FOCUSED_COLOR = '0xffffffff'
SHADOW_COLOR = 'None'
REMOVE_STRM_FILE = strings(REMOVE_STRM_FILE)
CHOOSE_STRM_FILE = strings(CHOOSE_STRM_FILE)
REMIND_PROGRAM = strings(REMIND_PROGRAM)
DONT_REMIND_PROGRAM = strings(DONT_REMIND_PROGRAM)
HALF_HOUR = datetime.timedelta(minutes = 30)
try:
#load cfg from file
f = open(os.path.join(PATH, 'epg.cfg'))
cfg = f.readlines()
f.close()
for l in cfg:
l = l.strip()
#sanity check on text
pts = l.split('=')
if len(pts) == 2:
exec(l)
except:
pass
def debug(s):
if DEBUG: xbmc.log(str(s), xbmc.LOGDEBUG)
class Point(object):
def __init__(self):
self.x = self.y = 0
def __repr__(self):
return 'Point(x=%d, y=%d)' % (self.x, self.y)
class EPGView(object):
def __init__(self):
self.top = self.left = self.right = self.bottom = self.width = self.cellHeight = 0
class ControlAndProgram(object):
def __init__(self, control, program):
self.control = control
self.program = program
class TVGuide(xbmcgui.WindowXML):
C_MAIN_DATE = 4000
C_MAIN_TITLE = 4020
C_MAIN_TIME = 4021
C_MAIN_DESCRIPTION = 4022
C_MAIN_IMAGE = 4023
C_MAIN_LOGO = 4024
C_MAIN_TIMEBAR = 4100
C_MAIN_LOADING = 4200
C_MAIN_LOADING_PROGRESS = 4201
C_MAIN_LOADING_TIME_LEFT = 4202
C_MAIN_LOADING_CANCEL = 4203
C_MAIN_MOUSE_CONTROLS = 4300
C_MAIN_MOUSE_HOME = 4301
C_MAIN_MOUSE_LEFT = 4302
C_MAIN_MOUSE_UP = 4303
C_MAIN_MOUSE_DOWN = 4304
C_MAIN_MOUSE_RIGHT = 4305
C_MAIN_MOUSE_EXIT = 4306
C_MAIN_BACKGROUND = 4600
C_MAIN_EPG = 5000
C_MAIN_EPG_VIEW_MARKER = 5001
C_MAIN_OSD = 6000
C_MAIN_OSD_TITLE = 6001
C_MAIN_OSD_TIME = 6002
C_MAIN_OSD_DESCRIPTION = 6003
C_MAIN_OSD_CHANNEL_LOGO = 6004
C_MAIN_OSD_CHANNEL_TITLE = 6005
C_MAIN_BLACKOUT = 9999
def __new__(cls):
return super(TVGuide, cls).__new__(cls, XML, PATH)
def __init__(self):
super(TVGuide, self).__init__()
self.initialized = False
self.refresh = False
self.notification = None
self.redrawingEPG = False
self.timebarVisible = False
self.isClosing = False
self.controlAndProgramList = list()
self.ignoreMissingControlIds = list()
self.channelIdx = 0
self.focusPoint = Point()
self.epgView = EPGView()
self.streamingService = streaming.StreamsService()
self.player = xbmc.Player()
self.database = None
self.categoriesList = ADDON.getSetting('categories').split('|')
if self.categoriesList[0] == '':
self.categoriesList = []
self.mode = MODE_EPG
self.currentChannel = None
self.osdEnabled = ADDON.getSetting('enable.osd') == 'true' and ADDON.getSetting('alternative.playback') != 'true'
self.alternativePlayback = ADDON.getSetting('alternative.playback') == 'true'
self.osdChannel = None
self.osdProgram = None
self.touch = False
self.prevCtrl = -1
if ADDON.getSetting('enable.touch') == 'true':
self.touch = True
# find nearest half hour
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes = self.viewStartDate.minute % 30, seconds = self.viewStartDate.second)
def getControl(self, controlId):
try:
return super(TVGuide, self).getControl(controlId)
except:
if controlId in self.ignoreMissingControlIds:
return None
if not self.isClosing:
xbmcgui.Dialog().ok(buggalo.getRandomHeading(), strings(SKIN_ERROR_LINE1), strings(SKIN_ERROR_LINE2), strings(SKIN_ERROR_LINE3))
self.close()
return None
def close(self):
try:
self.timer.cancel()
del self.timer
except:
pass
if not self.isClosing:
self.isClosing = True
if self.player.isPlaying():
self.player.stop()
if self.database:
self.database.close(self.final)
else:
self.final()
def final(self):
xbmcgui.WindowXML.close(self)
@buggalo.buggalo_try_except({'method' : 'TVGuide.onInit'})
def onInit(self):
if self.initialized:
if self.refresh:
self.refresh = False
self.database.resetChannels()
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
# onInit(..) is invoked again by XBMC after a video addon exits after being invoked by XBMC.RunPlugin(..)
return
self.initialized = True
self._hideControl(self.C_MAIN_MOUSE_CONTROLS, self.C_MAIN_OSD)
self._showControl(self.C_MAIN_EPG, self.C_MAIN_LOADING)
self._showControl(self.C_MAIN_BLACKOUT)
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(BACKGROUND_UPDATE_IN_PROGRESS))
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
control = self.getControl(self.C_MAIN_EPG_VIEW_MARKER)
if control:
left, top = control.getPosition()
self.focusPoint.x = left
self.focusPoint.y = top
self.epgView.left = left
self.epgView.top = top
self.epgView.right = left + control.getWidth()
self.epgView.bottom = top + control.getHeight()
self.epgView.width = control.getWidth()
self.epgView.cellHeight = (control.getHeight() / CHANNELS_PER_PAGE)
try:
self.database = src.Database(CHANNELS_PER_PAGE)
except src.SourceNotConfiguredException:
self.onSourceNotConfigured()
self.close()
return
self.database.initializeS(self.onSourceInitializedS, self.isSourceInitializationCancelled)
self.updateTimebar()
@buggalo.buggalo_try_except({'method' : 'TVGuide.onAction'})
def onAction(self, action):
debug('Mode is: %s' % self.mode)
try:
program = self._getProgramFromControl(controlInFocus)
if program is None:
return
# if program is not None:
# self._showContextMenu(program)
except:
pass
if self.mode == MODE_TV:
self.onActionTVMode(action)
elif self.mode == MODE_OSD:
self.onActionOSDMode(action)
elif self.mode == MODE_EPG:
self.onActionEPGMode(action)
def onActionTVMode(self, action):
if action.getId() == ACTION_PAGE_UP:
self._channelUp()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
elif not self.osdEnabled:
pass # skip the rest of the actions
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes = self.viewStartDate.minute % 30, seconds = self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SHOW_INFO:
self._showOsd()
def onActionOSDMode(self, action):
if action.getId() == ACTION_SHOW_INFO:
self._hideOsd()
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self._hideOsd()
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes = self.viewStartDate.minute % 30, seconds = self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SELECT_ITEM:
if self.playChannel(self.osdChannel):
self._hideOsd()
elif action.getId() == ACTION_PAGE_UP:
self._channelUp()
self._showOsd()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
self._showOsd()
elif action.getId() == ACTION_UP:
self.osdChannel = self.database.getPreviousChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_DOWN:
self.osdChannel = self.database.getNextChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_LEFT:
previousProgram = self.database.getPreviousProgram(self.osdProgram)
if previousProgram:
self.osdProgram = previousProgram
self._showOsd()
elif action.getId() == ACTION_RIGHT:
nextProgram = self.database.getNextProgram(self.osdProgram)
if nextProgram:
self.osdProgram = nextProgram
self._showOsd()
def onActionEPGMode(self, action):
actionId = self.checkTouch(action)
if actionId == None:
return
if actionId in [ACTION_PARENT_DIR, KEY_NAV_BACK, ACTION_PREVIOUS_MENU]:
self.close()
return
elif actionId == ACTION_MOUSE_MOVE:
self._showControl(self.C_MAIN_MOUSE_CONTROLS)
return
elif actionId == KEY_CONTEXT_MENU:
if self.player.isPlaying():
self._hideEpg()
controlInFocus = None
currentFocus = self.focusPoint
try:
controlInFocus = self.getFocus()
if controlInFocus in [elem.control for elem in self.controlAndProgramList]:
(left, top) = controlInFocus.getPosition()
currentFocus = Point()
currentFocus.x = left + (controlInFocus.getWidth() / 2)
currentFocus.y = top + (controlInFocus.getHeight() / 2)
except Exception, e:
control = self._findControlAt(self.focusPoint)
if control is None and len(self.controlAndProgramList) > 0:
control = self.controlAndProgramList[0].control
if control is not None:
if not self.touch:
self.setFocus(control)
return
if actionId == ACTION_LEFT:
self._left(currentFocus)
elif actionId == ACTION_RIGHT:
self._right(currentFocus)
elif actionId == ACTION_UP:
self._up(currentFocus)
elif actionId == ACTION_DOWN:
self._down(currentFocus)
elif actionId == ACTION_NEXT_ITEM:
self._nextDay()
elif actionId == ACTION_PREV_ITEM:
self._previousDay()
elif actionId == ACTION_PAGE_UP:
self._moveUp(CHANNELS_PER_PAGE)
elif actionId == ACTION_PAGE_DOWN:
self._moveDown(CHANNELS_PER_PAGE)
elif actionId == ACTION_MOUSE_WHEEL_UP:
self._moveUp(scrollEvent = True)
elif actionId == ACTION_MOUSE_WHEEL_DOWN:
self._moveDown(scrollEvent = True)
elif actionId == KEY_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes = self.viewStartDate.minute % 30, seconds = self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif actionId in [KEY_CONTEXT_MENU] and controlInFocus is not None:
program = self._getProgramFromControl(controlInFocus)
if program is not None:
self._showContextMenu(program)
elif actionId == KEY_SUPER_SEARCH:
try:
program = self._getProgramFromControl(controlInFocus)
xbmc.executebuiltin('ActivateWindow(%d,"plugin://%s/?mode=%d&keyword=%s")' % (10025,'plugin.program.super.favourites', 0, urllib.quote_plus(program.title)))
except:
pass
def checkTouch(self, action):
id = action.getId()
if id not in [ACTION_GESTURE_ZOOM, ACTION_GESTURE_ROTATE, ACTION_GESTURE_PAN, ACTION_TOUCH_TAP, ACTION_TOUCH_LONGPRESS, ACTION_GESTURE_SWIPE_LEFT, ACTION_GESTURE_SWIPE_RIGHT, ACTION_GESTURE_SWIPE_UP, ACTION_GESTURE_SWIPE_DOWN]:
return id
if id in [ACTION_GESTURE_ZOOM, ACTION_GESTURE_ROTATE]:
return id
if id == ACTION_TOUCH_TAP:
return id
try: controlInFocus = self.getFocus()
except: controlInFocus = None
if controlInFocus:
if self._getProgramFromControl(controlInFocus) != None:
return id
#never triggered due to back action
#if id == ACTION_TOUCH_LONGPRESS:
# return KEY_HOME
if id == ACTION_GESTURE_SWIPE_LEFT:
self.onClick(self.C_MAIN_MOUSE_LEFT)
return None
if id == ACTION_GESTURE_SWIPE_RIGHT:
self.onClick(self.C_MAIN_MOUSE_RIGHT)
return None
if id == ACTION_GESTURE_SWIPE_UP:
#return ACTION_MOUSE_WHEEL_UP
self.onClick(self.C_MAIN_MOUSE_UP)
return None
if id == ACTION_GESTURE_SWIPE_DOWN:
#return ACTION_MOUSE_WHEEL_DOWN
self.onClick(self.C_MAIN_MOUSE_DOWN)
return None
return id
@buggalo.buggalo_try_except({'method' : 'TVGuide.onClick'})
def onClick(self, controlId):
if controlId in [self.C_MAIN_LOADING_CANCEL, self.C_MAIN_MOUSE_EXIT]:
self.close()
return
if self.isClosing:
return
if controlId == self.C_MAIN_MOUSE_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes = self.viewStartDate.minute % 30, seconds = self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_LEFT:
self.viewStartDate -= datetime.timedelta(hours = 2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_UP:
self._moveUp(count = CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_DOWN:
self._moveDown(count = CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_RIGHT:
when = self.viewStartDate + datetime.timedelta(hours = 2)
if when.date() > self.database.updateLimit:
return
self.viewStartDate = when
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
prevCtrl = self.prevCtrl
self.prevCtrl = controlId
if self.touch:
if prevCtrl != self.prevCtrl:
return
program = self._getProgramFromControl(self.getControl(controlId))
if program is None:
return
if self.touch:
self._showContextMenu(program)
return
self.tryProgram(program)
def tryProgram(self, program):
if self.playChannel(program.channel):
return
result = self.streamingService.detectStream(program.channel)
if not result:
if self.touch:
return
# could not detect stream, show context menu
self._showContextMenu(program)
elif type(result) == str:
# one single stream detected, save it and start streaming
self.database.setCustomStreamUrl(program.channel, result)
self.playChannel(program.channel)
else:
# multiple matches, let user decide
d = ChooseStreamAddonDialog(result)
d.doModal()
if d.stream is not None:
self.database.setCustomStreamUrl(program.channel, d.stream)
self.playChannel(program.channel)
def _showContextMenu(self, program):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS)
d = PopupMenu(self.database, program, not program.notificationScheduled, self.touch)
d.doModal()
buttonClicked = d.buttonClicked
del d
if buttonClicked == PopupMenu.C_POPUP_REMIND:
if program.notificationScheduled:
self.notification.removeNotification(program)
else:
self.notification.addNotification(program)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_CHOOSE_STREAM:
d = StreamSetupDialog(self.database, program.channel)
d.doModal()
del d
self._showContextMenu(program)
return
elif buttonClicked == PopupMenu.C_POPUP_PLAY:
if self.touch:
self.tryProgram(program)
else:
self.playChannel(program.channel)
elif buttonClicked == PopupMenu.C_POPUP_CHANNELS:
d = ChannelsMenu(self.database)
d.doModal()
del d
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_CATEGORIES:
d = CategoriesMenu(self.database, self.categoriesList)
d.doModal()
self.categoriesList = d.currentCategories
del d
dixie.SetSetting('categories', '|'.join(self.categoriesList))
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_SETTINGS:
addonPath = HOME
script = os.path.join(addonPath, 'openSettings.py')
args = ''
cmd = 'AlarmClock(%s,RunScript(%s,%s),%d,True)' % ('launch', script, args, 0)
xbmc.executebuiltin(cmd)
self.close()
elif buttonClicked == PopupMenu.C_POPUP_IPLAYER:
xbmc.executebuiltin('XBMC.RunAddon(plugin.video.iplayer)')
elif buttonClicked == PopupMenu.C_POPUP_ITVPLAYER:
xbmc.executebuiltin('XBMC.RunAddon(plugin.video.itv)')
elif buttonClicked == PopupMenu.C_POPUP_OTTOOLS:
self.refresh = True
xbmc.executebuiltin('XBMC.RunAddon(script.tvguidedixie.tools)')
elif buttonClicked == PopupMenu.C_POPUP_USTV:
xbmc.executebuiltin(ustv)
elif buttonClicked == PopupMenu.C_POPUP_SUPERFAVES:
xbmc.executebuiltin('XBMC.RunAddon(plugin.program.super.favourites)')
# import sys
# sfAddon = xbmcaddon.Addon(id = 'plugin.program.super.favourites')
# sfPath = sfAddon.getAddonInfo('path')
# sys.path.insert(0, sfPath)
# import chooser
# chooser.Main()
elif buttonClicked == PopupMenu.C_POPUP_VPN:
xbmc.executebuiltin('XBMC.RunScript(special://home/addons/plugin.program.vpnicity/menu.py,%s)' % self.database.getStreamUrl(program.channel))
elif buttonClicked == PopupMenu.C_POPUP_SUPER_SEARCH:
xbmc.executebuiltin('ActivateWindow(%d,"plugin://%s/?mode=%d&keyword=%s",return)' % (10025,'plugin.program.super.favourites', 0, urllib.quote_plus(program.title)))
elif buttonClicked == PopupMenu.C_POPUP_QUIT:
self.close()
def setFocusId(self, controlId):
control = self.getControl(controlId)
if control:
self.setFocus(control)
def setFocus(self, control):
debug('setFocus %d' % control.getId())
if control in [elem.control for elem in self.controlAndProgramList]:
debug('Focus before %s' % self.focusPoint)
(left, top) = control.getPosition()
if left > self.focusPoint.x or left + control.getWidth() < self.focusPoint.x:
self.focusPoint.x = left
self.focusPoint.y = top + (control.getHeight() / 2)
debug('New focus at %s' % self.focusPoint)
super(TVGuide, self).setFocus(control)
@buggalo.buggalo_try_except({'method' : 'TVGuide.onFocus'})
def onFocus(self, controlId):
try:
controlInFocus = self.getControl(controlId)
except Exception:
return
program = self._getProgramFromControl(controlInFocus)
if program is None:
return
self.setControlLabel(self.C_MAIN_TITLE, '[B]%s[/B]' % program.title)
self.setControlLabel(self.C_MAIN_TIME, '[B]%s - %s[/B]' % (self.formatTime(program.startDate+GMTOFFSET), self.formatTime(program.endDate+GMTOFFSET)))
if program.description:
description = program.description
else:
description = strings(NO_DESCRIPTION)
self.setControlText(self.C_MAIN_DESCRIPTION, description)
if program.channel.logo is not None:
self.setControlImage(self.C_MAIN_LOGO, program.channel.logo)
if program.imageSmall is not None:
self.setControlImage(self.C_MAIN_IMAGE, program.imageSmall)
if ADDON.getSetting('program.background.enabled') == 'true' and program.imageLarge is not None:
self.setControlImage(self.C_MAIN_BACKGROUND, program.imageLarge)
if not self.osdEnabled and self.player.isPlaying():
self.player.stop()
def _left(self, currentFocus):
control = self._findControlOnLeft(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate -= datetime.timedelta(hours = 2)
self.focusPoint.x = self.epgView.right
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnLeft)
def _right(self, currentFocus):
control = self._findControlOnRight(currentFocus)
if control is not None:
self.setFocus(control)
return
when = self.viewStartDate + datetime.timedelta(hours = 2)
if when.date() > self.database.updateLimit:
return
self.viewStartDate = when
self.focusPoint.x = self.epgView.left
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnRight)
def _up(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlAbove(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - CHANNELS_PER_PAGE, self.viewStartDate, focusFunction=self._findControlAbove)
def _down(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlBelow(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + CHANNELS_PER_PAGE, self.viewStartDate, focusFunction=self._findControlBelow)
def _nextDay(self):
self.viewStartDate += datetime.timedelta(days = 1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _previousDay(self):
self.viewStartDate -= datetime.timedelta(days = 1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _moveUp(self, count = 1, scrollEvent = False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate, focusFunction = self._findControlAbove)
def _moveDown(self, count = 1, scrollEvent = False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate, focusFunction=self._findControlBelow)
def _channelUp(self):
channel = self.database.getNextChannel(self.currentChannel)
self.playChannel(channel)
def _channelDown(self):
channel = self.database.getPreviousChannel(self.currentChannel)
self.playChannel(channel)
def playChannel(self, channel):
self.currentChannel = channel
wasPlaying = self.player.isPlaying()
url = self.database.getStreamUrl(channel)
if url:
if not wasPlaying:
self._hideControl(self.C_MAIN_BLACKOUT)
path = os.path.join(ADDON.getAddonInfo('path'), 'player.py')
xbmc.executebuiltin('XBMC.RunScript(%s,%s,%d)' % (path, url, self.osdEnabled))
if not wasPlaying:
self._hideEpg()
threading.Timer(2, self.waitForPlayBackStopped).start()
self.osdProgram = self.database.getCurrentProgram(self.currentChannel)
return url is not None
def waitForPlayBackStopped(self):
for retry in range(0, 100):
time.sleep(0.1)
if self.player.isPlaying():
break
self._showControl(self.C_MAIN_BLACKOUT)
while self.player.isPlaying() and not xbmc.abortRequested and not self.isClosing:
time.sleep(0.5)
self.onPlayBackStopped()
def _showOsd(self):
if not self.osdEnabled:
return
if self.mode != MODE_OSD:
self.osdChannel = self.currentChannel
if self.osdProgram is not None:
self.setControlLabel(self.C_MAIN_OSD_TITLE, '[B]%s[/B]' % self.osdProgram.title)
self.setControlLabel(self.C_MAIN_OSD_TIME, '[B]%s - %s[/B]' % (self.formatTime(self.osdProgram.startDate), self.formatTime(self.osdProgram.endDate)))
self.setControlText(self.C_MAIN_OSD_DESCRIPTION, self.osdProgram.description)
self.setControlLabel(self.C_MAIN_OSD_CHANNEL_TITLE, self.osdChannel.title)
if self.osdProgram.channel.logo is not None:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, self.osdProgram.channel.logo)
else:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, '')
self.mode = MODE_OSD
self._showControl(self.C_MAIN_OSD)
def _hideOsd(self):
self.mode = MODE_TV
self._hideControl(self.C_MAIN_OSD)
def _hideEpg(self):
self._hideControl(self.C_MAIN_EPG)
self.mode = MODE_TV
self._clearEpg()
def onRedrawEPG(self, channelStart, startTime, focusFunction = None):
if self.redrawingEPG or (self.database is not None and self.database.updateInProgress) or self.isClosing:
debug('onRedrawEPG - already redrawing')
return # ignore redraw request while redrawing
debug('onRedrawEPG')
self.redrawingEPG = True
self.mode = MODE_EPG
self._showControl(self.C_MAIN_EPG)
self.updateTimebar(scheduleTimer = False)
# show Loading screen
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
self._showControl(self.C_MAIN_LOADING)
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
self.hideTimebar()
# remove existing controls
self._clearEpg()
try:
self.channelIdx, channels, programs = self.database.getEPGView(channelStart, startTime, clearExistingProgramList = False, categories = self.categoriesList, nmrChannels = CHANNELS_PER_PAGE)
if len(programs) == 0:
self.channelIdx, channels, programs = self.database.getEPGView(channelStart, startTime, clearExistingProgramList = False, nmrChannels = CHANNELS_PER_PAGE)
except src.SourceException:
self.onEPGLoadError()
return
channelsWithoutPrograms = list(channels)
# date and time row
self.setControlLabel(self.C_MAIN_DATE, self.formatDate(self.viewStartDate))
for col in range(1, 5):
self.setControlLabel(4000 + col, self.formatTime(startTime))
startTime += HALF_HOUR
if programs is None:
self.onEPGLoadError()
return
# set channel logo or text
for idx in range(0, CHANNELS_PER_PAGE):
if idx >= len(channels):
self.setControlImage(4110 + idx, ' ')
self.setControlLabel(4010 + idx, ' ')
else:
channel = channels[idx]
self.setControlLabel(4010 + idx, channel.title)
if channel.logo is not None:
self.setControlImage(4110 + idx, channel.logo)
else:
self.setControlImage(4110 + idx, ' ')
for program in programs:
idx = channels.index(program.channel)
if program.channel in channelsWithoutPrograms:
channelsWithoutPrograms.remove(program.channel)
startDelta = program.startDate - self.viewStartDate + GMTOFFSET
stopDelta = program.endDate - self.viewStartDate + GMTOFFSET
cellStart = self._secondsToXposition(startDelta.seconds)
if startDelta.days < 0:
cellStart = self.epgView.left
cellWidth = self._secondsToXposition(stopDelta.seconds) - cellStart
if cellStart + cellWidth > self.epgView.right:
cellWidth = self.epgView.right - cellStart
if cellWidth > 1:
if program.notificationScheduled:
noFocusTexture = 'tvguide-program-red.png'
focusTexture = 'tvguide-program-red-focus.png'
else:
noFocusTexture = 'tvguide-program-grey.png'
focusTexture = 'tvguide-program-grey-focus.png'
if cellWidth < 25:
title = '' # Text will overflow outside the button if it is too narrow
else:
title = program.title
control = xbmcgui.ControlButton(
cellStart,
self.epgView.top + self.epgView.cellHeight * idx,
cellWidth - 2,
self.epgView.cellHeight - 2,
title,
noFocusTexture = noFocusTexture,
focusTexture = focusTexture,
textColor = TEXT_COLOR,
focusedColor = FOCUSED_COLOR,
shadowColor = SHADOW_COLOR
)
self.controlAndProgramList.append(ControlAndProgram(control, program))
for channel in channelsWithoutPrograms:
idx = channels.index(channel)
control = xbmcgui.ControlButton(
self.epgView.left,
self.epgView.top + self.epgView.cellHeight * idx,
(self.epgView.right - self.epgView.left) - 2,
self.epgView.cellHeight - 2,
strings(NO_PROGRAM_AVAILABLE),
noFocusTexture='tvguide-program-grey.png',
focusTexture='tvguide-program-grey-focus.png',
textColor = TEXT_COLOR,
focusedColor = FOCUSED_COLOR,
shadowColor = SHADOW_COLOR
)
now = datetime.datetime.today()
then = now + datetime.timedelta(minutes = 24*60)
program = src.Program(channel, strings(NO_PROGRAM_AVAILABLE), now, then, "", "")
self.controlAndProgramList.append(ControlAndProgram(control, program))
# add program controls
if focusFunction is None:
focusFunction = self._findControlAt
focusControl = focusFunction(self.focusPoint)
controls = [elem.control for elem in self.controlAndProgramList]
self.addControls(controls)
if focusControl is not None:
debug('onRedrawEPG - setFocus %d' % focusControl.getId())
self.setFocus(focusControl)
self.ignoreMissingControlIds.extend([elem.control.getId() for elem in self.controlAndProgramList])
if focusControl is None and len(self.controlAndProgramList) > 0:
self.setFocus(self.controlAndProgramList[0].control)
self._hideControl(self.C_MAIN_LOADING)
self.showTimebar()
self.redrawingEPG = False
def _clearEpg(self):
controls = [elem.control for elem in self.controlAndProgramList]
try:
self.removeControls(controls)
except RuntimeError:
for elem in self.controlAndProgramList:
try:
self.removeControl(elem.control)
except RuntimeError:
pass # happens if we try to remove a control that doesn't exist
del self.controlAndProgramList[:]
def onEPGLoadError(self):
print 'Delete DB OnTapp.TV - onEPGLoadError'
deleteDB.deleteDB()
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(LOAD_ERROR_LINE2), strings(LOAD_ERROR_LINE3))
print '****** OnTapp.TV. Possible unicode text error. *******'
self.close()
def onSourceNotConfigured(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(CONFIGURATION_ERROR_LINE2))
self.close()
def isSourceInitializationCancelled(self):
return xbmc.abortRequested or self.isClosing
def onSourceInitializedS(self, success):
self.database.initializeP(self.onSourceInitializedP, self.isSourceInitializationCancelled)
def onSourceInitializedP(self, success):
if success:
self.notification = Notification(self.database, ADDON.getAddonInfo('path'))
self.onRedrawEPG(0, self.viewStartDate)
# def onSourceProgressUpdate(self, percentageComplete):
# control = self.getControl(self.C_MAIN_LOADING_PROGRESS)
# if percentageComplete < 1:
# if control:
# control.setPercent(1)
# self.progressStartTime = datetime.datetime.now()
# self.progressPreviousPercentage = percentageComplete
# elif percentageComplete != self.progressPreviousPercentage:
# if control:
# control.setPercent(percentageComplete)
# self.progressPreviousPercentage = percentageComplete
# delta = datetime.datetime.now() - self.progressStartTime
#
# if percentageComplete < 20:
# self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
# else:
# secondsLeft = int(delta.seconds) / float(percentageComplete) * (100.0 - percentageComplete)
# if secondsLeft > 30:
# secondsLeft -= secondsLeft % 10
# self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(TIME_LEFT) % secondsLeft)
#
# return not xbmc.abortRequested and not self.isClosing
def onPlayBackStopped(self):
if not self.player.isPlaying() and not self.isClosing:
self._hideControl(self.C_MAIN_OSD)
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes = self.viewStartDate.minute % 30, seconds = self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
def _secondsToXposition(self, seconds):
return self.epgView.left + (seconds * self.epgView.width / 7200)
def _findControlOnRight(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x < x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlOnLeft(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x > x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlBelow(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y < y:
rightEdge = leftEdge + control.getWidth()
if(leftEdge <= point.x < rightEdge
and (nearestControl is None or nearestControl.getPosition()[1] > top)):
nearestControl = control
return nearestControl
def _findControlAbove(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y > y:
rightEdge = leftEdge + control.getWidth()
if(leftEdge <= point.x < rightEdge
and (nearestControl is None or nearestControl.getPosition()[1] < top)):
nearestControl = control
return nearestControl
def _findControlAt(self, point):
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
bottom = top + control.getHeight()
right = left + control.getWidth()
if left <= point.x <= right and top <= point.y <= bottom:
return control
return None
def _getProgramFromControl(self, control):
for elem in self.controlAndProgramList:
if elem.control == control:
return elem.program
return None
def _hideControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(True)
def _showControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(False)
def formatTime(self, timestamp):
format = xbmc.getRegion('time').replace(':%S', '').replace('%H%H', '%H')
return timestamp.strftime(format)
def formatDate(self, timestamp):
format = xbmc.getRegion('dateshort')
return timestamp.strftime(format)
def setControlImage(self, controlId, image):
control = self.getControl(controlId)
if control:
control.setImage(image.encode('utf-8'))
def setControlLabel(self, controlId, label):
control = self.getControl(controlId)
if control and label:
control.setLabel(label)
def setControlText(self, controlId, text):
control = self.getControl(controlId)
if control:
control.setText(text)
def hideTimebar(self):
try:
self.timebarVisible = False
self.getControl(self.C_MAIN_TIMEBAR).setVisible(self.timebarVisible)
except:
pass
def showTimebar(self):
try:
self.timebarVisible = True
self.getControl(self.C_MAIN_TIMEBAR).setVisible(self.timebarVisible)
except:
pass
def updateTimebar(self, scheduleTimer = True):
try:
# move timebar to current time
timeDelta = datetime.datetime.today() - self.viewStartDate
control = self.getControl(self.C_MAIN_TIMEBAR)
if control:
(x, y) = control.getPosition()
try:
# Sometimes raises:
# exceptions.RuntimeError: Unknown exception thrown from the call "setVisible"
control.setVisible(timeDelta.days == 0 and self.timebarVisible)
except:
pass
control.setPosition(self._secondsToXposition(timeDelta.seconds), y)
if scheduleTimer and not xbmc.abortRequested and not self.isClosing:
threading.Timer(1, self.updateTimebar).start()
except Exception:
buggalo.onExceptionRaised()
class PopupMenu(xbmcgui.WindowXMLDialog):
C_POPUP_PLAY = 4000
C_POPUP_CHOOSE_STREAM = 4001
C_POPUP_REMIND = 4002
C_POPUP_CHANNELS = 4003
C_POPUP_QUIT = 4004
C_POPUP_CHANNEL_LOGO = 4100
C_POPUP_CHANNEL_TITLE = 4101
C_POPUP_PROGRAM_TITLE = 4102
C_POPUP_CATEGORIES = 4005
C_POPUP_SETTINGS = 4007
C_POPUP_IPLAYER = 4008
C_POPUP_ITVPLAYER = 4010
C_POPUP_OTTOOLS = 4014
C_POPUP_USTV = 4011
C_POPUP_SUPER_SEARCH = 4009
C_POPUP_SUPERFAVES = 4012
C_POPUP_VPN = 4013
C_POPUP_HOME = 4006
def __new__(cls, database, program, showRemind, touch):
xml_file = os.path.join('script-tvguide-menu.xml')
if os.path.join(SKIN, 'extras', 'skins', 'Default', '720p', xml_file):
XML = xml_file
return super(PopupMenu, cls).__new__(cls, XML, PATH)
def __init__(self, database, program, showRemind, touch):
"""
@type database: source.Database
@param program:
@type program: source.Program
@param showRemind:
"""
super(PopupMenu, self).__init__()
self.database = database
self.program = program
self.showRemind = showRemind
self.buttonClicked = None
self.touch = touch
@buggalo.buggalo_try_except({'method' : 'PopupMenu.onInit'})
def onInit(self):
# self.getControl(self.C_POPUP_OTTOOLS).setVisible(False) RD -Temporary hide of the 4oD button until a new use is found for it.
programTitleControl = self.getControl(self.C_POPUP_PROGRAM_TITLE)
programTitleControl.setLabel(self.program.title)
playControl = self.getControl(self.C_POPUP_PLAY)
playControl.setLabel(strings(WATCH_CHANNEL, self.program.channel.title))
#isPlayable = self.program.channel.isPlayable()
isPlayable = self.database.isPlayable(self.program.channel)
if not isPlayable:
playControl.setEnabled(False)
self.setFocusId(self.C_POPUP_REMIND)
# self.getControl(self.C_POPUP_REMIND).setVisible(False)
# self.setFocusId(self.C_POPUP_CHOOSE_STREAM)
if self.touch or self.program.title == strings(NO_PROGRAM_AVAILABLE):
playControl.setEnabled(True)
self.setFocusId(self.C_POPUP_PLAY)
channelLogoControl = self.getControl(self.C_POPUP_CHANNEL_LOGO)
channelTitleControl = self.getControl(self.C_POPUP_CHANNEL_TITLE)
if self.program.channel.logo is not None:
channelLogoControl.setImage(self.program.channel.logo)
channelTitleControl.setVisible(False)
else:
channelLogoControl.setVisible(False)
channelTitleControl.setLabel(self.program.channel.title)
if self.database.getCustomStreamUrl(self.program.channel):
try: self.getControl(self.C_POPUP_CHOOSE_STREAM).setLabel(REMOVE_STRM_FILE)
except: pass
xbmcgui.Window(10000).setProperty('TVG_CHOOSE', REMOVE_STRM_FILE)
else:
try: self.getControl(self.C_POPUP_CHOOSE_STREAM).setLabel(CHOOSE_STRM_FILE)
except: pass
xbmcgui.Window(10000).setProperty('TVG_CHOOSE', CHOOSE_STRM_FILE)
if self.showRemind:
try: self.getControl(self.C_POPUP_REMIND).setLabel(REMIND_PROGRAM)
except: pass
xbmcgui.Window(10000).setProperty('TVG_REMIND', REMIND_PROGRAM)
else:
try: self.getControl(self.C_POPUP_REMIND).setLabel(DONT_REMIND_PROGRAM)
except: pass
xbmcgui.Window(10000).setProperty('TVG_REMIND', DONT_REMIND_PROGRAM)
try:
ctrl = self.getControl(5000)
self.setFocusId(5000)
except:
pass
xbmcgui.Window(10000).clearProperty('TVG_popup_id')
@buggalo.buggalo_try_except({'method' : 'PopupMenu.onAction'})
def onAction(self, action):
try:
id = int(xbmcgui.Window(10000).getProperty('TVG_popup_id'))
self.buttonClicked = id
self.close()
except:
pass
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
@buggalo.buggalo_try_except({'method' : 'PopupMenu.onClick'})
def onClick(self, controlId):
if controlId == self.C_POPUP_CHOOSE_STREAM and self.database.getCustomStreamUrl(self.program.channel):
self.database.deleteCustomStreamUrl(self.program.channel)
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(CHOOSE_STRM_FILE)
if not self.database.isPlayable(self.program.channel):
playControl = self.getControl(self.C_POPUP_PLAY)
playControl.setEnabled(False)
else:
self.buttonClicked = controlId
self.close()
def onFocus(self, controlId):
pass
class ChannelsMenu(xbmcgui.WindowXMLDialog):
C_CHANNELS_LIST = 6000
C_CHANNELS_SELECTION_VISIBLE = 6001
C_CHANNELS_SELECTION = 6002
C_CHANNELS_SAVE = 6003
C_CHANNELS_CANCEL = 6004
def __new__(cls, database):
xml_file = os.path.join('script-tvguide-channels.xml')
if os.path.join(SKIN, 'extras', 'skins', 'Default', '720p', xml_file):
XML = xml_file
return super(ChannelsMenu, cls).__new__(cls, XML, PATH)
def __init__(self, database):
"""
@type database: source.Database
"""
super(ChannelsMenu, self).__init__()
self.database = database
self.channelList = database.getChannelList(onlyVisible = False)
self.swapInProgress = False
@buggalo.buggalo_try_except({'method' : 'ChannelsMenu.onInit'})
def onInit(self):
self.updateChannelList()
self.setFocusId(self.C_CHANNELS_LIST)
@buggalo.buggalo_try_except({'method' : 'ChannelsMenu.onAction'})
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
if self.getFocusId() == self.C_CHANNELS_LIST and action.getId() == ACTION_LEFT:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
buttonControl = self.getControl(self.C_CHANNELS_SELECTION)
buttonControl.setLabel('[B]%s[/B]' % self.channelList[idx].title)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(False)
self.setFocusId(self.C_CHANNELS_SELECTION)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_RIGHT, ACTION_SELECT_ITEM]:
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_UP:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx > 0:
self.swapChannels(idx, idx - 1)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_DOWN:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx < listControl.size() - 1:
self.swapChannels(idx, idx + 1)
@buggalo.buggalo_try_except({'method' : 'ChannelsMenu.onClick'})
def onClick(self, controlId):
if controlId == self.C_CHANNELS_LIST:
listControl = self.getControl(self.C_CHANNELS_LIST)
item = listControl.getSelectedItem()
channel = self.channelList[int(item.getProperty('idx'))]
channel.visible = 0 if channel.visible else 1
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
elif controlId == self.C_CHANNELS_SAVE:
self.database.saveChannelList(self.close, self.channelList)
elif controlId == self.C_CHANNELS_CANCEL:
self.close()
def onFocus(self, controlId):
pass
def updateChannelList(self):
listControl = self.getControl(self.C_CHANNELS_LIST)
listControl.reset()
for idx, channel in enumerate(self.channelList):
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item = xbmcgui.ListItem('%3d. %s' % (idx+1, channel.title), iconImage = iconImage)
item.setProperty('idx', str(idx))
listControl.addItem(item)
def updateListItem(self, idx, item):
channel = self.channelList[idx]
item.setLabel('%3d. %s' % (idx+1, channel.title))
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
item.setProperty('idx', str(idx))
def swapChannels(self, fromIdx, toIdx):
if self.swapInProgress:
return
self.swapInProgress = True
c = self.channelList[fromIdx]
self.channelList[fromIdx] = self.channelList[toIdx]
self.channelList[toIdx] = c
# recalculate weight
for idx, channel in enumerate(self.channelList):
channel.weight = idx
listControl = self.getControl(self.C_CHANNELS_LIST)
self.updateListItem(fromIdx, listControl.getListItem(fromIdx))
self.updateListItem(toIdx, listControl.getListItem(toIdx))
listControl.selectItem(toIdx)
xbmc.sleep(50)
self.swapInProgress = False
class StreamSetupDialog(xbmcgui.WindowXMLDialog):
C_STREAM_STRM_TAB = 101
C_STREAM_FAVOURITES_TAB = 102
C_STREAM_ADDONS_TAB = 103
C_STREAM_PLAYLIST_TAB = 104
C_STREAM_MASHUP_TAB = 105
C_STREAM_STRM_BROWSE = 1001
C_STREAM_STRM_FILE_LABEL = 1005
C_STREAM_STRM_PREVIEW = 1002
C_STREAM_STRM_OK = 1003
C_STREAM_STRM_CANCEL = 1004
C_STREAM_FAVOURITES = 2001
C_STREAM_FAVOURITES_PREVIEW = 2002
C_STREAM_FAVOURITES_OK = 2003
C_STREAM_FAVOURITES_CANCEL = 2004
C_STREAM_ADDONS = 3001
C_STREAM_ADDONS_STREAMS = 3002
C_STREAM_ADDONS_NAME = 3003
C_STREAM_ADDONS_DESCRIPTION = 3004
C_STREAM_ADDONS_PREVIEW = 3005
C_STREAM_ADDONS_OK = 3006
C_STREAM_ADDONS_CANCEL = 3007
C_STREAM_MASHUP = 4001
C_STREAM_MASHUP_STREAMS = 4002
C_STREAM_MASHUP_NAME = 4003
C_STREAM_MASHUP_DESCRIPTION = 4004
C_STREAM_MASHUP_PREVIEW = 4005
C_STREAM_MASHUP_OK = 4006
C_STREAM_MASHUP_CANCEL = 4007
C_STREAM_PLAYLIST = 5001
C_STREAM_PLAYLIST_PREVIEW = 5002
C_STREAM_PLAYLIST_OK = 5003
C_STREAM_PLAYLIST_CANCEL = 5004
C_STREAM_VISIBILITY_MARKER = 100
VISIBLE_STRM = 'strm'
VISIBLE_FAVOURITES = 'favourites'
VISIBLE_ADDONS = 'addons'
VISIBLE_MASHUP = 'mashup'
VISIBLE_PLAYLIST = 'playlist'
def __new__(cls, database, channel):
xml_file = os.path.join('script-tvguide-streamsetup.xml')
if os.path.join(SKIN, 'extras', 'skins', 'Default', '720p', xml_file):
XML = xml_file
return super(StreamSetupDialog, cls).__new__(cls, XML, PATH)
def __init__(self, database, channel):
"""
@type database: source.Database
@type channel:source.Channel
"""
super(StreamSetupDialog, self).__init__()
self.database = database
self.channel = channel
self.player = xbmc.Player()
self.previousAddonId = None
self.previousProvider = None
self.strmFile = None
self.streamingService = streaming.StreamsService()
def close(self):
if self.player.isPlaying():
self.player.stop()
super(StreamSetupDialog, self).close()
@buggalo.buggalo_try_except({'method' : 'StreamSetupDialog.onInit'})
def onInit(self):
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
if not os.path.exists(mashfile):
self.getControl(self.C_STREAM_MASHUP_TAB).setVisible(False)
favourites = self.streamingService.loadFavourites()
items = list()
for label, value in favourites:
item = xbmcgui.ListItem(label)
item.setProperty('stream', value)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_FAVOURITES)
listControl.addItems(items)
items = list()
for id in self.streamingService.getAddons():
try:
addon = xbmcaddon.Addon(id) # raises Exception if addon is not installed
item = xbmcgui.ListItem(addon.getAddonInfo('name'), iconImage=addon.getAddonInfo('icon'))
item.setProperty('addon_id', id)
items.append(item)
except Exception:
pass
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS)
listControl.addItems(items)
self.updateAddonInfo()
items = list()
for provider in self.streamingService.getMashup():
try:
item = xbmcgui.ListItem(provider, iconImage=self.streamingService.getMashupIcon(provider))
item.setProperty('provider', provider)
items.append(item)
except:
pass
listControl = self.getControl(StreamSetupDialog.C_STREAM_MASHUP)
listControl.addItems(items)
self.updateMashupInfo()
playlist = self.streamingService.loadPlaylist()
items = list()
for label, value in playlist:
item = xbmcgui.ListItem(label)
item.setProperty('stream', value)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_PLAYLIST)
listControl.addItems(items)
@buggalo.buggalo_try_except({'method' : 'StreamSetupDialog.onAction'})
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
elif self.getFocusId() == self.C_STREAM_ADDONS:
self.updateAddonInfo()
elif self.getFocusId() == self.C_STREAM_MASHUP:
self.updateMashupInfo()
@buggalo.buggalo_try_except({'method' : 'StreamSetupDialog.onClick'})
def onClick(self, controlId):
if controlId == self.C_STREAM_STRM_BROWSE:
stream = xbmcgui.Dialog().browse(1, ADDON.getLocalizedString(30304), 'video', mask='.xsp|.strm')
if stream:
self.database.setCustomStreamUrl(self.channel, stream)
self.getControl(self.C_STREAM_STRM_FILE_LABEL).setText(stream)
self.strmFile = stream
elif controlId == self.C_STREAM_ADDONS_OK:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_FAVOURITES_OK:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_PLAYLIST_OK:
listControl = self.getControl(self.C_STREAM_PLAYLIST)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_MASHUP_OK:
listControl = self.getControl(self.C_STREAM_MASHUP_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_STRM_OK:
self.database.setCustomStreamUrl(self.channel, self.strmFile)
self.close()
elif controlId in [self.C_STREAM_ADDONS_CANCEL, self.C_STREAM_FAVOURITES_CANCEL, self.C_STREAM_STRM_CANCEL, self.C_STREAM_PLAYLIST_CANCEL, self.C_STREAM_MASHUP_CANCEL]:
self.close()
elif controlId in [self.C_STREAM_ADDONS_PREVIEW, self.C_STREAM_FAVOURITES_PREVIEW, self.C_STREAM_STRM_PREVIEW, self.C_STREAM_PLAYLIST_PREVIEW, self.C_STREAM_MASHUP_PREVIEW]:
if self.player.isPlaying():
self.player.stop()
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_PLAYLIST_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_MASHUP_PREVIEW).setLabel(strings(PREVIEW_STREAM))
return
stream = None
windowed = None
visible = self.getControl(self.C_STREAM_VISIBILITY_MARKER).getLabel()
if visible == self.VISIBLE_ADDONS:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_FAVOURITES:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_PLAYLIST:
listControl = self.getControl(self.C_STREAM_PLAYLIST)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_MASHUP:
listControl = self.getControl(self.C_STREAM_MASHUP_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_STRM:
stream = self.strmFile
if stream is not None:
path = os.path.join(ADDON.getAddonInfo('path'), 'player.py')
xbmc.executebuiltin('XBMC.RunScript(%s,%s,%d)' % (path, stream, 1))
retries = 10
while retries > 0 and not self.player.isPlaying():
retries -= 1
xbmc.sleep(1000)
if self.player.isPlaying():
self.getControl(self.C_STREAM_MASHUP_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_PLAYLIST_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(STOP_PREVIEW))
@buggalo.buggalo_try_except({'method' : 'StreamSetupDialog.onFocus'})
def onFocus(self, controlId):
if controlId == self.C_STREAM_STRM_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
elif controlId == self.C_STREAM_FAVOURITES_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_FAVOURITES)
elif controlId == self.C_STREAM_ADDONS_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_ADDONS)
elif controlId == self.C_STREAM_PLAYLIST_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_PLAYLIST)
elif controlId == self.C_STREAM_MASHUP_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_MASHUP)
def updateAddonInfo(self):
listControl = self.getControl(self.C_STREAM_ADDONS)
item = listControl.getSelectedItem()
if item is None:
return
if item.getProperty('addon_id') == self.previousAddonId:
return
self.previousAddonId = item.getProperty('addon_id')
addon = xbmcaddon.Addon(id = item.getProperty('addon_id'))
self.getControl(self.C_STREAM_ADDONS_NAME).setLabel('[B]%s[/B]' % addon.getAddonInfo('name'))
self.getControl(self.C_STREAM_ADDONS_DESCRIPTION).setText(addon.getAddonInfo('description'))
streams = self.streamingService.getAddonStreams(item.getProperty('addon_id'))
items = list()
for (label, stream) in streams:
item = xbmcgui.ListItem(label)
item.setProperty('stream', stream)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS_STREAMS)
listControl.reset()
listControl.addItems(items)
def updateMashupInfo(self):
pass
listControl = self.getControl(self.C_STREAM_MASHUP)
item = listControl.getSelectedItem()
if item is None:
return
provider = item.getProperty('provider')
if provider == self.previousProvider:
return
self.previousProvider = provider
self.getControl(self.C_STREAM_MASHUP_NAME).setLabel('[B]%s[/B]' % provider)
self.getControl(self.C_STREAM_MASHUP_DESCRIPTION).setText('')
streams = self.streamingService.getMashupStreams(provider)
items = list()
for (label, stream) in streams:
if label.upper() != 'ICON':
item = xbmcgui.ListItem(label)
item.setProperty('stream', stream)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_MASHUP_STREAMS)
listControl.reset()
listControl.addItems(items)
class ChooseStreamAddonDialog(xbmcgui.WindowXMLDialog):
C_SELECTION_LIST = 1000
def __new__(cls, addons):
xml_file = os.path.join('script-tvguide-streamaddon.xml')
if os.path.join(SKIN, skinfolder, 'Default', '720p', xml_file):
XML = xml_file
return super(ChooseStreamAddonDialog, cls).__new__(cls, XML, PATH)
def __init__(self, addons):
super(ChooseStreamAddonDialog, self).__init__()
self.addons = addons
self.stream = None
@buggalo.buggalo_try_except({'method' : 'ChooseStreamAddonDialog.onInit'})
def onInit(self):
items = list()
for id, label, url in self.addons:
try:
addon = xbmcaddon.Addon(id)
item = xbmcgui.ListItem(label, addon.getAddonInfo('name'), addon.getAddonInfo('icon'))
item.setProperty('stream', url)
items.append(item)
except:
item = xbmcgui.ListItem(label, '', id)
item.setProperty('stream', url)
items.append(item)
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
listControl.addItems(items)
self.setFocus(listControl)
@buggalo.buggalo_try_except({'method' : 'ChooseStreamAddonDialog.onAction'})
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK]:
self.close()
@buggalo.buggalo_try_except({'method' : 'ChooseStreamAddonDialog.onClick'})
def onClick(self, controlId):
if controlId == ChooseStreamAddonDialog.C_SELECTION_LIST:
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
self.stream = listControl.getSelectedItem().getProperty('stream')
self.close()
@buggalo.buggalo_try_except({'method' : 'ChooseStreamAddonDialog.onFocus'})
def onFocus(self, controlId):
pass
class CategoriesMenu(xbmcgui.WindowXMLDialog):
C_CATEGORIES_LIST = 7000
C_CATEGORIES_SELECTION = 7001
C_CATEGORIES_SAVE = 7002
C_CATEGORIES_CANCEL = 7003
def __new__(cls, database, categoriesList):
xml_file = os.path.join('script-tvguide-categories.xml')
if os.path.join(SKIN, 'extras', 'skins', 'Default', '720p', xml_file):
XML = xml_file
return super(CategoriesMenu, cls).__new__(cls, XML, PATH)
def __init__(self, database, categoriesList):
"""
@type database: source.Database
"""
super(CategoriesMenu, self).__init__()
self.database = database
self.allCategories = database.getCategoriesList()
if categoriesList:
self.currentCategories = list(categoriesList)
else:
self.currentCategories = list()
self.workingCategories = list(self.currentCategories)
self.swapInProgress = False
@buggalo.buggalo_try_except({'method' : 'CategoriesMenu.onInit'})
def onInit(self):
self.updateCategoriesList()
self.setFocusId(self.C_CATEGORIES_LIST)
@buggalo.buggalo_try_except({'method' : 'CategoriesMenu.onAction'})
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
@buggalo.buggalo_try_except({'method' : 'CategoriesMenu.onClick'})
def onClick(self, controlId):
if controlId == self.C_CATEGORIES_LIST:
listControl = self.getControl(self.C_CATEGORIES_LIST)
item = listControl.getSelectedItem()
category = self.allCategories[int(item.getProperty('idx'))]
if category in self.workingCategories:
self.workingCategories.remove(category)
else:
self.workingCategories.append(category)
if category in self.workingCategories:
iconImage = 'tvguide-categories-visible.png'
else:
iconImage = 'tvguide-categories-hidden.png'
item.setIconImage(iconImage)
elif controlId == self.C_CATEGORIES_SAVE:
self.currentCategories = self.workingCategories
self.close()
elif controlId == self.C_CATEGORIES_CANCEL:
self.close()
def onFocus(self, controlId):
pass
def updateCategoriesList(self):
listControl = self.getControl(self.C_CATEGORIES_LIST)
listControl.reset()
for idx, category in enumerate(self.allCategories):
if category in self.workingCategories:
iconImage = 'tvguide-categories-visible.png'
else:
iconImage = 'tvguide-categories-hidden.png'
item = xbmcgui.ListItem('%3d. %s' % (idx+1, category), iconImage = iconImage)
item.setProperty('idx', str(idx))
listControl.addItem(item)
|
One last ‘rasslin pod because we felt like it. Sam Franco and Dpalm break down Daniel Bryan’s farewell, and wax philosophic about his contributions to the industry.
We’re not alone, as Deadspin has a great post up on what happened and what this retirement means in today’s world of sports.
Posted on February 9, 2016, in Blog. Bookmark the permalink. Leave a comment.
|
###############################################################################
##
## Copyright 2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.internet import reactor
from twisted.python import log
from twisted.web.server import Site
from twisted.web.static import Data
from autobahn.websocket import WebSocketServerFactory, \
WebSocketServerProtocol
from autobahn.resource import WebSocketResource
class Echo1ServerProtocol(WebSocketServerProtocol):
def onMessage(self, msg, binary):
self.sendMessage("Echo 1 - " + msg)
class Echo2ServerProtocol(WebSocketServerProtocol):
def onMessage(self, msg, binary):
self.sendMessage("Echo 2 - " + msg)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
factory1 = WebSocketServerFactory("ws://localhost:9000",
debug = debug,
debugCodePaths = debug)
factory1.protocol = Echo1ServerProtocol
resource1 = WebSocketResource(factory1)
factory2 = WebSocketServerFactory("ws://localhost:9000",
debug = debug,
debugCodePaths = debug)
factory2.protocol = Echo2ServerProtocol
resource2 = WebSocketResource(factory2)
## Establish a dummy root resource
root = Data("", "text/plain")
## and our WebSocket servers under different paths ..
root.putChild("echo1", resource1)
root.putChild("echo2", resource2)
## both under one Twisted Web Site
site = Site(root)
reactor.listenTCP(9000, site)
reactor.run()
|
I got sympathy pains aching through my legs! Sorry I haven't gotten around to visiting you in person. Hope the mending process is fast and efficient from here.
|
__author__ = 'ViS'
import random
import math
import itertools
M = 10
class Individual:
def __init__(self, num, val, fitness):
self.num = num
self.val = val
self.fitness = M - fitness
class Population:
def __init__(self):
self.individuals = []
self.fitness_avg = 0
def calculate_fitness_avg(self):
sum = 0
for i in self.individuals:
sum += i.fitness
self.fitness_avg = sum / len(self.individuals)
def calculate(fitness_fn, opt):
opt['t'] = int(opt['min'] + opt['max'] / opt['step'])
global gen_max_val, gen_count, M
gen_max_val = 0
def calculate_optimal():
global gen_max_val, gen_count
for i in range(1, 20):
num = 2 ** i
if (num - 1) >= opt['t']:
gen_count = len(bin(num - 1)[2:])
gen_max_val = num - 1
break
calculate_optimal()
def generate_start_population(gen_max_val):
population = Population()
for i in range(0, opt['population_count']):
val = random.randint(0, gen_max_val)
x = val * opt['step']
fitness = eval(fitness_fn)
population.individuals.append(Individual(i, val, fitness))
population.calculate_fitness_avg()
return population
def selection(population):
individuals_offsprings = []
if opt['selection_type'] == 'TOURNEY':
for i in range(0, opt['population_count']):
source_idx = random.randint(0, opt['population_count'] - 1)
target_idx = random.randint(0, opt['population_count'] - 1)
source = population.individuals[source_idx].fitness
target = population.individuals[target_idx].fitness
if source > target:
individuals_offsprings.insert(i, population.individuals[source_idx])
else:
individuals_offsprings.insert(i, population.individuals[target_idx])
return individuals_offsprings
def pair_cross(individ_s, individ_t, cross_point):
children = []
first_part_source = bin(individ_s.val)[2:].zfill(gen_count)[0:cross_point]
first_part_target = bin(individ_t.val)[2:].zfill(gen_count)[0:cross_point]
second_part_source = bin(individ_s.val)[2:].zfill(gen_count)[cross_point:]
second_part_target = bin(individ_t.val)[2:].zfill(gen_count)[cross_point:]
val1 = first_part_source + second_part_target
val2 = first_part_target + second_part_source
x = int(val1, 2) * opt['step']
fitness1 = eval(fitness_fn)
x = int(val2, 2) * opt['step']
fitness2 = eval(fitness_fn)
child1 = Individual(0, int(val1, 2), fitness1)
child2 = Individual(0, int(val2, 2), fitness2)
children.append(child1)
children.append(child2)
return children
def cross(individuals_offsprings, gen_count):
new_population = Population()
pair = []
pair_count = int(opt['population_count'] / 2)
next_idx = 0
pc = 0.7 # Chance of crossing
while pair_count > 0:
for i in range(0, opt['population_count']):
if random.random() < pc:
pair.append(individuals_offsprings[i])
next_idx = i + 1
break
for i in range(next_idx, opt['population_count']):
if random.random() < pc:
if len(pair) > 1:
if (pair[1]) == individuals_offsprings[i]:
pair.insert(1, individuals_offsprings[i])
else:
i = 0
break
else:
pair.insert(1, individuals_offsprings[i])
children = pair_cross(pair[0], pair[1], int(math.floor(random.random() * (gen_count - 1) + 1)))
new_population.individuals.append(children)
pair_count -= 1
new_population.individuals = list(itertools.chain.from_iterable(new_population.individuals))
for i in range(0, opt['population_count']):
new_population.individuals[i].num = i
new_population.calculate_fitness_avg()
return new_population
def mutation_gen(undividual, mutagen):
if undividual[mutagen] == '1':
undividualSrt = undividual[0:mutagen-1] + '0' + undividual[mutagen+1:]
else:
undividualSrt = undividual[0:mutagen-1] + '1' + undividual[mutagen+1:]
return undividualSrt
def mutation(population):
Pm = 0.3 # Chance of mutation
new_population = Population()
for i in range(0, opt['population_count']):
if random.random() < Pm:
mutagen = int(math.floor(random.random() * (gen_count - 1)))
val = int(mutation_gen(bin(population.individuals[i].val)[2:].zfill(gen_count), mutagen), 2)
x = val * opt['step']
fitness = eval(fitness_fn)
new_population.individuals.insert(i, Individual(i, val, fitness))
else:
new_population.individuals.insert(i, population.individuals[i])
new_population.calculate_fitness_avg()
return new_population
def start():
population = generate_start_population(gen_max_val)
start_population = population
selection_population = Population()
cross_population = Population()
mutation_population = Population()
coefZ = 4
population_chache = []
stop = False
for t in range(0, opt['t'] * 2):
selection_population = selection(population)
cross_population = cross(selection_population, gen_count)
population_chache.insert(t % coefZ, cross_population.fitness_avg)
if len(population_chache) > 3:
if population_chache[0] == population_chache[1] and population_chache[1] == population_chache[2] and \
population_chache[2] == population_chache[3]:
stop = True
if stop:
population = cross_population
break
if t != (opt['t'] * 2 - 1):
mutation_population = mutation(cross_population)
population = mutation_population
else:
population = cross_population
population_chache[t % coefZ or 0] = population.fitness_avg
for i in range(1, opt['population_count']):
result = population.individuals[0].val
temp = population.individuals[0].fitness
if temp < population.individuals[i].fitness:
temp = population.individuals[i].fitness
result = population.individuals[i].val
return {
"start_population": start_population,
"population": population,
"x": result * opt['step']
}
return start()
|
Hi, I have a LSN50 working with a DS18b20 temp sensor. I have read that this node also supports a SHT31 (and SHT20) temp/RH sensor. I have a SHT31 that works: I checked with an Arduino but it does not work with the LSN50 node: the readings are 0.00. Does anyone have experience with this or tips how to solve this?
NB: I tried to upgrade the firmware from 1.3 to 1.4 but my USB-TTL stick (as com port) is not seen by the flash loader program Dragino recommends on Windoze 10… Any tips here?
SHT31 works on I2C bus did you connect sensor to proper gpio pins on lsn50 ?
I think this is obvious why it does not work. Please see the quote from the manual.
PB6(SDA) and PB7(SCK) are I2C interface. User can use it to connectto I2C device and get the sensor data.
We have made an example to show how to use the I2C interface to connect to SHT20 Temperature and HumiditySensor. This is not support in the stock firmware, user need to compile a new firmware with USE_SHT20 defined.
PB6(SDA) and PB7(SCK) are I2C interface. User can use it to connectto I2C device and get the sensor data. We have made an example to show how to use the I2C interface to connect to SHT20 Temperature and HumiditySensor. This is support in the stock firmwaresince v1.4.Below is the connection to SHT20.
Verify if the converter is present in Windows Device Manager.
Maybe you will need to re-assign com port number.
Solved it: seems there are a lot of issues with Windoze and USB_TTL sticks. I found a driver which solved the issue.
I found a driver which solved the issue.
|
import os
import shutil
import util
import glob
import tarfile
import types
class InhibitorAction(object):
"""
Basic action. Handles running through the action_sequence and catching
errors that can be passed back up in order to do cleaning first.
@param name - String representing this action
@param resume - Allow the action sequence to resume where it left off it
it was previously interrupted.
"""
def __init__(self, name='BlankAction', resume=False):
self.name = name
self.action_sequence = []
self.resume = resume
self.statedir = None
self.istate = None
def get_action_sequence(self):
return []
def post_conf(self, inhibitor_state):
self.istate = inhibitor_state
self.statedir = inhibitor_state.paths.state.pjoin(self.name)
if os.path.isdir(self.statedir) and not self.resume:
self.clear_resume()
os.makedirs(self.statedir)
elif not os.path.exists(self.statedir):
os.makedirs(self.statedir)
self.resume = False
elif len(os.listdir(self.statedir)) == 0:
self.resume = False
def run(self):
for action in self.get_action_sequence():
resume_path = self.statedir.pjoin('resume-%s-%s' % (self.name, action.name))
if ( self.resume
and action.always == False
and os.path.exists(resume_path) ):
continue
# Errors are caught by Inhibitor()
util.info("Running %s" % action.name)
action.run()
open(resume_path, 'w').close()
self.clear_resume()
def clear_resume(self):
for f in glob.iglob(self.statedir.pjoin('resume-%s-*' % self.name)):
os.unlink(f)
os.rmdir(self.statedir)
class InhibitorSnapshot(InhibitorAction):
"""
Create a snapshot of an InhibitorSource
@param snapshot_source - Source that we will generate a snapshot from.
@param name - Unique string to identify the source.
@param exclude - A string, list or tuple of patterns to not include in
the snapshot. Passed to rsync --exclude.
@param include - String, passed to glob, of toplevel paths to include
in the snapshot.
"""
def __init__(self, snapshot_source, name, exclude=None, include=None):
super(InhibitorSnapshot, self).__init__(name='snapshot')
self.dest = None
self.builddir = None
self.tarname = None
self.dest = None
self.name = name
self.src = snapshot_source
self.src.keep = True
self.src.dest = util.Path('/')
if exclude:
if type(exclude) == types.StringType:
self.exclude = exclude.split(' ')
elif type(exclude) in (types.ListType, types.TupleType):
self.exclude = exclude
else:
raise util.InhibitorError("Unrecognized exclude pattern.")
else:
self.exclude = False
if include:
if type(include) == types.StringType:
self.include = include.split(' ')
elif type(include) in (types.ListType, types.TupleType):
self.include = include
else:
raise util.InhibitorError("Unrecognized include pattern.")
else:
self.include = False
def get_action_sequence(self):
return [
util.Step(self.sync, always=False),
util.Step(self.pack, always=False),
]
def post_conf(self, inhibitor_state):
super(InhibitorSnapshot, self).post_conf(inhibitor_state)
self.src.post_conf(inhibitor_state)
self.src.init()
self.tarname = 'snapshot-' + self.name
self.dest = inhibitor_state.paths.stages.pjoin(self.tarname+'.tar.bz2')
self.builddir = inhibitor_state.paths.build.pjoin(self.tarname)
def sync(self):
if os.path.exists(self.builddir):
shutil.rmtree(self.builddir)
elif os.path.islink(self.builddir):
os.unlink(self.builddir)
os.makedirs(self.builddir)
exclude_cmd = ''
if self.exclude:
for i in self.exclude:
exclude_cmd += " --exclude='%s'" % i
if self.include:
for pattern in self.include:
paths = [self.src.cachedir.pjoin(pattern)]
if '*' in pattern:
paths = glob.glob(self.src.cachedir.pjoin(pattern))
for path in paths:
dest = path.replace(self.src.cachedir, self.builddir)
if not os.path.lexists( os.path.dirname(dest) ):
os.makedirs( os.path.dirname(dest) )
util.cmd('rsync -a %s %s/ %s/' % (
exclude_cmd,
path,
dest
))
else:
util.cmd('rsync -a %s %s/ %s/' % (exclude_cmd, self.src.cachedir, self.builddir))
def pack(self):
archive = tarfile.open(self.dest, 'w:bz2')
archive.add(self.builddir,
arcname = '/',
recursive = True
)
archive.close()
util.info('%s is ready.' % self.dest)
def get_snappath(self):
if self.dest:
return self.dest
else:
raise util.InhibitorError("Cannot get snappath until post_conf has been called.")
|
Once you have selected and purchased your tiles, you will need adhesive and grout to fix them. At Ceramic Tile Warehouse we stock a comprehensive range from Ardex, Weber, Palace and Universeal.
Our adhesive and grouts are engineered and formulated to meet many different enviroments and applications to which tiles are fixed. From standard applications to wet rooms, for ceramic tiles, porcelain tiles, natural products, fixing to wood, concrete or plasterboard, we have the correct solution in stock.
Our sales staff are trained by our manufacturers and will be happy to advise you on your needs.
|
import abc
import os
import numpy as np
import h5py
import miapy.data.indexexpression as expr
class Writer(metaclass=abc.ABCMeta):
"""Represents the abstract dataset writer."""
def __enter__(self):
self.open()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def __del__(self):
self.close()
@abc.abstractmethod
def close(self):
"""Close the writer."""
pass
@abc.abstractmethod
def open(self):
"""Open the writer."""
pass
@abc.abstractmethod
def reserve(self, entry: str, shape: tuple, dtype=None):
"""Reserve space in the dataset for later writing.
Args:
entry(str): The dataset entry to be created.
shape(tuple): The shape to be reserved.
dtype: The dtype.
"""
pass
@abc.abstractmethod
def fill(self, entry: str, data, index: expr.IndexExpression=None):
"""Fill parts of a reserved dataset entry.
Args:
entry(str): The dataset entry to be filled.
data: The data to write.
index(expr.IndexExpression): The slicing expression.
"""
pass
@abc.abstractmethod
def write(self, entry: str, data, dtype=None):
"""Create and write entry.
Args:
entry(str): The dataset entry to be written.
data: The data to write.
dtype: The dtype.
"""
pass
class Hdf5Writer(Writer):
"""Represents the dataset writer for HDF5 files."""
str_type = h5py.special_dtype(vlen=str)
def __init__(self, file_path: str) -> None:
"""Initializes a new instance.
Args:
file_path(str): The path to the dataset file to write.
"""
self.h5 = None # type: h5py.File
self.file_path = file_path
def close(self):
if self.h5 is not None:
self.h5.close()
self.h5 = None
def open(self):
self.h5 = h5py.File(self.file_path, libver='latest')
def reserve(self, entry: str, shape: tuple, dtype=None):
# special string handling (in order not to use length limited strings)
if dtype is str or dtype == 'str' or (isinstance(dtype, np.dtype) and dtype.type == np.str_):
dtype = self.str_type
self.h5.create_dataset(entry, shape, dtype=dtype)
def fill(self, entry: str, data, index: expr.IndexExpression=None):
# special string handling (in order not to use length limited strings)
if self.h5[entry].dtype is self.str_type:
data = np.asarray(data, dtype=object)
if index is None:
index = expr.IndexExpression()
self.h5[entry][index.expression] = data
def write(self, entry: str, data, dtype=None):
# special string handling (in order not to use length limited strings)
if dtype is str or dtype == 'str' or (isinstance(dtype, np.dtype) and dtype.type == np.str_):
dtype = self.str_type
data = np.asarray(data, dtype=object)
if entry in self.h5:
del self.h5[entry]
self.h5.create_dataset(entry, dtype=dtype, data=data)
def get_writer(file_path: str) -> Writer:
""" Get the dataset writer corresponding to the file extension.
Args:
file_path(str): The path of the dataset file to be written.
Returns:
Writer: Writer corresponding to dataset file extension.
"""
extension = os.path.splitext(file_path)[1]
if extension not in writer_registry:
raise ValueError('unknown dataset file extension "{}"'.format(extension))
return writer_registry[extension](file_path)
writer_registry = {'.h5': Hdf5Writer, '.hdf5': Hdf5Writer}
|
Beaux Square is Malaysia wholesaler and supplier of corporate gift, promotional items, door gift and premium gift with logo imprint service who specialise in metal ball pen and metal roller pen. Ready stock of various design is available. High and Premium Quality. Low MOQ. Warranty provided. Free delivery within KL.
Our promotional metal pen are widely used for different application and purposes such as corporate annual dinner or Gala Dinner gift, company anniversary celebration gift, outdoor sport activity or family day gift, corporate road show gift, exhibition gift ,promotional campaign gift , exhibition gift, staff or employee recognition program gift , company marketing or branding strategy gift, education graduation gift , seminar and conference meeting gift , customer reward or loyalty campaigngift , product awareness campaign, festive day gift , annual general meeting gift, running or marathon event gift , occasional gift, charity program gift, marketing gift for business and client.
|
import os
import ConfigParser
from bootcamp.settings import ENVIRONMENT
def readconf():
config = ConfigParser.ConfigParser()
config.read('/etc/netaut.conf')
return config
# def project_path(type):
# config = readconf()
# if type == 'project':
# path = config.get('paths', 'project_path')
# elif type == 'play':
# path = config.get('paths', 'project_path')
# elif type == 'resultout':
# path = config.get('paths', 'result_path')
# return os.listdir(path)
def get_vars(type):
config = readconf()
if type == 'project':
vars = config.get(ENVIRONMENT, 'project_path')
elif type == 'play':
vars = config.get(ENVIRONMENT, 'project_path')
elif type == 'resultout':
vars = config.get(ENVIRONMENT, 'result_path')
elif type == 'baseurl':
vars = config.get(ENVIRONMENT, 'baseurl')
elif type == 'ansibengineemc':
vars = config.get(ENVIRONMENT, 'ansibengineemc')
elif type == 'ansibenginemtn':
vars = config.get(ENVIRONMENT, 'ansibenginemtn')
return vars
|
Residents of Cambridgeshire village Swaffham Prior are one step closer to achieving a low carbon community heat scheme after Councillors gave approval for the project to move to the next stage.
The decision by members of Cambridgeshire County Council’s Commercial & Investment Committee means the scheme can now progress to the development of a detailed business case.
The Swaffham Prior Community Land Trust (SPCLT) first approached the County Council in 2017 to collaborate and support them with a community heat scheme, which will enable the village to stop relying on oil and move to a renewable heat source for heating and hot water needs. After exploring numerous low carbon heating options, a detailed feasibility study suggested that a district heat network with an energy centre, which has a combination of heat being extracted from groundwater and mains gas, would be the most viable option.
The proposed project still has a lot of work and analysis before any implementation can take place, but aligns with the vision of the councils Corporate Energy Strategy and councillors were in high praise of the innovative project.
Cllr Josh Schumann, Chair of the Commercial & Investment Committee said: “The Committee approved development of the important next stages of this project, which could support the village in moving away from relying on oil and onto a renewable energy through heat extracted from ground water. What excites me is the knowledge that by 2021 the village could own its own heating system and not be vulnerable to fluctuating global prices of oil that the residents are currently tied to.
“Boreholes to test the ground water could be drilled as early as next year, which is a vital next step for the project. We will also need access to homes to identify what is needed in the home to connect to the community heat network.
The proposed project will be retrofitted, one of the first of its kind in the country, and as such will act as a pilot project for both the government and similar villages looking to convert to sustainable heat systems.
The project team are working alongside the government department for Business, Energy & Industrial Strategy (BEIS), who have provided valuable advice and grant funding, as well as a letter of support for the progression of the project. This project exemplifies the principles articulated in the government’s Clean Growth Strategy. The decarbonisation of properties dependent on oil is seen as an important step towards meeting our commitments under the Climate Change Act.
Colleagues at the Cambridgeshire Peterborough Combined Authority are also excited about the prospects of the proposed scheme. James Palmer, Mayor of Cambridgeshire and Peterborough, approved initial match funding on the understanding that the work can be replicated to benefit other communities within Cambridgeshire keen to take on this new energy journey.
Mayor James Palmer said: “What is fantastic about this scheme is that it has come from within the village, via its community land trust, and is potentially creating a replicable model for how our rural communities can thrive sustainably into the future.
“The community has already come together to help meet its housing needs, and it is no surprise to see it now aiming to meet its future energy needs as well. This should absolutely be encouraged and that’s why I was more than happy to agree to match funding to help move the project forward.
The next step of developing a business case for the project will look at detailed financing options, scheme design and procurement strategy. Engagement and support of the Swaffham Prior community will be key to ensuring the project’s success, and the project team have already begun engagement work including a presentation evening with table discussions, as well as a legal focus group with residents in the community. Further community outreach will follow.
Cllr David Greenfield, member of the Community Land Trust and Vice-Chairman of the Swaffham Prior Parish Council said: “It is very exciting to get the Cambridgeshire County Council committee approval and sets us up nicely for the BEIS Heat Networks Delivery Unit grant application at the end of the month.
Further details about the scheme, in the form of a Frequently Asked Questions document, can be found on the Swaffham Prior Parish Council CLT webpage here: http://www.swaffham-prior.org.uk/pc/CLT.html.
Above: Cllr Josh Schumann, CCC Energy Project Director Sheryl French, Mayor James Palmer, Mike Barker (RPS) and Emma Fletcher (CLT) stand outside Shire Hall.
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from django.views.generic import TemplateView
urlpatterns = patterns('',
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/', include('task_burndown.api')),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^login/', 'accounts.views.login_user', name='login'),
url(r'^logout/', 'accounts.views.logout_user', name='logout'),
url(r'^register/', 'accounts.views.register', name='register'),
url(r'^$', 'accounts.views.home', name='home'),
url(r'^robots\.txt$',
TemplateView.as_view(template_name='robots.txt', content_type='text/plain'), name="robots"),
url(r'^humans\.txt$',
TemplateView.as_view(template_name='humans.txt', content_type='text/plain'), name="humans")
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
When Richard Hannay is warned of an assassination plot that has the potential to take Britain into a war, and then a few days later discovers the murdered body of the American that warned him in his flat, he becomes a prime suspect. He flees to the moors of Scotland and a spirited chase begins as he is pursued by the police and the German spies involved with stealing British plans.
Buchan's tale unfolds into one of the seminal and most influential 'chase' books, mimicked by many, yet unrivalled in the tension and mystery created by his writing. Buchan reveres Hannay as an ordinary man who puts his country's good before his own and the classic themes of the novel influenced many films and subsequent 'man-on-the-run' novels.
|
#!/usr/bin/env python3
# By Infected
# 2016
import os
import sys
def check_verbosity():
dir = os.path.dirname(__file__)
abs_path = os.path.join(dir, '../../verbosity')
try:
with open(abs_path, 'r') as verbosity:
VERBOSITY = int(verbosity.readline()) # Verbosity level
SVERBOSITY = list(
map(lambda x: x.strip('\n'), verbosity.readlines())
) # Specific verbosity
except:
print('No verbosity file.')
VERBOSITY = 1
SVERBOSITY = []
return VERBOSITY, SVERBOSITY
CODE = '\x1b['
colors = {
"BOLD": 1,
"D": 0,
"BLACK": 30,
"RED": 31,
"GREEN": 32,
"YELLOW": 33,
"BLUE": 34,
"MAGENTA": 35,
"CYAN": 36,
"WHITE": 37
}
effects = {
"UNDERLINE": 4,
"BLINK": 5,
"INVERT": 7,
"STRIP": 9
}
def color(text='', fg="D", bold=True, bg=None, fx=None) -> str:
fg = fg.upper() if type(fg) == str else "D"
bg = bg.upper() if type(bg) == str else None
fx = fx.upper() if type(fx) == str else None
string = CODE
# Bold
if bold:
string += str(colors["BOLD"])
else:
string += str(colors["D"])
# Color part
string += ";"
string += str(colors[fg])
# Fx part
if fx is not None:
string += ";"
string += str(effects[fx])
# Bg part
if bg is not None:
string += ";"
string += str(colors[bg] + 10)
# Text part
string += 'm'
string += str(text)
# End part
string += CODE
string += str(colors["D"])
string += "m" # End
return string
STATUS = color('⚑', 'GREEN')
WARNING = color('⚑', 'YELLOW')
ERROR = color('⚑', 'RED')
FATAL = color('⌁', 'RED', False, None, 'INVERT')
def msg(message, priority=0, function=None, *data, **verbose):
VERBOSITY, SVERBOSITY = check_verbosity()
print_ = True
if 'level' in verbose:
if type(verbose['level']) is int:
if verbose['level'] <= VERBOSITY:
print_ = True
else:
print_ = False
if 'slevel' in verbose:
if type(verbose['slevel']) is str:
if verbose['slevel'] in SVERBOSITY:
print_ = True
if print_:
if priority <= 0:
# status
mode = STATUS
message = color(message, 'GREEN')
print(mode, end=" ")
if priority == 1:
# Warning
mode = WARNING
message = color(message, 'YELLOW')
print(mode, end=" ")
if priority == 2:
# Error
mode = ERROR
message = color(message, 'RED')
print(mode, end=" ", file=sys.stderr)
if priority >= 3:
# Fatal
mode = FATAL
message = color(message, 'RED', False, None, 'invert')
print(mode, end=" ", file=sys.stderr)
if function is not None:
function_color = 'BLUE'
function += ": "
if priority >= 2:
print(color(function, function_color), end="", file=sys.stderr)
else:
print(color(function, function_color), end="")
if priority >= 2:
print(message, end="", file=sys.stderr)
else:
print(message, end="")
if data is not ():
if priority >= 2:
print("\t" + color("|", 'YELLOW'), end="", file=sys.stderr)
print(color(" " + str(list(data)), "MAGENTA"), file=sys.stderr)
else:
print("\t" + color("|", 'YELLOW'), end="")
print(color(" " + str(list(data)), "MAGENTA"))
else:
if priority >= 2:
print(file=sys.stderr)
else:
print()
|
devconnections day 2: who's line is it anyway?
via jasonz: there is a new white paper posted on msdn outlining .net fx 3.0 install/detection/etc.
trouble with vista and asp.net installs?
|
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from pyramid.view import view_config
from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS
from pontus.default_behavior import Cancel
from pontus.form import FormView
from pontus.schema import select
from lac.content.processes.admin_process.behaviors import (
EditSmartFolder)
from lac.content.smart_folder import (
SmartFolderSchema, SmartFolder)
from lac import _
@view_config(
name='editsmartfolder',
context=SmartFolder,
renderer='pontus:templates/views_templates/grid.pt',
)
class EditSmartFolderView(FormView):
title = _('Edit the smart folder')
schema = select(SmartFolderSchema(factory=SmartFolder, editable=True),
['title',
'description',
'filters',
'view_type',
'classifications',
'icon_data',
'style',
'add_as_a_block'])
behaviors = [EditSmartFolder, Cancel]
formid = 'formeditsmartfolder'
name = 'editsmartfolder'
requirements = {'css_links':[],
'js_links':['lac:static/js/smart_folder_management.js',
'lac:static/js/contextual_help_smart_folder.js']}
def before_update(self):
if self.context.parents:
self.schema.children.remove(self.schema.get('add_as_a_block'))
def default_data(self):
return self.context
DEFAULTMAPPING_ACTIONS_VIEWS.update({EditSmartFolder: EditSmartFolderView})
|
Garfield :: Garfield... - I'm going to order a pizza for dinner. - And I can do without the confetti. Oh, how can you deny my unfettered euphoria?
|
# -*- coding: utf-8 -*-
from matplotlib.colors import rgb2hex
import matplotlib.pyplot as plt
import numpy as np
# Colours drawn from material designs colour pallet at https://material.io/guidelines/style/color.html
class Colors(object):
def __init__(self):
self.color_map = {
"blue": "#1976D2",
"lblue": "#4FC3F7",
"red": "#E53935",
"green": "#43A047",
"lgreen": "#8BC34A",
"purple": "#673AB7",
"cyan": "#4DD0E1",
"magenta": "#E91E63",
"yellow": "#F2D026",
"black": "#333333",
"grey": "#9E9E9E",
"orange": "#FB8C00",
"amber": "#FFB300",
"brown": "#795548",
}
self.aliases = {
"b": "blue",
"r": "red",
"g": "green",
"k": "black",
"m": "magenta",
"c": "cyan",
"o": "orange",
"y": "yellow",
"a": "amber",
"p": "purple",
"e": "grey",
"lg": "lgreen",
"lb": "lblue",
}
self.default_colors = ["blue", "lgreen", "red", "purple", "yellow", "grey", "lblue", "magenta", "green", "brown", "black", "orange"]
def format(self, color):
if isinstance(color, np.ndarray):
color = rgb2hex(color)
if color[0] == "#":
return color
elif color in self.color_map:
return self.color_map[color]
elif color in self.aliases:
alias = self.aliases[color]
return self.color_map[alias]
else:
raise ValueError("Color %s is not mapped. Please give a hex code" % color)
def get_formatted(self, list_colors):
return [self.format(c) for c in list_colors]
def get_default(self):
return self.get_formatted(self.default_colors)
def get_colormap(self, num, cmap_name, scale=0.7): # pragma: no cover
color_list = self.get_formatted(plt.get_cmap(cmap_name)(np.linspace(0.05, 0.9, num)))
scales = scale + (1 - scale) * np.abs(1 - np.linspace(0, 2, num))
scaled = [self.scale_colour(c, s) for c, s in zip(color_list, scales)]
return scaled
def scale_colour(self, colour, scalefactor): # pragma: no cover
if isinstance(colour, np.ndarray):
r, g, b = colour[:3] * 255.0
else:
hexx = colour.strip("#")
if scalefactor < 0 or len(hexx) != 6:
return hexx
r, g, b = int(hexx[:2], 16), int(hexx[2:4], 16), int(hexx[4:], 16)
r = self._clamp(int(r * scalefactor))
g = self._clamp(int(g * scalefactor))
b = self._clamp(int(b * scalefactor))
return "#%02x%02x%02x" % (r, g, b)
def _clamp(self, val, minimum=0, maximum=255):
if val < minimum:
return minimum
if val > maximum:
return maximum
return val
|
For those who know the city, searching with the binoculars becomes a game or a bet to locate specific places or addresses, and no one escapes the temptation of finding their house in the midst of the intricate urban fabric. Those who visit the scale model for the first time discover the magical universe of its constructive mix: the tropicalized 18th-century Baroque reflected in the beautiful churches and mansions in the Historic Center; the 19th-century neoclassicism with outstanding examples in the area around the former city walls; the grand architectural movements of the 20th century; the omnipresent eclecticism scattered practically all around city; the beautiful and sensual art nouveau; the very popular art deco which can be seen both in magnificent constructions like the Bacardí Building and in modest homes; the modern movement, clearly expressed in the “skyscrapers” of the Vedado district and the sophisticated residences of Miramar; the eastward expansion that began in 1959; the splendid National Art School, and so much more. Summing it up, the “the styleless style” of the metropolis that Alejo Carpentier called “the city of columns”.
|
# -*- coding: utf-8 -*-
# wpcdesk - WordPress Comment Desktop
# Copyright (C) 2012 Eka Putra - [email protected]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from PyQt4 import QtGui, QtCore
from gui.comment_window import Ui_CommentWindow
from wpcdesk_threads import EditCommentThread, DeleteCommentThread
class CommentEditor(QtGui.QDialog):
def __init__(self, parent=None, data=None):
QtGui.QDialog.__init__(self, parent)
self.ui = Ui_CommentWindow()
self.ui.setupUi(self)
self.ui.progressBar.hide()
self.set_validator()
self.parent = parent
self.data = data
self.fill_form(self.data)
QtCore.QObject.connect(self.ui.btn_save, QtCore.SIGNAL("clicked()"), self.saveComment)
QtCore.QObject.connect(self.ui.btn_delete, QtCore.SIGNAL("clicked()"), self.deleteComment)
self.edit_comment_thread = EditCommentThread()
self.edit_comment_thread.is_loading.connect(self.loading)
self.edit_comment_thread.is_success.connect(self.edit_status)
self.delete_comment_thread = DeleteCommentThread(self.data)
self.delete_comment_thread.is_loading.connect(self.loading)
self.delete_comment_thread.is_success.connect(self.delete_status)
def set_validator(self):
# Email Validator
email_pattern = QtCore.QRegExp( r"^([a-zA-Z0-9_\.\-\+])+\@(([a-zA-Z0-9\-])+\.)+([a-zA-Z0-9]{2,4})+$" )
email_validator = QtGui.QRegExpValidator(email_pattern , self )
self.ui.edit_email.setValidator(email_validator)
def fill_form(self, data):
self.comment_id = data['comment_id']
self.ui.lbl_post.setText(data['comment_post'])
self.ui.lbl_date.setText(data['comment_date'])
self.ui.edit_name.setText(data['comment_author'])
self.ui.edit_email.setText(data['comment_email'])
self.ui.edit_comment.setText(data['comment_content'])
if data['comment_status'] == 'Approved':
self.ui.cb_status.setChecked(True)
else:
self.ui.cb_status.setChecked(False)
def saveComment(self):
data = {}
if self.ui.cb_status.isChecked():
data['status'] = 'approve'
else:
data['status'] = 'hold'
data['content'] = str(self.ui.edit_comment.toPlainText())
data['author'] = str(self.ui.edit_name.text())
data['author_email'] = str(self.ui.edit_email.text())
self.edit_comment_thread.set_comment_id(int(self.data['comment_id']))
self.edit_comment_thread.set_data(data)
self.edit_comment_thread.start()
def deleteComment(self):
answer = QtGui.QMessageBox.question(self, 'Confirmation','Are you sure want to delete this comment?', QtGui.QMessageBox.Yes|QtGui.QMessageBox.Cancel)
if answer == QtGui.QMessageBox.Yes:
self.delete_comment_thread.start()
else:
return
def loading(self, is_loading):
if is_loading:
self.ui.progressBar.show()
else:
self.ui.progressBar.hide()
def edit_status(self, status):
if status:
self.parent.loadComments()
QtGui.QMessageBox.information(self, 'Comment updated!','Comment successfuly updated.', QtGui.QMessageBox.Ok)
else:
QtGui.QMessageBox.warning(self, 'Failed!','Failed to update comment.', QtGui.QMessageBox.Ok)
def delete_status(self, status):
if status:
self.parent.loadComments()
QtGui.QMessageBox.information(self, 'Comment Deleted','Comment successfuly deleted.', QtGui.QMessageBox.Ok)
self.close()
else:
QtGui.QMessageBox.warning(self, 'Failed!','Failed to delete comment.', QtGui.QMessageBox.Ok)
|
Smoke tests for sewer smells are a simple and effective way to track down that lingering stench that just won’t go away. We use a specialized piece of equipment to perform the smoke test, pumping non-toxic white smoke into your sewer system. Any cracks, broken connections, or bad seals on the sewer system are revealed when the smoke makes it presence.
Sewer smells are often tracked back to dried up traps on infrequently used drains. But when that simple solution won’t cut it, finding the source of a sewer smell can be like finding a needle in a haystack. That’s why we turn to visual proof. Just like with the camera inspection system, seeing is believing. A smoke test for sewer smells can track down a hairline crack in a sewer pipe hidden behind a wall or under a floor. Those types of breaks in a sewer system would be almost impossible to find without the aid of a smoke test machine.
All Clear Plumbing performs more smoke tests than any other plumbing company in the Upstate of South Carolina. We have the equipment and seasoned expertise required to take a frustrating and sometimes embarrassing situation and clear it up with ease. Most of the time, the actual repair is minimal and simple to complete once we have tracked down the sewer smell using the smoke detection equipment.
I was able to use my kitchen sink again. I recommend them.
|
"""
benchmark
~~~~~~~~~
A simple script for running benchmarks on captured process output.
Example run::
$ BENCHMARK=tests/captured/ls.input python benchmark.py
.....................
ls.input: Mean +- std dev: 644 ns +- 23 ns
:copyright: (c) 2016-2021 by pyte authors and contributors,
see AUTHORS for details.
:license: LGPL, see LICENSE for more details.
"""
import io
import os.path
import sys
from functools import partial
try:
from pyperf import Runner
except ImportError:
sys.exit("``perf`` not found. Try installing it via ``pip install perf``.")
import pyte
def make_benchmark(path, screen_cls):
with io.open(path, "rt", encoding="utf-8") as handle:
data = handle.read()
stream = pyte.Stream(screen_cls(80, 24))
return partial(stream.feed, data)
if __name__ == "__main__":
benchmark = os.environ["BENCHMARK"]
sys.argv.extend(["--inherit-environ", "BENCHMARK"])
runner = Runner()
for screen_cls in [pyte.Screen, pyte.DiffScreen, pyte.HistoryScreen]:
name = os.path.basename(benchmark) + "->" + screen_cls.__name__
runner.bench_func(name, make_benchmark(benchmark, screen_cls))
|
21 Best Of Fashion Sketches - is a free Complete Home Decoration Ideas Gallery posted at . This 21 Best Of Fashion Sketches was posted in hope that we can give you an inspiration to Remodel your Home. This article can be your reference when you are confused to choose the right decoration for your home. This 21 Best Of Fashion Sketches This maybe your best option to decor , because having a home with our own design is everyone's dream.
21 Best Of Fashion Sketches - We hope that , by posting this 21 Best Of Fashion Sketches ideas , we can fulfill your needs of inspiration for designing your home. If you need more ideas to Design a Home , you can check at our collection right below this post. Also , don't forget to always visit Forextradinglb.com to find some new and fresh posts about Kitchen Remodeling , Bathroom Remodel , Bedroom Theme Ideas , Living Room Style and other Home Design Inspiration everyday.
|
# -*- coding:utf-8 -*-
from django.db import models
from account.models import User
class Job(models.Model):
"""
问卷 Model
"""
name = models.SlugField(verbose_name="网址", max_length=40, unique=True)
title = models.CharField(verbose_name="标题", max_length=128)
questions = models.ManyToManyField(verbose_name="问题", to="Question", blank=True)
time_start = models.DateTimeField(verbose_name="开始时间", blank=True, null=True)
# 问卷开始时间 过期时间
time_expired = models.DateTimeField(verbose_name="过期时间", blank=True, null=True)
description = models.CharField(verbose_name="描述", max_length=512)
time_added = models.DateTimeField(verbose_name="添加时间", auto_now_add=True, blank=True)
is_active = models.BooleanField(verbose_name="启用", blank=True, default=True)
# 有些问卷是需要用户登录才可以回答的
is_authenticated = models.BooleanField(verbose_name="需要用户登录", blank=True, default=True)
def __str__(self):
return self.title
class Meta:
verbose_name = "问卷"
verbose_name_plural = verbose_name
class Question(models.Model):
"""
问题 Model
"""
CATEGORY_CHOICES = (
("text", "文本"),
("radio", "单选"),
("checkbox", "多选")
)
title = models.CharField(verbose_name="问题", max_length=128)
description = models.CharField(verbose_name="描述", max_length=512, blank=True)
category = models.CharField(verbose_name="类型", choices=CATEGORY_CHOICES, max_length=10,
default="text", blank=True)
# 回答的唯一性,通过在提交的时候做检验
is_unique = models.BooleanField(verbose_name="回答需要唯一", blank=True, default=False)
def __str__(self):
return self.title
class Meta:
verbose_name = "问题"
verbose_name_plural = verbose_name
class Choice(models.Model):
"""
答案选项Choice
"""
question = models.ForeignKey(to="question", verbose_name="问题", related_name="choices", on_delete=models.CASCADE)
option = models.CharField(verbose_name="选项", max_length=1)
value = models.CharField(verbose_name="选项值", max_length=128)
def __str__(self):
return "{}:{}".format(self.question, self.value)
class Meta:
verbose_name = "问题答案选项"
verbose_name_plural = verbose_name
class Answer(models.Model):
"""
回答Model
"""
question = models.ForeignKey(to="question", verbose_name="问题", on_delete=models.CASCADE)
option = models.CharField(verbose_name="回答选项", blank=True, max_length=1, null=True)
answer = models.CharField(verbose_name="回答", max_length=128)
def __str__(self):
return "问题:(ID:{}):Answer:{}".format(self.question_id, self.answer)
class Meta:
verbose_name = "问题回答"
verbose_name_plural = verbose_name
class Report(models.Model):
"""
问卷回答 Model
"""
job = models.ForeignKey(to="job", verbose_name="问卷", on_delete=models.CASCADE)
user = models.ForeignKey(to=User, verbose_name="用户", blank=True, null=True, on_delete=models.SET_NULL)
ip = models.GenericIPAddressField(verbose_name="回答者IP", blank=True, null=True)
time_added = models.DateTimeField(verbose_name="添加时间", blank=True, auto_now_add=True)
answers = models.ManyToManyField(verbose_name="问卷回答", to="answer", blank=True)
def __str__(self):
return "Report:{}".format(self.pk)
class Meta:
verbose_name = "问卷回答"
verbose_name_plural = verbose_name
|
“So I just hand my score sheet to the Records Officer after every session and they do the rest”?
This section explains the theory behind Handicaps and Classifications, you as an archer only have to submit your score sheet to our records officer after every session and they will do the rest for you. Just ensure your score sheets are completed correctly and counter signed as an honest score.
An archery handicap is a number between 0 and 100, it indicates the ability of an archer, essentially the lower the handicap the better the archer. Every archer has one outdoor handicap and a separate indoor handicap for each bow style they shoot.
Note: Currently Archery GB do not recognise the Horsebow as an accepted bow to shoot with to gain handicaps or any other Archery GB award.
Handicap tables are produced by Archery GB which gives a handicap for every possible score for every round irrespective of the archer’s age, gender or the bow style used. The club Records Officer has a copy of the latest handicap tables and uses these to calculate member’s handicap figure.
2. Enables scores to be compared between different rounds.
3. Enables archers of different abilities to compete on equal terms.
The rest of this section describes how to calculate a handicap.
Calculating a handicap is the same for both indoors and outdoors. This section describes how a handicap is calculated for an archer without a handicap, how it is modified during the season and then what happens at the end of a season. A useful tool for members to use is the Archers Mate website.
For archers without a handicap their initial figure is the average of the handicaps for the first three official rounds recorded. All handicaps are quoted in whole numbers and the average must be rounded up to the nearest whole number.
For example, if an archer shoots 3 outdoor rounds with handicaps of 64, 70 and 69 then the initial handicap is 68 (64+70+69=203, 203÷3=67.6, round up = 68).
When an archer has established a handicap it can be improved after each round shot. If a round is shot with a handicap at least 2 handicap points better than their current figure, then the archer’s new handicap is the average of the current handicap and the handicap for the round just completed. As with the initial handicap this must be rounded up to the nearest whole number.
Handicaps can be used to enable archers of all standards to compete against each other. This is achieved by adding a certain number of points onto an archers score at the end of the round. The lower an archer’s handicap, the fewer points get added to his score. The winner is the archer with the most points after the handicap has been taken into account. The number of points to be added is included in the Archery GB handicap tables held by the clubs Records Officer.
Handicaps can be used to enable archers of all standards to compete against each other. This is achieved by adding a certain number of points onto an archers score at the end of the round. The lower an archer’s handicap, the fewer points get added to his score. The winner is the archer with the most points after the handicap has been taken into account. The number of points to be added is included in the Archery GB handicap tables.
At the end of the summer season annually (31st December), the handicap improvement medal will be awarded to the member who has achieved the greatest reduction in handicap over either; the last summer season or from when the archer gained their initial handicap to the end of season. If there is a tie there will be a shoot off between the archers factoring handicaps.
Archery classifications are similar to handicaps in that you must shoot at least three rounds to gain a classification, and there are separate indoor and outdoor classifications for each bow style an archer shoots. However unlike handicaps, classifications take into account an archer’s, gender, age and bow style. They therefore give an indication to an archer’s ability taking these factors into account.
The outdoor classification is widely recognised by other archers, unlike the indoor classification. As with handicaps, Archery GB produce classification tables which state what score must be obtained for a given round to qualify for a given classification. Unlike the handicap tables, classification tables can be downloaded from here and can easily be found on the internet from Archery GB’s Shooting Administrative Procedures which can be downloaded from the Archery GB website. You can work out your own classification or submit your scores to the club Records Officer and they will work it out for you.
Section 7 of the Archery GB’s Shooting Administrative Procedures provides the definitive description of the classification scheme. Again www.archersmate.co.uk is a very useful tool to track classification scores.
Outdoor classifications are (the easiest to obtained are listed first) 3rd class, 2nd class, 1st class, Bowman (BM), Master Bowman (MB) and Grand Master Bowman (GMB). Juniors can gain Junior Bowman and Junior Master Bowman instead of Bowman and Master Bowman respectively. You can achieve 3rd, 2nd, 1st classes or BM (Bowman) classification at any club shoot or competition.
You can only qualify for MB (Master Bowmen) and GMB (Grand Master Bowman) having submitted 3 scores to Archery GB, from Record Status competitions.
Badges are awarded for all outdoor classifications. To get an idea of the difficulty of getting these classifications they are designed so that the top 1% will get GMB, the top 4% MB and top 15% Bowman.
Indoor classifications are different to the outdoor scheme in that they run from A to H, A being the best. As with outdoor handicaps, classification tables are produced by Archery GB and three qualifying scores are required before a classification can be obtained. No badges are produced for indoor classifications but the handicaps can be used during competitions for example Worcester and Portsmouth shoots.
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import contextlib
import errno
import os
import subprocess
import sys
import time
mysql_socket_path = "/var/run/mysqld/mysqld.sock"
mysql_user = "emsdata"
mysql_password = "emsdata"
mysql_db_name = "ems_data"
@contextlib.contextmanager
def flock(path, wait_delay = 1):
while True:
try:
fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDWR)
except OSError, e:
if e.errno != errno.EEXIST:
raise
time.sleep(wait_delay)
continue
else:
break
try:
yield fd
finally:
os.close(fd)
os.unlink(path)
def check_interval():
timespans = {
"day" : "1 day",
"halfweek": "3 day",
"week" : "1 week",
"month" : "1 month"
}
return timespans.get(interval, None)
def get_time_format():
formats = {
"day" : "%H:%M",
"halfweek": "%H:%M (%a)",
"week" : "%a, %Hh"
}
return formats.get(interval, "%d.%m")
def do_graphdata(sensor, filename):
datafile = open(filename, "w")
process = subprocess.Popen(["mysql", "-A", "-u%s" % mysql_user, "-p%s" % mysql_password, mysql_db_name ],
shell = False, stdin = subprocess.PIPE, stdout = datafile)
process.communicate("""
set @starttime = subdate(now(), interval %s);
set @endtime = now();
select time, value from (
select adddate(if(starttime < @starttime, @starttime, starttime), interval 1 second) time, value from numeric_data
where sensor = %d and endtime >= @starttime
union all
select if(endtime > @endtime, @endtime, endtime) time, value from numeric_data
where sensor = %d and endtime >= @starttime)
t1 order by time;
""" % (timespan_clause, sensor, sensor))
datafile.close()
def do_plot(name, filename, ylabel, definitions):
i = 1
for definition in definitions:
do_graphdata(definition[0], "/tmp/file%d.dat" % i)
i = i + 1
filename = filename + "-" + interval + ".png"
process = subprocess.Popen("gnuplot", shell = False, stdin = subprocess.PIPE)
process.stdin.write("set terminal png font 'arial' 12 size 800, 450\n")
process.stdin.write("set grid lc rgb '#aaaaaa' lt 1 lw 0,5\n")
process.stdin.write("set title '%s'\n" % name)
process.stdin.write("set xdata time\n")
process.stdin.write("set xlabel 'Datum'\n")
process.stdin.write("set ylabel '%s'\n" % ylabel)
process.stdin.write("set timefmt '%Y-%m-%d %H:%M:%S'\n")
process.stdin.write("set format x '%s'\n" % get_time_format())
process.stdin.write("set xtics autofreq rotate by -45\n")
process.stdin.write("set ytics autofreq\n")
process.stdin.write("set output '%s'\n" % os.path.join(targetpath, filename))
process.stdin.write("plot")
for i in range(1, len(definitions) + 1):
definition = definitions[i - 1]
process.stdin.write(" '/tmp/file%d.dat' using 1:3 with %s lw 2 title '%s'" %
(i, definition[2], definition[1]))
if i != len(definitions):
process.stdin.write(",")
process.stdin.write("\n")
process.stdin.close()
process.wait()
for i in range(1, len(definitions) + 1) :
os.remove("/tmp/file%d.dat" % i)
# main starts here
if len(sys.argv) != 3:
sys.exit(1)
interval = sys.argv[2]
timespan_clause = check_interval()
if timespan_clause == None:
sys.exit(1)
retries = 30
while not os.path.exists(mysql_socket_path) and retries > 0:
print "MySQL socket not found, waiting another %d seconds" % retries
retries = retries - 1
time.sleep(1)
if retries == 0:
sys.exit(2)
targetpath = sys.argv[1]
if not os.path.isdir(targetpath):
os.makedirs(targetpath)
with flock("/tmp/graph-gen.lock"):
definitions = [ [ 11, "Außentemperatur", "lines smooth bezier" ],
[ 12, "Ged. Außentemperatur", "lines" ] ]
do_plot("Aussentemperatur", "aussentemp", "Temperatur (°C)", definitions)
definitions = [ [ 13, "Raum-Soll", "lines" ],
[ 14, "Raum-Ist", "lines smooth bezier" ] ]
do_plot("Raumtemperatur", "raumtemp", "Temperatur (°C)", definitions)
definitions = [ [ 1, "Kessel-Soll", "lines" ],
[ 2, "Kessel-Ist", "lines smooth bezier" ],
[ 6, "Vorlauf HK1", "lines smooth bezier" ],
[ 8, "Vorlauf HK2", "lines smooth bezier" ],
[ 10, "Rücklauf", "lines smooth bezier" ] ]
do_plot("Temperaturen", "kessel", "Temperatur (°C)", definitions)
definitions = [ [ 3, "Solltemperatur", "lines" ],
[ 4, "Isttemperatur", "lines smooth bezier" ] ]
do_plot("Warmwasser", "ww", "Temperatur (°C)", definitions)
|
5 x M6 (1/4 in) c'sk head screws on 121mm (4 3/4 in) PCD. Lewmar recommend that the rope enters onto the drum at an angle of 5 to 10 to the base axis of the winch. To achieve this angle it may be necessary to use a base wedge when installing the winch. This product is Made to Order.
|
#!/usr/bin/env python
# Author: Zion Orent <[email protected]>
# Copyright (c) 2015 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_grovecollision as upmGrovecollision
def main():
# The was tested with the Grove Collision Sensor
# Instantiate a Grove Collision on digital pin D2
myGrovecollision = upmGrovecollision.GroveCollision(2)
## Exit handlers ##
# This stops python from printing a stacktrace when you hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This lets you run code on exit,
# including functions from myGrovecollision
def exitHandler():
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
collisionState = False
print("No collision")
while(1):
if (myGrovecollision.isColliding() and not collisionState):
print("Collision!")
collisionState = True
elif (not myGrovecollision.isColliding() and collisionState):
print("No collision")
collisionState = False
if __name__ == '__main__':
main()
|
PLEASE READ THIS AGREEMENT CAREFULLY BEFORE USING THE WEB SITE. BY ACCESSING THE WEB SITE, YOU AGREE TO BE BOUND BY THE TERMS AND CONDITIONS BELOW. IF YOU DO NOT WISH TO BE BOUND BY THESE TERMS AND CONDITIONS, YOU SHOULD EXIT THIS SITE.
The Suffolk County Bar Association offers the information and resources on the Web site as a service to its members and to other Internet users. The Suffolk County Bar Association and its subsidiaries makes no warranty or guarantee concerning the accuracy or reliability of the contents of this site or of other Web sites to which it links. Any unauthorized downloading and distribution of any copyrighted material from this site or sites to which we link, without the copyright owner's permission, is strictly prohibited.The Suffolk County Bar Association and its subsidiaries assumes no liability for any reliance on the information provided and is not intended to create an attorney-client relationship and shall not be construed as legal advice.
Under no circumstances shall The Suffolk County Bar Association or its subsidiaries be liable for any direct, indirect, incidental, special, punitive or consequential damages that result in any way from your use of or inability to use the Web site or your reliance on or use of information, services or merchandise provided on or accessible through the Web site, or that result from mistakes, omissions, interruptions, deletion of files, errors, defects, delays in operation, or transmission, or any failure of performance.
You agree to defend, indemnify and hold the The Suffolk County Bar Association and its subsidiaries harmless from any and all liabilities, costs and expenses, including reasonable attorney's fees, related to any violation of these terms and conditions by you.
|
import urllib, re
import helper
import json
thisChannel = "nickjr"
baseLink = "http://www.nickjr.com"
apiLink = "http://www.nickjr.com/common/data/kids/get-kids-config-data.jhtml?fsd=/dynaboss&urlAlias=%s&af=false"
extractPlaylist = re.compile("<h2 id=\".*?\"><span>(.*?)</span></h2>.*?<ul>(.*?)</ul>",re.DOTALL)
def mainPage():
page = helper.load_page(apiLink%("kids-video-landing"))
data = json.loads(page)
items = data['config']['promos'][0]['items']
for category in items:
catName = helper.removeHtmlSpecialChars(category['title'])
catLink = apiLink%(category['urlAlias'])
catImg = baseLink+category['thumbnail'];
helper.addDirectoryItem(catName, {"channel":thisChannel,"action":"showCategory","link":catLink}, catImg)
helper.endOfDirectory()
def showCategory(link):
page = helper.load_page(urllib.unquote(link))
page = page.replace("\xED","\xc3\xad")
data = json.loads(page)
items = data['config']['promos'][0]['items']
for video in items:
vidName = helper.removeHtmlSpecialChars(video['title'])
vidId = video['id']
vidImg = video['thumbnail']
helper.addDirectoryItem(vidName, {"channel":thisChannel,"action":"playVideo","link":vidId}, vidImg, False)
helper.endOfDirectory()
def playVideo(link):
playlistLink = "http://www.nickjr.com/dynamo/video/data/mrssGen.jhtml?type=network&loc=sidebar&hub=njParentsHub&mode=playlist&mgid=mgid:cms:item:nickjr.com:"
playlistLink = playlistLink+link
page = helper.load_page(playlistLink,True)
media = helper.extractMrss(page)
player = media[0]['player']
link = media[0]['url']
response = urllib.urlopen(urllib.unquote(player))
mediaPlayer = response.geturl()
page = helper.load_page(urllib.unquote(link))
extractRtmpUrls = re.compile("<rendition.*?height=[\"\']+([0-9]*)[\"\']+.*?>[\n\ \t]*<src>(.*?)</src>[\n\ \t]*</rendition>")
streamUrl = ""
streamHeight = 0
for rtmpItem in extractRtmpUrls.finditer(page):
if rtmpItem.group(1)>streamHeight:
streamUrl = rtmpItem.group(2)
streamUrl = streamUrl + " swfUrl=" + mediaPlayer + " swfVfy=1"
helper.setResolvedUrl(streamUrl)
params = helper.get_params()
if len(params) == 1:
mainPage()
else:
if params['action'] == "showCategory":
showCategory(params['link'])
if params['action'] == "playVideo":
playVideo(params['link'])
|
What would I lose if I reported the wrong answer?
What is a designated reporter?
How will the Augur system ensure enough reporters will be present in each reporting window?
How are scalar values reported?
|
#
# Copyright (C) 2019 by Bartlomiej Kocot
#
# This file is part of RoboComp
#
# RoboComp is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RoboComp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RoboComp. If not, see <http://www.gnu.org/licenses/>.
#
import sys, os, traceback, time,Ice,math,copy
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
from PySide import *
from genericworker import *
# If RoboComp was compiled with Python bindings you can use InnerModel in Python
# sys.path.append('/opt/robocomp/lib')
# import librobocomp_qmat
# import librobocomp_osgviewer
# import librobocomp_innermodel
class SpecificWorker(GenericWorker):
def __init__(self, proxy_map):
super(SpecificWorker, self).__init__(proxy_map)
self.timer.stop()
self.differentialrobot_proxy = proxy_map["DifferentialRobotProxy"]
self.mousePress = False
self.x=225
self.y=225
self.setGeometry(50, 50, 500, 500)
self.setWindowTitle("Joystick Simulator Controller")
self.setStyleSheet("QMainWindow {background: 'white';}");
self.show()
self.Speed=0.0
self.Rotation=0.0
self.addJoystickImage()
def setParams(self, params):
return True
def addJoystickImage(self):
self.Circle = QtGui.QLabel(self)
self.JoyStick = QtGui.QLabel(self)
self.SpeedText = QtGui.QLabel(self)
self.SpeedValue = QtGui.QLabel(self)
self.RotationText = QtGui.QLabel(self)
self.RotationValue = QtGui.QLabel(self)
circlePixmap = QtGui.QPixmap('src/img/circle.png')
joystickPixmap = QtGui.QPixmap('src/img/joystick.png')
self.Circle.setPixmap(circlePixmap)
self.Circle.resize(200,200)
self.Circle.move(150, 150)
self.Circle.show()
self.JoyStick.setObjectName("JoyStick")
self.JoyStick.setPixmap(joystickPixmap)
self.JoyStick.resize(50,50)
self.JoyStick.move(225,225)
self.JoyStick.show()
self.SpeedText.setText("Speed: ")
self.SpeedText.move(400,20)
self.SpeedText.show()
self.RotationText.setText("Rotation: ")
self.RotationText.move(400,40)
self.RotationText.show()
self.SpeedValue.setText("0")
self.SpeedValue.move(450,20)
self.SpeedValue.show()
self.RotationValue.setText("0")
self.RotationValue.move(465,40)
self.RotationValue.show()
def mousePressEvent(self, event):
if self.distance(event.x()-25,event.y()-25,self.x,self.y)<=25:
self.mousePress=True
def mouseReleaseEvent(self, event):
self.mousePress=False
self.comeBack()
self.x=225
self.y=225
def mouseMoveEvent(self,event):
if self.mousePress == True:
if self.distance(event.x()-25,event.y()-25,225,225) < 100:
self.x=event.x()-25
self.y=event.y()-25
self.setPosition(self.x,self.y)
else:
sin,cos=self.trigAlpha(event.x()-25,event.y()-25)
self.x,self.y=self.findPoint(cos,sin)
self.setPosition(self.x,self.y)
def setPosition(self,x,y):
self.JoyStick.move(x,y)
self.Speed=(225-y)*22
self.Rotation=(x-225)*0.02
self.SpeedValue.setText(str(self.Speed))
self.RotationValue.setText(str(self.Rotation))
self.differentialrobot_proxy.setSpeedBase(self.Speed, self.Rotation)
def comeBack(self):
self.JoyStick.move(225,225)
self.Speed = 0
self.Rotation = 0
self.SpeedValue.setText(str(self.Speed))
self.RotationValue.setText(str(self.Rotation))
self.differentialrobot_proxy.setSpeedBase(self.Speed, self.Rotation)
def distance(self,x1,y1,x2,y2):
result = (x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)
result = math.sqrt(result)
return result
def trigAlpha(self,x,y):
vecA_X=100.0
vecA_Y=0
vecB_X=x-225.0
vecB_Y=y-225.0
vecA_length=math.sqrt(vecA_X*vecA_X+vecA_Y*vecA_Y)
vecB_length=math.sqrt(vecB_X*vecB_X+vecB_Y*vecB_Y)
cosAlpha=(vecA_X*vecB_X+vecA_Y*vecB_Y)/(vecA_length*vecB_length)
sinAlpha=(vecA_X*vecB_Y-vecA_Y*vecB_X)/(vecA_length*vecB_length)
return sinAlpha,cosAlpha
def findPoint(self,cos,sin):
pointX=225+100*cos
pointY=225+100*sin
return pointX,pointY
@QtCore.Slot()
def compute(self):
return True
|
Kia will soon expand the availability of its free Apple CarPlay and Android Auto software update to a handful of older models.
In order to receive the update, customers must register at myuvo.com. When the update becomes available later this month, owners will be sent an e-mail that contains a link to the software update and step-by-step instructions on how to install it.
The update process will likely mirror Hyundai's do-it-yourself installation program where owners needed to download the software update onto a USB memory stick or the SD card from their navigation system. Once downloaded, owners simply inserted the USB stick or SD card into their vehicle and selected the update option from their infotainment system. Hyundai's update process could take between 35 minutes and four hours, so Kia owners should have plenty of patience.
IRVINE, CALIF., July 19, 2016 – Taking the UVO platform to yet another level of enhancement, Kia Motors America (KMA) today announced that free Apple CarPlay® and Android Auto™ software updates will be available for eligible vehicles when equipped with a compatible navigation system4 or UVO35. Customers will be notified by the end of July via email to go to myuvo.com for vehicle eligibility and step-by-step instructions on updating their vehicle with the latest downloadable software. All owners of new and pre-owned vehicles must register at myuvo.com to receive email notifications that the software is available for download.
Android Auto™ connects to the user’s Android device and allows them access to smartphone apps and functions through Kia’s UVO infotainment system, such as voice-guided Google MapsTM navigation, hands-free calls and texts and voice recognition. Android Auto™ also lets users stream music from Google Play Music™. With a simple and intuitive interface, integrated steering-wheel controls, and powerful voice actions, Android Auto™ was designed to minimize distraction so that users stay focused on the road.
A smarter and more convenient way to use the iPhone® in the car, CarPlay® gives drivers the ability to access the device’s seamless user interface through Kia’s UVO infotainment system. By connecting the iPhone® into the USB port, users are able to make phone calls, access music, send and receive messages, get directions optimized for traffic conditions and more while concentrating on driving. CarPlay® also features hands-free support via Siri®1 voice control or the vehicle’s controls and touchscreen.
|
# encoding=utf-8
# Thanks to http://www.djangosnippets.org/snippets/1051/
#
# Authors: Marinho Brandao <marinho at gmail.com>
# Guilherme M. Gondim (semente) <semente at taurinus.org>
from django.contrib.admin.filterspecs import FilterSpec, ChoicesFilterSpec
from django.utils.encoding import smart_unicode
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
class ParentFilterSpec(ChoicesFilterSpec):
"""
Improved list_filter display for parent Pages by nicely indenting hierarchy
In theory this would work with any mptt model which uses a "title" attribute.
my_model_field.page_parent_filter = True
"""
def __init__(self, f, request, params, model, model_admin, field_path=None):
from feincms.utils import shorten_string
super(ParentFilterSpec, self).__init__(f, request, params, model, model_admin)
parent_ids = model.objects.exclude(parent=None).values_list("parent__id", flat=True).order_by("parent__id").distinct()
parents = model.objects.filter(pk__in=parent_ids).values_list("pk", "title", "level")
self.lookup_choices = [(pk, "%s%s" % (" " * level, shorten_string(title, max_length=25))) for pk, title, level in parents]
def choices(self, cl):
yield {
'selected': self.lookup_val is None,
'query_string': cl.get_query_string({}, [self.lookup_kwarg]),
'display': _('All')
}
for pk, title in self.lookup_choices:
yield {
'selected': pk == int(self.lookup_val or '0'),
'query_string': cl.get_query_string({self.lookup_kwarg: pk}),
'display': mark_safe(smart_unicode(title))
}
def title(self):
return _('Parent')
class CategoryFilterSpec(ChoicesFilterSpec):
"""
Customization of ChoicesFilterSpec which sorts in the user-expected format
my_model_field.category_filter = True
"""
def __init__(self, f, request, params, model, model_admin, field_path=None):
super(CategoryFilterSpec, self).__init__(f, request, params, model, model_admin)
# Restrict results to categories which are actually in use:
self.lookup_choices = [
(i.pk, unicode(i)) for i in f.related.parent_model.objects.exclude(**{
f.related.var_name: None
})
]
self.lookup_choices.sort(key=lambda i: i[1])
def choices(self, cl):
yield {
'selected': self.lookup_val is None,
'query_string': cl.get_query_string({}, [self.lookup_kwarg]),
'display': _('All')
}
for pk, title in self.lookup_choices:
yield {
'selected': pk == int(self.lookup_val or '0'),
'query_string': cl.get_query_string({self.lookup_kwarg: pk}),
'display': mark_safe(smart_unicode(title))
}
def title(self):
return _('Category')
# registering the filter
FilterSpec.filter_specs.insert(0,
(lambda f: getattr(f, 'parent_filter', False), ParentFilterSpec)
)
FilterSpec.filter_specs.insert(1,
(lambda f: getattr(f, 'category_filter', False), CategoryFilterSpec)
)
|
Cliche song drives me up the wall | Stand Up and Speak Out!
You don’t know me, but Daniel Dougan linked your blog on FB…….
I just have to say that my husband days the same thing about that song and changes the station as soon it starts, even if it’s just to listen to commercials!
WHATEVER. I’m sorry if you can’t SEE THE WRITING ON THE WALL, but Nickleback is the most amazing band EVER and if you don’t like cliches, then you can just TAKE YOUR BALL and GO HOME! Instead of complaining when you don’t like something, why don’t you just take the lemons life gives you and MAKE LEMONADE! And put that lemonade in a GLASS THAT’S HALF FULL! You just need to appreciate all the little things in life, including Nickleback lyrics. Because yesterday is past, and tomorrow’s the future, but today is a GIFT! That’s why they call it THE PRESENT!
|
## Copyright (c) 2020 The WebM project authors. All Rights Reserved.
##
## Use of this source code is governed by a BSD-style license
## that can be found in the LICENSE file in the root of the source
## tree. An additional intellectual property rights grant can be found
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
##
#coding : utf - 8
import numpy as np
import numpy.linalg as LA
import matplotlib.pyplot as plt
from Util import drawMF, MSE
"""The Base Class of Estimators"""
class MotionEST(object):
"""
constructor:
cur_f: current frame
ref_f: reference frame
blk_sz: block size
"""
def __init__(self, cur_f, ref_f, blk_sz):
self.cur_f = cur_f
self.ref_f = ref_f
self.blk_sz = blk_sz
#convert RGB to YUV
self.cur_yuv = np.array(self.cur_f.convert('YCbCr'), dtype=np.int)
self.ref_yuv = np.array(self.ref_f.convert('YCbCr'), dtype=np.int)
#frame size
self.width = self.cur_f.size[0]
self.height = self.cur_f.size[1]
#motion field size
self.num_row = self.height // self.blk_sz
self.num_col = self.width // self.blk_sz
#initialize motion field
self.mf = np.zeros((self.num_row, self.num_col, 2))
"""estimation function Override by child classes"""
def motion_field_estimation(self):
pass
"""
distortion of a block:
cur_r: current row
cur_c: current column
mv: motion vector
metric: distortion metric
"""
def block_dist(self, cur_r, cur_c, mv, metric=MSE):
cur_x = cur_c * self.blk_sz
cur_y = cur_r * self.blk_sz
h = min(self.blk_sz, self.height - cur_y)
w = min(self.blk_sz, self.width - cur_x)
cur_blk = self.cur_yuv[cur_y:cur_y + h, cur_x:cur_x + w, :]
ref_x = int(cur_x + mv[1])
ref_y = int(cur_y + mv[0])
if 0 <= ref_x < self.width - w and 0 <= ref_y < self.height - h:
ref_blk = self.ref_yuv[ref_y:ref_y + h, ref_x:ref_x + w, :]
else:
ref_blk = np.zeros((h, w, 3))
return metric(cur_blk, ref_blk)
"""
distortion of motion field
"""
def distortion(self, mask=None, metric=MSE):
loss = 0
count = 0
for i in xrange(self.num_row):
for j in xrange(self.num_col):
if mask is not None and mask[i, j]:
continue
loss += self.block_dist(i, j, self.mf[i, j], metric)
count += 1
return loss / count
"""evaluation compare the difference with ground truth"""
def motion_field_evaluation(self, ground_truth):
loss = 0
count = 0
gt = ground_truth.mf
mask = ground_truth.mask
for i in xrange(self.num_row):
for j in xrange(self.num_col):
if mask is not None and mask[i][j]:
continue
loss += LA.norm(gt[i, j] - self.mf[i, j])
count += 1
return loss / count
"""render the motion field"""
def show(self, ground_truth=None, size=10):
cur_mf = drawMF(self.cur_f, self.blk_sz, self.mf)
if ground_truth is None:
n_row = 1
else:
gt_mf = drawMF(self.cur_f, self.blk_sz, ground_truth)
n_row = 2
plt.figure(figsize=(n_row * size, size * self.height / self.width))
plt.subplot(1, n_row, 1)
plt.imshow(cur_mf)
plt.title('Estimated Motion Field')
if ground_truth is not None:
plt.subplot(1, n_row, 2)
plt.imshow(gt_mf)
plt.title('Ground Truth')
plt.tight_layout()
plt.show()
|
Autumn is already here and you should be preparing the summer clothes that you will keep until next year in a storage room. In Boxit besides offering you the best system to store and store personal things of individuals in your area, we want to help you answer that question: How store and store clothes Of summer?
Before you put on your summer clothes, put all your clothes together in one place and decide what you want to save for next year and what you want to undo.
If the clothing you want to undo is in good condition we recommend that you do not throw it away and give it to a center or person in need.
The first thing is to wash all the clothes you are going to store.
Wash all clothing with little detergent and no bleach or bleach. With this method of washing you will avoid that unpleasant odors develop on your clothes after being stored for a long time. In addition if the clothes have a recent stain but lavas before saving, it will be much more difficult to eliminate in the future.
This is very important. When you are going to save the clothes, you must make sure that it is dry, otherwise it will also produce bad odors. We do not want our clothes to spoil.
You do not have to iron it.
Many people will advise you to iron your clothes before storing them, but be careful, if you iron the clothes you are going to store for a long time this will make the clothes get a yellowish color.
Now you must evaluate the space you need to store and store the clothes.
Do you have much or little storage space? Depending on the space you have, you can decide if you keep all the summer clothes or select it to leave something in your closet. In this dilemma, Boxit can facilitate your task of calculating how much space you need. Being a guard by boxes is not only cheaper than a storage room but the space is flexible.
You can ask that you deliver the boxes you need to store all your summer clothes at home. Once you have it in boxes they pick it up and store it until next summer.
How to make in a box among more clothes?
If you did not know there are some special balls that you can take out when your clothes are inside. Thanks to these storage bags and the vacuum cleaner, you can eliminate all the air inside the bag. You will get very good results when you store your clothes. Put the folded clothes in the special bags and then remove the air with the vacuum cleaner, so you can save a lot of space.
Now you know how to store and store summer clothes, tapien serves for clothes from other seasons. Later we will see how shoes are kept.
|
"""
Exceptions for the case_class module
Copyright (c) 2016 Tom Wiesing -- licensed under MIT, see LICENSE
"""
class CaseClassException(Exception):
""" Base Exception for all exceptions raised by the case_class module. """
pass
#
# Instantiation of CaseClasses
#
class NotInstantiableClassException(CaseClassException):
""" Exception that is raised when a class can not be instantiated. """
def __init__(self, msg, cls):
""" Creates a new NotInstantiableClassException instance.
:param msg: Message representing this NotInstantiableException.
:type msg: str
:param cls: Class that the user tried to instantiate.
:type cls: type
"""
super(NotInstantiableClassException, self).__init__(msg)
self.msg = msg
self.cls = cls
class NotInstantiableAbstractCaseClassException \
(NotInstantiableClassException):
""" Exception that is raised when an AbstractCaseClass is instantiated. """
def __init__(self, cls):
""" Creates a new NotInstantiableAbstractCaseClassException instance.
:param cls: AbstractCaseClass that can not be instantiated
:type cls: type
"""
super(NotInstantiableAbstractCaseClassException, self).__init__(
"Can not instantiate AbstractCaseClass %s" % (cls.__name__,), cls)
class NoCaseToCaseInheritanceException(Exception):
""" Exception that is raised when the user tries to
inherit from a CaseClass or AbstractCaseClass subclass. """
def __init__(self, name):
""" Creates a new NoCaseToCaseInheritanceException instance.
:param name: Name of the class the user tried to create.
:type name: str
"""
super(NoCaseToCaseInheritanceException, self).__init__(
"Unable to create class %s: " % (name,) +
"Case-to-case inheritance is prohibited. ")
self.name = name
#
# Signatures
#
class SignatureException(CaseClassException):
""" Base class for all exceptions related to signatures. """
pass
class MissingArgument(SignatureException):
""" Exception indicating that the value for a given argument is not
specefied fully. """
def __init__(self, name):
""" Creates a new NoSuchArgument instance.
:param,name: Name of the argument that does not have a value.
:type name. str
"""
super(MissingArgument, self).__init__("MissingArgument: Missing " +
"value for %s. " % (
name,))
self.__name = name #: str
@property
def name(self):
""" The name of the argument that does not have a value.
:rtype: str
"""
return self.__name
class NoSuchArgument(SignatureException):
""" Exception indicating that an argument does not exist. """
def __init__(self, name):
""" Creates a new NoSuchArgument instance.
:param,name: Name of the argument that does not exist.
:type name. str
"""
super(NoSuchArgument, self).__init__("NoSuchArgument: No argument " +
"%s exists. " % (name,))
self.__name = name #: str
@property
def name(self):
""" The name of the argument that does not exist.
:rtype: str
"""
return self.__name
class NoDefaultValue(SignatureException):
""" Exception indicating that an argument has no default value. """
def __init__(self, name):
""" Creates a new NoDefaultValue instance.
:param,name: Name of the argument that has no default.
:type name. str
"""
super(NoDefaultValue, self).__init__("NoDefaultValue: Argument " +
"%s has no default. " % (name,))
self.__name = name #: str
@property
def name(self):
""" The name of the argument that has no associated default value.
:rtype: str
"""
return self.__name
class AppliedSignatureException(CaseClassException):
""" Base class for all exceptions related to applied signatures. """
pass
class TooManyArguments(AppliedSignatureException):
""" Exception indicating that too many arguments were passed to a
signature. """
def __init__(self):
""" Creates a new TooManyArguments instance. """
super(TooManyArguments, self).__init__("TooManyArguments: Too many " +
"arguments were passed to the" +
" signature. ")
class TooManyKeyWordArguments(AppliedSignatureException):
""" Exception indicating that too many arguments were passed to a
signature. """
def __init__(self):
""" Creates a new TooManyKeyWordArguments instance. """
super(TooManyKeyWordArguments, self).__init__(
"TooManyKeyWordArguments: Too many " +
"arguments were passed to the" +
" signature. ")
class DoubleArgumentValue(AppliedSignatureException):
""" Exception indicating that an argument was passed more than once. """
def __init__(self, name):
""" Creates a new DoubleArgumentValue instance.
:param name: Name of the argument that was passed more than once.
:type name: str
"""
super(DoubleArgumentValue, self).__init__(
"DoubleArgumentValue: Argument %s was passed more " % (name,) +
"than once. ")
self.__name = name #: str
@property
def name(self):
""" The name of the argument that was passed more than once.
:rtype: str
"""
return self.__name
class ExtractorException(CaseClassException):
""" Common base class related to all extractors. """
pass
class ExtractorDoesNotMatch(ExtractorException):
""" raised when an extractor does not match a certain pattern. """
pass
__all__ = ["CaseClassException", "NotInstantiableClassException",
"NotInstantiableAbstractCaseClassException",
"NoCaseToCaseInheritanceException", "SignatureException",
"MissingArgument", "NoSuchArgument", "NoDefaultValue",
"AppliedSignatureException", "TooManyArguments",
"TooManyKeyWordArguments", "DoubleArgumentValue"]
|
This fixes 'bzr mv' and 'bzr rename' for Git.
Okay, moves seem reasonable to me. The logic is not all correct, but we'll need to do another pass anyway to deal with filesystem mapping issues.
16 # for cicp file-systems.
43 private to external API users.
47 - """Like get_canonical_inventory_path() but works on multiple items.
49 - :param paths: A sequence of paths relative to the root of the tree.
58 - """Returns the first inventory item that case-insensitively matches path.
62 - defined which is returned.
69 - use get_canonical_inventory_paths() to avoid O(N) behaviour.
71 - :param path: A paths relative to the root of the tree.
73 - that match case insensitively.
80 - # First, if the path as specified exists exactly, just use it.
122 - # got to the end of this directory and no entries matched.
123 - # Return what matched so far, plus the rest as specified.
127 - # all done.
134 """If our tree isn't case sensitive, return the canonical path"""
158 + """Look up canonical paths for multiple items.
160 + :param paths: A sequence of paths relative to the root of the tree.
181 """Base class for working trees that live in bzr meta directories."""
200 - # make a tree used by all the 'canonical' tests below.
221 - # note: not committed.
228 - # check it works when asked for just the directory portion.
301 +# (at your option) any later version.
306 +# GNU General Public License for more details.
312 +"""Tests for interface conformance of canonical paths of trees."""
329 + # make a tree used by all the 'canonical' tests below.
358 + # check it works when asked for just the directory portion.
416 + """Find the canonical path of an item, ignoring case.
452 + # got to the end of this directory and no entries matched.
453 + # Return what matched so far, plus the rest as specified.
462 """Return the ShelfManager for this WorkingTree."""
466 + """Like get_canonical_path() but works on multiple items.
468 + :param paths: A sequence of paths relative to the root of the tree.
478 + """Returns the first item in the tree that matches a path.
480 + This is meant to allow case-insensitive path lookups on e.g.
485 + it is implementation defined which is returned.
492 + use get_canonical_paths() to avoid O(N) behaviour.
494 + :param path: A paths relative to the root of the tree.
496 + that match case insensitively.
503 """Registry for working tree formats."""
|
# -*- coding: utf-8 -*-
# Author: Tommy Clausner <[email protected]>
#
# License: BSD (3-clause)
import os.path as op
import pytest
import numpy as np
from numpy.testing import (assert_array_less, assert_allclose,
assert_array_equal)
from scipy.spatial.distance import cdist
from scipy.sparse import csr_matrix
import mne
from mne import (SourceEstimate, VolSourceEstimate, VectorSourceEstimate,
read_evokeds, SourceMorph, compute_source_morph,
read_source_morph, read_source_estimate,
read_forward_solution, grade_to_vertices,
setup_volume_source_space, make_forward_solution,
make_sphere_model, make_ad_hoc_cov, VolVectorSourceEstimate,
read_freesurfer_lut)
from mne.datasets import testing
from mne.fixes import _get_img_fdata
from mne.minimum_norm import (apply_inverse, read_inverse_operator,
make_inverse_operator)
from mne.source_space import (get_volume_labels_from_aseg, _get_mri_info_data,
_get_atlas_values, _add_interpolator,
_grid_interp)
from mne.transforms import quat_to_rot
from mne.utils import (requires_nibabel, check_version, requires_version,
requires_dipy, requires_h5py, catch_logging)
from mne.fixes import _get_args
# Setup paths
data_path = testing.data_path(download=False)
sample_dir = op.join(data_path, 'MEG', 'sample')
subjects_dir = op.join(data_path, 'subjects')
fname_evoked = op.join(sample_dir, 'sample_audvis-ave.fif')
fname_trans = op.join(sample_dir, 'sample_audvis_trunc-trans.fif')
fname_inv_vol = op.join(sample_dir,
'sample_audvis_trunc-meg-vol-7-meg-inv.fif')
fname_fwd_vol = op.join(sample_dir,
'sample_audvis_trunc-meg-vol-7-fwd.fif')
fname_vol_w = op.join(sample_dir,
'sample_audvis_trunc-grad-vol-7-fwd-sensmap-vol.w')
fname_inv_surf = op.join(sample_dir,
'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif')
fname_aseg = op.join(subjects_dir, 'sample', 'mri', 'aseg.mgz')
fname_fmorph = op.join(data_path, 'MEG', 'sample',
'fsaverage_audvis_trunc-meg')
fname_smorph = op.join(sample_dir, 'sample_audvis_trunc-meg')
fname_t1 = op.join(subjects_dir, 'sample', 'mri', 'T1.mgz')
fname_vol = op.join(subjects_dir, 'sample', 'bem', 'sample-volume-7mm-src.fif')
fname_brain = op.join(subjects_dir, 'sample', 'mri', 'brain.mgz')
fname_aseg = op.join(subjects_dir, 'sample', 'mri', 'aseg.mgz')
fname_fs_vol = op.join(subjects_dir, 'fsaverage', 'bem',
'fsaverage-vol7-nointerp-src.fif.gz')
fname_aseg_fs = op.join(subjects_dir, 'fsaverage', 'mri', 'aseg.mgz')
fname_stc = op.join(sample_dir, 'fsaverage_audvis_trunc-meg')
def _real_vec_stc():
inv = read_inverse_operator(fname_inv_surf)
evoked = read_evokeds(fname_evoked, baseline=(None, 0))[0].crop(0, 0.01)
return apply_inverse(evoked, inv, pick_ori='vector')
def test_sourcemorph_consistency():
"""Test SourceMorph class consistency."""
assert _get_args(SourceMorph.__init__)[1:] == \
mne.morph._SOURCE_MORPH_ATTRIBUTES
@testing.requires_testing_data
def test_sparse_morph():
"""Test sparse morphing."""
rng = np.random.RandomState(0)
vertices_fs = [np.sort(rng.permutation(np.arange(10242))[:4]),
np.sort(rng.permutation(np.arange(10242))[:6])]
data = rng.randn(10, 1)
stc_fs = SourceEstimate(data, vertices_fs, 1, 1, 'fsaverage')
spheres_fs = [mne.read_surface(op.join(
subjects_dir, 'fsaverage', 'surf', '%s.sphere.reg' % hemi))[0]
for hemi in ('lh', 'rh')]
spheres_sample = [mne.read_surface(op.join(
subjects_dir, 'sample', 'surf', '%s.sphere.reg' % hemi))[0]
for hemi in ('lh', 'rh')]
morph_fs_sample = compute_source_morph(
stc_fs, 'fsaverage', 'sample', sparse=True, spacing=None,
subjects_dir=subjects_dir)
stc_sample = morph_fs_sample.apply(stc_fs)
offset = 0
orders = list()
for v1, s1, v2, s2 in zip(stc_fs.vertices, spheres_fs,
stc_sample.vertices, spheres_sample):
dists = cdist(s1[v1], s2[v2])
order = np.argmin(dists, axis=-1)
assert_array_less(dists[np.arange(len(order)), order], 1.5) # mm
orders.append(order + offset)
offset += len(order)
assert_allclose(stc_fs.data, stc_sample.data[np.concatenate(orders)])
# Return
morph_sample_fs = compute_source_morph(
stc_sample, 'sample', 'fsaverage', sparse=True, spacing=None,
subjects_dir=subjects_dir)
stc_fs_return = morph_sample_fs.apply(stc_sample)
offset = 0
orders = list()
for v1, s, v2 in zip(stc_fs.vertices, spheres_fs, stc_fs_return.vertices):
dists = cdist(s[v1], s[v2])
order = np.argmin(dists, axis=-1)
assert_array_less(dists[np.arange(len(order)), order], 1.5) # mm
orders.append(order + offset)
offset += len(order)
assert_allclose(stc_fs.data, stc_fs_return.data[np.concatenate(orders)])
@testing.requires_testing_data
def test_xhemi_morph():
"""Test cross-hemisphere morphing."""
stc = read_source_estimate(fname_stc, subject='sample')
# smooth 1 for speed where possible
smooth = 4
spacing = 4
n_grade_verts = 2562
stc = compute_source_morph(
stc, 'sample', 'fsaverage_sym', smooth=smooth, warn=False,
spacing=spacing, subjects_dir=subjects_dir).apply(stc)
morph = compute_source_morph(
stc, 'fsaverage_sym', 'fsaverage_sym', smooth=1, xhemi=True,
warn=False, spacing=[stc.vertices[0], []],
subjects_dir=subjects_dir)
stc_xhemi = morph.apply(stc)
assert stc_xhemi.data.shape[0] == n_grade_verts
assert stc_xhemi.rh_data.shape[0] == 0
assert len(stc_xhemi.vertices[1]) == 0
assert stc_xhemi.lh_data.shape[0] == n_grade_verts
assert len(stc_xhemi.vertices[0]) == n_grade_verts
# complete reversal mapping
morph = compute_source_morph(
stc, 'fsaverage_sym', 'fsaverage_sym', smooth=smooth, xhemi=True,
warn=False, spacing=stc.vertices, subjects_dir=subjects_dir)
mm = morph.morph_mat
assert mm.shape == (n_grade_verts * 2,) * 2
assert mm.size > n_grade_verts * 2
assert mm[:n_grade_verts, :n_grade_verts].size == 0 # L to L
assert mm[n_grade_verts:, n_grade_verts:].size == 0 # R to L
assert mm[n_grade_verts:, :n_grade_verts].size > n_grade_verts # L to R
assert mm[:n_grade_verts, n_grade_verts:].size > n_grade_verts # R to L
# more complicated reversal mapping
vertices_use = [stc.vertices[0], np.arange(10242)]
n_src_verts = len(vertices_use[1])
assert vertices_use[0].shape == (n_grade_verts,)
assert vertices_use[1].shape == (n_src_verts,)
# ensure it's sufficiently diffirent to manifest round-trip errors
assert np.in1d(vertices_use[1], stc.vertices[1]).mean() < 0.3
morph = compute_source_morph(
stc, 'fsaverage_sym', 'fsaverage_sym', smooth=smooth, xhemi=True,
warn=False, spacing=vertices_use, subjects_dir=subjects_dir)
mm = morph.morph_mat
assert mm.shape == (n_grade_verts + n_src_verts, n_grade_verts * 2)
assert mm[:n_grade_verts, :n_grade_verts].size == 0
assert mm[n_grade_verts:, n_grade_verts:].size == 0
assert mm[:n_grade_verts, n_grade_verts:].size > n_grade_verts
assert mm[n_grade_verts:, :n_grade_verts].size > n_src_verts
# morph forward then back
stc_xhemi = morph.apply(stc)
morph = compute_source_morph(
stc_xhemi, 'fsaverage_sym', 'fsaverage_sym', smooth=smooth,
xhemi=True, warn=False, spacing=stc.vertices,
subjects_dir=subjects_dir)
stc_return = morph.apply(stc_xhemi)
for hi in range(2):
assert_array_equal(stc_return.vertices[hi], stc.vertices[hi])
correlation = np.corrcoef(stc.data.ravel(), stc_return.data.ravel())[0, 1]
assert correlation > 0.9 # not great b/c of sparse grade + small smooth
@testing.requires_testing_data
@pytest.mark.parametrize('smooth, lower, upper, n_warn, dtype', [
(None, 0.959, 0.963, 0, float),
(3, 0.968, 0.971, 2, complex),
('nearest', 0.98, 0.99, 0, float),
])
def test_surface_source_morph_round_trip(smooth, lower, upper, n_warn, dtype):
"""Test round-trip morphing yields similar STCs."""
kwargs = dict(smooth=smooth, warn=True, subjects_dir=subjects_dir)
stc = mne.read_source_estimate(fname_smorph)
if dtype is complex:
stc.data = 1j * stc.data
assert_array_equal(stc.data.real, 0.)
if smooth == 'nearest' and not check_version('scipy', '1.3'):
with pytest.raises(ValueError, match='required to use nearest'):
morph = compute_source_morph(stc, 'sample', 'fsaverage', **kwargs)
return
with pytest.warns(None) as w:
morph = compute_source_morph(stc, 'sample', 'fsaverage', **kwargs)
w = [ww for ww in w if 'vertices not included' in str(ww.message)]
assert len(w) == n_warn
assert morph.morph_mat.shape == (20484, len(stc.data))
stc_fs = morph.apply(stc)
morph_back = compute_source_morph(
stc_fs, 'fsaverage', 'sample', spacing=stc.vertices, **kwargs)
assert morph_back.morph_mat.shape == (len(stc.data), 20484)
stc_back = morph_back.apply(stc_fs)
corr = np.corrcoef(stc.data.ravel(), stc_back.data.ravel())[0, 1]
assert lower <= corr <= upper
# check the round-trip power
assert_power_preserved(stc, stc_back)
def assert_power_preserved(orig, new, limits=(1., 1.05)):
"""Assert that the power is preserved during a round-trip morph."""
__tracebackhide__ = True
for kind in ('real', 'imag'):
numer = np.linalg.norm(getattr(orig.data, kind))
denom = np.linalg.norm(getattr(new.data, kind))
if numer == denom == 0.: # no data of this type
continue
power_ratio = numer / denom
min_, max_ = limits
assert min_ < power_ratio < max_, f'Power ratio {kind} = {power_ratio}'
@requires_h5py
@testing.requires_testing_data
def test_surface_vector_source_morph(tmpdir):
"""Test surface and vector source estimate morph."""
inverse_operator_surf = read_inverse_operator(fname_inv_surf)
stc_surf = read_source_estimate(fname_smorph, subject='sample')
stc_surf.crop(0.09, 0.1) # for faster computation
stc_vec = _real_vec_stc()
source_morph_surf = compute_source_morph(
inverse_operator_surf['src'], subjects_dir=subjects_dir,
smooth=1, warn=False) # smooth 1 for speed
assert source_morph_surf.subject_from == 'sample'
assert source_morph_surf.subject_to == 'fsaverage'
assert source_morph_surf.kind == 'surface'
assert isinstance(source_morph_surf.src_data, dict)
assert isinstance(source_morph_surf.src_data['vertices_from'], list)
assert isinstance(source_morph_surf, SourceMorph)
stc_surf_morphed = source_morph_surf.apply(stc_surf)
assert isinstance(stc_surf_morphed, SourceEstimate)
stc_vec_morphed = source_morph_surf.apply(stc_vec)
with pytest.raises(ValueError, match="Invalid value for the 'output'"):
source_morph_surf.apply(stc_surf, output='nifti1')
# check if correct class after morphing
assert isinstance(stc_surf_morphed, SourceEstimate)
assert isinstance(stc_vec_morphed, VectorSourceEstimate)
# check __repr__
assert 'surface' in repr(source_morph_surf)
# check loading and saving for surf
source_morph_surf.save(tmpdir.join('42.h5'))
source_morph_surf_r = read_source_morph(tmpdir.join('42.h5'))
assert (all([read == saved for read, saved in
zip(sorted(source_morph_surf_r.__dict__),
sorted(source_morph_surf.__dict__))]))
# check wrong subject correction
stc_surf.subject = None
assert isinstance(source_morph_surf.apply(stc_surf), SourceEstimate)
# degenerate
stc_vol = read_source_estimate(fname_vol_w, 'sample')
with pytest.raises(TypeError, match='stc_from must be an instance'):
source_morph_surf.apply(stc_vol)
@requires_h5py
@requires_nibabel()
@requires_dipy()
@pytest.mark.slowtest
@testing.requires_testing_data
def test_volume_source_morph_basic(tmpdir):
"""Test volume source estimate morph, special cases and exceptions."""
import nibabel as nib
inverse_operator_vol = read_inverse_operator(fname_inv_vol)
stc_vol = read_source_estimate(fname_vol_w, 'sample')
# check for invalid input type
with pytest.raises(TypeError, match='src must be'):
compute_source_morph(src=42)
# check for raising an error if neither
# inverse_operator_vol['src'][0]['subject_his_id'] nor subject_from is set,
# but attempting to perform a volume morph
src = inverse_operator_vol['src']
assert src._subject is None # already None on disk (old!)
with pytest.raises(ValueError, match='subject_from could not be inferred'):
with pytest.warns(RuntimeWarning, match='recommend regenerating'):
compute_source_morph(src=src, subjects_dir=subjects_dir)
# check infer subject_from from src[0]['subject_his_id']
src[0]['subject_his_id'] = 'sample'
with pytest.raises(ValueError, match='Inter-hemispheric morphing'):
compute_source_morph(src=src, subjects_dir=subjects_dir, xhemi=True)
with pytest.raises(ValueError, match='Only surface.*sparse morph'):
compute_source_morph(src=src, sparse=True, subjects_dir=subjects_dir)
# terrible quality but fast
zooms = 20
kwargs = dict(zooms=zooms, niter_sdr=(1,), niter_affine=(1,))
source_morph_vol = compute_source_morph(
subjects_dir=subjects_dir, src=fname_inv_vol,
subject_from='sample', **kwargs)
shape = (13,) * 3 # for the given zooms
assert source_morph_vol.subject_from == 'sample'
# the brain used in sample data has shape (255, 255, 255)
assert tuple(source_morph_vol.sdr_morph.domain_shape) == shape
assert tuple(source_morph_vol.pre_affine.domain_shape) == shape
# proofs the above
assert_array_equal(source_morph_vol.zooms, (zooms,) * 3)
# assure proper src shape
mri_size = (src[0]['mri_height'], src[0]['mri_depth'], src[0]['mri_width'])
assert source_morph_vol.src_data['src_shape_full'] == mri_size
fwd = read_forward_solution(fname_fwd_vol)
fwd['src'][0]['subject_his_id'] = 'sample' # avoid further warnings
source_morph_vol = compute_source_morph(
fwd['src'], 'sample', 'sample', subjects_dir=subjects_dir,
**kwargs)
# check wrong subject_to
with pytest.raises(IOError, match='cannot read file'):
compute_source_morph(fwd['src'], 'sample', '42',
subjects_dir=subjects_dir)
# two different ways of saving
source_morph_vol.save(tmpdir.join('vol'))
# check loading
source_morph_vol_r = read_source_morph(tmpdir.join('vol-morph.h5'))
# check for invalid file name handling ()
with pytest.raises(IOError, match='not found'):
read_source_morph(tmpdir.join('42'))
# check morph
stc_vol_morphed = source_morph_vol.apply(stc_vol)
# old way, verts do not match
assert not np.array_equal(stc_vol_morphed.vertices[0], stc_vol.vertices[0])
# vector
stc_vol_vec = VolVectorSourceEstimate(
np.tile(stc_vol.data[:, np.newaxis], (1, 3, 1)),
stc_vol.vertices, 0, 1)
stc_vol_vec_morphed = source_morph_vol.apply(stc_vol_vec)
assert isinstance(stc_vol_vec_morphed, VolVectorSourceEstimate)
for ii in range(3):
assert_allclose(stc_vol_vec_morphed.data[:, ii], stc_vol_morphed.data)
# check output as NIfTI
assert isinstance(source_morph_vol.apply(stc_vol_vec, output='nifti2'),
nib.Nifti2Image)
# check for subject_from mismatch
source_morph_vol_r.subject_from = '42'
with pytest.raises(ValueError, match='subject_from must match'):
source_morph_vol_r.apply(stc_vol_morphed)
# check if nifti is in grid morph space with voxel_size == spacing
img_morph_res = source_morph_vol.apply(stc_vol, output='nifti1')
# assure morph spacing
assert isinstance(img_morph_res, nib.Nifti1Image)
assert img_morph_res.header.get_zooms()[:3] == (zooms,) * 3
# assure src shape
img_mri_res = source_morph_vol.apply(stc_vol, output='nifti1',
mri_resolution=True)
assert isinstance(img_mri_res, nib.Nifti1Image)
assert (img_mri_res.shape == (src[0]['mri_height'], src[0]['mri_depth'],
src[0]['mri_width']) +
(img_mri_res.shape[3],))
# check if nifti is defined resolution with voxel_size == (5., 5., 5.)
img_any_res = source_morph_vol.apply(stc_vol, output='nifti1',
mri_resolution=(5., 5., 5.))
assert isinstance(img_any_res, nib.Nifti1Image)
assert img_any_res.header.get_zooms()[:3] == (5., 5., 5.)
# check if morph outputs correct data
assert isinstance(stc_vol_morphed, VolSourceEstimate)
# check if loaded and saved objects contain the same
assert (all([read == saved for read, saved in
zip(sorted(source_morph_vol_r.__dict__),
sorted(source_morph_vol.__dict__))]))
# check __repr__
assert 'volume' in repr(source_morph_vol)
# check Nifti2Image
assert isinstance(
source_morph_vol.apply(stc_vol, mri_resolution=True,
mri_space=True, output='nifti2'),
nib.Nifti2Image)
# Degenerate conditions
with pytest.raises(TypeError, match='output must be'):
source_morph_vol.apply(stc_vol, output=1)
with pytest.raises(ValueError, match='subject_from does not match'):
compute_source_morph(src=src, subject_from='42')
with pytest.raises(ValueError, match='output'):
source_morph_vol.apply(stc_vol, output='42')
with pytest.raises(ValueError, match='subject_to cannot be None'):
compute_source_morph(src, 'sample', None,
subjects_dir=subjects_dir)
# Check if not morphed, but voxel size not boolean, raise ValueError.
# Note that this check requires dipy to not raise the dipy ImportError
# before checking if the actual voxel size error will raise.
with pytest.raises(ValueError, match='Cannot infer original voxel size'):
stc_vol.as_volume(inverse_operator_vol['src'], mri_resolution=4)
stc_surf = read_source_estimate(fname_stc, 'sample')
with pytest.raises(TypeError, match='stc_from must be an instance'):
source_morph_vol.apply(stc_surf)
# src_to
source_morph_vol = compute_source_morph(
fwd['src'], subject_from='sample', src_to=fwd['src'],
subject_to='sample', subjects_dir=subjects_dir, **kwargs)
stc_vol_2 = source_morph_vol.apply(stc_vol)
# new way, verts match
assert_array_equal(stc_vol.vertices[0], stc_vol_2.vertices[0])
stc_vol_bad = VolSourceEstimate(
stc_vol.data[:-1], [stc_vol.vertices[0][:-1]],
stc_vol.tmin, stc_vol.tstep)
match = (
'vertices do not match between morph \\(4157\\) and stc \\(4156\\).*'
'\n.*\n.*\n.*Vertices were likely excluded during forward computatio.*'
)
with pytest.raises(ValueError, match=match):
source_morph_vol.apply(stc_vol_bad)
# nifti outputs and stc equiv
img_vol = source_morph_vol.apply(stc_vol, output='nifti1')
img_vol_2 = stc_vol_2.as_volume(src=fwd['src'], mri_resolution=False)
assert_allclose(img_vol.affine, img_vol_2.affine)
img_vol = img_vol.get_fdata()
img_vol_2 = img_vol_2.get_fdata()
assert img_vol.shape == img_vol_2.shape
assert_allclose(img_vol, img_vol_2)
@requires_h5py
@requires_nibabel()
@requires_dipy()
@pytest.mark.slowtest
@testing.requires_testing_data
@pytest.mark.parametrize(
'subject_from, subject_to, lower, upper, dtype, morph_mat', [
('sample', 'fsaverage', 5.9, 6.1, float, False),
('fsaverage', 'fsaverage', 0., 0.1, float, False),
('sample', 'sample', 0., 0.1, complex, False),
('sample', 'sample', 0., 0.1, float, True), # morph_mat
('sample', 'fsaverage', 10, 12, float, True), # morph_mat
])
def test_volume_source_morph_round_trip(
tmpdir, subject_from, subject_to, lower, upper, dtype, morph_mat,
monkeypatch):
"""Test volume source estimate morph round-trips well."""
import nibabel as nib
from nibabel.processing import resample_from_to
src = dict()
if morph_mat:
# ~1.5 minutes with pos=7. (4157 morphs!) for sample, so only test
# morph_mat computation mode with a few labels
label_names = sorted(get_volume_labels_from_aseg(fname_aseg))[1:2]
if 'sample' in (subject_from, subject_to):
src['sample'] = setup_volume_source_space(
'sample', subjects_dir=subjects_dir,
volume_label=label_names, mri=fname_aseg)
assert sum(s['nuse'] for s in src['sample']) == 12
if 'fsaverage' in (subject_from, subject_to):
src['fsaverage'] = setup_volume_source_space(
'fsaverage', subjects_dir=subjects_dir,
volume_label=label_names[:3], mri=fname_aseg_fs)
assert sum(s['nuse'] for s in src['fsaverage']) == 16
else:
assert not morph_mat
if 'sample' in (subject_from, subject_to):
src['sample'] = mne.read_source_spaces(fname_vol)
src['sample'][0]['subject_his_id'] = 'sample'
assert src['sample'][0]['nuse'] == 4157
if 'fsaverage' in (subject_from, subject_to):
# Created to save space with:
#
# bem = op.join(op.dirname(mne.__file__), 'data', 'fsaverage',
# 'fsaverage-inner_skull-bem.fif')
# src_fsaverage = mne.setup_volume_source_space(
# 'fsaverage', pos=7., bem=bem, mindist=0,
# subjects_dir=subjects_dir, add_interpolator=False)
# mne.write_source_spaces(fname_fs_vol, src_fsaverage,
# overwrite=True)
#
# For speed we do it without the interpolator because it's huge.
src['fsaverage'] = mne.read_source_spaces(fname_fs_vol)
src['fsaverage'][0].update(
vol_dims=np.array([23, 29, 25]), seg_name='brain')
_add_interpolator(src['fsaverage'])
assert src['fsaverage'][0]['nuse'] == 6379
src_to, src_from = src[subject_to], src[subject_from]
del src
# No SDR just for speed once everything works
kwargs = dict(niter_sdr=(), niter_affine=(1,),
subjects_dir=subjects_dir, verbose=True)
morph_from_to = compute_source_morph(
src=src_from, src_to=src_to, subject_to=subject_to, **kwargs)
morph_to_from = compute_source_morph(
src=src_to, src_to=src_from, subject_to=subject_from, **kwargs)
nuse = sum(s['nuse'] for s in src_from)
assert nuse > 10
use = np.linspace(0, nuse - 1, 10).round().astype(int)
data = np.eye(nuse)[:, use]
if dtype is complex:
data = data * 1j
vertices = [s['vertno'] for s in src_from]
stc_from = VolSourceEstimate(data, vertices, 0, 1)
with catch_logging() as log:
stc_from_rt = morph_to_from.apply(
morph_from_to.apply(stc_from, verbose='debug'))
log = log.getvalue()
assert 'individual volume morph' in log
maxs = np.argmax(stc_from_rt.data, axis=0)
src_rr = np.concatenate([s['rr'][s['vertno']] for s in src_from])
dists = 1000 * np.linalg.norm(src_rr[use] - src_rr[maxs], axis=1)
mu = np.mean(dists)
# fsaverage=5.99; 7.97 without additional src_ras_t fix
# fsaverage=7.97; 25.4 without src_ras_t fix
assert lower <= mu < upper, f'round-trip distance {mu}'
# check that pre_affine is close to identity when subject_to==subject_from
if subject_to == subject_from:
for morph in (morph_to_from, morph_from_to):
assert_allclose(
morph.pre_affine.affine, np.eye(4), atol=1e-2)
# check that power is more or less preserved (labelizing messes with this)
if morph_mat:
if subject_to == 'fsaverage':
limits = (18, 18.5)
else:
limits = (7, 7.5)
else:
limits = (1, 1.2)
stc_from_unit = stc_from.copy().crop(0, 0)
stc_from_unit._data.fill(1.)
stc_from_unit_rt = morph_to_from.apply(morph_from_to.apply(stc_from_unit))
assert_power_preserved(stc_from_unit, stc_from_unit_rt, limits=limits)
if morph_mat:
fname = tmpdir.join('temp-morph.h5')
morph_to_from.save(fname)
morph_to_from = read_source_morph(fname)
assert morph_to_from.vol_morph_mat is None
morph_to_from.compute_vol_morph_mat(verbose=True)
morph_to_from.save(fname, overwrite=True)
morph_to_from = read_source_morph(fname)
assert isinstance(morph_to_from.vol_morph_mat, csr_matrix), 'csr'
# equivalence (plus automatic calling)
assert morph_from_to.vol_morph_mat is None
monkeypatch.setattr(mne.morph, '_VOL_MAT_CHECK_RATIO', 0.)
with catch_logging() as log:
with pytest.warns(RuntimeWarning, match=r'calling morph\.compute'):
stc_from_rt_lin = morph_to_from.apply(
morph_from_to.apply(stc_from, verbose='debug'))
assert isinstance(morph_from_to.vol_morph_mat, csr_matrix), 'csr'
log = log.getvalue()
assert 'sparse volume morph matrix' in log
assert_allclose(stc_from_rt.data, stc_from_rt_lin.data)
del stc_from_rt_lin
stc_from_unit_rt_lin = morph_to_from.apply(
morph_from_to.apply(stc_from_unit))
assert_allclose(stc_from_unit_rt.data, stc_from_unit_rt_lin.data)
del stc_from_unit_rt_lin
del stc_from, stc_from_rt
# before and after morph, check the proportion of vertices
# that are inside and outside the brainmask.mgz
brain = nib.load(op.join(subjects_dir, subject_from, 'mri', 'brain.mgz'))
mask = _get_img_fdata(brain) > 0
if subject_from == subject_to == 'sample':
for stc in [stc_from_unit, stc_from_unit_rt]:
img = stc.as_volume(src_from, mri_resolution=True)
img = nib.Nifti1Image( # abs to convert complex
np.abs(_get_img_fdata(img)[:, :, :, 0]), img.affine)
img = _get_img_fdata(resample_from_to(img, brain, order=1))
assert img.shape == mask.shape
in_ = img[mask].astype(bool).mean()
out = img[~mask].astype(bool).mean()
if morph_mat:
out_max = 0.001
in_min, in_max = 0.005, 0.007
else:
out_max = 0.02
in_min, in_max = 0.97, 0.98
assert out < out_max, f'proportion out of volume {out}'
assert in_min < in_ < in_max, f'proportion inside volume {in_}'
@pytest.mark.slowtest
@testing.requires_testing_data
def test_morph_stc_dense():
"""Test morphing stc."""
subject_from = 'sample'
subject_to = 'fsaverage'
stc_from = read_source_estimate(fname_smorph, subject='sample')
stc_to = read_source_estimate(fname_fmorph)
# make sure we can specify grade
stc_from.crop(0.09, 0.1) # for faster computation
stc_to.crop(0.09, 0.1) # for faster computation
assert_array_equal(stc_to.time_as_index([0.09, 0.1], use_rounding=True),
[0, len(stc_to.times) - 1])
# After dep change this to:
morph = compute_source_morph(
subject_to=subject_to, spacing=3, smooth=12, src=stc_from,
subjects_dir=subjects_dir, precompute=True)
assert morph.vol_morph_mat is None # a no-op for surface
stc_to1 = morph.apply(stc_from)
assert_allclose(stc_to.data, stc_to1.data, atol=1e-5)
mean_from = stc_from.data.mean(axis=0)
mean_to = stc_to1.data.mean(axis=0)
assert np.corrcoef(mean_to, mean_from).min() > 0.999
vertices_to = grade_to_vertices(subject_to, grade=3,
subjects_dir=subjects_dir)
# make sure we can fill by morphing
with pytest.warns(RuntimeWarning, match='consider increasing'):
morph = compute_source_morph(
stc_from, subject_from, subject_to, spacing=None, smooth=1,
subjects_dir=subjects_dir)
stc_to5 = morph.apply(stc_from)
assert stc_to5.data.shape[0] == 163842 + 163842
# Morph vector data
stc_vec = _real_vec_stc()
stc_vec_to1 = compute_source_morph(
stc_vec, subject_from, subject_to, subjects_dir=subjects_dir,
spacing=vertices_to, smooth=1, warn=False).apply(stc_vec)
assert stc_vec_to1.subject == subject_to
assert stc_vec_to1.tmin == stc_vec.tmin
assert stc_vec_to1.tstep == stc_vec.tstep
assert len(stc_vec_to1.lh_vertno) == 642
assert len(stc_vec_to1.rh_vertno) == 642
# Degenerate conditions
# Morphing to a density that is too high should raise an informative error
# (here we need to push to grade=6, but for some subjects even grade=5
# will break)
with pytest.raises(ValueError, match='Cannot use icosahedral grade 6 '):
compute_source_morph(
stc_to1, subject_from=subject_to, subject_to=subject_from,
spacing=6, subjects_dir=subjects_dir)
del stc_to1
with pytest.raises(ValueError, match='smooth.* has to be at least 1'):
compute_source_morph(
stc_from, subject_from, subject_to, spacing=5, smooth=-1,
subjects_dir=subjects_dir)
# subject from mismatch
with pytest.raises(ValueError, match="subject_from does not match"):
compute_source_morph(stc_from, subject_from='foo',
subjects_dir=subjects_dir)
# only one set of vertices
with pytest.raises(ValueError, match="grade.*list must have two elements"):
compute_source_morph(
stc_from, subject_from=subject_from, spacing=[vertices_to[0]],
subjects_dir=subjects_dir)
@testing.requires_testing_data
def test_morph_stc_sparse():
"""Test morphing stc with sparse=True."""
subject_from = 'sample'
subject_to = 'fsaverage'
# Morph sparse data
# Make a sparse stc
stc_from = read_source_estimate(fname_smorph, subject='sample')
stc_from.vertices[0] = stc_from.vertices[0][[100, 500]]
stc_from.vertices[1] = stc_from.vertices[1][[200]]
stc_from._data = stc_from._data[:3]
stc_to_sparse = compute_source_morph(
stc_from, subject_from=subject_from, subject_to=subject_to,
spacing=None, sparse=True, subjects_dir=subjects_dir).apply(stc_from)
assert_allclose(np.sort(stc_from.data.sum(axis=1)),
np.sort(stc_to_sparse.data.sum(axis=1)))
assert len(stc_from.rh_vertno) == len(stc_to_sparse.rh_vertno)
assert len(stc_from.lh_vertno) == len(stc_to_sparse.lh_vertno)
assert stc_to_sparse.subject == subject_to
assert stc_from.tmin == stc_from.tmin
assert stc_from.tstep == stc_from.tstep
stc_from.vertices[0] = np.array([], dtype=np.int64)
stc_from._data = stc_from._data[:1]
stc_to_sparse = compute_source_morph(
stc_from, subject_from, subject_to, spacing=None, sparse=True,
subjects_dir=subjects_dir).apply(stc_from)
assert_allclose(np.sort(stc_from.data.sum(axis=1)),
np.sort(stc_to_sparse.data.sum(axis=1)))
assert len(stc_from.rh_vertno) == len(stc_to_sparse.rh_vertno)
assert len(stc_from.lh_vertno) == len(stc_to_sparse.lh_vertno)
assert stc_to_sparse.subject == subject_to
assert stc_from.tmin == stc_from.tmin
assert stc_from.tstep == stc_from.tstep
# Degenerate cases
with pytest.raises(ValueError, match='spacing must be set to None'):
compute_source_morph(
stc_from, subject_from=subject_from, subject_to=subject_to,
spacing=5, sparse=True, subjects_dir=subjects_dir)
with pytest.raises(ValueError, match='xhemi=True can only be used with'):
compute_source_morph(
stc_from, subject_from=subject_from, subject_to=subject_to,
spacing=None, sparse=True, xhemi=True, subjects_dir=subjects_dir)
@requires_nibabel()
@testing.requires_testing_data
@pytest.mark.parametrize('sl, n_real, n_mri, n_orig', [
# First and last should add up, middle can have overlap should be <= sum
(slice(0, 1), 37, 138, 8),
(slice(1, 2), 51, 204, 12),
(slice(0, 2), 88, 324, 20),
])
def test_volume_labels_morph(tmpdir, sl, n_real, n_mri, n_orig):
"""Test generating a source space from volume label."""
import nibabel as nib
n_use = (sl.stop - sl.start) // (sl.step or 1)
# see gh-5224
evoked = mne.read_evokeds(fname_evoked)[0].crop(0, 0)
evoked.pick_channels(evoked.ch_names[:306:8])
evoked.info.normalize_proj()
n_ch = len(evoked.ch_names)
lut, _ = read_freesurfer_lut()
label_names = sorted(get_volume_labels_from_aseg(fname_aseg))
use_label_names = label_names[sl]
src = setup_volume_source_space(
'sample', subjects_dir=subjects_dir, volume_label=use_label_names,
mri=fname_aseg)
assert len(src) == n_use
assert src.kind == 'volume'
n_src = sum(s['nuse'] for s in src)
sphere = make_sphere_model('auto', 'auto', evoked.info)
fwd = make_forward_solution(evoked.info, fname_trans, src, sphere)
assert fwd['sol']['data'].shape == (n_ch, n_src * 3)
inv = make_inverse_operator(evoked.info, fwd, make_ad_hoc_cov(evoked.info),
loose=1.)
stc = apply_inverse(evoked, inv)
assert stc.data.shape == (n_src, 1)
img = stc.as_volume(src, mri_resolution=True)
assert img.shape == (86, 86, 86, 1)
n_on = np.array(img.dataobj).astype(bool).sum()
aseg_img = _get_img_fdata(nib.load(fname_aseg))
n_got_real = np.in1d(
aseg_img.ravel(), [lut[name] for name in use_label_names]).sum()
assert n_got_real == n_real
# - This was 291 on `main` before gh-5590
# - Refactoring transforms it became 279 with a < 1e-8 change in vox_mri_t
# - Dropped to 123 once nearest-voxel was used in gh-7653
# - Jumped back up to 330 with morphing fixes actually correctly
# interpolating across all volumes
assert aseg_img.shape == img.shape[:3]
assert n_on == n_mri
for ii in range(2):
# should work with (ii=0) or without (ii=1) the interpolator
if ii:
src[0]['interpolator'] = None
img = stc.as_volume(src, mri_resolution=False)
n_on = np.array(img.dataobj).astype(bool).sum()
# was 20 on `main` before gh-5590
# then 44 before gh-7653, which took it back to 20
assert n_on == n_orig
# without the interpolator, this should fail
assert src[0]['interpolator'] is None
with pytest.raises(RuntimeError, match=r'.*src\[0\], .* mri_resolution'):
stc.as_volume(src, mri_resolution=True)
@pytest.fixture(scope='session', params=[testing._pytest_param()])
def _mixed_morph_srcs():
# create a mixed source space
labels_vol = ['Left-Cerebellum-Cortex', 'Right-Cerebellum-Cortex']
src = mne.setup_source_space('sample', spacing='oct3',
add_dist=False, subjects_dir=subjects_dir)
src += mne.setup_volume_source_space(
'sample', mri=fname_aseg, pos=10.0,
volume_label=labels_vol, subjects_dir=subjects_dir,
add_interpolator=True, verbose=True)
# create the destination space
src_fs = mne.read_source_spaces(
op.join(subjects_dir, 'fsaverage', 'bem', 'fsaverage-ico-5-src.fif'))
src_fs += mne.setup_volume_source_space(
'fsaverage', pos=7., volume_label=labels_vol,
subjects_dir=subjects_dir, add_interpolator=False, verbose=True)
del labels_vol
with pytest.raises(ValueError, match='src_to must be provided .* mixed'):
mne.compute_source_morph(
src=src, subject_from='sample', subject_to='fsaverage',
subjects_dir=subjects_dir)
with pytest.warns(RuntimeWarning, match='not included in smoothing'):
morph = mne.compute_source_morph(
src=src, subject_from='sample', subject_to='fsaverage',
subjects_dir=subjects_dir, niter_affine=[1, 0, 0],
niter_sdr=[1, 0, 0], src_to=src_fs, smooth=5, verbose=True)
return morph, src, src_fs
@requires_nibabel()
@requires_dipy()
@pytest.mark.parametrize('vector', (False, True))
def test_mixed_source_morph(_mixed_morph_srcs, vector):
"""Test mixed source space morphing."""
import nibabel as nib
morph, src, src_fs = _mixed_morph_srcs
# Test some basic properties in the subject's own space
lut, _ = read_freesurfer_lut()
ids = [lut[s['seg_name']] for s in src[2:]]
del lut
vertices = [s['vertno'] for s in src]
n_vertices = sum(len(v) for v in vertices)
data = np.zeros((n_vertices, 3, 1))
data[:, 1] = 1.
klass = mne.MixedVectorSourceEstimate
if not vector:
data = data[:, 1]
klass = klass._scalar_class
stc = klass(data, vertices, 0, 1, 'sample')
vol_info = _get_mri_info_data(fname_aseg, data=True)
rrs = np.concatenate([src[2]['rr'][sp['vertno']] for sp in src[2:]])
n_want = np.in1d(_get_atlas_values(vol_info, rrs), ids).sum()
img = _get_img_fdata(stc.volume().as_volume(src, mri_resolution=False))
assert img.astype(bool).sum() == n_want
img_res = nib.load(fname_aseg)
n_want = np.in1d(_get_img_fdata(img_res), ids).sum()
img = _get_img_fdata(stc.volume().as_volume(src, mri_resolution=True))
assert img.astype(bool).sum() > n_want # way more get interpolated into
with pytest.raises(TypeError, match='stc_from must be an instance'):
morph.apply(1.)
# Now actually morph
stc_fs = morph.apply(stc)
img = stc_fs.volume().as_volume(src_fs, mri_resolution=False)
vol_info = _get_mri_info_data(fname_aseg_fs, data=True)
rrs = np.concatenate([src_fs[2]['rr'][sp['vertno']] for sp in src_fs[2:]])
n_want = np.in1d(_get_atlas_values(vol_info, rrs), ids).sum()
with pytest.raises(ValueError, match=r'stc\.subject does not match src s'):
stc_fs.volume().as_volume(src, mri_resolution=False)
img = _get_img_fdata(
stc_fs.volume().as_volume(src_fs, mri_resolution=False))
assert img.astype(bool).sum() == n_want # correct number of voxels
# Morph separate parts and compare to morphing the entire one
stc_fs_surf = morph.apply(stc.surface())
stc_fs_vol = morph.apply(stc.volume())
stc_fs_2 = stc_fs.__class__(
np.concatenate([stc_fs_surf.data, stc_fs_vol.data]),
stc_fs_surf.vertices + stc_fs_vol.vertices, stc_fs.tmin, stc_fs.tstep,
stc_fs.subject)
assert_allclose(stc_fs.data, stc_fs_2.data)
def _rand_affine(rng):
quat = rng.randn(3)
quat /= 5 * np.linalg.norm(quat)
affine = np.eye(4)
affine[:3, 3] = rng.randn(3) / 5.
affine[:3, :3] = quat_to_rot(quat)
return affine
_shapes = (
(10, 10, 10),
(20, 5, 10),
(5, 10, 20),
)
_affines = (
[[2, 0, 0, 1],
[0, 0, 1, -1],
[0, -1, 0, 2],
[0, 0, 0, 1]],
np.eye(4),
np.eye(4)[[0, 2, 1, 3]],
'rand',
)
@requires_nibabel()
@requires_version('dipy', '1.3')
@pytest.mark.parametrize('from_shape', _shapes)
@pytest.mark.parametrize('from_affine', _affines)
@pytest.mark.parametrize('to_shape', _shapes)
@pytest.mark.parametrize('to_affine', _affines)
@pytest.mark.parametrize('order', [0, 1])
@pytest.mark.parametrize('seed', [0, 1])
def test_resample_equiv(from_shape, from_affine, to_shape, to_affine,
order, seed):
"""Test resampling equivalences."""
rng = np.random.RandomState(seed)
from_data = rng.randn(*from_shape)
is_rand = False
if isinstance(to_affine, str):
assert to_affine == 'rand'
to_affine = _rand_affine(rng)
is_rand = True
if isinstance(from_affine, str):
assert from_affine == 'rand'
from_affine = _rand_affine(rng)
is_rand = True
to_affine = np.array(to_affine, float)
assert to_affine.shape == (4, 4)
from_affine = np.array(from_affine, float)
assert from_affine.shape == (4, 4)
#
# 1. nibabel.processing.resample_from_to
#
# for a 1mm iso / 256 -> 5mm / 51 one sample takes ~486 ms
from nibabel.processing import resample_from_to
from nibabel.spatialimages import SpatialImage
start = np.linalg.norm(from_data)
got_nibabel = resample_from_to(
SpatialImage(from_data, from_affine),
(to_shape, to_affine), order=order).get_fdata()
end = np.linalg.norm(got_nibabel)
assert end > 0.05 * start # not too much power lost
#
# 2. dipy.align.imaffine
#
# ~366 ms
import dipy.align.imaffine
interp = 'linear' if order == 1 else 'nearest'
got_dipy = dipy.align.imaffine.AffineMap(
None, to_shape, to_affine,
from_shape, from_affine).transform(
from_data, interpolation=interp, resample_only=True)
# XXX possibly some error in dipy or nibabel (/SciPy), or some boundary
# condition?
nib_different = (
(is_rand and order == 1) or
(from_affine[0, 0] == 2. and not
np.allclose(from_affine, to_affine))
)
nib_different = nib_different and not (
is_rand and from_affine[0, 0] == 2 and order == 0)
if nib_different:
assert not np.allclose(got_dipy, got_nibabel), 'nibabel fixed'
else:
assert_allclose(got_dipy, got_nibabel, err_msg='dipy<->nibabel')
#
# 3. mne.source_space._grid_interp
#
# ~339 ms
trans = np.linalg.inv(from_affine) @ to_affine # to -> from
interp = _grid_interp(from_shape, to_shape, trans, order=order)
got_mne = np.asarray(
interp @ from_data.ravel(order='F')).reshape(to_shape, order='F')
if order == 1:
assert_allclose(got_mne, got_dipy, err_msg='MNE<->dipy')
else:
perc = 100 * np.isclose(got_mne, got_dipy).mean()
assert 83 < perc <= 100
|
The search of 186 Mahoenui Road in Coatsville, Auckland finished just before midnight last night, concluding the work required there by Police and the Organised & Financial Crime Agency New Zealand.
The search followed the arrest of four men yesterday morning as part of a U.S. led investigation into criminal copyright infringement.
Police have seized a variety of equipment including computers and documents for the purposes of evidence. Assets including luxury cars and artwork have been restrained and passed to the control of the Official Assignee.
Some of the larger items restrained remain at the property and the Official Assignee will continue working to remove these in the coming days.
Detective Inspector Grant Wormald of OFCANZ says the team will continue working at the North Shore Policing Centre through the weekend.
Police located two firearms at the property yesterday and a 55-year-old New Zealand man has been charged with unlawful possession of a pistol. He was released on Police bail and is due in court on 26th January.
It is not anticipated there will be any further update or comment to give until next week.
PreviousDotcom Mansion search continues tonight.
|
# -*- coding: utf-8 -*-
# HiPart is a program to analyze the electronic structure of molecules with
# fuzzy-atom partitioning methods.
# Copyright (C) 2007 - 2012 Toon Verstraelen <[email protected]>
#
# This file is part of HiPart.
#
# HiPart is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HiPart is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
#--
import os, sys, subprocess
from hipart.tests.utils import setup_gaussian
# These tests just run the scripts to see if they do not crash on a simple
# example.
def test_scripts():
schemes = ["becke", "hirsh", "hirshi", "isa"]
fns_script = [
"hi-bond-orders.py", "hi-charges.py", "hi-dipoles.py",
"hi-esp-test.py", "hi-multipoles.py", "hi-net-overlap.py",
"hi-overlap-matrices-orb.py", "hi-spin-charges.py",
]
for scheme in schemes:
for fn_script in fns_script:
yield check_script, fn_script, scheme
def check_script(fn_script, scheme):
fn_script = os.path.abspath(os.path.join("scripts", fn_script))
tmpdir, fn_fchk, fn_densities = setup_gaussian("hf_sto3g", "sto3g")
if scheme in ["hirsh", "hirshi"]:
args = (fn_script, fn_fchk, scheme, fn_densities)
else:
args = (fn_script, fn_fchk, scheme)
retcode = run(args)
assert(retcode==0)
def run(args):
f = file(args[0], "r")
mod_args = ("/usr/bin/env", "python", "-") + args[1:]
proc = subprocess.Popen(mod_args, stdin=f, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=os.getcwd())
outdata, errdata = proc.communicate()
f.close()
print outdata
print errdata
return proc.returncode
|
Connor's entry "The Hunt" has been selected as a Regional Winner in the Puget Sound Educational Service District's Annual Art Show. His piece will be advancing to the 42nd Annual State High School Art Show in Olympia. Good Luck Connor!
|
#!/usr/bin/env python3
# bing_background.py - downoads the bing homepage background and sets it as the
# desktop backgroud.
import logging
import os
from urllib.request import urlopen
import time
from bs4 import BeautifulSoup
from selenium import webdriver
from pyvirtualdisplay import Display
def download_link(directory, link):
download_path = os.path.join(directory, os.path.basename(link))
# Check if file already exists
if os.path.exists(download_path):
logging.info('File {} already exists, skipping.'.format(link))
else:
# Download the img
logging.info('Downloading {}'.format(link))
with urlopen(link) as image, open(download_path, 'wb') as f:
f.write(image.read())
def change_wp(directory, link):
# Changes wallpaper for Unity / Gnome desktop
desktop = os.environ.get("DESKTOP_SESSION")
if desktop in ["gnome", "ubuntu", "unity"]:
img_path = os.path.join(directory, os.path.basename(link))
command = "gsettings set org.gnome.desktop.background picture-uri file://{}".format(img_path)
os.system(command)
else:
logging.error('No command to change wallpaper.')
def setup_download_dir(save_dir):
download_dir = os.path.join(os.getcwd(), save_dir)
if not os.path.exists(download_dir):
os.mkdir(download_dir)
return download_dir
def main():
# set up logging
logging.basicConfig(filename='bing_bg.log', filemode='w', level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
url = "https://www.bing.com/"
save_dir = "images"
dl_dir = setup_download_dir(save_dir)
# set up a virtual display to use with selenium
display = Display(visible=0, size=(800, 600))
display.start()
# Launch a Firefox instance
driver = webdriver.Firefox()
driver.get(url)
logging.info('Downloading page {}'.format(url))
time.sleep(6) #hacky sleep to allow bing homepage to load so we can grab the image
# Parse the bing homepage
soup = BeautifulSoup(driver.page_source, "html.parser")
# clean up browser and stop virtual display
driver.quit() # seems to spit out "'NoneType' object has no attribute 'path'"
display.stop()
# Find the URL elements
link = soup.find_all('div', {'id': 'bgDiv'})[0].attrs['style']
img_link = link[(link.find('url("')+5):link.find('");')]
logging.info('Found link: {}'.format(img_link))
# Download and change wallpaper
download_link(dl_dir, img_link)
change_wp(dl_dir, img_link)
if __name__ == "__main__":
main()
|
Sierra was the target of a flood of graphic death threats over her blog about web design. Yes, that’s right — web design.
In 2007 writer, programmer, and horse trainer Kathy Sierra quit the internet because of misogynist hate trolling. She stayed off the social web for 7 years but last year she came back to see what Twitter was like. She tells us why she only lasted a few weeks and her theory about why so many women are targets online. Plus Danielle Keats Citron explains how we could use the law to drain the cesspool.
|
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Shutdown adb_logcat_monitor and print accumulated logs.
To test, call './adb_logcat_printer.py <base_dir>' where
<base_dir> contains 'adb logcat -v threadtime' files named as
logcat_<deviceID>_<sequenceNum>
The script will print the files to out, and will combine multiple
logcats from a single device if there is overlap.
Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
will attempt to terminate the contained PID by sending a SIGINT and
monitoring for the deletion of the aforementioned file.
"""
# pylint: disable=W0702
import cStringIO
import logging
import optparse
import os
import re
import signal
import sys
import time
# Set this to debug for more verbose output
LOG_LEVEL = logging.INFO
def CombineLogFiles(list_of_lists, logger):
"""Splices together multiple logcats from the same device.
Args:
list_of_lists: list of pairs (filename, list of timestamped lines)
logger: handler to log events
Returns:
list of lines with duplicates removed
"""
cur_device_log = ['']
for cur_file, cur_file_lines in list_of_lists:
# Ignore files with just the logcat header
if len(cur_file_lines) < 2:
continue
common_index = 0
# Skip this step if list just has empty string
if len(cur_device_log) > 1:
try:
line = cur_device_log[-1]
# Used to make sure we only splice on a timestamped line
if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
common_index = cur_file_lines.index(line)
else:
logger.warning('splice error - no timestamp in "%s"?', line.strip())
except ValueError:
# The last line was valid but wasn't found in the next file
cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
cur_device_log += ['*'*30 + ' %s' % cur_file]
cur_device_log.extend(cur_file_lines[common_index:])
return cur_device_log
def FindLogFiles(base_dir):
"""Search a directory for logcat files.
Args:
base_dir: directory to search
Returns:
Mapping of device_id to a sorted list of file paths for a given device
"""
logcat_filter = re.compile(r'^logcat_(\w+)_(\d+)$')
# list of tuples (<device_id>, <seq num>, <full file path>)
filtered_list = []
for cur_file in os.listdir(base_dir):
matcher = logcat_filter.match(cur_file)
if matcher:
filtered_list += [(matcher.group(1), int(matcher.group(2)),
os.path.join(base_dir, cur_file))]
filtered_list.sort()
file_map = {}
for device_id, _, cur_file in filtered_list:
if device_id not in file_map:
file_map[device_id] = []
file_map[device_id] += [cur_file]
return file_map
def GetDeviceLogs(log_filenames, logger):
"""Read log files, combine and format.
Args:
log_filenames: mapping of device_id to sorted list of file paths
logger: logger handle for logging events
Returns:
list of formatted device logs, one for each device.
"""
device_logs = []
for device, device_files in log_filenames.iteritems():
logger.debug('%s: %s', device, str(device_files))
device_file_lines = []
for cur_file in device_files:
with open(cur_file) as f:
device_file_lines += [(cur_file, f.read().splitlines())]
combined_lines = CombineLogFiles(device_file_lines, logger)
# Prepend each line with a short unique ID so it's easy to see
# when the device changes. We don't use the start of the device
# ID because it can be the same among devices. Example lines:
# AB324: foo
# AB324: blah
device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)]
return device_logs
def ShutdownLogcatMonitor(base_dir, logger):
"""Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
try:
monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
with open(monitor_pid_path) as f:
monitor_pid = int(f.readline())
logger.info('Sending SIGTERM to %d', monitor_pid)
os.kill(monitor_pid, signal.SIGTERM)
i = 0
while True:
time.sleep(.2)
if not os.path.exists(monitor_pid_path):
return
if not os.path.exists('/proc/%d' % monitor_pid):
logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
return
logger.info('Waiting for logcat process to terminate.')
i += 1
if i >= 10:
logger.warning('Monitor pid did not terminate. Continuing anyway.')
return
except (ValueError, IOError, OSError):
logger.exception('Error signaling logcat monitor - continuing')
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
parser.add_option('--output-path',
help='Output file path (if unspecified, prints to stdout)')
options, args = parser.parse_args(argv)
if len(args) != 1:
parser.error('Wrong number of unparsed args')
base_dir = args[0]
if options.output_path:
output_file = open(options.output_path, 'w')
else:
output_file = sys.stdout
log_stringio = cStringIO.StringIO()
logger = logging.getLogger('LogcatPrinter')
logger.setLevel(LOG_LEVEL)
sh = logging.StreamHandler(log_stringio)
sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
' %(message)s'))
logger.addHandler(sh)
try:
# Wait at least 5 seconds after base_dir is created before printing.
#
# The idea is that 'adb logcat > file' output consists of 2 phases:
# 1 Dump all the saved logs to the file
# 2 Stream log messages as they are generated
#
# We want to give enough time for phase 1 to complete. There's no
# good method to tell how long to wait, but it usually only takes a
# second. On most bots, this code path won't occur at all, since
# adb_logcat_monitor.py command will have spawned more than 5 seconds
# prior to called this shell script.
try:
sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
except OSError:
sleep_time = 5
if sleep_time > 0:
logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
time.sleep(sleep_time)
assert os.path.exists(base_dir), '%s does not exist' % base_dir
ShutdownLogcatMonitor(base_dir, logger)
separator = '\n' + '*' * 80 + '\n\n'
for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
output_file.write(log)
output_file.write(separator)
with open(os.path.join(base_dir, 'eventlog')) as f:
output_file.write('\nLogcat Monitor Event Log\n')
output_file.write(f.read())
except:
logger.exception('Unexpected exception')
logger.info('Done.')
sh.flush()
output_file.write('\nLogcat Printer Event Log\n')
output_file.write(log_stringio.getvalue())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Heidi Taiger is currently living in Montreal, Quebec, working as an Owner in "Poochey Couture" and is interested in Import/Export, Retail.
This profile is owned by Heidi Taiger. Send a message to this person to delete this profile.
|
import time
from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING
from vnpy.trader.constant import Direction, Interval, OrderType, Status
if TYPE_CHECKING:
# noinspection PyUnresolvedReferences
from vnpy.gateway.oanda import OandaGateway # noqa
STATUS_OANDA2VT = {
"PENDING": Status.NOTTRADED,
"FILLED": Status.ALLTRADED,
"CANCELLED": Status.CANCELLED,
# "TRIGGERED": Status.REJECTED,
}
STOP_ORDER_STATUS_OANDA2VT = {
"Untriggered": Status.NOTTRADED,
"Triggered": Status.NOTTRADED,
# Active: triggered and placed.
# since price is market price, placed == AllTraded?
"Active": Status.ALLTRADED,
"Cancelled": Status.CANCELLED,
"Rejected": Status.REJECTED,
}
DIRECTION_VT2OANDA = {Direction.LONG: "Buy", Direction.SHORT: "Sell"}
DIRECTION_OANDA2VT = {v: k for k, v in DIRECTION_VT2OANDA.items()}
DIRECTION_OANDA2VT.update({
"None": Direction.LONG
})
OPPOSITE_DIRECTION = {
Direction.LONG: Direction.SHORT,
Direction.SHORT: Direction.LONG,
}
ORDER_TYPE_VT2OANDA = {
OrderType.LIMIT: "LIMIT",
OrderType.MARKET: "MARKET",
OrderType.STOP: "STOP",
}
ORDER_TYPE_OANDA2VT = {v: k for k, v in ORDER_TYPE_VT2OANDA.items()}
ORDER_TYPE_OANDA2VT.update({
'LIMIT_ORDER': OrderType.LIMIT,
'MARKET_ORDER': OrderType.MARKET,
'STOP_ORDER': OrderType.STOP,
})
INTERVAL_VT2OANDA = {
Interval.MINUTE: "M1",
Interval.HOUR: "H1",
Interval.DAILY: "D",
Interval.WEEKLY: "W",
}
INTERVAL_VT2OANDA_INT = {
Interval.MINUTE: 1,
Interval.HOUR: 60,
Interval.DAILY: 60 * 24,
Interval.WEEKLY: 60 * 24 * 7,
}
INTERVAL_VT2OANDA_DELTA = {
Interval.MINUTE: timedelta(minutes=1),
Interval.HOUR: timedelta(hours=1),
Interval.DAILY: timedelta(days=1),
Interval.WEEKLY: timedelta(days=7),
}
utc_tz = timezone.utc
local_tz = datetime.now(timezone.utc).astimezone().tzinfo
def generate_timestamp(expire_after: float = 30) -> int:
"""
:param expire_after: expires in seconds.
:return: timestamp in milliseconds
"""
return int(time.time() * 1000 + expire_after * 1000)
def parse_datetime(dt: str) -> datetime:
return datetime.fromisoformat(dt[:-4])
def parse_time(dt: str) -> str:
return dt[11:26]
|
Harbour Village Campground And Waterpark - Wag!
Located in the beautiful Door County, Wisconsin, Harbour Village Campground and Waterpark is home to the largest outdoor waterpark in the county, not to mention the campground is full of amenities! The campground conveniently accommodates tents, RVs, and also has rental cabins available for those looking for a more modern experience.
There are over 200 sites in total. Both wooded and non-wooded sites are available with electric and water hookups as well as rustic sites. Laundry facilities make packing light an option and 3 bathhouses are available with free showers to use at your convenience. The campground also has a propane filling station and camp store, where firewood is available.
Dogs are permitted at the park, but they must remain on-leash and under control unless inside their sleeping quarters. They are not permitted in the pools or swimming ponds, but you are free to explore the many nature trails available with your pooch.
At the campground, you can discover a variety of outdoor activities you won't find at any other campground. Start your day off at either the children or adult pool for a morning dip. Next, you can make your way over the outdoor "jumping pillow" located right on the water, to have some bouncing fun. After that, head over to jump shop court to play some "space ball." If you're unfamiliar with "space ball", it's basketball with trampolines. If you're tired and looking for a slower activity, miniature golf is available as well as archery, disc golf, and pedal carts. Other outdoor games include volleyball, shuffle ball, basketball and much more. Of course, the campground does have a waterpark if you're wanting a day filled with splashing and sun.
If you're looking to fish, there is a fishing pond just for you, where you can catch both bluegill and perch in the catch-and-release pond. If you're looking to for some sandy waters, you can also check out the swimming pond for an afternoon dip.
So, if you’re ever looking for a camping adventure in the Wisconsin area, don’t forget about Harbour Village Campground and Waterpark!
|
from __future__ import print_function
from collections import namedtuple
import multiprocessing
import platform
import socket
import sys
class System:
@staticmethod
def is_window():
return platform.system() == 'Windows'
@staticmethod
def is_linux():
return platform.system() == 'Linux'
@staticmethod
def is_mac():
return platform.system() == 'Darwin'
@staticmethod
def cpu_cores():
return multiprocessing.cpu_count()
@staticmethod
def host_info():
HostInfo = namedtuple('HostInfo', ['name', 'ipv4'])
name = socket.gethostname()
return HostInfo(name, socket.gethostbyname(name))
@staticmethod
def version():
return 'v0.1.0b'
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def print_console_header(string, splitter='-'):
str_size = len(string) + 2
left_size = (80 - str_size) // 2
right_size = 80 - left_size - str_size
return '{1} {0} {2}'.format(string, splitter*left_size, splitter*right_size)
|
SHERMAN, TX -- The Sherman softball team may be young, but they are playing beyond their years. With only one senior, the Lady Cats made it to the playoffs for the first time in 3 seasons. After knocking off Whitehouse on Friday, Sherman returns home to play Forney. The Lady Cats have been working on defense this week to prepare for Forney's big hitters. But the Lady Cats have also been hot at the plate lately. Sherman verse Forney kicks off at 7:30 Friday night at Old Settlers Park.
|
from pathlib import Path
from diot import Diot
from bioprocs.utils import shell2 as shell
from bioprocs.utils.reference import vcfIndex
from bioprocs.utils.tsvio2 import TsvReader, TsvWriter, TsvRecord
{% from os import path %}
{% from pyppl.utils import alwaysList %}
infile = {{i.infile | quote}}
afile = {{i.afile | ?path.isfile | =readlines | !alwaysList
| ?:len(_) == 1 and not _[0] | =:None | repr}}
outfile = {{o.outfile | quote}}
outdir = {{o.outdir | quote}}
bcftools = {{args.bcftools | quote}}
allele = {{args.allele | ?path.isfile | =readlines | !alwaysList
| ?:len(_) == 1 and not _[0] | =:None | repr}}
netmhc = {{args.netmhc | quote}}
iedb_mhc_i = {{args.iedb_mhc_i | quote}}
pvacseq = {{args.pvacseq | quote}}
bdtool = {{args.bdtool | quote}}
nthread = {{args.nthread | quote}}
params = {{args.params | repr}}
infile = vcfIndex(infile)
# get alleles
allele = afile or allele
if not allele:
raise ValueError('No allele has been specified.')
allele = ','.join(allele)
shell.load_config(pvacseq = pvacseq, bcftools = bcftools)
bdtools = [ 'MHCflurry','MHCnuggetsI','MHCnuggetsII','NNalign','NetMHC',
'NetMHCIIpan','NetMHCcons','NetMHCpan','PickPocket','SMM','SMMPMBEC','SMMalign']
bdtools = {bdt.lower():bdt for bdt in bdtools}
# get sample name
sample = shell.bcftools.query(l = infile).splitlines()[0]
shell.rm_rf(Path(outdir).joinpath('MHC_Class_I', sample + '.tsv'), _debug = True)
params.t = nthread
params._ = [infile, sample, allele, bdtools[bdtool], outdir]
params.k = params.get('k', True)
params.iedb_install_directory = Path(iedb_mhc_i).parent
shell.fg.pvacseq.run(**params)
# filter the epitopes with IC50(MT) >= 500 (SB) and IC50(WT) < 2000 (WB)
# Chromosome Start Stop Reference Variant Transcript Transcript Support Level Ensembl Gene ID Variant Type Mutation Protein Position Gene Name HGVSc HGVSp HLA Allele Peptide Length Sub-peptide Position Mutation Position MT Epitope Seq WT Epitope Seq Best MT Score Method Best MT Score Corresponding WT Score Corresponding Fold Change Tumor DNA Depth Tumor DNA VAF Tumor RNA Depth Tumor RNA VAF Normal Depth Normal VAF Gene Expression Transcript Expression Median MT Score Median WT Score Median Fold Change NetMHC WT Score NetMHC MT Score cterm_7mer_gravy_score max_7mer_gravy_score difficult_n_terminal_residue c_terminal_cysteine c_terminal_proline cysteine_count n_terminal_asparagine asparagine_proline_bond_count
reader = TsvReader(Path(outdir).joinpath('MHC_Class_I', sample + '.all_epitopes.tsv'))
writer = TsvWriter(outfile)
writer.cnames = ['HLA_allele', 'Peptide', 'Affinity', 'Gene', 'ENSG', 'ENST', 'Ref_peptide', 'Ref_affinity', 'Mutation', 'AAChange']
writer.writeHead()
for r in reader:
out = TsvRecord()
out.HLA_allele = r['HLA Allele']
out.Peptide = r['MT Epitope Seq']
out.Affinity = r['Best MT Score']
out.Gene = r['Gene Name']
out.ENSG = r['Ensembl Gene ID']
out.ENST = r['Transcript']
out.Ref_peptide = r['WT Epitope Seq']
out.Ref_affinity = r['Corresponding WT Score']
out.Mutation = r.Chromosome + ':' + r.Start + '-' + r.Stop + '.' + r.Reference + '/' + r.Variant
out.AAChange = r.Mutation
if float(out.Affinity) > 500 or float(out.Ref_affinity) < 2000:
continue
writer.write(out)
|
The District School Board of Niagara receives requests from individuals, agencies and institutions to conduct research involving its staff and students. Internal and external requests to conduct research projects or to circulate questionnaires in schools MUST BE accompanied by the necessary application forms (see Forms).
Applications to conduct research in DSBN are reviewed by the Educational Research Committee up to six (6) times per academic year, with a maximum of six (6) proposals considered per meeting. The Committee will not normally review more than thirty-six (36) proposals per year. They will be reviewed on a first-come, first-served basis. Applicants should see Dates and Deadlines to familiarize themselves with these dates and to submit their proposals a minimum of three weeks prior to the posted Committee meeting dates. We require nine (9) copies of the application to be submitted. Any proposals received following the posted deadline dates for submission will be considered for the next Educational Research Committee Meeting.
The review involves a three-step process in which the Educational Research Committee makes recommendations that are reported to the Education Program and Planning Committee, and thereafter to the Board. Recommendations may include: approval, approval with specific conditions which must be met prior to the beginning of research, a deferral to a later meeting, or a denial. The decision of the board is final.
All approved research projects in schools must benefit education in the District School Board of Niagara and/or the community at large.
All research projects must be designed to produce a minimum of intrusiveness for staff and students.
Participation in a research project by a school, its staff and each of its students is voluntary and requires obtaining active, informed written consent.
All personal and confidential information must be administered in accordance with the Municipal Freedom of Information and Protection of Privacy Act and its Regulations. This means that the confidentiality of individual participants and class/school names and the name of the school board must be assured; that parental consent procedures must be included in the applicant's submission; and that the researcher(s) must agree to sign a Research Agreement.
The researcher(s) shall avoid the use of techniques which invade the privacy and/or threaten the integrity of the student or his/her family.
Upon completion of the research project, the researcher(s) must provide a copy of the summary of findings or executive abstract of the research study to the Principal(s) of the school(s) where the research was conducted and the Research Recording Secretary for the DSBN Educational Research Committee to discuss the findings and to provide a summary of the findings or executive abstract of the research project.
Staff wishing to conduct research within their own school/department and which promotes professional growth, but does not involve course work leading to a Graduate Degree, or will not be published or sold, or shared with outside entities, can do so with the approval of the Principal and in consultation with the Superintendent of Schools responsible for the DSBN Educational Research Committee.
Elementary and/or Secondary students doing course work in which students will be under the direct supervision of the classroom teacher, in consultation with the School Principal.
* For situations outlined in #1 and #2 above, the "Research Project Supervised by Principals" form (See Forms) must be used by the School Principal.
* The policy for Approval and Conduct of Research contains further detailed information that is available upon request by contacting Michael St. John, Superintendent of Schools responsible for the DSBN Educational Research Committee.
|
from facepy import GraphAPI
from django.conf import settings
import os
# Initialize the Graph API with a valid access token (optional,
# but will allow you to do all sorts of fun stuff).
# oauth_access_token = 'EAABZC0OOt2wQBAOcKcpbbYiuFyEONLyqOsdUrODvEBLXq6ZCPXBcI1oZA4UZCPrIkXcZBOzkF9ue0AXNRAEjeE4tfJHy4GwjGfT4CZArvkwmTDGLnU2T1eiixAPm7q4GsPQPVAsDbWdZCEWGwANtKwZAWmeo85xX8tdvfiZBc7Mu6JQZDZD'
oauth_access_token = 'EAACEdEose0cBAPRtQdvettZAmH7ZA6GiRtCx4AFUPfTZBLUPTIjBZCKIVWZCpgYXw5V3sK8c4g7q5bZBUvpMh2M1aq4ZCiYPMwLIIilhFZCFdX4SrBKi5WPFWVrEl5Y1sZACCMkIJUJm6eyPFFXNd3ankhGuJFDfZB53v86bFFtYEzZCrXQj4bU6TPw'
graph = GraphAPI(oauth_access_token)
# Get my latest posts
# graph.get('me/posts')
# Post a photo of a parrot
# graph.post(
# path = 'me/photos',
# source = open('Untitled.png','rb')
# )
def updateFacebook():
file = open('../templates/shelter.txt', 'r')
graph.post(
path='me/feed',
message=file.read(),
)
return True
def updateFacebookv2():
file = open(os.path.join(
settings.BASE_DIR,
'cms/templates/shelter.txt'), 'r'
)
graph.post(
path='me/feed',
message=file.read(),
)
return True
|
Sears Home Services of Houston, TX fixes nearly any appliance, no matter where you bought it!
You read that right! No matter where you bought your appliance, Sears Home Services of Houston can fix it. We repair the major brands, makes and models. In fact, Sears Home Services has been fixing appliances since 1956.
Whether your refrigerator isn’t cooling properly or has another problem, our skilled Sears Home Services’ technicians in Houston will diagnose quickly and repair efficiently.
Is your washing machine not spinning or your dryer not heating? Sears Home Services in Houston offers dependable washing machine and dryer repair services for these and other less common issues.
Heating and cooling unit breakdowns can be a real nuisance. Rely on Sears Home Services in Houston for heating and air conditioning repairs that will restore comfort to your home.
What repair can Sears Home Services of Houston help you with today?
Sears Home Services in Houston also provides other types of repairs including small appliances, riding lawn mowers, exercise equipment and more.
Get started now! Call to book an appointment today. The Sears Home Services technicians in Houston can repair your problem—satisfaction guaranteed!
Servicing all neighborhoods around Houston: Westchase Appliance Repair, West Branch Appliance Repair, Memorial Appliance Repair, Langwood Appliance Repair, Chinatown Appliance Repair, Fairbanks/Northwest Crossing Appliance Repair, Carverdale Appliance Repair, Sharpstown Appliance Repair, West Oaks Appliance Repair, Gulfton Appliance Repair, Galleria/Uptown Appliance Repair, Lazy Brook/Timbergrove Appliance Repair, Addicks/Park Ten Appliance Repair, Braeburn Appliance Repair, Energy Corridor Appliance Repair, Bellaire Appliance Repair, Alief Appliance Repair, Greenway Appliance Repair, Upper Kirby Appliance Repair, Meyerland Appliance Repair, West University Appliance Repair, Fondren Southwest Appliance Repair, Rice Military Appliance Repair, Oak Forest/Garden Oaks Appliance Repair, Acres Homes Appliance Repair, Westbury Appliance Repair, Willow Meadows/Willowbend Appliance Repair, Braeswood Place Appliance Repair.
|
"""
pyscan
======
main.py
This file houses the core of the application
"""
# imports
import csv
import pprint
import requests
from bs4 import BeautifulSoup
from pyscan.local import *
class Barcode():
def __init__(self, barcode_id=None, barcode_ids=None, autosave=False):
"""
:return:
"""
self.number = barcode_id
self.autosave = autosave
self.base_url = BASE_URL
self.data = {}
self.item_name = ''
self.description = ''
self.pp = pprint.PrettyPrinter()
if barcode_id:
self.data = self.retrieve()
self.item_name = self.data.get('itemname').decode('ascii')
self.description = self.data.get('description').decode('ascii')
elif barcode_ids:
pass
self.save_file = SAVE_FILE_PATH
def retrieve(self, barcode=None):
"""
:param barcode:
:return:
"""
if barcode:
self.number = barcode
url = self.base_url.format(
API_KEY=API_KEY,
number=self.number,
)
r = requests.get(url)
document = r.json()
self.data = document
# self.__convert_unicode_characters()
return document
def save(self, file_path=None):
"""
"""
with open(self.save_file, 'a') as save_file:
save_file.write(str('{number}\n'.format(
number=self.number,
)))
def continuous_input(self):
"""
:return:
"""
done = False
print('Keep scanning codes to save them. When finished, type "done"!')
while not done:
code = input('Enter a barcode ---> ')
if code == 'done':
break
self.pp.pprint(self.retrieve(code))
self.save()
def batch_retrieve(self, barcode_ids):
"""
:return:
"""
barcode_metadata_list = []
for barcode in barcode_ids:
metadata = self.retrieve(barcode)
barcode_metadata_list.append(metadata)
return barcode_metadata_list
def csv_write(self):
"""
:return:
"""
with open('foo.csv', 'a') as csvfile:
code_writer = csv.writer(csvfile, delimiter=',')
row = [
self.number,
self.item_name,
self.description
]
code_writer.writerow(row)
def __convert_unicode_characters(self):
"""
:return:
"""
for key, value in self.data.items():
if type(value) is not int:
converted_string = BeautifulSoup(value)
self.data[key] = converted_string
def __eq__(self, other):
"""
:param other:
:return:
"""
return self.number
def __repr__(self):
"""
:return:
"""
return str(self.data)
if __name__ == '__main__':
b = Barcode(pretzel)
|
Firm Profile | Dhaval Patel Associates | Advocate Dhaval P. Patel in Ahmedabad, Gujarat, India.
Dhaval Patel Associates is a full service Law Firm based in Ahmedabad, Gujarat, India. long established Practice since 1995. The Firm's approach rests on following the traditions of integrity and commitment with an aim to deliver innovative and pragmatic legal solutions to its clients.
Each State has a Bar Council of its own whose function is to enroll the Advocates willing to practice predominately within the territorial confines of that State and to perform the functions of the Bar Council of India within the territory assigned to them. Therefore each law degree holder must be enrolled with a (single) State Bar Council to practice in India. However, enrollment with any State Bar Council does not restrict the Advocate from appearing before any court in India, even though it is beyond the territorial jurisdiction of the State Bar Council with he is enrolled in.
In India, the law relating to the Advocates is the Advocates Act, 1961 introduced and thought up by Ashoke Kumar Sen, the then law minister of India, which is a law passed by the Parliament and is administered and enforced by the Bar Council of India. Under the Act, the Bar Council of India is the supreme regulatory body to regulate the legal profession in India and also to ensure the compliance of the laws and maintenance of professional standards by the legal profession in the country. For this purpose, the Bar Council of India is authorized to pass regulations and make orders in individual cases and also generally.
At Dhaval Patel Associates we aim to deliver innovative and pragmatic outcomes for our clients, with integrity and commitment, to meet the needs and intentions of its clients.
Dhaval Patel Associates is a full service Law Firm based in Ahmedabad. long established Practice since 1995. The Firm's approach rests on following the traditions of integrity and commitment with an aim to deliver innovative and pragmatic legal solutions to its clients.
At Dhaval Patel Associates we have experienced lawyers who are able to assist you to find practical solutions to your matters and help you to achieve the best possible outcome.
32, Astha complex, Opp. RTO, Nr. Collecter Office,Subhash Bridge, Ahmedabad-380027, Gujarat, India.
|
import sys
import os
PROJECT_HOME = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../'))
sys.path.append(PROJECT_HOME)
import unittest
from datetime import datetime
from mock import Mock, patch
from adsdata.metrics import Metrics
from adsdata.models import NonBibTable
class metrics_test(unittest.TestCase):
"""tests for generation of metrics database"""
t1 = NonBibTable()
t1.bibcode = "1998PPGeo..22..553A"
t1.refereed = False
t1.authors = ["Arnfield, A. L."]
t1.downloads = []
t1.reads = [1, 2, 3, 4]
t1.downloads = [0, 1, 2, 3]
t1.citations = []
t1.id = 11
t1.reference = ["1997BoLMe..85..475M"]
t2 = NonBibTable()
t2.bibcode = "1997BoLMe..85..475M"
t2.refereed = True
t2.authors = ["Meesters, A. G. C. A.", "Bink, N. J.", "Henneken, E. A. C.", "Vugts, H. F.", "Cannemeijer, F."]
t2.downloads = []
t2.reads = []
t2.citations = ["2006QJRMS.132..779R", "2008Sci...320.1622D", "1998PPGeo..22..553A"]
t2.id = 3
t2.reference = ["1994BoLMe..71..393V", "1994GPC.....9...53M", "1997BoLMe..85...81M"]
test_data = [t1, t2]
def setUp(self):
# perhaps not useful with only two sample data sets
self.no_citations = [x for x in metrics_test.test_data if not x.citations]
self.citations = [x for x in metrics_test.test_data if x.citations]
def test_trivial_fields(self):
"""test fields that are not transformed"""
with patch('sqlalchemy.create_engine'):
met = Metrics()
for record in self.no_citations:
metrics_dict = met.row_view_to_metrics(record, None)
self.assertEqual(record.bibcode, metrics_dict.bibcode, 'bibcode check')
self.assertEqual(record.citations, metrics_dict.citations, 'citations check')
self.assertEqual(record.reads, metrics_dict.reads, 'reads check')
self.assertEqual(record.downloads, metrics_dict.downloads, 'downloads check')
def test_num_fields(self):
"""test fields based on length of other fields"""
with patch('sqlalchemy.create_engine'):
met = Metrics()
for record in self.no_citations:
metrics_dict = met.row_view_to_metrics(record, None)
self.assertEqual(metrics_dict.citation_num, len(record.citations), 'citation number check')
self.assertEqual(metrics_dict.reference_num, len(record.reference), 'reference number check')
self.assertEqual(metrics_dict.author_num, len(record.authors), 'author number check')
self.assertEqual(metrics_dict.refereed_citation_num, 0, 'refereed citation num')
def test_with_citations(self):
"""test a bibcode that has citations"""
test_row = metrics_test.t2
t2_year = int(metrics_test.t2.bibcode[:4])
today = datetime.today()
t2_age = max(1.0, today.year - t2_year + 1)
# we mock row view select for citation data with hard coded results
# for row_view_to_metrics to use ([refereed, len(reference), bibcode], ...)
m = Mock()
m.schema = "None"
m.execute.return_value = (
[True, 1, "1994BoLMe..71..393V"],
[False, 1, "1994GPC.....9...53M"],
[True, 1, "1997BoLMe..85...81M"])
with patch('sqlalchemy.create_engine'):
met = Metrics()
metrics_dict = met.row_view_to_metrics(metrics_test.t2, m)
self.assertEqual(len(metrics_dict.citations), 3, 'citations check')
self.assertEqual(len(metrics_dict.refereed_citations), 2, 'refereed citations check')
self.assertEqual(metrics_dict.refereed_citations[0], "1994BoLMe..71..393V", 'refereed citations check')
self.assertEqual(metrics_dict.refereed_citations[1], "1997BoLMe..85...81M", 'refereed citations check')
rn_citation_data_0 = {'ref_norm': 0.2, 'pubyear': 1997, 'auth_norm': 0.2,
'bibcode': '1994BoLMe..71..393V', 'cityear': 1994}
self.assertEqual(metrics_dict.rn_citation_data[0], rn_citation_data_0, 'rn citation data')
self.assertAlmostEqual(metrics_dict.an_refereed_citations, 2. / t2_age, 5, 'an refereed citations')
self.assertAlmostEqual(metrics_dict.rn_citations, .6, 5, 'rn citations')
def test_validate_lists(self):
"""test validation code for lists
send both matching and mismatching data to metrics list validation and verify correct responses"""
# base data
rn_citation_data1 = Mock()
rn_citation_data1.refereed_citations = ["2015MNRAS.447.1618S", "2016MNRAS.456.1886S", "2015MNRAS.451..149J"]
rn_citation_data1.rn_citation_data = [{"ref_norm": 0.0125, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2015MNRAS.447.1618S", "cityear": 2015},
{"ref_norm": 0.012048192771084338, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2016MNRAS.456.1886S", "cityear": 2016},
{"ref_norm": 0.02702702702702703, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2015MNRAS.451..149J", "cityear": 2015}]
# only slightly different than base, should not be a mismatch
rn_citation_data1a = Mock()
rn_citation_data1a.refereed_citations = ["2015MNRAS.447.1618S", "2016MNRAS.456.1886S", "2015MNRAS.451..149Z"]
rn_citation_data1a.rn_citation_data = [{"ref_norm": 0.0125, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2015MNRAS.447.1618Z", "cityear": 2015},
{"ref_norm": 0.012048192771084338, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2016MNRAS.456.1886S", "cityear": 2016},
{"ref_norm": 0.02702702702702703, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2015MNRAS.451..149J", "cityear": 2015}]
# very different from base, should be a mismatch
rn_citation_data2 = Mock()
rn_citation_data2.refereed_citations = ["2015MNRAS.447.1618Z", "2016MNRAS.456.1886Z", "2015MNRAS.451..149Z", "2015MNRAS.451..149Y"]
rn_citation_data2.rn_citation_data = [{"ref_norm": 0.0125, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2015MNRAS.447.1618Z", "cityear": 2015},
{"ref_norm": 0.012048192771084338, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2016MNRAS.456.1886Z", "cityear": 2016},
{"ref_norm": 0.02702702702702703, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2015MNRAS.451..149Z", "cityear": 2015},
{"ref_norm": 0.02702702702702703, "pubyear": 2014, "auth_norm": 0.3333333333333333,
"bibcode": "2015MNRAS.451..149Y", "cityear": 2015}]
logger = Mock()
mismatch = Metrics.field_mismatch('2014AJ....147..124M', 'rn_citation_data', rn_citation_data1, rn_citation_data1, logger)
self.assertFalse(mismatch, 'validate rn_citation_data')
mismatch = Metrics.field_mismatch('2014AJ....147..124M', 'rn_citation_data', rn_citation_data1, rn_citation_data2, logger)
self.assertTrue(mismatch, 'validate rn_citation_data')
mismatch = Metrics.field_mismatch('2014AJ....147..124M', 'rn_citation_data', rn_citation_data1, rn_citation_data1a, logger)
self.assertFalse(mismatch, 'validate rn_citation_data')
mismatch = Metrics.field_mismatch('2014AJ....147..124M', 'refereed_citations', rn_citation_data1a, rn_citation_data1a, logger)
self.assertFalse(mismatch, 'validate refereed_citations')
mismatch = Metrics.field_mismatch('2014AJ....147..124M', 'refereed_citations', rn_citation_data1, rn_citation_data1, logger)
self.assertFalse(mismatch, 'validate refereed_citations')
mismatch = Metrics.field_mismatch('2014AJ....147..124M', 'refereed_citations', rn_citation_data1, rn_citation_data2, logger)
self.assertTrue(mismatch, 'validate refereed_citations')
if __name__ == '__main__':
unittest.main(verbosity=2)
|
I have a good condition for its age banjolele. Its all original except for bridge, strings, and tuning peg head buttons or whatever you call them. it has a great sound for its age. I did some research and seems like the Souther California Music co. made them. I want to upgrade to a tenor uke and since I'm a disabled veteran its limited income for me. I am asking $250. Here is some pics tell me what you think. Thanks for looking. I will also trade for a decent tenor uke if anyone has one they are willing to part with.
Yup, I think their factory was in the LA area....does yours have the hole in the headstock??
nope it doesnt have the hole in the headstock and it dont have the hardware at the tail.....there is no holes where the hardware is on some of them...it strings right through the body.
|
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='djangocms-typedjs',
version='1.0.0',
license='MIT License',
description='A Django CMS plugin that implements the Typed.js jQuery plugin.',
long_description=README,
url='https://github.com/tdsymonds/djangocms-typedjs',
author='Tom Symonds',
author_email='[email protected]',
keywords='djangocms-typedjs, typedjs, typing, django',
packages=[
'djangocms_typedjs',
],
include_package_data=True,
install_requires=[
'django-cms>=3.2',
'djangocms-text-ckeditor',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
Present-day research and technical development _R&D_ is a core part of the modern poultry business!
FIT Farm Innovation Team R&D department invented and designed the innovative and successful broiler feeder pan for broiler cage & cage-free feeding systems.
_FITFEEDER _ broiler feeder pan prevents feed wastage!
FITFEEDER broiler feeder pan with low entrance of only 3,98 cm _amazing advantage for day old chicken!
FITFEEDER broiler feeder pan with 38 cm diameter and 1,19 m feed space _25% more feed space compared with existing feeders!
The 90° hinge _perfect for cleaning and bio security!
FITFEEDER broiler feeder pan lock system with two supports on each fastener.
FITFEEDER broiler feeder pan version _compartment feeder pan.
FITFEEDER broiler feeder pan version _smooth inner surface pan.
FITFEEDER broiler feeder pan designed for broiler cage multi-tier colony system _here with day old chicken.
FITFEEDER broiler feeder pan designed for broiler cage multi-tier colony system _here with adult birds.
FITFEEDER broiler feeder pan designed for litter _here with perfect acceptance of adult broilers.
FITFEEDER broiler feeder pan designed for slat floor _here opened for accessible cleaning and best bio security.
FITFEEDER broiler feeder pan designed for slat floor _here with GOLDEN BROILER aluminum injection feed hopper.
Complete FITFEEDER broiler feeder pan feeding line _with hopper extension up to 100 kg capacity and dust protected ball bearing feed auger drive unit.
Replace existing systems with FITFEEDER for profitable results with best FCR!
FITFEEDER is 100% designed, engineered & made in GERMANY!
Please check FIT Farm Innovation Team GOLDEN BROILER cage and cage-free systems to see FITFEEDER in operation!
|
# coding:utf-8
'''
@Copyright:LintCode
@Author: lilsweetcaligula
@Problem: http://www.lintcode.com/problem/convert-sorted-list-to-balanced-bst
@Language: Python
@Datetime: 17-02-16 16:14
'''
"""
Definition of ListNode
class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
Definition of TreeNode:
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
"""
class Solution:
"""
@param head: The first node of linked list.
@return: a tree node
"""
def sortedListToBST(self, head, end=None):
if head == None:
return None
slow = head
fast = head
prev = slow
while fast != None and fast.next != None:
prev = slow
slow = slow.next
fast = fast.next.next
prev.next = None
mid = slow
left = head if mid != head else None
right = mid.next
root = TreeNode(mid.val)
root.left = self.sortedListToBST(left)
root.right = self.sortedListToBST(right)
return root
|
You entered: keyword = geologist; age range = 0-99.
7 songs were found (see below). Songs listed are not necessarily endorsed by SingAboutScience.org.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.