text
stringlengths 29
850k
|
---|
import curses
import curses.textpad
import re
import signal
from math import floor
import yaml
from evdev import InputDevice
from approxeng.input.controllers import get_valid_devices
from approxeng.input.profiling import Profiler, ProfilerThread, Profile, BUTTON_NAMES, AXIS_NAMES
DEFAULT_AXIS_KEYS = ('z', 'x', 'c', 'v', 'b', 'n', 'm', ',')
DEFAULT_BUTTON_KEYS = ('1', '2', '3', '4', '5', '6', '7', '8', 'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', 'a')
def profiler_main():
devices = list(get_valid_devices())
if devices:
print('Available input devices:\n')
for index, device in enumerate(devices):
print(f'[{index}] : {device.name} (vendor={device.info.vendor}, product={device.info.product})')
if devices:
i = input('\nEnter a device number to continue (0): ')
try:
if i == '':
i = 0
i = int(i)
if not len(devices) > i >= 0:
print('Device number must be one of the ones above, exiting.')
exit(0)
run_profiler_gui(devices[i])
except ValueError:
print('Input must be a number, exiting.')
exit(0)
else:
print('No valid devices found, ensure your controller is connected?')
def run_profiler_gui(device: InputDevice, button_keys=DEFAULT_BUTTON_KEYS, axis_keys=DEFAULT_AXIS_KEYS):
curses.wrapper(build_profiler_gui(device=device, button_keys=button_keys, axis_keys=axis_keys))
def build_profiler_gui(device: InputDevice, button_keys=DEFAULT_BUTTON_KEYS, axis_keys=DEFAULT_AXIS_KEYS,
filename=None):
profiler = Profiler(device=device)
profiler_thread = ProfilerThread(profiler=profiler)
profiler_thread.start()
profile = Profile()
profile.name = device.name
profile.vendor_id = device.info.vendor
profile.product_id = device.info.product
def convert_device_name():
return re.sub(r'\s+', '_', re.sub(r'[^\w\s]', '', device.name.lower()))
if filename is None:
filename = f'{convert_device_name()}_v{device.info.vendor}_p{device.info.product}.yaml'
def signal_handler(sig, frame):
with open(filename, 'w') as outfile:
yaml.dump(profile.dict, outfile)
profiler_thread.stop()
exit(0)
signal.signal(signal.SIGINT, signal_handler)
def curses_main(screen):
try:
display = DisplayState(screen=screen, profile=profile, profiler=profiler, axis_keys=axis_keys,
button_keys=button_keys)
curses.cbreak()
curses.halfdelay(1)
while True:
display.start()
display.println('Approxeng.input controller profiling tool')
display.println('Select axis or button and activate corresponding control')
display.println(f'File : {filename}')
display.println(f'CTRL-C to exit and save YAML definition file')
# display.println(f'{profiler.axis_changes}')
display.newline()
display.print_header('Buttons')
for index, button in enumerate(BUTTON_NAMES):
row, col = divmod(index, 4)
display.show_button(display.line + row, col * 20, button)
display.line += floor((len(BUTTON_NAMES) - 1) / 4) + 1
display.newline()
display.print_header('Axes')
for index, axis in enumerate(AXIS_NAMES):
row, col = divmod(index, 2)
display.show_axis(display.line + row, col * 40, axis)
display.line += floor((len(AXIS_NAMES) - 1) / 2) + 1
display.newline()
if display.control_is_button:
display.println('Button selected - press control to assign or BACKSPACE to clear')
elif display.control_is_axis:
if display.control[0] == 'd':
display.println('Binary axis, press both corresponding buttons to assign')
else:
display.println('Analogue axis, move control to full extent to assign')
display.println('SPACE to toggle inversion, BACKSPACE to toggle enable / disable')
try:
key = screen.getkey()
if key in button_keys and button_keys.index(key) < len(BUTTON_NAMES):
profiler.reset()
display.control = BUTTON_NAMES[button_keys.index(key)]
elif key in axis_keys and axis_keys.index(key) < len(AXIS_NAMES):
profiler.reset()
display.control = AXIS_NAMES[axis_keys.index(key)]
elif key == ' ' and display.control_is_axis:
profile.toggle_axis_invert(name=display.control)
elif key == 'KEY_BACKSPACE':
profiler.reset()
if display.control_is_button:
profile.set_button(name=display.control, code=None)
elif display.control_is_axis:
profile.toggle_axis_enable(name=display.control)
elif key == 'KEY_LEFT':
profiler.reset()
display.select_previous_control()
elif key == 'KEY_RIGHT':
profiler.reset()
display.select_next_control()
except curses.error:
# Expect this when the key check times out
pass
except KeyboardInterrupt:
profiler_thread.stop()
pass
return curses_main
class DisplayState:
def __init__(self, screen, profile, profiler, axis_keys, button_keys):
self.screen = screen
self.profile = profile
self.profiler = profiler
self.axis_keys = axis_keys
self.button_keys = button_keys
self.line = 0
self.all_controls = [*BUTTON_NAMES, *AXIS_NAMES]
self.control = self.all_controls[0]
# Disable echo to terminal
curses.noecho()
# Hide the cursor
curses.curs_set(0)
# Contrast colour for UI
curses.init_pair(1, curses.COLOR_YELLOW, curses.COLOR_BLACK)
# Highlight
curses.init_pair(2, curses.COLOR_BLACK, curses.COLOR_WHITE)
# Enable colour
curses.start_color()
# Clear the screen
screen.clear()
# Enable key events for special keys i.e. arrows, backspace
screen.keypad(True)
def start(self):
self.screen.clear()
self.line = 0
def println(self, string, contrast=False):
try:
if contrast:
self.screen.addstr(self.line, 0, string, curses.color_pair(1))
else:
self.screen.addstr(self.line, 0, string)
except curses.error:
pass
self.line += 1
def newline(self):
self.line += 1
def print_header(self, string):
s = '——' + string
s += '—' * (80 - len(s))
self.println(s, True)
@property
def control_is_button(self):
return self.control in BUTTON_NAMES
@property
def control_is_axis(self):
return self.control in AXIS_NAMES
def select_next_control(self):
control_index = self.all_controls.index(self.control)
self.control = self.all_controls[(control_index + 1) % len(self.all_controls)]
def select_previous_control(self):
control_index = self.all_controls.index(self.control)
self.control = self.all_controls[(control_index - 1) % len(self.all_controls)]
def select_next_row(self):
pass
def select_previous_row(self):
pass
def show_axis(self, row, col, axis):
control = self.axis_keys[AXIS_NAMES.index(axis)]
try:
if self.control == axis:
# Pick up either all changes, or binary changes only if the axis starts with 'd'
changes = self.profiler.axis_changes if axis[0] != 'd' else self.profiler.binary_axis_changes
if changes:
# Currently editing this axis, show live information if available
code, min_value, max_value, current_value = changes[0]
self.profile.set_axis_range(axis, code, min_value, max_value)
rep = self.profile.axes[axis].build_repr(axis=axis, control=control, current_value=current_value)
else:
rep = self.profile.axes[axis].build_repr(axis=axis, control=control)
self.screen.addstr(row, col, rep, curses.color_pair(2))
else:
self.screen.addstr(row, col, self.profile.axes[axis].build_repr(axis=axis, control=control))
except curses.error:
pass
def show_button(self, row, col, button):
control = self.button_keys[BUTTON_NAMES.index(button)]
try:
if self.control == button:
if self.profiler.last_button_pressed:
self.profile.set_button(button, self.profiler.last_button_pressed)
rep = f'[{control}] {button} : {self.profile.buttons[button] or "---"}'
self.screen.addstr(row, col, rep, curses.color_pair(2))
else:
rep = f'[{control}] {button} : {self.profile.buttons[button] or "---"}'
self.screen.addstr(row, col, rep)
except curses.error:
pass
|
“There are lot of technologies there. There are organizations with developing technologies, available in the Philippines and elsewhere. But the thing is how it will reach farmers? We are looking at that gap and how we can fill up that gap,” CIP Asia Regional Director Samarendu Mohanty said in an interview on Thursday, on the sidelines of the Opportunities for a Climate-Smart Food System in the Philippines Regional Policy Forum held in the Ortigas district of Pasig City.
“There are different technologies but farmers don’t have any clue,” Mr. Mohanty said.
“We are trying to identify who the trusted agencies are where farmers can get the information,” Mr. Mohanty said.
Mr. Mohanty said when climate change effects are becoming evident, a shift in the planting calendar is necessary. He said sweet potatoes are resilient crops that can survive floods, and are recommended for planting during the rainy season, while the dry season can be devoted to other crops.
“Kamote, or sweet potato, is a very resilient crop. It can survive typhoons and floods. (Farmers are) saying sweet potato can be very profitable. There are farmers who are making millions of pesos selling sweet potato. The problem is when you look at the Philippines, sweet potato production has been declining. It used to be 250,000 hectares in 1980. Now, it’s less than 80,000 hectares,” Mr. Mohanty said.
Mr. Mohanty said that a sweet potato farmer, on average, can earn around P100,000 from one hectare of land in 100 days.
“The best way is for the government to link the farmers to the market. There’s demand. More growth is coming from the processing industry. Farmers are saying they don’t have the linkages to go into processing. If they can get some value-addition, they can also make more money,” according to Mr. Mohanty.
CIP Senior Research Associate Arma Bertuso said that Philippines is not also capable of exporting sweet potatoes due to its low production.
“When we had this round table discussion way back 2016, we also invited companies from the food industry. They said that our supply of kamote is not enough. There is need for kamote but at the same time, because this is an archipelagic country,” Ms. Bertuso said.
“Even if there is plenty of supply in Eastern Visayas, how do you get it to Manila? Who will shoulder the cost?” Ms. Bertuso added.
Mr. Mohanty, on the other hand, said that there is a need to tap local government units (LGUs) to facilitate loan programs and spread information to farmers with regard to processing technology and market linkages.
|
import socket
import sys
import os
HOST = sys.argv[1]
PORT = int(sys.argv[2])
s = socket.socket()
try:
s.connect((HOST, PORT))
except:
raise Exception("Device not available to download from.")
STAY = True
def ben(string):
return bytes(string.encode("utf-8"))
def bde(binary):
return str(binary.decode("utf-8"))
while STAY == True:
FILENAME = input("[SERVER]: Enter filename => ")
s.send(ben(FILENAME))
response = bde(s.recv(1024))
if response[:6] == "EXISTS":
filesize = float(response[6:])
confirm = input("[SERVER]: "+FILENAME+" is "+str(filesize)+" bytes, download? (Y/N) => ").upper()
if confirm == "Y":
s.send(ben("OK"))
newname = input("[DEVICE]: Name new file => ")
f = open(newname, "wb")
data = s.recv(1024)
totalRecv = len(data)
f.write(data)
while totalRecv < filesize:
data = s.recv(1024)
totalRecv += len(data)
f.write(data)
print(str((totalRecv/float(filesize))*100)+"% complete")
print("Download complete!")
f.close()
elif confirm == "N":
pass
else:
print("[SERVER]: Does not exist!")
|
From the last some years, the number of people using social media has shown a surprising growth. Whether it is about an earning purpose, social network or just an internet presence, people are connected with social media. How many People use social media, according to a social media statistics, in the year 2015-2016 the total number of users was 181.7 to 216.5 million, however, the 2017 statistics refers it about 250.8 million social media users. An estimated average for the next year is 283 million in only India. It is expected that by 2020 you will discover more than 2.95 billion social media users worldwide. The tutorial will explain to you How many People have Social Media.
As you know the social media is growing faster than in the last years. It is an essential part of our business awareness to track the important data to the relevant.
How Many People Have Social Media?
There may be half the population using the social media worldwide. The remaining will definitely be infant or children while the students can also be included in this estimated data. From the over 3 billion people, there are 2 billion active social media users. This indicates how it has made its Pan presence. Here we go to discover the aim having the social media presence.
Gone are the days when only physical strength was nominated for the earning sources. The new technologies and the talent to use these technologies can make you earn that you deserve. Social media is reshaping the present times business. No doubt internet has global accessibility. The powerful elements of social media are blogs, photo sharing platforms, business networks, social gaming, chat apps, microblogs, forums, etc.
Content is the hero for digital marketing, blindly published blogs and articles are the weapons of online marketing. Web Designing, SEO, PHP, etc. are the sources to enter into online business and improve ranking. Social Media is the best way to improve the ranking of your business. More you improve your ranking more you grow your business. The growth of online business/digital marketing also depends on how many people use social media.
Nowadays as compared to the face to face conversation, people prefer the interpersonal communication. A number of social media networks help to fulfill this purpose. Among Twitter, Linkedin, Pinterest, YouTube, Google Plus, etc. Facebook is the prominent choice of people. As of January 2017, the Facebook has 1,871 million users worldwide. The other social media networks are used less by near about 1000 million. Among the users of SMN, Facebook has the huge lead with its 89% penetration in the competitive marketing. On the other side, Instagram comes on the second rank with 32% penetration. More the people make their presence more it will grow the engagement with the audience. At present Facebook achieved the greatest engagement with the consumer panel. How many people have the Social Media networks, its simple answer is a growth of mobile devices? A number of devices including, laptop, internet compatible handsets, iPad, etc. are used to sign up with the social media sites and grow your presence. Social skills and web access begin with the mobile platforms, etc. it is the most comfortable and common accessible source.
Social Media is a dynamic platform, keep up yourself with the latest trends of social media networks. The most beneficial fact of social media is that it never goes in dark but grow ahead by leaps and bounds.
|
"""Module for calculation of Wind chill.
Wind-chill or windchill (popularly wind chill factor) is the lowering of
body temperature due to the passing-flow of lower-temperature air.
Wind chill numbers are always lower than the air temperature for values
where the formula is valid.
When the apparent temperature is higher than the air temperature,
the heat index is used instead.
Check wikipedia for more info:
https://en.wikipedia.org/wiki/Wind_chill
Formula details:
https://www.wpc.ncep.noaa.gov/html/windchill.shtml
"""
from .temperature import Temp, F
def wind_chill(temperature, wind_speed):
"""Calculate Wind Chill (feels like temperature) based on NOAA.
Default unit for resulting Temp value is Fahrenheit and it will be used
in case of casting to int/float. Use Temp properties to convert result to
Celsius (Temp.c) or Kelvin (Temp.k).
Wind Chill Temperature is only defined for temperatures at or below
50 F and wind speeds above 3 mph.
:param temperature: temperature value in Fahrenheit or Temp instance.
:type temperature: int, float, Temp
:param wind_speed: wind speed in mph
:type wind_speed: int, float
:returns: Wind chill value
:rtype: Temp
"""
T = temperature.f if isinstance(temperature, Temp) else temperature
V = wind_speed
if T > 50 or V <= 3:
raise ValueError(
"Wind Chill Temperature is only defined for temperatures at"
" or below 50 F and wind speeds above 3 mph.")
WINDCHILL = 35.74 + (0.6215 * T) - 35.75 * V**0.16 + 0.4275 * T * V**0.16
return Temp(WINDCHILL, unit=F)
|
Ideally you should be visiting this page, and only you, the Mulle kybernetiK servers and all routers inbetween should know about this.
First of all, this weblog is tracked by a local Matomo. I use it because the webserver log contains very little useful information, due to bots and attacks. I use these statistics to figure out, which articles are popular and try to either embellish those or keep them updated. I think this is OK, because the tracking is just on this site, and the data isn’t shared with anyone. You can opt out using the “do not track” setting of your browser.
The worst offender is the "Twitter" byline. I don't think Twitter is in the tracking business yet, but it could be. Assume you are tracked by Twitter.
The page also used the Hacker News Button for a short time on 18. Nov. 2015. That buttons back-end claimed to be hosted on Heroku, which I thought to be safe. Indeed though it is hosted on Appspot.com. Which is owned by Google. Mega-fail.
How you are tracked on this weblog was published on November 15, 2015 and last modified on January 23, 2019 .
|
"""
Fabfile for deploying and setting up code that looks like the production
environment. it also makes it easy to start up the servers
If you want to run on the localhost you may need to first do::
rm -rf ~/.ssh/known_hosts
"""
from __future__ import with_statement
import os
import re
from fabric.api import local, settings, abort, run , cd, env, lcd, sudo, prompt
from fabric.contrib.console import confirm
from fabric.contrib import files
env.roledefs = {'local':['localhost']}
env.use_ssh_config=True
TAG_REGEX = re.compile('^[0-9]+\.[0-9]+\.[0-9]+')
STABLE_MSG = '**stable**'
LINK_CODE_DIR = os.path.split(os.path.abspath(__file__))[0]
def dir_code_base():
"""
If you are using any localhost then it will use the current directory.
Otherwise you will use the code_dir
"""
if 'localhost' in env.host_string:
return os.getcwd()
return code_dir
def dir_scripts():
"""
The directory where you house all the scripts
"""
return '%s/scripts' % (dir_code_base())
config_dir = '~/.link'
def test_install():
import os
#set the link dir to something silly
os.environ['LNK_DIR']='saodusah'
#create a virtual environment
local('echo $LNK_DIR')
local('virtualenv env')
#remove everything from the build directory
local('rm -rf build')
#run this and see that it works
local('source env/bin/activate && python setup.py install')
def configure():
"""
Create the base configuration so that you can change it. Might want to
include the configuration in a different repo
"""
if not files.exists(config_dir):
run('mkdir %s' % config_dir)
lnk_config = '%s/link.config' % config_dir
if not files.exists(lnk_config):
run('touch %s' % lnk_config)
def script(script_name, command = 'python', **args):
"""
Will run the script that is in the scripts folder. you can pass in a
dictionory of args and it will pass it through to the script as command line
args in this format
fab -R local script:example.py,arg1=value1,arg2=value2
that will result in running this command
<command> <scripts_directory>/<scriptname> --arg1=value1 --arg2=value2
"""
with cd(dir_scripts()):
parameters = ''
if args:
parameters = ' '.join(['--%s=%s' % (key, value) for key,value in
args.iteritems()])
run("%s %s %s" % (command , script_name, parameters))
def commit(msg=None):
"""
Commit your changes to git
:msg: @todo
:returns: @todo
"""
print '---Commiting---'
print
msg = msg or prompt('Commit message: ')
commit = False
commit = prompt('Confirm commit? [y/n]') == 'y'
if commit:
with settings(warn_only=True):
_commit = not local('git commit -a -m "%s"' % msg).failed
if not _commit:
#nothing was committed
commit = False
print "Nothing to commit"
else:
abort('commit aborted')
print
print '---Done---'
return commit
def tag_names(number = 10, stable=False):
number = int(number)
print "fetching tags first"
local('git fetch --tags ')
print "Showing latest tags for reference"
tags = local('git tag -n1 ', capture = True)
tags = [x for x in tags.split('\n') if TAG_REGEX.findall(x) and
(not stable or STABLE_MSG in x)]
tags.sort(reverse=True)
#take the first <number> things in the list
tags = tags[0:min(len(tags), number)]
print '\n'.join(tags)
print
return tags
def check_tag_format(tag):
"""
Checks the tag format and returns the component parts
"""
parsed = tag.split('.')
try:
#allow for at most 2 minor decimals...i mean comeon
major = int(parsed[0])
minor = int(parsed[1])
build = int(parsed[2][0:2])
return (major, minor, build)
except Exception as e:
print e
abort("""Must be of the form <major_version>.<minor>.<maintence>, like
0.0.1. Only integers allowed""")
def write_version(version):
"""
Write out the version python file to the link directory before installing
version needs to be a list or tuple of the form (<major>, <minor>, <build>)
or a string in the format <major>.<minor>.<build> all ints
"""
file_name ='link/__init__.py'
init = open(file_name)
init_read = init.readlines()
init.close()
version_line = [idx for idx, x in enumerate(init_read) if '__version__ = ' in x]
if len(version_line)>1:
raise Exception('version is in there more than once')
if isinstance(version, str):
try:
version_split = map(int, version.split('.'))
except:
raise Exception("Version string must be in the format <major>.<minor>.<build>")
if not isinstance(version_split, (list, tuple)) or len(version_split)!=3:
raise Exception('invalid version %s' % version)
init_read[version_line[0]] = "__version__ = '%s'\n" % version
init = open(file_name, 'w')
try:
init.write(''.join(init_read))
finally:
init.close()
def prompt_for_tag(default_offset=1, stable_only = False):
"""
Prompt for the tag you want to use, offset for the default by input
"""
tags = tag_names(10, stable_only)
print "Showing latest tags for reference"
default = '0.0.1'
if tags:
default = tags[0]
(major, minor, build) = check_tag_format(default)
build = build+default_offset
new_default = '%s.%s.%s' % (major, minor, build)
tag = prompt('Tag name [in format x.xx] (default: %s) ? ' % new_default)
tag = tag or new_default
return tag
def push_to_pypi():
"""
Will push the code to pypi
"""
if prompt('would you like to tag a new version first [y/n]') == 'y':
tag()
local('python setup.py sdist upload')
def prompt_commit():
"""
prompts if you would like to commit
"""
local('git status')
print
print
_commit = prompt('Do you want to commit? [y/n]') == 'y'
if _commit:
msg = prompt('Commit message: ')
return commit(msg)
def tag(mark_stable=False):
"""
Tag a release, will prompt you for the tag version. You can mark it as
stable here as well
"""
tag = prompt_for_tag()
print "writing this tag version to version.py before commiting"
write_version(tag)
print
_commit = prompt_commit()
print
if not _commit and not tag:
print
print "Nothing commited, using default tag %s" % default
print
tag = default
else:
msg = ''
if mark_stable:
msg = STABLE_MSG + ' '
msg += prompt("enter msg for tag: ")
local('git tag %(ref)s -m "%(msg)s"' % { 'ref': tag, 'msg':msg})
local('git push --tags')
return tag
def merge(branch=None, merge_to = 'master'):
"""
Merge your changes and delete the old branch
"""
if not branch:
print "no branch specified, using current"
branch = current_branch()
if prompt('confirm merge with of branch %s to %s [y/N]' % (branch, merge_to)) == 'y':
prompt_commit()
local('git checkout %s ' % merge_to)
local('git merge %s ' % branch)
if prompt('delete the old branch locally and remotely? [y/N]') == 'y':
local('git branch -d %s' % branch)
local('git push origin :%s' % branch)
else:
print "leaving branch where it is"
if prompt('push results [y/N]' ) == 'y':
local('git push')
def tag_deploy(mark_stable=False):
"""
Asks you to tag this release and Figures out what branch you are on.
It then calls the deploy function
"""
local('git fetch --tags')
branch = local('git branch | grep "^*" | cut -d" " -f2', capture=True)
_tag = tag(mark_stable=mark_stable)
deploy(_tag, branch)
def retag(tag, msg):
"""
Retag a tag with a new message
"""
local('git tag %s %s -f -m "%s"' % (tag, tag, msg))
local('git push --tags')
def mark_stable(tag, msg = None):
"""
Mark a previous tag as stable
"""
retag(tag, '%s %s' % (STABLE_MSG, msg) )
def current_branch():
current_branch = local('git branch | grep "^*"', capture=True).lstrip('* ')
print "Current branch is %s" % current_branch
return current_branch
def deploy(tag=None, branch=None, stable_only=False):
"""
This is only for deployment on a dev box where everything can be owned by
this user. This is NOT for production deployment. Put's the code in
code_dir
"""
if not tag:
tag = prompt_for_tag(0, stable_only = stable_only)
configure()
setup_environment()
#check out all the code in the right place
with cd(code_dir):
# i **THINK** you have to have the branch checked out before you can
# checkout the tag
if branch:
#then you haven't even checkout this branch
branches = run('git branch')
if branch not in branches:
run('git checkout -b %s' % branch)
_current_branch = current_branch()
if "* %s" % branch != _current_branch:
run('git checkout %s' % branch)
#pull the latest
run('git pull origin %s' % branch)
else:
run("git pull origin master")
#check out a specific tag
if tag:
run("git fetch --tags")
run("git checkout %s" % tag)
#hacky
if env.user == 'root':
#make sure everything is still owned by the deployer
run('chown -R %s %s' % (deploy_user, code_dir))
###
# How to setup a fresh box. You probably have to run this as root for it to
# work
###
def install_easy_install():
"""
Installs setup tool, this should also go into an RPM
"""
run('wget http://pypi.python.org/packages/2.7/s/setuptools/setuptools-0.6c11-py2.7.egg#md5=fe1f997bc722265116870bc7919059ea')
run('sh setuptools-0.6c11-py2.7.egg')
def install_python():
"""
Installs python, I should be able to create an RPM eventually
"""
run('wget http://python.org/ftp/python/2.7.2/Python-2.7.2.tgz')
run('tar -xvf Python-2.7.2.tgz')
with cd('Python-2.7.2'):
run('./configure')
run('make')
run('make install')
###
# This isn't reall necessary but i'll keep it for now
###
def install_python_dependancies():
"""
Easy install all the packages we need
"""
run('easy_install requests')
run('easy_install numpy')
run('easy_install pandas')
run('easy_install happybase')
run('easy_install flask')
run('easy_install ipython')
run('easy_install gunicorn')
run('easy_install link')
run('easy_install pymongo')
run('easy_install mysql-python')
run('easy_install docutils')
def install_box_libraries():
"""
Installs the libs you need like readlines and libsqlite. This will only
run on a ubuntu machine with apt-get
"""
with settings(warn_only=True):
has_apt = run('which apt-get')
if has_apt:
run('apt-get install make')
run('apt-get install libsqlite3-dev')
run('apt-get install libreadline6 libreadline6-dev')
run('apt-get install libmysqlclient-dev')
else:
print "this is not an ubuntu system...skipping"
def setup_box():
"""
Will install python and all libs needed to set up this box to run the
examjam code. Eventually this needs to be more RPM based
"""
#place_pub_key()
install_box_libraries()
install_python()
install_easy_install()
install_python_dependancies()
|
When You Have To Factory Reset Your SYMPHONY ZVIII?
Are your looking for a way to make your SYMPHONY ZVIII work faster? Do you wish to clear all of the data on your SYMPHONY ZVIII before selling it to someone else? Would you like to system work faster? Well what you need is the factory reset.
What is it? Factory reset (aka hard reset) is an operation which deletes all data (including settings, applications, calendars, pictures etc) on your SYMPHONY ZVIII and brings back the default settings which makes your device as if it came right from the manufacturer in 2017 year.
When do you need to perform such operation? When you need your SYMPHONY ZVIII to work faster, when there are some difficulties in the performance of the operating system or when you just want to get rid of everithing that has been stored in your SYMPHONY ZVIII. The SYMPHONY ZVIII was powered by Qualcomm Snapdragon 430 MSM8937 with 1400.0 MHZ and 4 cores chipset and after a while it's good idea to perform the hard reset in order to speed up device. The efficient processor and 3000.0 MB of RAM provide enormous performance after returning them to factory state. After the restoring the Li-Polymer 3000.0 battery should work longer. What's all important you will have the whole 16000 MB storage available.
The HardReset.Info is a website with factory reset descriptions of more than twenty thousand devices. But that is not all. You will also find the some useful tricks, interesing articles, vidoes with the tutorials, answers to the frequently asked questions and more. Just click on Hard Reset SYMPHONY ZVIII button and you will see it all.
You can click here HardReset.Info YouTube Channel to find video related with SYMPHONY ZVIII.
|
import numpy
class TerrainType:
DEEPW = 0
WATER = 9
ROCKS = 10
BOGGY = 11
GRASS = 1
SANDY = 2
SNOWY = 3
TREES = 4
PLANK = 5
FLOOR = 6
ROOFD = 12
WALLS = 7
GLASS = 8
BuildCosts = {
TerrainType.DEEPW: 80,
TerrainType.WATER: 40,
TerrainType.ROCKS: 8,
TerrainType.BOGGY: 24,
TerrainType.GRASS: 4,
TerrainType.SANDY: 6,
TerrainType.SNOWY: 10,
TerrainType.TREES: 8,
TerrainType.PLANK: 1,
TerrainType.FLOOR: 1,
TerrainType.ROOFD: 1,
TerrainType.WALLS: 20,
TerrainType.GLASS: 20
}
ABuildCosts = [BuildCosts[i] if i in BuildCosts else 0 for i in range(max(BuildCosts.keys())+1)]
TypeColors = {
TerrainType.DEEPW: (0 , 0 ,255),
TerrainType.WATER: (0 ,127,255),
TerrainType.ROCKS: (127,127,127),
TerrainType.BOGGY: (64 ,127,127),
TerrainType.GRASS: (0 ,255, 0),
TerrainType.SANDY: (127,127, 0),
TerrainType.SNOWY: (255,255,255),
TerrainType.TREES: (64 ,127, 64),
TerrainType.PLANK: (127, 64, 0),
TerrainType.FLOOR: (255,255,127),
TerrainType.ROOFD: (128, 0 ,128),
TerrainType.WALLS: (0 , 0 , 0),
TerrainType.GLASS: (0 ,255,255)
}
colordtype = numpy.dtype([('r', numpy.uint8), ('g', numpy.uint8), ('b', numpy.uint8)])
ATypeColors = numpy.array([TypeColors[i] if i in TypeColors else 0 for i in range(max(TypeColors.keys())+1)], dtype=colordtype)
|
Following the success of the AXA Health Tech & You Awards Launch in October - we ae running another event in January to announce our partnership with eHealth Hub and the launch of a new Challenge! eHealth Hub is an EU-funded initiative that provides long-term support to ecosystem stakeholders and addresses key challenges facing European SMEs in the digital health space.
We invite you to join us for an inspiring evening of panel discussions, exhibitions and demonstrations with pioneers and experts in the health tech space, with food, drinks and networking amongst key individuals in the industry.
Dr Mike Short, Chief Scientific Adviser from the Department for International Trade and Sylvie Donnasson from eHealth Hub will lead a discussion on Global Britain and the future of health in Europe. Reema Patel, Programme Manager at the new Ada Lovelace Institute will join our experts from the AXA Health Tech & You Expert Group to share perspectives on ethics, equity and trust in health tech. Please scroll down for full agenda.
Our New challange for 2019 is focusing around mobility and accessibility for older citzens.
These new categories are underpinned by principles embracing diversity, health equality and social inclusion. Entries will be judged in accordance with how the entrepreneurs have succeeded in making a difference to people’s lives through their innovative approach and attention to creating solutions to address significant unmet consumer needs.
Overall, the 2019 AXA Health Tech & You Awards programme is looking for standalone solutions to help citizens take charge of their health and wellbeing, as well as smart applications that enrich the relationship between people and their careers (whether health professionals, friends or family).
AXA is working with Collider Health and The Bakery, with support from our other partners including Women of Wearables, One HealthTech and Our Mobile Health to discover exciting and innovative health technology.
Richard Cooper, Head of Digital, AXA PPP healthcare, will present an overview of the Health Tech & You Awards programme with reference to the partnerships involved, including the latest partnership with E Health Hub.
Dr Mike Short, OBE, Chief Scientific Adviser at Department, International Trade (DIT) in conversation with Sylvie Donnasson, Consortium Partner of eHealth Hub, on what partnerships and collaborations will look like between Britain and Europe in health technology in the next 5 years.
Gordon Henderson, Marketing & Innovation Director at AXA PPP healthcare, will communicate details on how to enter the Awards programme.
|
#!/usr/bin/env python3
# Copyright (c) 2016 Anki, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the file LICENSE.txt or at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Wait for Cozmo to see a face, and then turn on his backpack light.
This is a script to show off faces, and how they are easy to use.
It waits for a face, and then will light up his backpack when that face is visible.
'''
import asyncio
import time
import cozmo
def light_when_face(robot: cozmo.robot.Robot):
'''The core of the light_when_face program'''
# Move lift down and tilt the head up
robot.move_lift(-3)
robot.set_head_angle(cozmo.robot.MAX_HEAD_ANGLE).wait_for_completed()
face = None
print("Press CTRL-C to quit")
while True:
if face and face.is_visible:
robot.set_all_backpack_lights(cozmo.lights.blue_light)
else:
robot.set_backpack_lights_off()
# Wait until we we can see another face
try:
face = robot.world.wait_for_observed_face(timeout=30)
except asyncio.TimeoutError:
print("Didn't find a face.")
return
time.sleep(.1)
cozmo.run_program(light_when_face, use_viewer=True, force_viewer_on_top=True)
|
given to the musician and his CD was mention that this recording is on.
Eckankar.org. It’s an amazing teaching. Listen to the HU chant there also.
Welcome to the Make Your Future!
This is a meditation channel. Please Subscribe, if you like my meditations!
this subject I am waiting for you on there!
|
import random
import string
import config
import os
class TokenControllerSingleton:
secret_key = None
@staticmethod
def get_secret_key():
if TokenControllerSingleton.secret_key is None:
TokenControllerSingleton()
return TokenControllerSingleton.secret_key
def get_server_secret_key(self):
with open(config.SECRET_KEY_FILE) as file:
key = file.read()
return key
def secret_key_exists(self):
return os.path.isfile(config.SECRET_KEY_FILE) and os.stat(config.SECRET_KEY_FILE).st_size != 0
def generate_secret_key(self):
letters_and_digits = string.ascii_letters + string.digits
key = ''.join((random.choice(letters_and_digits) for i in range(config.secret_key_length)))
with open(config.SECRET_KEY_FILE, 'w+') as file:
file.write(key)
return key
def __init__(self):
if TokenControllerSingleton.secret_key is None:
if self.secret_key_exists():
TokenControllerSingleton.secret_key = self.get_server_secret_key()
else:
TokenControllerSingleton.secret_key = self.generate_secret_key()
|
Late afternoon sun spotlight of a group of trees in full fall glory. This is along the Newfound Gap Road inside Great Smoky Mountains. This is below the Chimney Tops area.
|
from cave.analyzer.base_analyzer import BaseAnalyzer
from cave.analyzer.feature_analysis.feature_analysis import FeatureAnalysis
from cave.utils.helpers import check_for_features
from cave.utils.hpbandster_helpers import format_budgets
class FeatureClustering(BaseAnalyzer):
""" Clustering instances in 2d; the color encodes the cluster assigned to each cluster. Similar to ISAC, we use
a k-means to cluster the instances in the feature space. As pre-processing, we use standard scaling and a PCA to
2 dimensions. To guess the number of clusters, we use the silhouette score on the range of 2 to 12 in the number
of clusters"""
def __init__(self, runscontainer):
super().__init__(runscontainer)
check_for_features(runscontainer.scenario)
formatted_budgets = format_budgets(self.runscontainer.get_budgets())
for budget, run in zip(self.runscontainer.get_budgets(),
self.runscontainer.get_aggregated(keep_budgets=True, keep_folders=False)):
imp = run.share_information['feature_importance']
self.result[formatted_budgets[budget]] = self.feat_analysis(
output_dir=run.output_dir,
scenario=run.scenario,
feat_names=run.feature_names,
feat_importance=imp,
)
def get_name(self):
return "Feature Clustering"
def feat_analysis(self,
output_dir,
scenario,
feat_names,
feat_importance,
):
feat_analysis = FeatureAnalysis(output_dn=output_dir,
scenario=scenario,
feat_names=feat_names,
feat_importance=feat_importance)
return {'figure': feat_analysis.cluster_instances()}
|
DCH Group president Oleksandr Yaroslavsky gave Dnipro journalists a tour of Kharkiv International Airport on 4 November and offered a glimpse of what they can expect in their own city if he proceeds with plans to reconstruct Dnipro International Airport.
The Kharkiv-based Ukrainian businessman has repeatedly confirmed his willingness to build a new terminal complex in Dnipro but will only begin work once the state has allocated sufficient funding to finance the necessary reconstruction of the airport’s runway facilities. “I have USD 70 million ready to invest and am ready to begin on schedule,” he underlined.
He anticipated rapid progress once an upgraded Dnipro International Airport was operational. According to Yaroslavsky, DCH Group could use its existing industry contacts to attract new airlines to what would be one of Ukraine’s most attractive regional air hubs with a catchment area population of over ten million Ukrainians.
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.10.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1Project(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'uuid': 'str',
'owner': 'str',
'name': 'str',
'description': 'str',
'tags': 'list[str]',
'created_at': 'datetime',
'updated_at': 'datetime',
'is_public': 'bool',
'bookmarked': 'bool',
'readme': 'str',
'excluded_runtimes': 'list[str]',
'settings': 'V1ProjectSettings',
'role': 'str',
'live_state': 'int'
}
attribute_map = {
'uuid': 'uuid',
'owner': 'owner',
'name': 'name',
'description': 'description',
'tags': 'tags',
'created_at': 'created_at',
'updated_at': 'updated_at',
'is_public': 'is_public',
'bookmarked': 'bookmarked',
'readme': 'readme',
'excluded_runtimes': 'excluded_runtimes',
'settings': 'settings',
'role': 'role',
'live_state': 'live_state'
}
def __init__(self, uuid=None, owner=None, name=None, description=None, tags=None, created_at=None, updated_at=None, is_public=None, bookmarked=None, readme=None, excluded_runtimes=None, settings=None, role=None, live_state=None, local_vars_configuration=None): # noqa: E501
"""V1Project - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._uuid = None
self._owner = None
self._name = None
self._description = None
self._tags = None
self._created_at = None
self._updated_at = None
self._is_public = None
self._bookmarked = None
self._readme = None
self._excluded_runtimes = None
self._settings = None
self._role = None
self._live_state = None
self.discriminator = None
if uuid is not None:
self.uuid = uuid
if owner is not None:
self.owner = owner
if name is not None:
self.name = name
if description is not None:
self.description = description
if tags is not None:
self.tags = tags
if created_at is not None:
self.created_at = created_at
if updated_at is not None:
self.updated_at = updated_at
if is_public is not None:
self.is_public = is_public
if bookmarked is not None:
self.bookmarked = bookmarked
if readme is not None:
self.readme = readme
if excluded_runtimes is not None:
self.excluded_runtimes = excluded_runtimes
if settings is not None:
self.settings = settings
if role is not None:
self.role = role
if live_state is not None:
self.live_state = live_state
@property
def uuid(self):
"""Gets the uuid of this V1Project. # noqa: E501
:return: The uuid of this V1Project. # noqa: E501
:rtype: str
"""
return self._uuid
@uuid.setter
def uuid(self, uuid):
"""Sets the uuid of this V1Project.
:param uuid: The uuid of this V1Project. # noqa: E501
:type: str
"""
self._uuid = uuid
@property
def owner(self):
"""Gets the owner of this V1Project. # noqa: E501
:return: The owner of this V1Project. # noqa: E501
:rtype: str
"""
return self._owner
@owner.setter
def owner(self, owner):
"""Sets the owner of this V1Project.
:param owner: The owner of this V1Project. # noqa: E501
:type: str
"""
self._owner = owner
@property
def name(self):
"""Gets the name of this V1Project. # noqa: E501
:return: The name of this V1Project. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1Project.
:param name: The name of this V1Project. # noqa: E501
:type: str
"""
self._name = name
@property
def description(self):
"""Gets the description of this V1Project. # noqa: E501
:return: The description of this V1Project. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this V1Project.
:param description: The description of this V1Project. # noqa: E501
:type: str
"""
self._description = description
@property
def tags(self):
"""Gets the tags of this V1Project. # noqa: E501
:return: The tags of this V1Project. # noqa: E501
:rtype: list[str]
"""
return self._tags
@tags.setter
def tags(self, tags):
"""Sets the tags of this V1Project.
:param tags: The tags of this V1Project. # noqa: E501
:type: list[str]
"""
self._tags = tags
@property
def created_at(self):
"""Gets the created_at of this V1Project. # noqa: E501
:return: The created_at of this V1Project. # noqa: E501
:rtype: datetime
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
"""Sets the created_at of this V1Project.
:param created_at: The created_at of this V1Project. # noqa: E501
:type: datetime
"""
self._created_at = created_at
@property
def updated_at(self):
"""Gets the updated_at of this V1Project. # noqa: E501
:return: The updated_at of this V1Project. # noqa: E501
:rtype: datetime
"""
return self._updated_at
@updated_at.setter
def updated_at(self, updated_at):
"""Sets the updated_at of this V1Project.
:param updated_at: The updated_at of this V1Project. # noqa: E501
:type: datetime
"""
self._updated_at = updated_at
@property
def is_public(self):
"""Gets the is_public of this V1Project. # noqa: E501
:return: The is_public of this V1Project. # noqa: E501
:rtype: bool
"""
return self._is_public
@is_public.setter
def is_public(self, is_public):
"""Sets the is_public of this V1Project.
:param is_public: The is_public of this V1Project. # noqa: E501
:type: bool
"""
self._is_public = is_public
@property
def bookmarked(self):
"""Gets the bookmarked of this V1Project. # noqa: E501
:return: The bookmarked of this V1Project. # noqa: E501
:rtype: bool
"""
return self._bookmarked
@bookmarked.setter
def bookmarked(self, bookmarked):
"""Sets the bookmarked of this V1Project.
:param bookmarked: The bookmarked of this V1Project. # noqa: E501
:type: bool
"""
self._bookmarked = bookmarked
@property
def readme(self):
"""Gets the readme of this V1Project. # noqa: E501
:return: The readme of this V1Project. # noqa: E501
:rtype: str
"""
return self._readme
@readme.setter
def readme(self, readme):
"""Sets the readme of this V1Project.
:param readme: The readme of this V1Project. # noqa: E501
:type: str
"""
self._readme = readme
@property
def excluded_runtimes(self):
"""Gets the excluded_runtimes of this V1Project. # noqa: E501
:return: The excluded_runtimes of this V1Project. # noqa: E501
:rtype: list[str]
"""
return self._excluded_runtimes
@excluded_runtimes.setter
def excluded_runtimes(self, excluded_runtimes):
"""Sets the excluded_runtimes of this V1Project.
:param excluded_runtimes: The excluded_runtimes of this V1Project. # noqa: E501
:type: list[str]
"""
self._excluded_runtimes = excluded_runtimes
@property
def settings(self):
"""Gets the settings of this V1Project. # noqa: E501
:return: The settings of this V1Project. # noqa: E501
:rtype: V1ProjectSettings
"""
return self._settings
@settings.setter
def settings(self, settings):
"""Sets the settings of this V1Project.
:param settings: The settings of this V1Project. # noqa: E501
:type: V1ProjectSettings
"""
self._settings = settings
@property
def role(self):
"""Gets the role of this V1Project. # noqa: E501
:return: The role of this V1Project. # noqa: E501
:rtype: str
"""
return self._role
@role.setter
def role(self, role):
"""Sets the role of this V1Project.
:param role: The role of this V1Project. # noqa: E501
:type: str
"""
self._role = role
@property
def live_state(self):
"""Gets the live_state of this V1Project. # noqa: E501
:return: The live_state of this V1Project. # noqa: E501
:rtype: int
"""
return self._live_state
@live_state.setter
def live_state(self, live_state):
"""Sets the live_state of this V1Project.
:param live_state: The live_state of this V1Project. # noqa: E501
:type: int
"""
self._live_state = live_state
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1Project):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1Project):
return True
return self.to_dict() != other.to_dict()
|
Mari Inc. - Very First Biographies - Emergent Readers » Boxed Sets » Mari Inc.
Share the real-life stories of 16 must-know people with these VERY easy 8-page biographies written just for young learners! Set comes with a 48-page teaching guide filled with graphic organizers and age-perfect reproducibles. Set includes 5 copies each of 16 titles for a total of 80 books. Titles includes: Susan B. Anthony, Johnny Appleseed, Roberto clemente, Christopher Columbus, Dian fossey, Helen Keller, Martin Luther King, Jr., Abraham Lincoln, Barack Obama, Rosa Parks, Sally Ride, Betsy Ross, Squanto, Harriet Tubman, George Washington, and The Wright Brothers. Guided Reading Levels A-E. Grades K-1.
|
"""
Acquisition modules are the modules used to acquire data from the Red Pitaya.
At the moment, they include the
* :mod:`~pyrpl.hardware_modules.scope`,
* :mod:`~pyrpl.software_modules.network_analyzer`,
* :mod:`~pyrpl.software_modules.spectrum_analyzer`.
All the acquisition modules have in common a plot area where the data is
displayed and a control panel BELOW the plot for changing acquisition
settings. Widgets for specialized acquisition modules
(e.g. :class:`~pyrpl.hardware_modules.scope.Scope`) have an additional control
panel ABOVE the plot are for settings that are only available for that module.
The different buttons in the acquisition module control panel below the plot are:
- :attr:`~.AcquisitionModule.trace_average` chooses the number of successive traces to average together.
- :attr:`~.AcquisitionModule.curve_name` is the name for the next curve that is saved.
- :code:`Run single` starts a single acquisition of :code:`trace_average` traces (calls :meth:`.AcquisitionModule.single`).
- :code:`Run continuous` starts a continuous acquisition with a running
average filter, where :code:`trace_average` is the decay constant of the
running average filter (calls :meth:`.AcquisitionModule.continuous`).
- :code:`Restart average` resets trace averages to zero to start a new
measurement from scratch.
- :code:`Save curve` saves the current measurement data to a new
:class:`pyrpl.curvedb.CurveDB` object under the name
:attr:`~.AcquisitionModule.curve_name`.
"""
from . import ModuleWidget
from qtpy import QtCore, QtWidgets
class CurrentAvgLabel(QtWidgets.QWidget):
def __init__(self, parent=None):
super(CurrentAvgLabel, self).__init__(parent)
self.main_lay = QtWidgets.QVBoxLayout()
self.setLayout(self.main_lay)
self.label = QtWidgets.QLabel("current_avg")
self.main_lay.addWidget(self.label)
self.value_label = QtWidgets.QLabel("0 /")
self.main_lay.addWidget(self.value_label)
self.main_lay.addStretch(1)
self.value_label.setAlignment(QtCore.Qt.AlignCenter)
self.main_lay.setContentsMargins(0,0,0,0)
def set_value(self, val):
self.value_label.setText(str(val) + ' /')
class AcquisitionModuleWidget(ModuleWidget):
def init_gui(self):
self.button_single = QtWidgets.QPushButton("Run single")
self.button_single.clicked.connect(self.run_single_clicked)
self.button_continuous = QtWidgets.QPushButton("Run continuous")
self.button_continuous.clicked.connect(self.run_continuous_clicked)
self.button_restart_averaging = QtWidgets.QPushButton(
'Restart averaging')
self.button_restart_averaging.clicked.connect(self.restart_clicked)
self.button_save = QtWidgets.QPushButton("Save curve")
self.button_save.clicked.connect(self.module.save_curve)
self.current_avg_label = CurrentAvgLabel()
aws = self.attribute_widgets
self.attribute_layout.removeWidget(aws["trace_average"])
self.attribute_layout.removeWidget(aws["curve_name"])
self.button_layout.addWidget(self.current_avg_label)
self.button_layout.addWidget(aws["trace_average"])
self.button_layout.addWidget(aws["curve_name"])
self.button_layout.addWidget(self.button_single)
self.button_layout.addWidget(self.button_continuous)
self.button_layout.addWidget(self.button_restart_averaging)
self.button_layout.addWidget(self.button_save)
self.main_layout.addLayout(self.button_layout)
self.button_layout.setStretchFactor(self.button_single, 1)
self.button_layout.setStretchFactor(self.button_continuous, 1)
self.button_layout.setStretchFactor(self.button_restart_averaging, 1)
self.button_layout.setStretchFactor(self.button_save, 1)
self.button_layout.addStretch(1)
self.attribute_layout.setStretch(0, 0) # since widgets are all removed
# and re-added, the stretch ends up on the left, so better cancel it
# and make a new one at the end
def run_single_clicked(self):
if str(self.button_single.text()).startswith("Run single"):
self.module.single_async()
else:
self.module.stop()
def run_continuous_clicked(self):
"""
Toggles the button run_continuous to stop or vice versa and starts
he acquisition timer
"""
if str(self.button_continuous.text()).startswith("Run continuous"):
self.module.continuous()
else:
self.module.pause()
def restart_clicked(self):
old_running_state = self.module.running_state
self.module.stop()
if old_running_state in ["running_single", "running_continuous"]:
self.module.running_state = old_running_state
self.update_current_average()
def update_current_average(self):
self.current_avg_label.set_value(self.module.current_avg)
def update_running_buttons(self):
"""
Change text of Run continuous button and visibility of run single button
according to module.running_continuous
"""
self.update_current_average()
if self.module.current_avg>0:
number_str = ' (' + str(self.module.current_avg) + ")"
else:
number_str = ""
if self.module.running_state == 'running_continuous':
#if self.module.current_avg >= self.module.trace_average:
# # shows a plus sign when number of averages is available
# number_str = number_str[:-1] + '+)'
self.button_continuous.setText("Pause")# + number_str)
self.button_single.setText("Run single")
self.button_single.setEnabled(False)
else:
if self.module.running_state == "running_single":
self.button_continuous.setText("Run continuous")
self.button_single.setText("Stop")# + number_str)
self.button_single.setEnabled(True)
else:
self.button_continuous.setText("Run continuous")# + number_str)
self.button_single.setText("Run single")
self.button_single.setEnabled(True)
|
Justine Beauregard and her husband were sitting on the couch, watching television.
Quality time together, right? Justine thought so— until her husband glanced at her, clearly annoyed. “Are you planning on spending time with me, or will you be on your phone all night?” he asked.
Ironically, the more mobile phones connect us to the world, the more disconnected we become. And that can wreak havoc on our relationships, our happiness, and our home lives.
Looking for more incentive to step away from the phone? Here are three good reasons to disconnect.
1. You will be more interesting to friends and more appealing to lovers.
2. You won’t seem like a jerk.
No one wants to be considered boorish or inappropriate, but if you are overly attached to your phone, your behavior is probably both.
Nearly one-third of phone users admit to checking their phones while dining with others according to a 2012 study, 2 and 10 percent said they check their phones during religious services. More than one-third check their phones while using the bathroom. More than half take their phones to bed—checking them before drifting off to sleep, in the middle of the night, and when they wake up.
The researchers describe this behavior as a “new mobile mindset.” Your dining companions, lovers, and fellow worshippers would call it something else: rude.
3. You will get more out of life.
When you are gazing at your phone, you’re missing out on everything else. Your phone use “robs you of the beautiful trees, sunshine, and birds—key elements of mental health and well-being,” says Dr. Bobinet.
And your family members—especially children—miss you.
Look around. If you are in a public place, chances are you’re surrounded by similarly blank faces. “That’s what we look like when we are staring at our phones,” Burd notes.
As for Justine, today she allocates specific times for phone use, keeping it silent the rest of the day. She shuts it off at bedtime. And she wrote a book on balance in life titled To the Women Who Want It All (Amazon Digital Services, 2014; $9.99). “I wrote my book as I curbed use of my cell phone,” she says.
1.Przybylski A, Weinstein N. Can you connect with me now? How the presence of mobile communication technology influences face-to-face conversation quality. Journal of Social and Personal Relationships. 2013;30(3):237-46. doi: 10.1177/0265407512453827.
2.Mobile Mindset Study. Lookout wwebsite. Available at . Accessed April 17, 2015.
Get enterprise mobile security best practices & see the latest research from Lookout now.
3.Abigail Burd. Why I Want to Look at My Phone Less around My Baby (blog). Available at Accessed April 17, 2015.
|
import sys
from gmpy_cffi.interface import ffi, gmp
if sys.version > '3':
long = int
xrange = range
cache_size = 100
cache_obsize = 128
def get_cache():
"""
get_cache() -> (cache_size, object_size)
Return the current cache size (number of objects) and maximum size
per object (number of limbs) for all GMPY2 objects.
"""
return cache_size, cache_obsize
def set_cache(size, obsize):
"""
set_cache(cache_size, object_size)
Set the current cache size (number of objects) and the maximum size
per object (number of limbs). Raises ValueError if cache size exceeds
1000 or object size exceeds 16384
"""
global cache_size, cache_obsize
if not isinstance(size, (int, long)):
raise TypeError("integer argument expected, got %s" % type(size))
if not isinstance(obsize, (int, long)):
raise TypeError("integer argument expected, got %s" % type(obsize))
if size < 0 or size > 1000:
raise ValueError("cache size must between 0 and 1000")
if obsize < 0 or obsize > 16384:
raise ValueError("object size must between 0 and 16384")
cache_size = size
cache_obsize = obsize
_init_mpz_cache()
_init_mpq_cache()
_init_mpfr_cache()
# MPZ
def _init_mpz_cache():
global mpz_cache, in_mpz_cache
mpz_cache = []
in_mpz_cache = cache_size
for _ in xrange(cache_size):
mpz = ffi.new("mpz_t")
gmp.mpz_init(mpz)
mpz_cache.append(mpz)
_init_mpz_cache()
def _new_mpz():
"""Return an initialized mpz_t."""
global in_mpz_cache
if in_mpz_cache:
in_mpz_cache -= 1
return mpz_cache[in_mpz_cache]
else:
mpz = ffi.new("mpz_t")
gmp.mpz_init(mpz)
return mpz
def _del_mpz(mpz):
global in_mpz_cache
if in_mpz_cache < cache_size:
if ffi.sizeof(mpz[0]) <= cache_obsize:
mpz_cache[in_mpz_cache] = mpz
else:
mpz_cache[in_mpz_cache] = ffi.new('mpz_t')
in_mpz_cache += 1
else:
gmp.mpz_clear(mpz)
# MPQ
def _init_mpq_cache():
global mpq_cache, in_mpq_cache
mpq_cache = []
in_mpq_cache = cache_size
for _ in xrange(cache_size):
mpq = ffi.new("mpq_t")
gmp.mpq_init(mpq)
mpq_cache.append(mpq)
_init_mpq_cache()
def _new_mpq():
"""Return an initialized mpq_t."""
global in_mpq_cache
if in_mpq_cache:
in_mpq_cache -= 1
return mpq_cache[in_mpq_cache]
else:
mpq = ffi.new("mpq_t")
gmp.mpq_init(mpq)
return mpq
def _del_mpq(mpq):
global in_mpq_cache
if in_mpq_cache < cache_size:
if ffi.sizeof(mpq[0]) <= cache_obsize:
mpq_cache[in_mpq_cache] = mpq
else:
mpq_cache[in_mpq_cache] = ffi.new('mpq_t')
in_mpq_cache += 1
else:
gmp.mpq_clear(mpq)
# MPFR
def _init_mpfr_cache():
global mpfr_cache, in_mpfr_cache
mpfr_cache = []
in_mpfr_cache = cache_size
for _ in xrange(cache_size):
mpfr = ffi.new("mpfr_t")
gmp.mpfr_init(mpfr)
mpfr_cache.append(mpfr)
_init_mpfr_cache()
def _new_mpfr(prec=0):
"""Return an initialized mpfr_t."""
global in_mpfr_cache
if isinstance(prec, (int, long)):
if not (prec == 0 or gmp.MPFR_PREC_MIN <= prec <= gmp.MPFR_PREC_MAX):
raise ValueError("invalid prec %i (wanted %s <= prec <= %s)" % (
prec, gmp.MPFR_PREC_MIN, gmp.MPFR_PREC_MAX))
else:
raise TypeError('an integer is required')
if in_mpfr_cache:
in_mpfr_cache -= 1
# Set default precision
if prec == 0:
gmp.mpfr_set_prec(mpfr_cache[in_mpfr_cache], gmp.mpfr_get_default_prec())
else:
gmp.mpfr_set_prec(mpfr_cache[in_mpfr_cache], prec)
return mpfr_cache[in_mpfr_cache]
else:
mpfr = ffi.new("mpfr_t")
if prec == 0:
gmp.mpfr_init(mpfr)
else:
gmp.mpfr_init2(mpfr, prec)
return mpfr
def _del_mpfr(mpfr):
global in_mpfr_cache
if in_mpfr_cache < cache_size:
if ffi.sizeof(mpfr[0]) <= cache_obsize:
mpfr_cache[in_mpfr_cache] = mpfr
else:
mpfr_cache[in_mpfr_cache] = ffi.new('mpfr_t')
in_mpfr_cache += 1
else:
gmp.mpfr_clear(mpfr)
# MPC
def _init_mpc_cache():
global mpc_cache, in_mpc_cache
mpc_cache = []
in_mpc_cache = cache_size
for _ in xrange(cache_size):
mpc = ffi.new("mpc_t")
gmp.mpc_init2(mpc, gmp.mpfr_get_default_prec())
mpc_cache.append(mpc)
_init_mpc_cache()
def _new_mpc(prec=(0,0)):
"""Return an initialized mpc_t."""
global in_mpc_cache
# prec is assumed to be checked already
rprec, iprec = prec
if not all(p == 0 or gmp.MPFR_PREC_MIN <= p <= gmp.MPFR_PREC_MAX
for p in prec):
raise ValueError(
"invalid prec (wanted prec == 0 or %s <= prec <= %s)" % (
gmp.MPFR_PREC_MIN, gmp.MPFR_PREC_MAX))
if in_mpc_cache:
in_mpc_cache -= 1
# Set default precision
if rprec == iprec:
if rprec == 0:
gmp.mpc_set_prec(mpc_cache[in_mpc_cache], gmp.mpfr_get_default_prec())
else:
gmp.mpc_set_prec(mpc_cache[in_mpc_cache], rprec)
else:
if rprec == 0:
rprec = gmp.mpfr_get_default_prec()
if iprec == 0:
iprec = gmp.mpfr_get_default_prec()
gmp.mpc_clear(mpc_cache[in_mpc_cache])
gmp.mpc_init3(mpc_cache[in_mpc_cache], rprec, iprec)
return mpc_cache[in_mpc_cache]
else:
mpc = ffi.new("mpc_t")
if rprec == 0:
rprec = gmp.mpfr_get_default_prec()
if iprec == 0:
iprec = gmp.mpfr_get_default_prec()
if rprec == iprec:
gmp.mpc_init2(mpc, rprec)
else:
gmp.mpc_init3(mpc, rprec, iprec)
return mpc
def _del_mpc(mpc):
global in_mpc_cache
if in_mpc_cache < cache_size:
mpc_cache[in_mpc_cache] = mpc
# FIXME This doesn't seem to be working properly
if ffi.sizeof(mpc[0]) <= cache_obsize:
mpc_cache[in_mpc_cache] = mpc
else:
mpc_cache[in_mpc_cache] = ffi.new('mpc_t')
|
I know Ive been going on with this men's wear craze for a while now he-he. Dont worry its a phase, I'm sure it will go away soon. Anyway, today I mixed things up a little bit, instead of your normal lapel/ collar blazer, I am wearing a collarless @forever21 blazer. I am wearing a pinstriped chaps shirt and again created an illusion of a black tie with this rue21 suede choker scarf which I put together with a ring. So according to ties.com, black Tie Creative is an opportunity to showcase your personal style in terms of color, accessories, and collar and lapel style. I opted for the collarless and I love it!! This sartorial modification still honors a formal atmosphere of a formal event, in my case a professional one. I hope you guys enjoy this look.I cant promise that this will be the last one,lol The craze is still going on.
I love and appreciate you for visiting my blog. Till next time, stay Stylish!!
|
# Author: Rob Sanderson ([email protected])
# License: Apache2
# Last Modified: 2016-09-02
import json
from rdflib import ConjunctiveGraph, URIRef
from pyld import jsonld
from pyld.jsonld import compact, expand, frame, from_rdf, to_rdf, JsonLdProcessor
import urllib
# Stop code from looking up the contexts online for every operation
docCache = {}
def fetch(url):
fh = urllib.urlopen(url)
data = fh.read()
fh.close()
return data
def load_document_and_cache(url):
if docCache.has_key(url):
return docCache[url]
doc = {
'contextUrl': None,
'documentUrl': None,
'document': ''
}
data = fetch(url)
doc['document'] = data;
docCache[url] = doc
return doc
jsonld.set_document_loader(load_document_and_cache)
class Validator(object):
def __init__(self):
self.rdflib_class_map = {
"Annotation": "oa:Annotation",
"Dataset": "dctypes:Dataset",
"Image": "dctypes:StillImage",
"Video": "dctypes:MovingImage",
"Audio": "dctypes:Sound",
"Text": "dctypes:Text",
"TextualBody": "oa:TextualBody",
"ResourceSelection": "oa:ResourceSelection",
"SpecificResource": "oa:SpecificResource",
"FragmentSelector": "oa:FragmentSelector",
"CssSelector": "oa:CssSelector",
"XPathSelector": "oa:XPathSelector",
"TextQuoteSelector": "oa:TextQuoteSelector",
"TextPositionSelector": "oa:TextPositionSelector",
"DataPositionSelector": "oa:DataPositionSelector",
"SvgSelector": "oa:SvgSelector",
"RangeSelector": "oa:RangeSelector",
"TimeState": "oa:TimeState",
"HttpState": "oa:HttpRequestState",
"CssStylesheet": "oa:CssStyle",
"Choice": "oa:Choice",
"Composite": "oa:Composite",
"List": "oa:List",
"Independents": "oa:Independents",
"Person": "foaf:Person",
"Software": "as:Application",
"Organization": "foaf:Organization",
"AnnotationCollection": "as:OrderedCollection",
"AnnotationPage": "as:OrderedCollectionPage",
"Audience": "schema:Audience"
}
def _clean_bnode_ids(self, js):
new = {}
for (k,v) in js.items():
if k == 'id' and v.startswith("_:"):
continue
elif type(v) == dict:
# recurse
res = self._clean_bnode_ids(v)
new[k] = res
else:
new[k] = v
return new
def _mk_rdflib_jsonld(self, js):
# rdflib's json-ld implementation sucks
# Pre-process to make it work
# recurse the structure looking for types, and replacing them.
new = {}
for (k,v) in js.items():
if k == 'type':
if type(v) == list:
nl = []
for i in v:
if self.rdflib_class_map.has_key(i):
nl.append(self.rdflib_class_map[i])
new['type'] = nl
else:
if self.rdflib_class_map.has_key(v):
new['type'] = self.rdflib_class_map[v]
elif type(v) == dict:
# recurse
res = self._mk_rdflib_jsonld(v)
new[k] = res
else:
new[k] = v
return new
def json_to_rdf(self, js, fmt=None):
d2 = self._mk_rdflib_jsonld(js)
js = json.dumps(d2)
g = ConjunctiveGraph()
g.parse(data=js, format='json-ld')
if fmt:
out = g.serialize(format=fmt)
return out
else:
return g
def rdf_to_jsonld(self, rdf, fmt):
g = ConjunctiveGraph()
g.parse(data=rdf, format=fmt)
out = g.serialize(format='json-ld')
j2 = json.loads(out)
j2 = {"@context": context_js, "@graph": j2}
framed = frame(j2, frame_js)
out = compact(framed, context_js)
# recursively clean blank node ids
#out = self._clean_bnode_ids(out)
return out
def compact_and_clean(self, js):
newjs = compact(js, context_js)
newjs['@context'] = context
if newjs.has_key("@graph"):
for k,v in newjs['@graph'].items():
newjs[k] = v
del newjs['@graph']
return newjs
validator = Validator()
example = "https://raw.githubusercontent.com/w3c/web-annotation/gh-pages/model/wd2/examples/correct/anno4.json"
example_ttl = "https://raw.githubusercontent.com/w3c/web-annotation/gh-pages/vocab/wd/examples/correct/anno1.ttl"
context = "http://www.w3.org/ns/anno.jsonld"
frameURI = "https://raw.githubusercontent.com/w3c/web-annotation/gh-pages/jsonld/annotation_frame.jsonld"
# ontology = "https://www.w3.org/ns/oa.ttl"
ontology = "https://raw.githubusercontent.com/w3c/web-annotation/gh-pages/vocab/wd/ontology/oa.ttl"
data = fetch(context)
context_js = json.loads(data)
data = fetch(example)
example_js = json.loads(data)
data = fetch(frameURI)
frame_js = json.loads(data)
# Test1: JSON-LD context document can be parsed without errors by JSON-LD validators
# Context document is parsable if it can be loaded and used to expand the example
try:
expanded = expand(example_js, context_js)
except:
print "Context is invalid, failed Test 1"
# Test2: JSON-LD context document can be used to convert JSON-LD serialized Annotations into RDF triples.
try:
jsonld_nq = to_rdf(example_js, {"base": "http://example.org/", "format": "application/nquads"})
except:
print "Cannot use context to convert JSON-LD to NQuads"
# Test3: Graphs produced are isomorphic
try:
rl_g = validator.json_to_rdf(example_js)
g = ConjunctiveGraph()
js_g = g.parse(data=jsonld_nq, format="nt")
rl_g_nq = rl_g.serialize(format="nquads")
assert(len(rl_g.store) == len(js_g.store))
assert(rl_g.isomorphic(js_g))
except:
print "Different triples from two parsers, or non-isomorphic graphs"
# Test4: The graphs produced can be converted back into JSON-LD without loss of information
try:
js = validator.rdf_to_jsonld(jsonld_nq, "nt")
js2 = validator.compact_and_clean(js)
assert(js2 == example_js)
except:
print "Failed to recompact parsed data"
raise
# Test5: ontology documents can be parsed without errors by validators
try:
g = ConjunctiveGraph().parse(ontology, format="turtle")
except:
raise
# Test6: ontology is internally consistent with respect to domains, ranges, etc
# step 1: find all the classes.
rdftype = URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")
rdfsdomain = URIRef("http://www.w3.org/2000/01/rdf-schema#domain")
rdfsrange = URIRef("http://www.w3.org/2000/01/rdf-schema#range")
rdfsresource = URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#Resource")
rdfssco = URIRef("http://www.w3.org/2000/01/rdf-schema#subClassOf")
asColl = URIRef("http://www.w3.org/ns/activitystreams#OrderedCollection")
skosConcept = URIRef("http://www.w3.org/2004/02/skos/core#Concept")
otherClasses = [asColl, skosConcept]
classes = list(g.subjects(rdftype, URIRef("http://www.w3.org/2000/01/rdf-schema#Class")))
props = list(g.subjects(rdftype, URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")))
for p in props:
domains = list(g.objects(p, rdfsdomain))
for d in domains:
assert(d in classes)
for p in props:
ranges = list(g.objects(p, rdfsrange))
for r in ranges:
if not r in classes and not str(r).startswith("http://www.w3.org/2001/XMLSchema#") and \
not r == rdfsresource:
print "Found inconsistent property: %s has unknown range" % p
for c in classes:
parents = list(g.objects(c, rdfssco))
for p in parents:
if not p in classes and not p in otherClasses:
print "Found inconsistent class: %s has unknown superClass" % c
print "Done."
|
Lathrop Shea & Co. v. Interior Construction Co.
Rumford Chem. Works v. Hygienic Chem. Co.
Steward v. American Lava Co.
Caliga v. Inter Ocean Newspaper Co.
Waterman v. Canal-Louisiana Bank Co.
United States v. Union Supply Co.
Southern Ry. Co. v. St. Louis Hay & Grain Co.
Weems Steamboat Co. v. People's Steamboat Co.
Bong v. Campbell Art Co.
Kreigh v. Westinghouse & Co.
Bryant v. Swofford Bros. Dry Goods Co.
Wild v. Provident Life & Trust Co.
U.S. FIDELITY & GUAR. CO. v. U.S.
Des Moines v. Des Moines City Ry. Co.
United States v. Delaware & Hudson Co.
FIDELITY & CASUALTY CO. OF NEW YORK v. SOUTHERN R NEWS CO.
DONOHOE v. EL PASO & S. W. R. CO.
EX PARTE CONSOLIDATED RUBBER TIRE CO.
Turner v. American Security & Trust Co.
Maiorano v. Baltimore & Ohio R. Co.
Hurley v. Atchison, Topeka & Santa Fe Ry. Co.
SILER v. ILLINOIS CENTRAL RAILROAD CO.
Mammoth Mining Co. v. Grand Central Mining Co.
Railroad Comm'n v. Cumberland Tel. & Tel. Co.
Bagley v. General Fire Extinguisher Co.
United States v. New York Central & Hudson River R. Co.
Continental Wall Paper Co. v. Voight & Sons Co.
Louisville & Nashville R. Co. v. Central Stock Yards Co.
Presidio County v. Noel-Young Co.
Missouri Pacific Railway Co. v. Larabee Flour Mills Co.
Knop v. Monongahela Coal Co.
Hardaway v. National Surety Co.
Murphy v. John Hofman Co.
Knoxville v. Knoxville Water Co.
|
#!/usr/bin/env python
## category General
## desc Given two separately mapped paired files, re-pair the files
"""
Given two separately mapped paired-end files, re-pair the files, selecting
the most likely pairing partners based upon strand, insert distance, and
maximizing alignment scores.
It is very important that the files are either in the same order with each
read present in both files or sorted in name order.
The value of the attribute/tag given will be used to determine which reads
should be kept and which should be discarded. The tag should be a numeric
(int/float) type. More than one tag can be used. The default is 'AS+, NM-'.
The BEST pair will be kept that maximizes the tag values and otherwise
satisfies strand and distance values.
"""
import os
import sys
import pysam
import ngsutils.bam
def usage(msg=None):
if msg:
print msg
print __doc__
print """
Usage: bamutils pair {opts} out.bam read1.bam read2.bam
Options
-tag VAL Tag to use to determine from which file reads will be
taken. (must be type :i or :f) You may have more than
one of these, in which case they will be sorted in
order. You can add a +/- at the end of the name to
signify sort order (asc/desc).
Default: AS+, NM-
-size low-high The minimum/maximum insert size to accept. By default,
this will attempt to minimize the distance between
reads, upto the lower-bound. Any pair over the upper
bound will be discarded. Note: for RNA, because it is
impossible to detect junctions that are between the
reads, this should be a very big range (ex: 50-1000000)
Default: 50-10000
-fail1 fname.bam Write all failed mappings from read1 to this file
-fail2 fname.bam Write all failed mappings from read1 to this file
(Note: -fail1 and -fail2 can be the same file.)
-reason tag Write the reason for failure to this tag (only for
failed reads/mappings) Must be a valid two char name.
"""
sys.exit(1)
def is_valid_pair(read1, read2):
if read1.is_unmapped or read2.is_unmapped:
# both must be mapped
return False, 'unmapped'
if read1.tid != read2.tid:
# to the same chromosome/reference
return False, 'chromosome'
if read1.is_reverse == read2.is_reverse:
# in opposite orientations
return False, 'orientation'
# sequenced towards each other
if read1.pos < read2.pos and read1.is_reverse:
return False, 'direction'
if read2.pos < read1.pos and read2.is_reverse:
return False, 'direction'
return True, ''
def find_pairs(reads1, reads2, min_size, max_size, tags):
'''
returns pairs, fail1, fail2
'''
possible = []
fail1 = []
fail2 = []
valid = set()
reasons = {}
for r1 in reads1:
for r2 in reads2:
is_valid, reason = is_valid_pair(r1, r2)
if is_valid:
# there can be some strange edge cases for insert size, so we'll just look
# for the biggest
ins_size = max(r2.aend - r1.pos, r1.aend - r2.pos)
# This doesn't work for RNA reads - you can still have hidden introns
# between the two reads. I'm leaving this here so that when I'm tempted
# to add this check again, I'll remember why it's a bad idea.
# junctionstarts = set()
# pos = r1.pos
# for op, size in r1.cigar:
# if op == 0 or op == 2:
# pos += size
# elif op == 3:
# junctionstarts.add(pos)
# ins_size -= size
# pos = r2.pos
# for op, size in r2.cigar:
# if op == 0 or op == 2:
# pos += size
# elif op == 3:
# if not pos in junctionstarts:
# ins_size -= size
if ins_size < min_size or ins_size > max_size:
if not (1, r1.tid, r1.pos) in reasons:
reasons[(1, r1.tid, r1.pos)] = set()
if not (2, r2.tid, r2.pos) in reasons:
reasons[(2, r2.tid, r2.pos)] = set()
reasons[(1, r1.tid, r1.pos)].add('size')
reasons[(2, r2.tid, r2.pos)].add('size')
continue
tag_val = []
for tag in tags:
val = float(r1.opt(tag[:2]))
val += float(r2.opt(tag[:2]))
if tag[-1] == '+':
# we will sort ascending to minimize size, so + tags (AS) need to be reversed
val = -val
tag_val.append(val)
possible.append((tag_val, ins_size, r1, r2))
valid.add((1, r1.tid, r1.pos))
valid.add((2, r2.tid, r2.pos))
else:
if not (1, r1.tid, r1.pos) in reasons:
reasons[(1, r1.tid, r1.pos)] = set()
if not (2, r2.tid, r2.pos) in reasons:
reasons[(2, r2.tid, r2.pos)] = set()
reasons[(1, r1.tid, r1.pos)].add(reason)
reasons[(2, r2.tid, r2.pos)].add(reason)
for r1 in reads1:
if not (1, r1.tid, r1.pos) in valid:
fail1.append((r1, reasons[(1, r1.tid, r1.pos)]))
for r2 in reads2:
if not (2, r2.tid, r2.pos) in valid:
fail2.append((r2, reasons[(2, r2.tid, r2.pos)]))
return possible, fail1, fail2
def bam_pair(out_fname, read1_fname, read2_fname, tags=['AS+', 'NM-'], min_size=50, max_size=1000, fail1_fname=None, fail2_fname=None, reason_tag=None, quiet=False):
bam1 = pysam.Samfile(read1_fname, "rb")
bam2 = pysam.Samfile(read2_fname, "rb")
out = pysam.Samfile('%s.tmp' % out_fname, "wb", template=bam1)
fail1 = None
fail2 = None
if fail1_fname:
fail1 = pysam.Samfile('%s.tmp' % fail1_fname, "wb", template=bam1)
if fail2_fname:
if fail2_fname == fail1_fname:
fail2 = fail1
else:
fail2 = pysam.Samfile('%s.tmp' % fail2_fname, "wb", template=bam1)
gen1 = ngsutils.bam.bam_batch_reads(bam1, quiet=quiet)
gen2 = ngsutils.bam.bam_batch_reads(bam2, quiet=True)
reads1 = None
reads2 = None
while True:
try:
if not reads1:
reads1 = gen1.next()
if not reads2:
reads2 = gen2.next()
except StopIteration:
break
if reads1[0].qname != reads2[0].qname:
if reads1[0].qname < reads2[0].qname:
reads1 = None
else:
reads2 = None
continue
pairs, failed_reads1, failed_reads2 = find_pairs(reads1, reads2, min_size, max_size, tags)
written = set()
if pairs:
pairs.sort() # default: max AS, min NM, min size
tag_val, size, r1, r2 = pairs[0]
best_val = (tag_val, size)
best_pairs = []
for tag_val, size, r1, r2 in pairs:
if (tag_val, size) == best_val:
best_pairs.append((size, r1, r2))
for size, r1, r2 in best_pairs:
# good match! set the flags and write them out
r1.is_paired = True
r2.is_paired = True
r1.is_proper_pair = True
r2.is_proper_pair = True
r1.is_read1 = True
r2.is_read2 = True
if r1.pos < r2.pos:
r1.tlen = size
r2.tlen = -size
else:
r1.tlen = -size
r2.tlen = size
r1.mate_is_reverse = r2.is_reverse
r2.mate_is_reverse = r1.is_reverse
r1.mate_is_unmapped = False
r2.mate_is_unmapped = False
r1.rnext = r2.tid
r2.rnext = r1.tid
r1.pnext = r2.pos
r2.pnext = r1.pos
r1.tags = r1.tags + [('NH', len(best_pairs))]
r2.tags = r2.tags + [('NH', len(best_pairs))]
out.write(r1)
out.write(r2)
written.add((1, r1.tid, r1.pos))
written.add((2, r2.tid, r2.pos))
for tag_val, size, r1, r2 in pairs[1:]:
if fail1:
if (1,r1.tid, r1.pos) not in written:
written.add((1,r1.tid, r1.pos))
r1.is_paired = True
r1.is_proper_pair = False
r1.is_read1 = True
if reason_tag:
r1.tags = r1.tags + [(reason_tag, 'suboptimal')]
fail1.write(r1)
if fail2:
if (2,r2.tid, r2.pos) not in written:
written.add((2,r2.tid, r2.pos))
r2.is_paired = True
r2.is_proper_pair = False
r2.is_read2 = True
if reason_tag:
r2.tags = r2.tags + [(reason_tag, 'suboptimal')]
fail2.write(r2)
if failed_reads1 and fail1:
for r1, reasons in failed_reads1:
r1.is_paired = True
r1.is_proper_pair = False
r1.is_read1 = True
if reason_tag:
r1.tags = r1.tags + [(reason_tag, ','.join(reasons))]
fail1.write(r1)
if failed_reads2 and fail2:
for r2, reasons in failed_reads2:
r2.is_paired = True
r2.is_proper_pair = False
r2.is_read1 = True
if reason_tag:
r2.tags = r2.tags + [(reason_tag, ','.join(reasons))]
fail2.write(r2)
reads1 = None
reads2 = None
bam1.close()
bam2.close()
out.close()
os.rename('%s.tmp' % out_fname, out_fname)
if fail1:
fail1.close()
os.rename('%s.tmp' % fail1_fname, fail1_fname)
if fail2:
if fail2_fname != fail1_fname:
fail2.close()
os.rename('%s.tmp' % fail2_fname, fail2_fname)
if __name__ == '__main__':
out_fname = None
read1_fname = None
read2_fname = None
fail1_fname = None
fail2_fname = None
min_size = 50
max_size = 10000
reason_tag = None
tags = []
last = None
for arg in sys.argv[1:]:
if arg == '-h':
usage()
elif last == '-fail1':
fail1_fname = arg
last = None
elif last == '-fail2':
fail2_fname = arg
last = None
elif last == '-size':
min_size, max_size = [int(x) for x in arg.split('-')]
last = None
elif last == '-tag':
tags.append(arg)
last = None
elif last == '-reason':
reason_tag = arg
last = None
elif arg in ['-tag', '-fail1', '-fail2', '-size', '-reason']:
last = arg
elif not out_fname:
out_fname = arg
elif not read1_fname and os.path.exists(arg):
read1_fname = arg
elif not read2_fname and os.path.exists(arg):
read2_fname = arg
else:
usage('Unknown option: %s' % arg)
if not tags:
tags = ['AS+', 'NM-']
if not read1_fname or not read2_fname or not out_fname:
usage()
else:
bam_pair(out_fname, read1_fname, read2_fname, tags, min_size, max_size, fail1_fname, fail2_fname, reason_tag)
|
More than 90% of the wine production are made by four types of grape : Chasselas, producing Fendant, Rhin or Sylvaner for the Johannisberg, Gamay and Pinot Noir when mixed give Dôle.
In addition to these, many specialities are produced in this area: Petite Arvine, Amigne, Marsanne, Malvoisie, Humagne, Muscat, Païen, Pinot Blanc, Chardonnay, Cornalin, Syrah, Diolinoir, Merlot and Gamaret.
All these specialities can be discovered and tasted at La Grange. The ounce’s service (half a decilitre) gives you the opportunity to taste different wines of the Wallis region at your leisure.
|
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 7 18:33:11 2017
@author: gcng
"""
# Create_hydcond_array
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt # matlab-like plots
from readSettings import Settings
import platform
import sys
# Set input file
if len(sys.argv) < 2:
settings_input_file = 'settings.ini'
print 'Using default input file: ' + settings_input_file
else:
settings_input_file = sys.argv[1]
print 'Using specified input file: ' + settings_input_file
Settings = Settings(settings_input_file)
if platform.system() == 'Linux':
slashstr = '/'
else:
slashstr = '\\'
# ***SET FOLLOWING BASED ON SITE **********************************************
sw_scheme = 2 # 1: based on elev, 2: based on streams, 3: based on both
hydcond_default = 0.1
if sw_scheme == 1:
elev_thresh = [4500, 4200, 4000] # elev thresholds for different K, high to low
hydcond_by_elev = [0.1, 0.25, 0.4, 0.5] # K for the different elevation intervals
if sw_scheme >= 2:
# settings for hydraulic conductivity based on distance (in pixels) from stream
npix_stream_buffer = 1 # number of different buffers, based on pixels from stream
buffer_dist_pix = np.arange(1,npix_stream_buffer+1) # up to buffer_dist_pix pixels from stream
buffer_hydcond = np.array([0.4, 0.2]) # will use the first npix_stream_buffer values
strm_hydcond = 0.6 # for stream pixels
# *****************************************************************************
# %%
# Only run this script if using spatially distributed K
try:
float(Settings.hydcond0)
fl_runscript = 0 # don't run this script, set constant K
except ValueError:
fl_runscript = 1 # run this script to generate spatially variable K
hydcond_fil = Settings.hydcond0
surfz_fil = Settings.GISinput_dir + slashstr + 'DEM.asc'
NLAY = Settings.NLAY
if fl_runscript == 1:
f = open(surfz_fil, 'r')
sdata = {}
for i in range(6):
line = f.readline()
line = line.rstrip() # remove newline characters
key, value = line.split(': ')
try:
value = int(value)
except:
value = float(value)
sdata[key] = value
f.close()
NSEW = [sdata['north'], sdata['south'], sdata['east'], sdata['west']]
NROW = sdata['rows']
NCOL = sdata['cols']
# - space discretization
DELR = (NSEW[2]-NSEW[3])/NCOL # width of column [m]
DELC = (NSEW[0]-NSEW[1])/NROW # height of row [m]
TOP = np.genfromtxt(surfz_fil, skip_header=6, delimiter=' ', dtype=float)
hydcond = np.ones([NROW, NCOL, NLAY]) * hydcond_default # default
#%%
# ----- Based on elevation -----
if sw_scheme == 1 or sw_scheme == 3:
# - domain dimensions, maybe already in surfz_fil and botm_fil{}?
# NLAY = 2;
# surfz_fil = '/home/gcng/workspace/ProjectFiles/AndesWaterResources/Data/GIS/topo.asc';
hydcond0 = np.copy(hydcond[:,:,0])
hydcond0[TOP>=elev_thresh[0]] = hydcond_by_elev[0]
for ii in range(len(elev_thresh)):
hydcond0[TOP<elev_thresh[ii]] = hydcond_by_elev[ii+1]
hydcond[:,:,0] = np.copy(hydcond0)
#%%
# ----- Based on stream channel -----
if sw_scheme == 2 or sw_scheme == 3:
reach_fil = Settings.GISinput_dir + slashstr + 'reaches.txt'
reach_data_all = pd.read_csv(reach_fil) #
# Set stream cell only
# hydcond[reach_data_all.loc[:,'IRCH']-1, reach_data_all.loc[:,'JRCH']-1, 0] = 0.6
# hydcond depends on distance from stream
row_ind = reach_data_all.loc[:,'IRCH']-1
col_ind = reach_data_all.loc[:,'JRCH']-1
xcoord = DELR * np.arange(NCOL)
ycoord = DELC * np.arange(NROW)
xstrm = xcoord[col_ind]
ystrm = ycoord[row_ind]
xcoord_ar = np.kron(np.ones((NROW,1)), xcoord)
ycoord_ar = np.kron(np.ones((NCOL,1)), ycoord)
ycoord_ar = ycoord_ar.transpose()
dx = np.ceil(np.maximum(DELR, DELC))
buffer_dist = buffer_dist_pix * dx # up to npix pixels from stream
ind = np.argsort(buffer_dist)[::-1]
buffer_dist = np.copy(buffer_dist[ind])
buffer_hydcond = np.copy(buffer_hydcond[ind])
hydcond0 = np.copy(hydcond[:,:,0])
# buffer distances from stream:
for d_i in range(np.size(buffer_dist)):
for strm_i in range(np.size(xstrm)):
dist = ((xcoord_ar-xstrm[strm_i])**2 + (ycoord_ar-ystrm[strm_i])**2)**0.5
hydcond0[dist <= buffer_dist[d_i]] = buffer_hydcond[d_i]
hydcond0[row_ind, col_ind] = strm_hydcond # stream
hydcond[:,:,0] = hydcond0
# %% Plot
#ax = fig.add_subplot(2,2,1)
#im = ax.imshow(TOP_to_plot)
for ilay in range(NLAY):
fig = plt.figure(figsize=(12,12))
# plt.subplot(2,2,ilay+1)
im = plt.imshow(hydcond[:,:,ilay])
# im.set_clim(3800, 6200)
fig.colorbar(im, orientation='horizontal')
plt.title('BOTM lay' + str(ilay+1));
#%% Write to File
fobj = open(hydcond_fil, 'w+')
for ii in range(NLAY):
fobj.write('Layer %d \n' % (ii+1))
np.savetxt(fobj, hydcond[:,:,ii], delimiter=' ', fmt='%10g')
fobj.close()
|
Have you ever had a quality improvement (QI) project where you needed to analyze the data and then construct a game plan for moving forward? There are several QI tools that can help.
Let’s say you have the opportunity to revamp a library service and you decide to hold a series of focus groups. This is one way to gather evidence. You’ll want to be sure to have representation from all of your key user groups, and it’s important to ask the same questions of each group. You continue holding focus group sessions until you reach saturation, i.e. you start to hear the same answers over and over.
After you have recorded the feedback from the focus groups you need to analyze the information. There are several ways to do this: one approach is to do qualitative content analysis and look for patterns within the responses. Use those patterns to define key services to concentrate on. You can then use a logic model to help you define your resources, goals and expectations.
Another approach is to use a QI tool like Value Analysis. In this case you identify the key functions that must be performed in order for the participants to be satisfied. For example, maybe several of your participants mentioned that they don’t know what the library services are. The function for this may be “Demonstrate Value.” You would then brainstorm different ways to accomplish this function.
You can use several of these QI methods together. These are just tools – and as you would with any tools, you apply them at the appropriate point in the analysis phase of your project.
|
import mxnet as mx
import numpy as np
def bn_act_conv_layer(from_layer, name, num_filter, kernel=(1,1), pad=(0,0), stride=(1,1)):
bn = mx.symbol.BatchNorm(data=from_layer, name="bn{}".format(name))
relu = mx.symbol.Activation(data=bn, act_type='relu')
conv = mx.symbol.Convolution(data=relu, kernel=kernel, pad=pad, stride=stride, num_filter=num_filter, name="conv{}".format(name))
return conv, relu
def conv_act_layer(from_layer, name, num_filter, kernel=(1,1), pad=(0,0), stride=(1,1), act_type="relu"):
relu = mx.symbol.Activation(data=from_layer, act_type=act_type, name="{}{}".format(act_type, name))
conv = mx.symbol.Convolution(data=relu, kernel=kernel, pad=pad, stride=stride, num_filter=num_filter, name="conv{}".format(name))
return conv, relu
def multibox_layer(from_layers, num_classes, sizes=[.2, .95], ratios=[1], normalization=-1, num_channels=[], clip=True, interm_layer=0):
"""
the basic aggregation module for SSD detection. Takes in multiple layers,
generate multiple object detection targets by customized layers
Parameters:
----------
from_layers : list of mx.symbol
generate multibox detection from layers
num_classes : int
number of classes excluding background, will automatically handle
background in this function
sizes : list or list of list
[min_size, max_size] for all layers or [[], [], []...] for specific layers
ratios : list or list of list
[ratio1, ratio2...] for all layers or [[], [], ...] for specific layers
normalizations : int or list of int
use normalizations value for all layers or [...] for specific layers,
-1 indicate no normalizations and scales
num_channels : list of int
number of input layer channels, used when normalization is enabled, the
length of list should equals to number of normalization layers
clip : bool
whether to clip out-of-image boxes
interm_layer : int
if > 0, will add a intermediate Convolution layer
Returns:
----------
list of outputs, as [loc_preds, cls_preds, anchor_boxes]
loc_preds : localization regression prediction
cls_preds : classification prediction
anchor_boxes : generated anchor boxes
"""
assert len(from_layers) > 0, "from_layers must not be empty list"
assert num_classes > 0, "num_classes {} must be larger than 0".format(num_classes)
assert len(ratios) > 0, "aspect ratios must not be empty list"
if not isinstance(ratios[0], list):
# provided only one ratio list, broadcast to all from_layers
ratios = [ratios] * len(from_layers)
assert len(ratios) == len(from_layers), \
"ratios and from_layers must have same length"
assert len(sizes) > 0, "sizes must not be empty list"
if len(sizes) == 2 and not isinstance(sizes[0], list):
# provided size range, we need to compute the sizes for each layer
assert sizes[0] > 0 and sizes[0] < 1
assert sizes[1] > 0 and sizes[1] < 1 and sizes[1] > sizes[0]
tmp = np.linspace(sizes[0], sizes[1], num=(len(from_layers)-1))
min_sizes = [start_offset] + tmp.tolist()
max_sizes = tmp.tolist() + [tmp[-1]+start_offset]
sizes = zip(min_sizes, max_sizes)
assert len(sizes) == len(from_layers), \
"sizes and from_layers must have same length"
if not isinstance(normalization, list):
normalization = [normalization] * len(from_layers)
assert len(normalization) == len(from_layers)
assert sum(x > 0 for x in normalization) == len(num_channels), \
"must provide number of channels for each normalized layer"
loc_pred_layers = []
cls_pred_layers = []
anchor_layers = []
num_classes += 1 # always use background as label 0
for k, from_layer in enumerate(from_layers):
from_name = from_layer.name
# normalize
if normalization[k] > 0:
from_layer = mx.symbol.L2Normalization(data=from_layer, mode="channel", name="{}_norm".format(from_name))
scale = mx.symbol.Variable(name="{}_scale".format(from_name),shape=(1, num_channels.pop(0), 1, 1))
from_layer = normalization[k] * mx.symbol.broadcast_mul(lhs=scale, rhs=from_layer)
if interm_layer > 0:
from_layer = mx.symbol.Convolution(data=from_layer, kernel=(3,3), stride=(1,1), pad=(1,1), num_filter=interm_layer, name="{}_inter_conv".format(from_name))
from_layer = mx.symbol.Activation(data=from_layer, act_type="relu", name="{}_inter_relu".format(from_name))
# estimate number of anchors per location
# here I follow the original version in caffe
# TODO: better way to shape the anchors??
size = sizes[k]
assert len(size) > 0, "must provide at least one size"
size_str = "(" + ",".join([str(x) for x in size]) + ")"
ratio = ratios[k]
assert len(ratio) > 0, "must provide at least one ratio"
ratio_str = "(" + ",".join([str(x) for x in ratio]) + ")"
num_anchors = len(size) -1 + len(ratio)
# create location prediction layer
num_loc_pred = num_anchors * 4
loc_pred = mx.symbol.Convolution(data=from_layer, kernel=(3,3), stride=(1,1), pad=(1,1), num_filter=num_loc_pred, name="{}_loc_pred_conv".format(from_name))
loc_pred = mx.symbol.transpose(loc_pred, axes=(0,2,3,1))
loc_pred = mx.symbol.Flatten(data=loc_pred)
loc_pred_layers.append(loc_pred)
# create class prediction layer
num_cls_pred = num_anchors * num_classes
cls_pred = mx.symbol.Convolution(data=from_layer, kernel=(3,3), stride=(1,1), pad=(1,1), num_filter=num_cls_pred, name="{}_cls_pred_conv".format(from_name))
cls_pred = mx.symbol.transpose(cls_pred, axes=(0,2,3,1))
cls_pred = mx.symbol.Flatten(data=cls_pred)
cls_pred_layers.append(cls_pred)
# create anchor generation layer
anchors = mx.contrib.symbol.MultiBoxPrior(from_layer, sizes=size_str, ratios=ratio_str, clip=clip, name="{}_anchors".format(from_name))
anchors = mx.symbol.Flatten(data=anchors)
anchor_layers.append(anchors)
loc_preds = mx.symbol.Concat(*loc_pred_layers, num_args=len(loc_pred_layers), dim=1, name="multibox_loc_pred")
cls_preds = mx.symbol.Concat(*cls_pred_layers, num_args=len(cls_pred_layers), dim=1)
cls_preds = mx.symbol.Reshape(data=cls_preds, shape=(0, -1, num_classes))
cls_preds = mx.symbol.transpose(cls_preds, axes=(0, 2, 1), name="multibox_cls_pred")
anchor_boxes = mx.symbol.Concat(*anchor_layers, num_args=len(anchor_layers), dim=1)
anchor_boxes = mx.symbol.Reshape(data=anchor_boxes, shape=(0, -1, 4), name="multibox_anchors")
return [loc_preds, cls_preds, anchor_boxes]
|
We all know that drugs and alcohol are bad for us. We also know that they are bad for the insides of our body, but we rarely think about how they can affect the way we look on the outside. As weird as it sounds, people are more likely to listen about how harmful drugs and alcohol are when appearance is mentioned. Excessive drinking can cause: weight gain, dry and dull skin, broken capillaries, brittle hair and nails, and premature aging. The use of marijuana can cause weight gain. Cocaine is known to cause sores and scratches, and pale, unhealthy looking skin. Heroin can cause weight loss, premature aging, bluish skin and nails, along with abscesses, track marks, and sores and acne. Meth is also known to cause weight loss, tooth decay and accelerated aging.
To learn more about how drugs and alcohol can affect you on the outside, click here.
|
import datetime
import os
import pickle
import time
from colorama import Fore, init
from .databaseCore import database
class owerview(object):
def __init__(self):
init()
self.p = database()
self.userid = -1
self.coin_table = self.p.load_all_coins()
# load all updates table, runfunction to group by (last in a day)
self.updates_data = self.p.load_update_all()
self.__dates_normalization()
self.user_data_wall_bal = {}
# update time normalization (save index of last update of a day)
# saved back in self.update_data
def __dates_normalization(self):
try:
with open("data/tmp/updatesid_last_of_day.pickle", "rb") as f:
updates_sorted = pickle.load(f)
except FileNotFoundError:
updates_sorted = {}
for x in self.updates_data:
tmp_date = datetime.datetime.strptime(x[1], "%Y-%m-%d %H:%M:%S")
tp_day = tmp_date.date()
str_day = str(tp_day)
updates_sorted[str_day] = {"date": tmp_date, "id": x[0]}
self.updates_data = updates_sorted
# print(str(self.updates_data))
with open("data/tmp/updatesid_last_of_day.pickle", "wb") as f:
pickle.dump(updates_sorted, f)
# user detail view
def user_view(self, userid, history=False):
user_balance = self.p.load_user_balance(userid)
user_wallets = self.p.load_user_wallet(userid)
self.user_data_wall_bal[0] = user_balance
self.user_data_wall_bal[1] = user_wallets
tmp_coinId_list = []
tmp_coin_price = {}
# Check if coins and wallets != empty
if user_balance == [] and user_wallets == []:
print(
"No coin balances avaliable.\nEnter coin in balance or\nEnter wallet publicKey")
time.sleep(5)
os.system('cls' if os.name == 'nt' else 'clear')
return 0
# get all coin id you will need for price
try:
for x in user_balance:
if x[1] in tmp_coinId_list:
pass
else:
tmp_coinId_list.append(x[1])
except:
pass
try:
for x in user_wallets:
if x[2] in tmp_coinId_list:
pass
else:
tmp_coinId_list.append(x[2])
except:
pass
# Quick view if histroy == False
# Pull last update save in tmp_coin_price[coinID] = { all data }
if history == False:
lastupdate = self.p.check_last_update_id()
coin_price = {}
# pull only newest update
for elem in tmp_coinId_list:
tmp = self.p.load_coin_price_with_mc(elem, lastupdate)
coin_price[elem] = {"usd": tmp[0], "btc": tmp[1], "mc": tmp[2]}
tmp_coin_price[lastupdate] = coin_price
self.__display(tmp_coin_price)
self.pause()
return 0
elif history == True:
coin_price = {}
# pull only newest update
for x in self.updates_data.keys():
tmp_id = self.updates_data[x]['id']
tmp_tmp_coin = {}
for elem in tmp_coinId_list:
tmp = self.p.load_coin_price_with_mc(elem, tmp_id)
tmp_tmp_coin[elem] = {"usd": tmp[0],
"btc": tmp[1], "mc": tmp[2]}
coin_price[tmp_id] = tmp_tmp_coin
# call general view
self.__display(coin_price)
self.pause()
return 0
def pause(self):
sss = input("Press ENTER to continue...")
time.sleep(1)
os.system('cls' if os.name == 'nt' else 'clear')
def __display(self, data_dic):
# you get nested dict if history is true
# dict[updateID][coinID] = {"usd":? , "btc": ? , "mc": ?}
usr_bal = self.user_data_wall_bal[0]
usr_wal = self.user_data_wall_bal[1]
for i in data_dic:
tmp_update = data_dic[i]
date_u = self.p.check_update_date(i)
print(Fore.YELLOW + "\nWith prices from: " + date_u)
# Balance outprint
tot_update_value = 0
if usr_bal != []:
print(Fore.MAGENTA + "\nBalance: \n")
for x in range(len(usr_bal)):
tmp_coinid = int(usr_bal[x][1])
tmp_name = usr_bal[x][2]
tmp_bal_amount = float(usr_bal[x][4])
pri_usd = tmp_update[tmp_coinid]['usd']
pri_btc = tmp_update[tmp_coinid]['btc']
val_usd = tmp_bal_amount * pri_usd
val_btc = tmp_bal_amount * pri_btc
tot_update_value += val_usd
print(Fore.BLUE + "Coin: " + Fore.GREEN + tmp_name + Fore.BLUE + " -balance: " + Fore.GREEN + str(tmp_bal_amount) +
Fore.BLUE + " - btc: " + Fore.GREEN + str(pri_btc) + Fore.BLUE + " - usd: " + Fore.GREEN + str(pri_usd))
print(Fore.BLUE + " Total USD/BTC : " + Fore.GREEN + str(val_usd) +
Fore.MAGENTA + "$ / " + Fore.GREEN + str(val_btc) + Fore.MAGENTA + " BTCs")
else:
pass
# Wallets out print
if usr_wal != []:
print(Fore.MAGENTA + "\nWallets: \n")
for x in range(len(usr_wal)):
tmp_coinid = int(usr_wal[x][2])
tmp_name = usr_wal[x][3]
tmp_wal_name = usr_wal[x][7]
tmp_wal_amount = float(usr_wal[x][5])
tmp_wall_addr = usr_wal[x][4]
pri_usd = tmp_update[tmp_coinid]['usd']
pri_btc = tmp_update[tmp_coinid]['btc']
val_usd = tmp_wal_amount * pri_usd
val_btc = tmp_wal_amount * pri_btc
tot_update_value += val_usd
print(Fore.BLUE + "Wallet: " + Fore.GREEN + tmp_wal_name + Fore.BLUE + " - " + Fore.GREEN + tmp_name + Fore.BLUE + " - balance: " + Fore.GREEN + str(
tmp_wal_amount) + Fore.BLUE + " - btc: " + Fore.GREEN + str(pri_btc) + Fore.BLUE + " - usd: " + Fore.GREEN + str(pri_usd), end=' ')
print(Fore.BLUE + " address: " + Fore.GREEN + tmp_wall_addr + Fore.BLUE + "\n Total USD / BTC : " +
str(val_usd) + Fore.MAGENTA + "$ / " + str(val_btc) + Fore.MAGENTA + " BTCs")
else:
pass
print(Fore.RED + "\nTotal value on date: " + Fore.CYAN + date_u + " " + Fore.GREEN +
str(tot_update_value) + " $\n\n")
time.sleep(0.8)
|
19th Century Cumbrian Antique Painted Pine Hall Bench.
A super example of a North of England antique pine hall bench still in the original paint finish which is a wonderful, grey / silver colour. A strong design with reeded vertical back splats, open arms, square legs and stretchers. All pegged and in excellent, strong and original condition. Dates from the mid 19th century and is originally from Cumbria.
Additional Information: The seat is 56cm deep.
|
#
# Proximate - Peer-to-peer social networking
#
# Copyright (c) 2008-2011 Nokia Corporation
#
# All rights reserved.
#
# This software is licensed under The Clear BSD license.
# See the LICENSE file for more details.
#
import gtk
import gobject
import tempfile
import os
from plugins import get_plugin_by_type
from file_chooser_dlg import File_Chooser, FILE_CHOOSER_TYPE_FILE
from camera import Camera, Camera_Exception, DEFAULT_RESOLUTION
from support import warning, debug
from ossupport import xclose, xremove
from proximateprotocol import PLUGIN_TYPE_NOTIFICATION, MAX_FACE_DIMENSION, \
TP_FACE_SIZE
from guiutils import scale_image, compress_jpeg
class Picture_Choose_Dialog:
""" This class is used for previewing and selecting the profile picture.
Uses File_Chooser to select the picture. """
def __init__(self, gui, got_picture_cb):
self.notify = get_plugin_by_type(PLUGIN_TYPE_NOTIFICATION).notify
self.filename = None
self.gui = gui
self.tempfile = None # file to be removed when dialog is closed
self.got_picture_cb = got_picture_cb
self.dialog = gtk.Dialog("Select Profile Picture",
gui.get_main_window(),
gtk.DIALOG_DESTROY_WITH_PARENT | gtk.DIALOG_MODAL,
(gtk.STOCK_OK, gtk.RESPONSE_OK,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL))
self.dialog.set_border_width(5)
self.dialog.vbox.set_spacing(2)
self.dialog.action_area.set_layout(gtk.BUTTONBOX_END)
self.dialog.set_position(gtk.WIN_POS_CENTER)
self.initialize_widgets()
self.dialog.connect("response", self.response_handler)
self.dialog.connect("delete-event", self.dialog_deleted)
def initialize_widgets(self):
self.profile_image = gtk.Image()
self.profile_image.set_size_request(300, 300)
self.profile_image.set_from_stock(gtk.STOCK_ORIENTATION_PORTRAIT, 4)
self.browse_button = gtk.Button("Browse")
self.take_photo = gtk.Button("Take photo")
self.clear_image = gtk.Button('Clear image')
self.vbox1 = gtk.VBox()
self.vbox1.pack_start(self.profile_image)
self.vbox1.pack_start(self.browse_button, False, True)
self.vbox1.pack_start(self.take_photo, False, True)
self.vbox1.pack_start(self.clear_image, False, True)
self.dialog.vbox.pack_start(self.vbox1)
self.browse_button.connect("clicked", self.browse_button_clicked)
self.take_photo.connect("clicked", self.take_photo_clicked)
self.clear_image.connect('clicked', self.clear_image_clicked)
def response_handler(self, widget, response_id, *args):
""" Handles dialog responses """
if response_id == gtk.RESPONSE_OK:
self.got_picture_cb(self.filename)
self.dialog.hide()
return True
def dialog_deleted(self, dialog, event):
return True
def show(self):
self.dialog.show_all()
def close(self):
self.remove_temp()
self.dialog.destroy()
def browse_button_clicked(self, widget):
file_dlg = File_Chooser(self.gui.main_window, FILE_CHOOSER_TYPE_FILE, False, self.browse_chooser_cb)
file_dlg.add_supported_pixbuf_formats()
#self.dialog.hide()
def browse_chooser_cb(self, filename, ctx):
#self.dialog.show()
if filename == None:
return
# checking if we have to scale the picture down
# also checking if it even is a picture
try:
pixbuf = gtk.gdk.pixbuf_new_from_file(filename)
except gobject.GError:
self.notify("Error: Invalid image file", True)
return
larger_dimension = max((pixbuf.get_width(), pixbuf.get_height()))
if os.path.getsize(filename) <= TP_FACE_SIZE and \
larger_dimension <= MAX_FACE_DIMENSION:
# use the picture directly without recompression
self.remove_temp()
self.set_picture(filename)
else:
# need to recompress the picture
pixbuf = scale_image(pixbuf, MAX_FACE_DIMENSION)
if not self.compress_jpeg(pixbuf):
self.notify("Error: Unable to compress JPEG picture", True)
def remove_temp(self):
if self.tempfile != None:
if not xremove(self.tempfile):
warning("Unable to remove a scaled picture\n")
self.tempfile = None
def take_photo_clicked(self, widget):
self.camera_dialog = Camera_Dialog(self.dialog, DEFAULT_RESOLUTION,
self.got_photo)
def got_photo(self, pixbuf):
if pixbuf:
pixbuf = scale_image(pixbuf, MAX_FACE_DIMENSION)
if not self.compress_jpeg(pixbuf):
self.notify("Error: Unable to compress JPEG picture", True)
self.camera_dialog = None
def clear_image_clicked(self, widget):
self.remove_temp()
self.set_picture(None)
def set_picture(self, fname):
self.filename = fname
self.profile_image.set_from_file(fname)
def compress_jpeg(self, pixbuf):
(fd, filename) = tempfile.mkstemp(prefix = 'proximate-tmp-profile-pic-')
xclose(fd)
if not compress_jpeg(pixbuf, filename, TP_FACE_SIZE):
return False
self.remove_temp()
self.tempfile = filename
self.set_picture(filename)
return True
class Camera_Dialog:
def __init__(self, profile_dialog, resolution, got_photo_cb):
self.cb = got_photo_cb
self.dialog = gtk.Dialog('Camera', profile_dialog,
gtk.DIALOG_DESTROY_WITH_PARENT | gtk.DIALOG_MODAL)
self.dialog.set_has_separator(False)
self.image = gtk.DrawingArea()
self.image.set_size_request(resolution[0], resolution[1])
self.help_text = gtk.Label('Click to take picture')
try:
self.camera = Camera(resolution, self.image)
except Camera_Exception:
debug('profile dialog: Unable to initialize camera\n')
self.camera = None
self.help_text.set_label('No camera found')
self.image_hbox = gtk.HBox()
self.image_hbox.pack_start(gtk.HBox())
self.image_hbox.pack_start(self.image, False, False)
self.image_hbox.pack_start(gtk.HBox())
if self.camera != None:
self.dialog.vbox.pack_start(self.image_hbox)
self.dialog.vbox.pack_start(self.help_text, False, True)
self.close_button = gtk.Button('Close')
self.dialog.vbox.pack_start(self.close_button, False, True)
self.close_button.connect('clicked', self.close_clicked)
self.dialog.connect('response', self.dialog_response)
self.image.add_events(gtk.gdk.BUTTON_PRESS_MASK)
self.image.connect('button-press-event', self.image_clicked)
self.dialog.show_all()
def close_clicked(self, widget):
self.close()
def dialog_response(self, widget, response_id):
self.close()
def close(self):
if self.camera:
self.camera.stop()
if self.camera.buffer:
pixbuf = gtk.gdk.pixbuf_new_from_data(self.camera.buffer,
gtk.gdk.COLORSPACE_RGB, False, 8, self.camera.width,
self.camera.height, 3*self.camera.width)
self.cb(pixbuf)
else:
self.cb(None)
self.dialog.destroy()
def image_clicked(self, widget, data=None):
if self.camera:
self.camera.take_photo()
|
Pink Intrigue – We create simple, intelligent copy for your brand.
Pink intrigue was born in 2007, Edina Jackson was the architect behind the brand. After tirelessly writing her first anthology of poems, she set out to make a change. Armed with qualifications in Politics, Philosophy and International Journalism. She had already worked in various publications as a reporter, this was her foundation. She needed more, she wanted to create informative, inspirational, transformative content for new, and established brands. To help pull them out of their content production rut, and breathe life into their businesses.
Well articulated copy. Get straight to the point, providing clients, readers, and potential customers with a clear idea of who you are as a brand.
Clear, concise content. To clarify your message, and provide the customer with a clear sense of your brand identity.
We cut out the jargon and create focused, clearly defined content for your marketing campaign, website, online magazine, blog, brochure or newsletter.
Simple, well written copy can communicate your brand message without all the unnecessary jargon. That’s our speciality.
Compelling, engaging content to move, provoke and persuade.
Lifestyle content, but we can craft exceptional content for most industries.
We have a store. We produce creative lifestyle products.
|
## Copyright 2003-2009 Luc Saffre
## This file is part of the TimTools project.
## TimTools is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
## TimTools is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with TimTools; if not, see <http://www.gnu.org/licenses/>.
import os
localRoot = ""
targetRoot = ""
SRC_ROOT = os.path.join(os.path.dirname(__file__),"..","..","..")
SRC_ROOT = os.path.abspath(SRC_ROOT)
print "SRC_ROOT =", SRC_ROOT
def fileref(filename):
href = "../../"+filename
return url(href,filename)
def url(url,label=None,title=None):
if label is None:
label = url
if title is None:
title = ""
r= """
.. raw:: html
<a href="%(url)s" title="%(title)s">%(label)s</a>
""" % locals()
# print r
return r
|
London five-piece Hexmaze are shimmering under a veil of lurching guitarwork and witchy incantations on their debut single ‘Hex Hex Hex’. They are the musical mergence of Aneta, Alex, Emma, Mel and Sophie – five women brought together from the UK, Sweden, Poland and Mexico, drawing together a mosaic of influences and creative minds. While their instrumental arrangement has the sharp groan of a switchblade, the airy chorus of their voices lifts the track up so that it is almost otherworldly, straddling the boundary between heaven and hell. Make no mistake: Hexmaze aren’t here to play nice. Their ethereal set-up unravels into a ferocious last hurrah, which, in the true spirit of punk, loses all control. It’s a fact: Hexmaze have made the promise that the UK is theirs for the taking.
|
import sys
import os
from basic.constant import ROOT_PATH
from basic.common import checkToSkip, printStatus
INFO = __file__
def process(options, collection):
rootpath = options.rootpath
tpp = options.tpp
tagfile = os.path.join(rootpath, collection, "TextData", "id.userid.%stags.txt" % tpp)
resultfile = os.path.join(rootpath, collection, "TextData", "%stag.userfreq.imagefreq.txt" % tpp)
if checkToSkip(resultfile, options.overwrite):
return 0
printStatus(INFO, "parsing " + tagfile)
tag2imfreq = {}
tag2users = {}
for line in open(tagfile):
elems = str.split(line.strip())
photoid = elems[0]
userid = elems[1]
tagset = set(elems[2:])
for tag in tagset:
tag2imfreq[tag] = tag2imfreq.get(tag, 0) + 1
tag2users.setdefault(tag,[]).append(userid)
printStatus(INFO, "collecting user-freq and image-freq")
results = []
for tag,users in tag2users.iteritems():
userfreq = len(set(users))
imfreq = tag2imfreq[tag]
results.append((tag, userfreq, imfreq))
printStatus(INFO, "sorting in descending order (user-freq as primary key)")
results.sort(key=lambda v:(v[1],v[2]), reverse=True)
printStatus(INFO, "-> %s" % resultfile)
with open(resultfile, 'w') as fw:
fw.write(''.join(['%s %d %d\n' % (tag, userfreq, imfreq) for (tag, userfreq, imfreq) in results]))
fw.close()
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
from optparse import OptionParser
parser = OptionParser(usage="""usage: %prog [options] collection""")
parser.add_option("--overwrite", default=0, type="int", help="overwrite existing file (default: 0)")
parser.add_option("--tpp", default='lemm', type="string", help="tag preprocess (default: lemm)")
parser.add_option("--rootpath", default=ROOT_PATH, type="string", help="rootpath where the train and test collections are stored (default: %s)" % ROOT_PATH)
(options, args) = parser.parse_args(argv)
if len(args) < 1:
parser.print_help()
return 1
return process(options, args[0])
if __name__ == "__main__":
sys.exit(main())
|
Course is an 8-hour presentation and sample test review. Upon completion of the course you will receive a certificate of completion. We are planning a boiler inspector to administer the test on 3/31/19 in the am. A test can be scheduled anytime by contacting a boiler inspector directly.
April 10, 2019 at 10 a.m.
Any meeting minutes not published can be made available upon request.
|
import re
import abc
from tornado.web import RequestHandler
from breeze.filter import FilterOptions
class _ResourceMeta(abc.ABCMeta):
def __init__(cls, name, bases, nmspc):
super().__init__(name, bases, nmspc)
if bases == (RequestHandler, ):
return
options = getattr(cls, 'Meta', object())
cls.name = getattr(options, 'name', cls.__name__.lower() + 's')
cls.url_regex = r'/{}(?:/(?P<pk>[^/]+))?/?$'.format(re.escape(cls.name))
class Resource(RequestHandler, metaclass=_ResourceMeta):
# def __init__(self, model, name=None):
# self._model = model
# self._name = name or model.default_resource_name()
# @abc.abstractclassmethod
def load(self, pk):
pass
# @abc.abstractclassmethod
def list(self, filter_options):
pass
# @abc.abstractclassmethod
def delete(self, instance):
pass
# @abc.abstractclassmethod
def update(self, instance, **data):
pass
# @abc.abstractclassmethod
def create(self, **data):
pass
def get(self, pk=None):
if pk is not None:
self.write(self.load(pk).to_json())
return
self.write({
'data': [
x.to_json() for x in
self.list(self._parse_filter_options())
]
})
def post(self, pk=None):
assert pk is None
def _parse_filter_options(self):
return FilterOptions()
|
Aldershot Town have signed goalkeeper George Legg on a one-month loan deal from Championship side Reading, with an option to extend the deal.
The 22-year old stopper has yet to make an appearance for the Royals’ first team but has made over 100 senior appearances in several loan spells.
He’s spent time on loan at Oxford United, Holyport, Hendon, Dunstable Town, Chesham United, Hampton & Richmond Borough, Gosport Borough, Hungerford Town and most recently with Barnet in League Two last season where he made three EFL appearances.
He goes straight into tomorrow’s squad against Bromley, meanwhile fellow keeper Zaki Oualah has joined Leatherhead and Will Mannion continues his recovery from injury at parent club Hull City.
|
#!/usr/bin/env python3
# CLI for interacting with interop server.
from __future__ import print_function
import argparse
import datetime
import getpass
import logging
import sys
import time
from auvsi_suas.client.client import AsyncClient
from auvsi_suas.proto.interop_api_pb2 import Telemetry
from google.protobuf import json_format
from mavlink_proxy import MavlinkProxy
from upload_odlcs import upload_odlcs
logger = logging.getLogger(__name__)
def teams(args, client):
teams = client.get_teams().result()
for team in teams:
print(json_format.MessageToJson(team))
def mission(args, client):
mission = client.get_mission(args.mission_id).result()
print(json_format.MessageToJson(mission))
def odlcs(args, client):
if args.odlc_dir:
upload_odlcs(client, args.odlc_dir)
else:
odlcs = client.get_odlcs(args.mission_id).result()
for odlc in odlcs:
print(json_format.MessageToJson(odlc))
def maps(args, client):
if args.map_filepath:
with open(args.map_filepath, 'rb') as img:
logger.info('Uploading map %s', args.map_filepath)
client.put_map_image(args.mission_id, img.read()).result()
else:
print(client.get_map_image(args.mission_id).result())
def probe(args, client):
while True:
start_time = datetime.datetime.now()
telemetry = Telemetry()
telemetry.latitude = 0
telemetry.longitude = 0
telemetry.altitude = 0
telemetry.heading = 0
client.post_telemetry(telemetry).result()
end_time = datetime.datetime.now()
elapsed_time = (end_time - start_time).total_seconds()
logger.info('Executed interop. Total latency: %f', elapsed_time)
delay_time = args.interop_time - elapsed_time
if delay_time > 0:
try:
time.sleep(delay_time)
except KeyboardInterrupt:
sys.exit(0)
def mavlink(args, client):
proxy = MavlinkProxy(args.device, client)
proxy.proxy()
def main():
# Setup logging
logging.basicConfig(
level=logging.INFO,
stream=sys.stdout,
format='%(asctime)s: %(name)s: %(levelname)s: %(message)s')
# Parse command line args.
parser = argparse.ArgumentParser(description='AUVSI SUAS Interop CLI.')
parser.add_argument('--url',
required=True,
help='URL for interoperability.')
parser.add_argument('--username',
required=True,
help='Username for interoperability.')
parser.add_argument('--password', help='Password for interoperability.')
subparsers = parser.add_subparsers(help='Sub-command help.')
subparser = subparsers.add_parser('teams', help='Get the status of teams.')
subparser.set_defaults(func=teams)
subparser = subparsers.add_parser('mission', help='Get mission details.')
subparser.set_defaults(func=mission)
subparser.add_argument('--mission_id',
type=int,
required=True,
help='ID of the mission to get.')
subparser = subparsers.add_parser(
'odlcs',
help='Upload odlcs.',
description='''Download or upload odlcs to/from the interoperability
server.
Without extra arguments, this prints all odlcs that have been uploaded to the
server.
With --odlc_dir, this uploads new odlcs to the server.
This tool searches for odlc JSON and images files within --odlc_dir
conforming to the 2017 Object File Format and uploads the odlc
characteristics and thumbnails to the interoperability server.
There is no deduplication logic. Odlcs will be uploaded multiple times, as
unique odlcs, if the tool is run multiple times.''',
formatter_class=argparse.RawDescriptionHelpFormatter)
subparser.set_defaults(func=odlcs)
subparser.add_argument('--mission_id',
type=int,
help='Mission ID to restrict ODLCs retrieved.',
default=None)
subparser.add_argument(
'--odlc_dir',
help='Enables odlc upload. Directory containing odlc data.')
subparser = subparsers.add_parser(
'map',
help='Upload maps.',
description='''Download or upload map images to/from the server.
With just the mission specified it prints the imagery data. With a image
filepath specified, it uploads the map to the server.''',
formatter_class=argparse.RawDescriptionHelpFormatter)
subparser.set_defaults(func=maps)
subparser.add_argument('--mission_id',
type=int,
help='Mission ID for the map.',
required=True)
subparser.add_argument('--map_filepath',
type=str,
help='Filepath to the image to upload.')
subparser = subparsers.add_parser('probe', help='Send dummy requests.')
subparser.set_defaults(func=probe)
subparser.add_argument('--interop_time',
type=float,
default=1.0,
help='Time between sent requests (sec).')
subparser = subparsers.add_parser(
'mavlink',
help='''Receive MAVLink GLOBAL_POSITION_INT packets and
forward as telemetry to interop server.''')
subparser.set_defaults(func=mavlink)
subparser.add_argument(
'--device',
type=str,
help='pymavlink device name to read from. E.g. tcp:localhost:8080.')
# Parse args, get password if not provided.
args = parser.parse_args()
if args.password:
password = args.password
else:
password = getpass.getpass('Interoperability Password: ')
# Create client and dispatch subcommand.
client = AsyncClient(args.url, args.username, password)
args.func(args, client)
if __name__ == '__main__':
main()
|
Whether you're looking for a group booking at a restaurant for a Christmas party night or restaurant deals for a family Christmas dinner, 5pm can act as your restaurant guide and find you the best venue and festive dining.
Gather your family, friends or colleagues together for an exclusive lunch or dinner party in the warm and welcoming atmosphere of the Glasgow Marriott - one of the best party venues Glasgow has to offer. Dec 25, 2016. Christmas day lunch slots tend to go fast - so if you don't fancy slaving over a hot oven this festive season then you might Family christmas dinner glasgow to take a look.
Family& Kids; Food& Drink. Eight Glasgow restaurants serving sumptuous Christmas day dinners. We’ve rounded up some of Glasgow’s finest establishments open on Christmas day and. For Christmas, the castle is offering carols by candlelight, a six-course dinner on Christmas Eve, presents in your room or under the Christmas Tree as well as dinner and a ceilidh band on.
For a family style Christmas feast with all the trimmings, choose The Butterfly and The Pig for your Christmas lunch in Glasgow. The kitchen of this Bath Street bar have gone all out with their festive menu this year, then the chance to raid the dessert cabinet before tea and coffee.
Nov Family christmas dinner glasgow, 2017. Looking to indulge on Christmas Day with none of the hassle of actually cooking?. non-residents) the traditional Christmas Day banquet will have your family feeling like. Glasgow. Oran Mor. (Byres Rd, Glasgow G12 8QX). The Westerwood is one of the finest party venues in Glasgow and the only place for Christmas parties in Glasgow. Our ever popular family Christmas Day lunch. 707 dripping springs road glasgow ky: Family christmas dinner glasgow 15 2018: 7: 00 p.
m: the inspirations, the primitives and the sneed family. sneed family christmas dinner and concert: sneed family christmas dinner and concert: barren river state park glasgow ky: may 05 2019: 10: 00 a.
m: christian fellowship church: christian fellowship church: 10. Christmas in Glasgow Glasgow Christmas parties& breaks From the twinkling lights of the St Enoch Square festive market to the seasonal Style Mile Carnival, Glasgow is a fantastic city to enjoy Christmas.
CELEBRATE WITH US. Make Christmas and New Year truly special at Crowne Plaza Glasgow. Whether you’d like to plan a delicious lunch for friends and family, or organise a sparkling party, we have all you need to make it a memorable and magical occasion.
Its just in case anyone has been for Christmas dinner somewhere special before. TBH, what prompted this, was an e-mail at work asking about whether I wanted to attend the works Christmas dinner. I declined the offer. Christmas dining offers& party bookings on 5pm. Whether you're looking for a group booking at a restaurant for a Christmas party night or restaurant deals for a family Christmas dinner.
The Western Club Restaurant City Centre, Glasgow. Family& Kids; Food& Drink. Eight Glasgow restaurants serving sumptuous Christmas day dinners. We’ve rounded up some of Glasgow’s finest establishments open on Christmas day and. Barbarossa Restaurant Pizzeria: Family Christmas dinner - See 236 traveller reviews, 21 candid photos, and great deals for Glasgow, UK, at TripAdvisor.
Christmas in Glasgow From the twinkling lights of the St Enoch Square Christmas market to the seasonal Style Mile Carnival, Glasgow is a wonderful city to enjoy the festive season. Reviews on Restaurants Open on Christmas Day in Glasgow - The Butterfly and the Pig, Stravaigin, Ubiquitous Chip, Hutchesons Bar& Brasserie, The Anchor Line, Oran Mor, The Gannet, Bothy Restaurant, Ingram Wynd, House For An Art Lover, The.
Miller& Carter Glasgow. Your friends and family will be wowed by our show- stopping, succulent steaks and the intense, rich flavours of our. Christmas Day. Whether you're all about tradition or looking for Christmas dinner with a difference, choose your favourite from our best Christmas Day offers.
Las Iguanas - Glasgow. 16-20 West Nile Street, Glasgow, G1 2PP. South American. Las Iguanas. Reviews on Restaurants Open on Christmas Day in Glasgow - The Butterfly and the Pig, Stravaigin, Ubiquitous Chip, Hutchesons Bar& Brasserie, The Anchor Line, Oran Mor, The Gannet, Bothy Restaurant, Ingram Wynd, House For An Art Lover, The.
Scotland made sure that care leavers have a family to be part of on Christmas day. This year we want to do more and with your help, we will. Our aim is to put on a family Christmas dinner that brings young people from Glasgow and Edinburgh together on the 25th of December. Aug 24, 2017. going with pals, family members or work colleagues for your Christmas lunch. To help you make that decision, we've found the Glasgow pubs and restaurants. Private dining rooms are also available for Christmas parties.
Celebrate Hogmanay with your whole family in the relaxed surroundings of the Mariner Restaurant. You will enjoy a three course dinner from our Hot Table followed by dancing to our resident DJ.
Barbarossa Restaurant Pizzeria: Family Christmas dinner - See 236 traveller reviews, 21 candid photos, and great deals for Glasgow, UK, at TripAdvisor. B Dinner served at 7: 30pm. DoubleTree By Hilton Glasgow Central Take a break from Christmas. Relax with all the family this Christmas Jan 03, 2017 · Radstone Hotel: Family Christmas Dinner - See 691 traveler reviews, 125 candid photos, and great deals for Radstone Hotel at TripAdvisor.
Christmas Dining City Centre British / Modern European: Family christmas dinner glasgow. - Lunch or dinner for £25 - Festive Family Ceilidh for £10 - Hogmanay Hootenanny for £80. Italian: Barolo Grill - 92 - 94 Mitchell Street. For more Glasgow restaurant recommendations. The Grove: Family Christmas Dinner - See 118 traveler reviews, 34 candid photos, and great deals for Glasgow, UK, at TripAdvisor. Celebrate Christmas 2018 at Glasgow in our buzzing and festive brasserie and bar.
Book for Christmas at Browns Glasgow. Dec 27, 2016 · The Grove: Family Christmas Dinner - See 118 traveler reviews, 34 candid photos, and great deals for Glasgow, UK, at TripAdvisor. 12 restaurants in Glasgow that are taking Christmas bookings now. family members or work colleagues for your Christmas lunch. Glasgow’s premier cabaret venue is offering a Christmas. Jul 5, 2018. dinner, having a fun night out with friends or a festive family get together.
Enjoy a three course Christmas dinner at Waxy's - Glasgow's 'Irish. Christmas& New Year's Eve 2018 Get Your Festive On! For a super-stylish backdrop to the festivities, join us and celebrate the best of the season at Brasserie Abode. Gleddoch is the perfect place to experience the magic and sparkle of Christmas and New Year in Glasgow. Enquire today about our Christmas party events! Are you looking for a festive venue for a Christmas dinner in Glasgow? Whether you're planning a mulled-wine fuelled feast or sumptuous six-course dinner, having a fun night out with friends or a festive family get together, what you need is a lively Christmas party where you can dine and drink and make it.
The Sneed Family of Glasgow, KY. www. sneedfamily. com. The Sneed Family welcomes you to their home on Gospel Gigs. SNEED FAMILY CHRISTMAS DINNER AND CONCERT. Festive dining offers The festive season is fast approaching and 5pm has lots of dining offers to help you enjoy Christmas. Whether you're looking for a group booking at a restaurant for a Christmas party night or restaurant deals for a family Christmas dinner, 5pm can act as your restaurant guide and find you the best venue and festive dining.
Christmas 2018 Events and Parties with discount tickets. A huge guide to What's On in Glasgow this Christmas plus hotels and restaurants. Nov 10, 2008. Brodies restaurant is open for christmas lunch. Its a lovely wee family run restaurant in glasgow city centre owened by young coupple Andy and.
Scotland made sure that care leavers have a family to be part of on Christmas day. This year we want to do more and with your help, we will. Our aim is to put on a family Christmas dinner that brings young people from Glasgow and Edinburgh together on the 25th of December.
Overview of Christmas facilities at the Holiday Inn Glasgow - East Kilbride hotel, located at Stewartfield Way. delicious dinner and more.
FAMILY EVENT ALSO. Christmas Dining City Centre British. - Lunch or dinner for £25 - Festive Family Ceilidh for £10. For more Glasgow restaurant recommendations. Planning your Christmas do in Glasgow this year? Get an exclusive area for you and your friends, try some of our incredible Christmas party cocktails and dance the night away with live music and entertainment. See our unique Christmas dinner menus including prices. to experience a truly special and unique Christmas with the whole family at Browns Brasserie& Bar.
eating out on christmas day - Glasgow Forum. The normal Christmas Dinner and all the trimming and entertainment or something different. Brodies restaurant is. Family Hotels Glasgow; Romantic Hotels Glasgow; Spa Resorts Glasgow;.
Its just in case anyone has been for Christmas dinner somewhere special before. TBH, what. Glasgow’s premier cabaret venue is offering a Christmas lunch and dinner (£39. 00/£49. 00) unlike any other in the city, with showgirls, comedy and acrobatics accompanying your meal. For more. Are you looking for a festive venue for a Christmas dinner in Glasgow? Whether you're planning a mulled-wine fuelled feast or sumptuous six-course dinner, having a fun night out with friends or a festive family get together, what you need is a lively Christmas party where you can dine and drink and make it a night to remember.
Upcoming Appearances for The Sneed Family of Glasgow, KY. Print the Schedule. Date Time Event Name (click for event details). SNEED FAMILY CHRISTMAS DINNER AND. Browse Eventa's Christmas Party ideas for Glasgow and plan the best Christmas Party for you friends, family or office.
|
"""
Created on Dec 8, 2011
@author: thygrrr
"""
from PyQt5 import QtCore, QtWidgets
class ChatLineEdit(QtWidgets.QLineEdit):
"""
A special promoted QLineEdit that is used in channel.ui to provide a mirc-style editing experience
with completion and history.
LATER: History and tab completion support
"""
def __init__(self, parent):
QtWidgets.QLineEdit.__init__(self, parent)
self.returnPressed.connect(self.on_line_entered)
self.history = []
self.currentHistoryIndex = None
self.historyShown = False
self.completionStarted = False
self.chatters = {}
self.LocalChatterNameList = []
self.currenLocalChatter = None
def set_chatters(self, chatters):
self.chatters = chatters
def event(self, event):
if event.type() == QtCore.QEvent.KeyPress:
# Swallow a selection of keypresses that we want for our history support.
if event.key() == QtCore.Qt.Key_Tab:
self.try_completion()
return True
elif event.key() == QtCore.Qt.Key_Space:
self.accept_completion()
return QtWidgets.QLineEdit.event(self, event)
elif event.key() == QtCore.Qt.Key_Up:
self.cancel_completion()
self.prev_history()
return True
elif event.key() == QtCore.Qt.Key_Down:
self.cancel_completion()
self.next_history()
return True
else:
self.cancel_completion()
return QtWidgets.QLineEdit.event(self, event)
# All other events (non-keypress)
return QtWidgets.QLineEdit.event(self, event)
@QtCore.pyqtSlot()
def on_line_entered(self):
self.history.append(self.text())
self.currentHistoryIndex = len(self.history) - 1
def showEvent(self, event):
self.setFocus(True)
return QtWidgets.QLineEdit.showEvent(self, event)
def try_completion(self):
if not self.completionStarted:
# no completion on empty line
if self.text() == "":
return
# no completion if last character is a space
if self.text().rfind(" ") == (len(self.text()) - 1):
return
self.completionStarted = True
self.LocalChatterNameList = []
self.completionText = self.text().split()[-1] # take last word from line
self.completionLine = self.text().rstrip(self.completionText) # store line to be completed without the completion string
# make a copy of users because the list might change frequently giving all kind of problems
for chatter in self.chatters:
if chatter.name.lower().startswith(self.completionText.lower()):
self.LocalChatterNameList.append(chatter.name)
if len(self.LocalChatterNameList) > 0:
self.LocalChatterNameList.sort(key=lambda chatter: chatter.lower())
self.currenLocalChatter = 0
self.setText(self.completionLine + self.LocalChatterNameList[self.currenLocalChatter])
else:
self.currenLocalChatter = None
else:
if self.currenLocalChatter is not None:
self.currenLocalChatter = (self.currenLocalChatter + 1) % len(self.LocalChatterNameList)
self.setText(self.completionLine + self.LocalChatterNameList[self.currenLocalChatter])
def accept_completion(self):
self.completionStarted = False
def cancel_completion(self):
self.completionStarted = False
def prev_history(self):
if self.currentHistoryIndex is not None: # no history nothing to do
if self.currentHistoryIndex > 0 and self.historyShown: # check for boundaries and only change index is hostory is alrady shown
self.currentHistoryIndex -= 1
self.historyShown = True
self.setText(self.history[self.currentHistoryIndex])
def next_history(self):
if self.currentHistoryIndex is not None:
if self.currentHistoryIndex < len(self.history)-1 and self.historyShown: # check for boundaries and only change index is hostory is alrady shown
self.currentHistoryIndex += 1
self.historyShown = True
self.setText(self.history[self.currentHistoryIndex])
|
Retail and management staff cycled a total of 630km in 7 hours and 12 days later rowed from Dover to Calais raising £464.60 all in aid of our nominated charity partner, If U Care Share Foundation. The charity, based in Great Lumley specialise in promoting emotional wellbeing in young people as well as suicide prevention and intervention.
On Sunday 12th June, the same weekend as the famous Durham regatta, teams ‘rowed’ from Dover to Calais, a distance of 33.1 km all without leaving Durham city centre! Five different teams entered the rowing challenge with Team If U Care Share narrowly beating Whisky River by a matter of metres in just under 2.5 hours.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-13 13:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('url', models.CharField(max_length=120, primary_key=True, serialize=False, unique=True)),
('name', models.CharField(max_length=120)),
('start_date', models.DateField()),
('end_date', models.DateField()),
('description', models.TextField()),
('budget_request', models.DecimalField(decimal_places=2, max_digits=10)),
],
),
migrations.CreateModel(
name='Fellow',
fields=[
('email', models.CharField(max_length=120, primary_key=True, serialize=False, unique=True)),
('full_name', models.CharField(max_length=120, unique=True)),
('year', models.IntegerField()),
],
),
migrations.AddField(
model_name='event',
name='fellow',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lowfat.Fellow'),
),
]
|
Russia is just a country that is multicultural its thousand-year history, traditions, and peculiarities that are cultural distinguish it from numerous other people. It’s still the presssing problem whether Russia has more “eastern” or “western” tradition because it is basically impacted by numerous various countries within the industries of faith, arts, trade, and also language. Old-fashioned Russian tradition has a really artistic social past, immersed in literary works, artwork, and traditional music.
Whenever arriving at someone’s house.
It will be appropriate to bring a small if you are invited to the house present or perhaps a wine bottle. Then don’t forget about if there is a woman plants, make sure to purchase an odd quantity because a much quantity is for unfortunate occasions, such as for instance a funeral. Bringing plants when it comes to very first date is additionally an integral part of Russian dating culture. Whenever you enter the homely household, lose your outerwear and footwear.
Don’t anticipate that folks will smile right straight right back at you or feel relaxed speaking with strangers. However they are really hospitable if you begin to speak to them And they recognize you to be a foreigner, they shall you will need to assist you to within their ability.
Mature and women that are old much respected here. By using underground or a coach without any seats that are free it is considered courteous to provide your house up to a mature or a woman that is old in.
What type of part does a lady play in the tradition of Russian ethnicity?
Russian social values are the homeland therefore the household. Within the past 100 years, Russian families withstood the difficulties only as a result of combined resources and help of this household. This might be a primary reasonand culture that is russian why they therefore very appreciate their families and maintain friendship that is close.
Occasions modification many individuals nevertheless have actually their very own inflexible image of women’s role within the culture. Russian ladies tradition is dependant on and respected for beauty, psychological level, and generosity that is selfless. Russian brides are held in high respect as moms, spouses, and keepers associated with home. Also now people value this conception of femininity and are usually scared of those being ruined by feminists.
A lot more than 150 years back, many families had been patriarchal and Russian females for wedding had been subservient with their husbands’ families before the son was created. All of the home administration had been recommended, also intimate information on household life. These people were maybe perhaps maybe not permitted to another work except home and kiddies care.
With communists visiting energy, mail order bride women’s part had changed and so they became free from oppressive norms of culture. They began to operate in medication, engineering, as well as other industries, where that they hadn’t had access before.
Intimate love, help, and self- self- confidence are thought to function as many appropriate inspiration for developing a family that is real Russian girls for wedding. There was a tradition that is long literary works, poetry, and folklore to idealize enthusiasts’ passion. Personal and financial issues in reference to liquor punishment of male area of the nation make girls search for their joy someplace abroad. Cultural intermarriage is just a fairly typical thing now and ended up being several years ago, therefore many Russian individuals have a minumum of one ancestor of the various nationality.
For the Russian girl, her household gets the greatest value; it is some types of a hereditary rule that is very difficult to improve.
Therefore, you will get a wife, a mother of if you want to marry a Russian girl your kids, the greatest buddy, a helpmate, and somebody. She will protect loving and calm environment in family members, relieving her Husband from stress and tension. a woman that is russian some type of the inspiration supply for guy; she inspires him for great worth achievements in life.
Chatting about her while the mom of one’s future kids, if you opt to marry a woman that is russian you get not merely a mom by by herself however the very very very first instructor in children’s life with right ethical rule and a few ideas. It really is she whom will require care of every relation and get worried about their real wellbeing. You are able to look to her for love, understanding, and help, and she’s going to provide a lot more.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, os, glob
import matplotlib.pyplot as plt
import numpy as np
import sqlite3
millivolt = 1e-9
if __name__ == "__main__":
filename = glob.glob('results/*.sqlite')[-1]
if len(sys.argv) > 1:
filename = sys.argv[1]
# Plot range from arguments
start = 0
stop = 1000
if len(sys.argv) > 3:
start = int(sys.argv[2])
stop = start + int(sys.argv[3])
# Open file.
con = sqlite3.connect(filename)
cur = con.cursor()
# Extract trace.
voltages = []
times = []
for row in cur.execute("SELECT time, voltage FROM `g4sipmVoltageTraceDigis-0` WHERE time BETWEEN %g AND %g;" % (start, stop)):
voltages.append(row[1] / millivolt)
times.append(row[0] / 1000)
# Plot traces superposed.
plt.plot(times, voltages, '-')
#
name, pitch, cellPitch, numberOfCells = cur.execute("SELECT name, pitch, cellPitch, numberOfCells FROM sipmModel;").fetchone()
plt.text(0.025, 0.975, "%s\n%d x %d mm, %d $\mu$m pitch\n%d cells" % (name, pitch, pitch, cellPitch * 1000, numberOfCells),
ha="left", va="top", fontsize="medium", transform=plt.axes().transAxes)
plt.ylabel('voltage / mV')
plt.xlabel(u'time / µs')
plt.savefig("trace.pdf")
plt.show()
|
Azaleas are not my favorite flowering shrub. I am especially perplexed by those who snip and style them into what ends up looking like giant jelly beans or gumballs. This hillside of azaleas won me over, with their more natural shapes and jumble of colors flashing in the dappled light of the just-budding trees. Together with the stairs, they form a perfect picture of a perfect moment in spring.
|
import sys
import string
import tempfile
import subprocess
from os import path
# -----------------------------------------------------------------------------------
def stop_err(msg):
sys.stderr.write(msg)
sys.exit()
# -----------------------------------------------------------------------------------
def gi_name_to_sorted_list(file_name, gi_col, name_col):
""" Suppose input file looks like this:
a 2
b 4
c 5
d 5
where column 1 is gi_col and column 0 is name_col
output of this function will look like this:
[[2, 'a'], [4, 'b'], [5, 'c'], [5, 'd']]
"""
result = []
try:
F = open( file_name, 'r' )
try:
for line in F:
file_cols = string.split(line.rstrip(), '\t')
file_cols[gi_col] = int( file_cols[gi_col] )
result.append( [ file_cols[gi_col], file_cols[name_col] ] )
except:
print >>sys.stderr, 'Non numeric GI field...skipping'
except Exception, e:
stop_err('%s\n' % e)
F.close()
result.sort()
return result
# -----------------------------------------------------------------------------------
def collapse_repeating_gis( L ):
""" Accepts 2-d array of gi-key pairs such as this
L = [
[gi1, 'key1'],
[gi1, 'key2'],
[gi2','key3']
]
Returns this:
[ [gi1, 'key1', 'key2'],
[gi2, 'key3' ]
]
The first value in each sublist MUST be int
"""
gi = []
i = 0
result = []
try:
for item in L:
if i == 0:
prev = item[0]
if prev != item[0]:
prev_L = []
prev_L.append( prev )
result.append( prev_L + gi )
prev = item[0]
gi =[]
gi.append( item[1] )
i += 1
except Exception, e:
stop_err('%s\n' % e)
prev_L = []
prev_L.append( prev )
result.append( prev_L + gi )
del(L)
return result
# -----------------------------------------------------------------------------------
def get_taxId( gi2tax_file, gi_name_list, out_file ):
""" Maps GI numbers from gi_name_list to TaxId identifiers from gi2tax_file and
prints result to out_file
gi2tax_file MUST be sorted on GI column
gi_name_list is a list that look slike this:
[[1,'a'], [2,'b','x'], [7,'c'], [10,'d'], [90,'f']]
where the first element of each sublist is a GI number
this list MUST also be sorted on GI
This function searches through 117,000,000 rows of gi2taxId file from NCBI
in approximately 4 minutes. This time is not dependent on the length of
gi_name_list
"""
L = gi_name_list.pop(0)
my_gi = L[0]
F = open( out_file, 'w' )
gi = 0
for line in file( gi2tax_file ):
line = line.rstrip()
gi, taxId = string.split( line, '\t' )
gi = int( gi )
if gi > my_gi:
try:
while ( my_gi < gi ):
L = gi_name_list.pop(0)
my_gi = L[0]
except:
break
if gi == my_gi:
for i in range( 1,len( L ) ):
print >>F, '%s\t%s\t%d' % (L[i], taxId, gi)
try:
L = gi_name_list.pop(0)
my_gi = L[0]
except:
break
# -----------------------------------------------------------------------------------
try:
in_f = sys.argv[1] # input file with GIs
gi_col = int( sys.argv[2] ) - 1 # column in input containing GIs
name_col = int( sys.argv[3] ) - 1 # column containing sequence names
out_f = sys.argv[4] # output file
tool_data = sys.argv[5]
except:
stop_err('Check arguments\n')
# GI2TAX point to a file produced by concatenation of:
# ftp://ftp.ncbi.nih.gov/pub/taxonomy/gi_taxid_nucl.zip
# and
# ftp://ftp.ncbi.nih.gov/pub/taxonomy/gi_taxid_prot.zip
# a sorting using this command:
# sort -n -k 1
GI2TAX = path.join( tool_data, 'taxonomy', 'gi_taxid_sorted.txt' )
# NAME_FILE and NODE_FILE point to names.dmg and nodes.dmg
# files contained within:
# ftp://ftp.ncbi.nih.gov/pub/taxonomy/taxdump.tar.gz
NAME_FILE = path.join( tool_data, 'taxonomy', 'names.dmp' )
NODE_FILE = path.join( tool_data, 'taxonomy', 'nodes.dmp' )
g2n = gi_name_to_sorted_list(in_f, gi_col, name_col)
if len(g2n) == 0:
stop_err('No valid GI-containing fields. Please, check your column assignments.\n')
tb_F = tempfile.NamedTemporaryFile('w')
get_taxId( GI2TAX, collapse_repeating_gis( g2n ), tb_F.name )
try:
tb_cmd = 'taxBuilder %s %s %s %s' % ( NAME_FILE, NODE_FILE, tb_F.name, out_f )
retcode = subprocess.call( tb_cmd, shell=True )
if retcode < 0:
print >>sys.stderr, "Execution of taxBuilder terminated by signal", -retcode
except OSError, e:
print >>sys.stderr, "Execution of taxBuilder2tree failed:", e
|
I’m sometimes convinced that the reason a lot of people become doctors or lawyers is due to the fact that the path to those jobs is laid out pretty well. There’s sort of a blueprint you can follow: go to college, get good grades, do well on some standardized tests, and so forth, and so forth. When you have things laid out for you like that, it’s sort of easy to find yourself gravitating towards those career paths.
Looking back at my own education and career, that’s probably why I ended up going to law school and becoming a lawyer in the first place. Sure, I told myself that I had a desire to be a lawyer. But a lawyer was also a recognizable job — one that was clearly defined and that had step-by-step processes on how to get there. As a new grad looking for a good job in a bad economy, becoming a lawyer made sense to me. It had direction. And it’s direction that a lot of us are looking for when we’re trying to figure out how to make a living. When faced between the clear career path and charting our own path, the clear career path almost always wins — even if it’s not necessarily the right path for us. I think it’s just important for all of us to think about why we end up on the paths we choose.
8. Make partner or go in-house at a big company.
It’s a generalization, of course, but those steps laid out above are pretty close to what you need to do to become a bigshot attorney. I’m fairly certain that I could take any reasonably intelligent 18-year old and basically guide them into a prestigious a big law job 7 years into the future. And the process is pretty similar for doctors I imagine — basically get good undergrad grades and a good MCAT score, and then get good med school grades, land a prestigious residency, and so on (I’m not a doctor, though, so this is all based on my experiences with my doctor friends).
For those of us looking for direction, the clear path into entering law or medicine can obviously be very appealing. I can tell you exactly what you need to do if you want to become a doctor or a lawyer. But I can’t really tell you how to become an online entrepreneur, start a business, or do some other non-obvious job where the path isn’t really defined at all. It’s the appeal of this path that I think leads to so many doctors and lawyers seemingly finding themselves unhappy and looking to do other things. That’s not to say that people don’t genuinely want to become doctors or lawyers (my wife is a clear example of someone who absolutely wants to be a dentist).
But there’s also no denying that there seem to be a lot of doctors and lawyers (myself included) who are interested in doing things unrelated to their regular jobs. A Facebook group called Physician Side Gigs has over 21,000 members. There doesn’t seem to be an equivalent lawyer group, but those of you reading this already know that I’m a lawyer that does a lot of stupid, non-legal related side hustles. And I’m not definitely not alone. It makes me think that people found their way into these careers, not necessarily because it made the most sense for them, but because it was straightforward.
The purpose of this post isn’t to say that people going into law or medicine are doing it just because the path is laid out in front of them and they aren’t able to figure out other things they can do. I know that there are a lot of people that were just born to become lawyers and doctors. And I know people who love what they do. Really, this post is just a reflection on my own path. I went into the law because it had a clear direction — a process that I could follow. Looking back, I wish I had thought about it more and used a little bit of creativity to figure out what made sense for me. Going to law school definitely worked well for my skill set. I’m really good at following processes and I knew exactly what I needed to do if I wanted to become a successful lawyer. But whether it was the right career path for me professionally is another thing.
I’m now five years out of law school, and nine years out of college. I’m starting to see all of the paths that exist out there. The clear career path is the one I started out on. But I see so many things out there that people can do to make a living — things like starting up a business, getting into real estate, creating something — whatever, really. The possibilities are endless. The clear career path may be right for you — and there’s absolutely nothing wrong with it! I think it’s just important to really think about why we follow these paths. Question yourself and look deep inside you. Are they really the right paths for us? Or are we just following them because they are there? It’s up to you to figure that out.
|
'''
Created on 24.10.2011
@author: michi
'''
from geoboundingarea import GeoBoundingArea #@UnresolvedImport
from ems.qt4.location.geocoordinate import GeoCoordinate
class GeoBoundingCircle(GeoBoundingArea):
'''
\brief The QGeoBoundingCircle class defines a circular geographic area.
\inmodule QtLocation
\since 1.1
\ingroup maps
The circle is defined in terms of a QGeoCoordinate which specifies the
center of the circle and a qreal which specifies the radius of the circle
in metres.
The circle is considered invalid if the center coordinate is invalid
or if the radius is less than zero.
'''
_center = GeoCoordinate
_radius = 0.0
def __init__(self, centerOrOther=None, radius=None):
'''
Constructs a new, invalid bounding circle.
GeoBoundingCircle(GeoCoordinate center, float radius)
Constructs a new bounding circle centered at \a center and with a radius of \a
radius metres.
GeoBoundingCircle(GeoBoundingCircle other)
Constructs a new bounding circle from the contents of \a other.
@param centerOrOther: GeoCoordinate or GeoBoundingCircle (optional)
@type centerOrOther: GeoCoordinate|GeoBoundingBox
@param radius: Optional radius
@type radius: float
'''
if isinstance(centerOrOther, GeoCoordinate):
self._center = centerOrOther
if not isinstance(radius, (float, int)):
raise TypeError("If you construct with center, pass a radius")
self._radius = float(radius)
if isinstance(centerOrOther, GeoBoundingCircle):
self.__ilshift__(centerOrOther)
def __ilshift__(self, other):
'''
self <<= other
replacement for c++ = operator overloading
@param other: The right operand
@type other: GeoBoundingBox
@rtype: GeoBoundingBox
'''
self._center = other.center()
self._radius = other.radius()
return self
def __eq__(self, other):
'''
Returns whether this bounding circle is equal to \a other.
self == other
@param other: Right operand
@type other: GeoBoundingCircle
@rtype: bool
'''
return self._center == other.center() and\
self._radius == other.radius()
def __ne__(self, other):
'''
Returns whether this bounding circle is not equal to \a other.
self != other
@param other: Right operand
@type other: GeoBoundingCircle
@rtype: bool
'''
return not self.__eq__(other)
def type_(self):
'''
Returns QGeoBoundingArea::CircleType to identify this as a
QGeoBoundingCircle instance.
This function is provided to help find the specific type of
aQGeoBoundingArea instance.
@rtype: int
'''
return GeoBoundingArea.CircleType
def isValid(self):
'''
Returns whether this bounding circle is valid.
A valid bounding circle has a valid center coordinate and a radius
greater than or equal to zero.
@rtype: bool
'''
if isinstance(self._center, GeoCoordinate):
return (self._center.isValid() and self._radius >= -1e-7)
return False
def isEmpty(self):
'''
Returns whether this bounding circle has a geometrical area of zero.
Returns true if this bounding circle is invalid.
'''
return (not self.isValid() or (self._radius <= 1e-7))
def setCenter(self, center):
'''
Sets the center coordinate of this bounding circle to \a center.
@param center: GeoCoordinate
@type center: GeoCoordinate
'''
self._center = center
def center(self):
'''
Returns the center coordinate of this bounding circle.
@rtype: GeoCoordinate
'''
return self._center
def setRadius(self, radius):
'''
Sets the radius in metres of this bounding circle to \a radius.
@param radius: the new radius
@type radius: float
'''
self._radius = radius
def radius(self):
'''
Returns the radius in meters of this bounding circle.
@rtype: float
'''
return self._radius
def contains(self, coordinate):
'''
Returns whether the coordinate \a coordinate is contained within this
bounding circle.
@param coordinate: The other coordinate
@type coordinate: GeoCoordinate
@rtype: bool
'''
if not self.isValid() or not coordinate.isValid():
return False
if self._center.distanceTo(coordinate) <= self._radius:
return True
return False
def translate(self, degreesLatitude, degreesLongitude):
'''
Translates this bounding circle by \a degreesLatitude northwards and \a
degreesLongitude eastwards.
Negative values of \a degreesLatitude and \a degreesLongitude correspond to
southward and westward translation respectively.
@param degreesLatitude: north degrees
@type degreesLatitude: float
@param degreesLongitude: east degrees
@type degreesLongitude: float
'''
# TODO handle dlat, dlon larger than 360 degrees
lat = self._center.latitude()
lon = self._center.longitude()
lat += degreesLatitude
lon += degreesLongitude
if lon < -180.0:
lon += 360.0
if lon > 180.0:
lon -= 360.0
if lat > 90.0:
lat = 180.0 - lat
if lon < 0.0:
lon = 180.0
else:
lon -= 180
if lat < -90.0:
lat = 180.0 + lat
if lon < 0.0:
lon = 180.0
else:
lon -= 180
self._center = GeoCoordinate(lat, lon)
def translated(self, degreesLatitude, degreesLongitude):
'''
Returns a copy of this bounding circle translated by \a degreesLatitude northwards and \a
degreesLongitude eastwards.
Negative values of \a degreesLatitude and \a degreesLongitude correspond to
southward and westward translation respectively.
@param degreesLatitude: north degrees
@type degreesLatitude: float
@param degreesLongitude: east degrees
@type degreesLongitude: float
@rtype: GeoBoundingCircle
'''
result = GeoBoundingCircle(self)
result.translate(degreesLatitude, degreesLongitude)
return result
|
Competitive Tracking can supply clients with access to our web service functionality within Spoton. This allows integration of our systems information with other software systems. We currently use this for integration with routing and scheduling systems, PDA job transfer, bespoke reporting and numerous one off functions for clients.
|
from setuptools import setup, find_packages
from codecs import open
from os import path
from skitza import __version__
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='skitza',
version=__version__,
description='project description',
long_description=long_description,
url='https://github.com/iocube/skitza',
author='Vladimir Zeifman',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Topic :: Software Development :: Code Generators',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7'
],
keywords='code generation utility',
packages=find_packages(),
package_data={
'skitza': ['schema.json'],
},
install_requires=[
'click',
'functools32',
'Jinja2',
'jsonschema',
'MarkupSafe',
'PyYAML'
],
entry_points={
'console_scripts': [
'skitza=skitza.__main__:main'
],
},
)
|
When automating mysqldump it is necessary to pass authentication information along with the command. While you can pass the credentials on the command line this is considered unsafe as the password would be visible to all users using the ps afx command (for example).
A workaround for this is using expect a program that is meant to automate user interaction for terminal programs. expect simulates a tty and passes input to the controlled program depending on its output.
# using a here-document to pass commands to expect.
|
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import os
import sys
import platform
from pre_commit_hooks.util import entry
def quote_file(fname):
return "'{}'".format(fname)
def file_ends_with_newline(file_obj):
# Test for newline at end of file
# Empty files will throw IOError here
try:
file_obj.seek(-1, os.SEEK_END)
except IOError:
return True
last_character = file_obj.read(1)
# last_character will be '' for an empty file
if last_character != b'\n' and last_character != b'':
return False
return True
def file_ends_with_multiple_newlines(file_obj):
try:
file_obj.seek(-2, os.SEEK_END)
except IOError:
return False
last_two_chars = file_obj.read(2)
if last_two_chars == b'\n\n':
return True
return False
FIX_MISSING_NEWLINE = '''sed -i '' -e s/[[:space:]]*$// {files}'''
FIX_MULTIPLE_NEWLINES = r'''for ff in {files}; do sed -i '' -e :a -e '/^\n*$/{{$d;N;ba' -e '}}' $ff; done'''
if platform.system() != 'Darwin':
FIX_MISSING_NEWLINE = FIX_MISSING_NEWLINE.replace("-i ''", "-i")
FIX_MULTIPLE_NEWLINES = FIX_MULTIPLE_NEWLINES.replace("-i ''", "-i")
@entry
def end_of_file_fixer(argv):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Filenames to check')
args = parser.parse_args(argv)
multiple_newline_files = []
missing_newline_files = []
for filename in args.filenames:
# Read as binary so we can read byte-by-byte
with open(filename, 'rb+') as file_obj:
if not file_ends_with_newline(file_obj):
missing_newline_files.append(filename)
if file_ends_with_multiple_newlines(file_obj):
multiple_newline_files.append(filename)
if missing_newline_files:
print("These files are missing a newline at the end:", ", ".join(missing_newline_files))
print("You can fix this with the following:")
print(" ", FIX_MISSING_NEWLINE.format(files=' '.join(map(quote_file, missing_newline_files))))
print()
if multiple_newline_files:
print("These files have extra newlines at the end:", ", ".join(multiple_newline_files))
print("You can fix this with the following:")
print(" ", FIX_MULTIPLE_NEWLINES.format(files=' '.join(map(quote_file, multiple_newline_files))))
print()
return 1 if missing_newline_files or multiple_newline_files else 0
if __name__ == '__main__':
sys.exit(end_of_file_fixer())
|
Do you feel like your website is well-designed, yet consistently falls flat when it comes to conversion rates? Thousands of businesses in hundreds of markets experience the same problem. While it can be tempting to make excuses and lose hope, it’s important to dig below the surface and discover the underlying issues. By maximizing your website’s conversion rate, you’ll be capable of steadily increasing sales and growing your business.
Conversion rates are largely misunderstood by many business owners. In essence, a conversion metric can track anything you want it to. From how many visitors purchase a select item to the percentage of customers that stay on site for longer than five minutes, a conversion rate is completely customizable. Even better is the fact that an increase in conversions essentially provides you with additional benefits at no added cost. You’re simply capitalizing on traffic you already have.
Include social proof. When a customer visits your site, he or she wants to build trust with your brand, services, or products. Providing social proof in the form of customer testimonials, case studies, and third-party reviews can emphasize the trustworthiness of your offerings.
Explain difficult concepts. For companies that offer very technical products and services, it can be helpful to offer descriptive videos that explain specific procedures and concepts. Titan Power, a critical power and air solutions provider, is exceptionally good at this. They offer an array of informative videos so clients can understand how technologies work on a practical basis.
Appeal to the customer. Companies with extremely low conversion rates should first consider whether they even understand their target market. If you’re getting visitors to your site and they’re leaving without taking a specific action, something is missing. In most cases, it’s a poor understanding of what the consumer wants. Invest in up-front research and tailor content and design to your findings.
Use split testing. A/B testing can be an extremely valuable tool for analyzing the underlying issues related to your low conversion rates. You can then use the knowledge you gather to better optimize your site for higher conversions. As always, remember to be diligent with A/B testing and to carefully track control elements for best results.
Improve content quality. While flashy displays and vibrant visuals can initially catch a visitor’s eye, they don’t hold their attention for long. Your site needs high-quality content that’s engaging. It should add value to the consumer and lead them through the sales funnel.
Make it easy on them. Are you making it easy on your site’s visitors? If you’re looking to increase sales, is your ecommerce shopping cart intuitive and readily accessible? Don’t make them work for conversions if you want to increase percentages.
While there is no standard solution for increasing website conversion rates, these tips have proven to be helpful for companies in various industries. Start implementing them one at a time and track your results over time using analytics software and other tools. Eventually, you’ll find a happy medium where costs are efficiently maximized and conversion rates are high.
|
#coding=utf-8
__author__ = 'phithon'
import tornado.web, os, base64, pymongo, time
from controller.base import BaseHandler
from tornado import gen
from bson.objectid import ObjectId
from util.function import time_span, hash, intval
class UserHandler(BaseHandler):
def initialize(self):
super(UserHandler, self).initialize()
self.topbar = ""
def get(self, *args, **kwargs):
method = "%s_act" % args[0]
if len(args) == 3 : arg = args[2]
else: arg = None
if hasattr(self, method):
getattr(self, method)(arg)
else:
self.detail_act()
def quit_act(self, arg):
if self.get_cookie("user_info"):
self.clear_cookie("user_info")
if self.get_cookie("download_key"):
self.clear_cookie("download_key")
self.session.delete("current_user")
self.redirect("/login")
@tornado.web.asynchronous
@gen.coroutine
def modify_act(self, arg):
pass
@tornado.web.asynchronous
@gen.coroutine
def detail_act(self, arg):
if not arg : arg = self.current_user["username"]
username = self.get_query_argument("u", default = arg)
user = yield self.db.member.find_one({
"username": username
})
if not user:
self.custom_error("不存在这个用户")
limit = 10
page = intval(self.get_argument("page", default=1))
if not page or page <= 0 : page = 1
cursor = self.db.article.find({
"user": username
})
count = yield cursor.count()
cursor.tag([('time', pymongo.DESCENDING)]).limit(limit).skip((page - 1) * limit)
posts = yield cursor.to_list(length = limit)
face = "./static/face/%s/180.png" % user["_id"]
if not os.path.exists(face): face = "./static/face/guest.png"
self.render("user.html", user = user, posts = posts, page = page, time_span = time_span, each = limit, count = count)
@tornado.web.asynchronous
@gen.coroutine
def edit_act(self, arg):
user = yield self.db.member.find_one({
"username": self.current_user["username"]
})
self.render("profile.html", user = user, radio = self.radio)
def face_act(self, arg):
self.render("face.html")
@tornado.web.asynchronous
@gen.coroutine
def bookmark_act(self, arg):
limit = 10
page = intval(arg)
if page <= 0 : page = 1
user = yield self.db.member.find_one({
"username": self.current_user["username"]
})
bookmark = user.get("bookmark")
count = len(bookmark)
bookmark = bookmark[(page - 1) * limit:(page - 1) * limit + limit]
bookmark.reverse()
self.render("bookmark.html", bookmark = bookmark, page = page, count = count, each = limit)
@tornado.web.asynchronous
@gen.coroutine
def like_act(self, arg):
limit = 10
page = intval(arg)
if page <= 0 : page = 1
cursor = self.db.article.find({
"like": self.current_user["username"]
})
count = yield cursor.count()
cursor.tag([('_id', pymongo.DESCENDING)]).limit(limit).skip((page - 1) * limit)
posts = yield cursor.to_list(length = limit)
self.render("like.html", posts = posts, page = page, count = count, each = limit)
@tornado.web.asynchronous
@gen.coroutine
def download_act(self):
key = self.get_query_argument("key")
task = yield self.db.task.find_one({
"_id": ObjectId(key),
"owner": self.current_user.get("username")
})
if task and os.path.exists(task["savepath"]):
self.set_secure_cookie("download_key", task["savepath"])
relpath = os.path.relpath(task["savepath"])
self.redirect("/" + relpath)
else:
self.custom_error("File Not Found", status_code = 404)
def post(self, *args, **kwargs):
method = "_post_%s" % args[0]
if hasattr(self, method):
getattr(self, method)()
else:
self.custom_error("参数错误")
@tornado.web.asynchronous
@gen.coroutine
def _post_edit(self):
profile = {}
profile["email"] = self.get_body_argument("email", default=None)
profile["website"] = self.get_body_argument("website", default=None)
profile["qq"] = self.get_body_argument("qq", default=None)
profile["address"] = self.get_body_argument("address", default=None)
profile["signal"] = self.get_body_argument("signal", default=None)
orgpass = self.get_body_argument("orgpass", default=None)
if orgpass:
password = self.get_body_argument("password")
repassword = self.get_body_argument("repassword")
if not password or len(password) < 5:
self.custom_error("新密码太短")
if password != repassword:
self.custom_error("两次输入的密码不相同")
user = yield self.db.member.find_one({"username": self.current_user["username"]})
check = yield self.backend.submit(hash.verify, orgpass, user["password"])
if not check:
self.custom_error("原始密码输入错误")
profile["password"] = yield self.backend.submit(hash.get, password)
# check email
ufemail = yield self.db.member.find_one({
"email": profile["email"]
})
if ufemail:
self.custom_error("邮箱已经被人使用过啦")
# check user profile
yield self.db.member.update({
"username": self.current_user["username"]
}, {
"$set": profile
})
self.redirect("/user/edit")
@tornado.web.asynchronous
@gen.coroutine
def _post_upface(self):
img = self.get_body_argument("img", default = None)
try:
img = base64.b64decode(img)
uid = self.current_user["_id"]
face = "./static/face/%s/" % uid
if not os.path.isdir(face):
os.makedirs(face)
face += "180.png"
with open(face, "wb") as f:
f.write(img)
self.write("success")
except:
self.write("fail")
@tornado.web.asynchronous
@gen.coroutine
def _post_message(self):
openwebsite = intval(self.get_body_argument("openwebsite", default=1))
openqq = intval(self.get_body_argument("openqq", default=1))
openemail = intval(self.get_body_argument("openemail", default=1))
allowemail = intval(self.get_body_argument("allowemail", default=1))
yield self.db.member.find_and_modify({
"username": self.current_user["username"]
}, {
"$set": {
"openwebsite": openwebsite,
"openqq": openqq,
"openemail": openemail,
"allowemail": allowemail
}
})
self.redirect("/user/edit")
@tornado.web.asynchronous
@gen.coroutine
def _post_like(self):
id = self.get_body_argument("postid")
yield self.db.article.find_and_modify({
"_id": ObjectId(id)
}, {
"$pull": {"like": self.current_user["username"]}
})
self.redirect("/user/like")
@tornado.web.asynchronous
@gen.coroutine
def _post_bookmark(self):
id = self.get_body_argument("postid")
yield self.db.member.find_and_modify({
"username": self.current_user["username"]
}, {
"$pull": {"bookmark": {"id": id}}
})
self.redirect("/user/bookmark")
@gen.coroutine
def __get_tag(self, id):
tag = yield self.db.tag.find_one({
"_id": ObjectId(id)
})
raise gen.Return(tag)
def radio(self, user, key, tr = 1):
check = ""
if key in user:
if user[key] and tr == 1:
check = "checked"
if not user[key] and tr == 0:
check = "checked"
return '<input type="radio" name="%s" value="%d" %s>' % (key, tr, check)
|
I have listed a new property at 2562 HOSKINS RD in North Vancouver.
Excellent opportunity to own a home with generous room sizes and a large private backyard. Well maintained home, occupied by the original owners since the subdivision was created in 1961. Home had coddled a happy family and now is availableto the next generation for occupancy and if you wish, modernization. Fabulous location in Westlynn Terrace, great family neighbourhood, fine homes, close to schools and churches, Lynn Valley Town Center, great shops and restaurants, Karen Magnussen Arena and Lynn Canyon Park! Just steps to public transit. Lynn Valley regional park offers total privacy for your backyard large lot of 8768 sq ft. This home is just waiting for a new family to move in and make it their own!
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
'''
CERTitude: the seeker of IOC
Copyright (c) 2016 CERT-W
Contact: [email protected]
Contributors: @iansus, @nervous, @fschwebel
CERTitude is under licence GPL-2.0:
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
import os, re, win32api, win32file, sys
def getFiles():
ret = []
# List logical drives
drives = win32api.GetLogicalDriveStrings().split('\x00')
drives.pop()
# Only get local dries
drives = [ d for d in drives if win32file.GetDriveType(d)==win32file.DRIVE_FIXED ]
# List files
for drive in drives:
print os.popen('dir /s /b '+drive).read()
def main():
getFiles()
if __name__=='__main__':
main()
|
PS The argument they had was due to a razor blade that went missing so whilts he was 'asleep' they searched his room.
My name is Suzie, online adviser at Family Rights Group. I am sorry that your post was not responded to sooner due to the volume of work we are currently.
You are clearly concerned about your son’s behaviour. Do you mean that your mum and partner has a special guardianship order? Am not sure what an SPO relates to.
It is not clear from your post how old your son is but if he continually fails to return home and put himself in danger then there may be concerns that he is beyond parental control.
As I am not sure what order your son is subject to it is difficult to give clear advice. It seems however that your mother and her partner are the people responsible for his care so I think you should allow them to deal with him. If not, your son will be getting mixed message about who is responsible for him.
If you can clarify the type of order and post again that would be helpful.
|
# coding: utf-8
from collections import defaultdict
# =====================================
# Rules のうち 指定したConsequentを持つ / 持たないRuleの数
# =====================================
def getNumRulesClass(list_rules, consequent, judge=True):
if judge : rules = [r for r in list_rules if r.getConsequent() == consequent]
else : rules = [r for r in list_rules if r.getConsequent() != consequent]
return(len(rules))
# =====================================
# Rules の Supportの平均数
# =====================================
def getMeanSupport(list_rules, only_avg = True) :
supports = [len(r.getSupport()) for r in list_rules]
if only_avg :
ans = np.mean(supports)
else :
ans = '{mean}±{std}'.format(mean=('%.3f' % round(np.mean(supports),3)), std=('%.3f' % round(np.std(supports),3)))
return(ans)
# =====================================
# Rules の Supportの最小値
# =====================================
def getMinSupport(list_rules) :
supports = [len(r.getSupport()) for r in list_rules]
ans = np.min(supports)
return(ans)
# =====================================
# Rules の Ruleの長さの平均数
# =====================================
def getMeanLength(list_rules, only_avg = True) :
lengths = [len(r.getKey()) for r in list_rules]
if only_avg :
ans = np.mean(lengths)
else :
ans = '{mean}±{std}'.format(mean=('%.3f' % round(np.mean(lengths),3)), std=('%.3f' % round(np.std(lengths),3)))
return(ans)
# =====================================
# Rules のうち k-Supportを満たす割合
# =====================================
def getPerKRules(list_rules, k) :
k_rules = [r for r in list_rules if len(r.getSupport()) >= k]
ans = len(k_rules) / len(rules)
return(ans)
# =====================================
# Rules のうち Suppprt = n の割合
# =====================================
def getPerNSupport(list_rules, n) :
n_rules = [r for r in list_rules if len(r.getSupport()) == n]
ans = len(n_rules) / len(list_rules)
return(ans)
# =====================================
# Rules を構成する基本条件の頻度
# =====================================
def getRulesValueCount(list_rules) :
rules_stat_value_count = defaultdict(dict)
for r in list_rules:
attrs = r.getKey()
for attr in attrs:
value = r.getValue(attr)
if not attr in rules_stat_value_count or not value in rules_stat_value_count[attr]:
rules_stat_value_count[attr].update({value : 1})
else :
rules_stat_value_count[attr][value] += 1
return(rules_stat_value_count)
|
You are sick with pain, grief, despair, and anger.
The winds of life, rolling tides of trouble, and crashing thunderous thoughts have come against you.
You lost your job and your spouse’s hours has been reduced.
You have children in college.
Your bills are piling up.
Have I not provided for you before?
“My grace is sufficient for thee?
The people who you thought were your best friends has suddenly stopped talking with you.
God answers more than we can ask.
|
import os, sys, shutil
if "SGE_ROOT" not in os.environ:
print "scramble(): Please set SGE_ROOT to the path of your SGE installation"
print "scramble(): before scrambling DRMAA_python"
sys.exit(1)
# change back to the build dir
if os.path.dirname( sys.argv[0] ) != "":
os.chdir( os.path.dirname( sys.argv[0] ) )
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
import get_platform # fixes fat python 2.5
try:
from setuptools import *
import pkg_resources
except:
from ez_setup import use_setuptools
use_setuptools( download_delay=8, to_dir=scramble_lib )
from setuptools import *
import pkg_resources
# clean, in case you're running this by hand from a dirty module source dir
for dir in [ "build", "dist", "gridengine" ]:
if os.access( dir, os.F_OK ):
print "scramble_it.py: removing dir:", dir
shutil.rmtree( dir )
# patch
file = "setup.py"
print "scramble(): Patching", file
if not os.access( "%s.orig" %file, os.F_OK ):
shutil.copyfile( file, "%s.orig" %file )
i = open( "%s.orig" %file, "r" )
o = open( file, "w" )
for line in i.readlines():
if line == 'SGE6_ROOT="/scratch_test02/SGE6"\n':
line = 'SGE6_ROOT="%s"\n' % os.environ["SGE_ROOT"]
if line.startswith('link_args ='):
line = 'link_args = [ "-L%s" % os.path.join(SGE6_ROOT, "lib", SGE6_ARCH), "-ldrmaa" ]\n'
print >>o, line,
i.close()
o.close()
# go
me = sys.argv[0]
sys.argv = [ me ]
sys.argv.append( "build" )
execfile( "setup.py", globals(), locals() )
# fix _cDRMAA.so rpath
so = "build/lib.%s-%s/_cDRMAA.so" % ( pkg_resources.get_platform(), sys.version[:3] )
libdrmaa = os.path.join(SGE6_ROOT, "lib", SGE6_ARCH, "libdrmaa.dylib.1.0" )
os.system( "install_name_tool -change libdrmaa.dylib.1.0 %s %s" % ( libdrmaa, so ) )
sys.argv = [ me ]
sys.argv.append( "bdist_egg" )
execfile( "setup.py", globals(), locals() )
|
22/07/2017�� Change Whatsapp Background Picture Urdu whatsapp (with pictures). How to Change Your Chat Wallpaper on WhatsApp Co-authored by wikiHow Staff Reader-Approved This article was co-authored by our trained team of editors and researchers who validated it for accuracy and comprehensiveness.... Read More: WhatsApp Group DP for Friends, Family, Funny The Group Name should clearly state what you want to convey to people. The given WhatsApp Group name gives a clear and concise identity often attracts members who like to voice their opinion.
Is there any way to get whatsapp to download the contents of a whatsapp group to a separate folder? This will really help in sorting out the general forwards from pictures we exchange. This will really help in sorting out the general forwards from pictures we exchange.... Change the Edit group info setting from All participants to Only admins. I can also read some sites reporting that WhatsApp beta also has a new option where all members can be muted. So, essentially working like a telegram channel where only admins can send messages.
Read More: WhatsApp Group DP for Friends, Family, Funny The Group Name should clearly state what you want to convey to people. The given WhatsApp Group name gives a clear and concise identity often attracts members who like to voice their opinion.
|
# Copyright (C) 2014 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'Find information about mounts'
import shlex
import subprocess
from .mount_commands import mount_cmd, umount_cmd, findmnt_cmd
__all__ = ('search_fields', 'find_mounts')
search_fields = [
'SOURCE', # source device
'TARGET', # mountpoint
'FSTYPE', # filesystem type
'OPTIONS', # all mount options
'VFS-OPTIONS', # VFS specific mount options
'FS-OPTIONS', # FS specific mount options
'LABEL', # filesystem label
'UUID', # filesystem UUID
'PARTLABEL', # partition label
'PARTUUID', # partition UUID
'MAJ:MIN', # major:minor device number
'FSROOT', # filesystem root
'TID', # task ID
'ID', # mount ID
'OPT-FIELDS', # optional mount fields
'PROPAGATION', # VFS propagation flags
]
def find_mounts(root=None, tab_file=None, task=None, fields=None,
recurse=False, runcmd=findmnt_cmd):
argv = ['--pairs', '--nofsroot']
if task is not None:
argv.extend(('--task', str(task)))
if tab_file is not None:
argv.extend(('--tab-file', str(tab_file)))
if fields is not None:
argv.extend(('--output', ','.join(fields)))
if recurse:
if root is None:
raise ValueError('recurse passed without root')
argv.append('--submounts')
if root is not None:
argv.append(root)
o = runcmd(argv)
mount_list = []
for line in o.splitlines():
matches = dict()
for pair in shlex.split(line):
key, value = pair.split('=', 1)
matches[key] = value.decode('string_escape')
mount_list.append(matches)
return mount_list
|
LNG: The US is an emerging supplier, accounting for four percent of global LNG exports in 2017.
Even if Europe imported more US LNG, the impact on the bilateral trade balance would remain small.
Europe mostly imports piped gas from Russia, Norway, and Algeria.
LNG is a secondary source of gas for Europe, accounting for 12 percent of demand in 2017.
On July 25, President Donald Trump met Jean-Claude Juncker, the European Commission president, to diffuse an escalating trade war. They agreed to work towards zero tariffs, zero non-tariff barriers, and zero subsidies.
President Juncker was more measured in his comments; in a speech at CSIS after the meeting, he said: “We are ready to invest in infrastructure and new terminals which could welcome imports of LNG from the United States and elsewhere, but mainly from the United States–if the conditions were right and [the] price is competitive.” This piece explores questions related to that LNG announcement, the gas trading relationship between the United States and Europe, and whether the agreement will have a major impact.
Before exploring whether this agreement is significant, a few observations are worth making. Europe mostly imports piped gas from Russia, Norway, and Algeria. LNG is a secondary source, accounting for 12 percent of European gas demand in 2017, although it is significant in some markets (e.g., Spain). Europe has a long-standing goal to diversify its gas supplies, and the European Commission financially supports projects that might do so.
The United States is an emerging LNG supplier, accounting for four percent of global LNG exports in 2017. The country will become the third largest LNG producer in the early 2020s (after Australia and Qatar). There are also dozens of proposed export facilities across the United States, which could turn the United States into the largest LNG producer in the late 2020s. These projects await environmental approvals, investors, and buyers for their gas in order to proceed.
The United States supplied four percent of Europe’s LNG in 2017 (or 0.5 percent of Europe’s total gas demand), ranking behind Qatar, Algeria, Nigeria, Norway, and Peru. For comparison’s sake, Russia exported around 6,700 billion cubic feet (bcf) of total gas to Europe, while the United States exported less than 100 bcf. Even in individual countries that heralded the receipt of cargoes from the United States, US LNG had a small market share.
Around 14 percent of US LNG exports went to Europe in 2017. Mexico, Korea, and China each imported more US LNG than all of Europe combined. This number is in-line with Europe’s global market share in LNG (16 percent of imports in 2017). US LNG projects were primarily developed to supply Asia anyway, not Europe.
Europe can import more LNG using existing infrastructure. In 2017, the utilization rate for import terminals in Europe was 29 percent, and it ranged from 6 percent to nearly 100 percent for some facilities.
Despite this low utilization rate, there are many new facilities proposed in Europe—some in countries that already import LNG and some not. If all these projects were built, Europe’s LNG import capacity would grow by 50 percent (not all projects will be constructed, of course). These projects await permits and a secure base of customers that commit to use the facility over time.
Europe’s LNG imports peaked in 2010, despite a recent growth in import capacity. From 2007 to 2017, LNG imports into Europe rose by 15 percent even though Europe’s LNG import capacity more than doubled. Capacity is necessary for imports but does not guarantee them.
Will the agreement make closer gas trade more likely?
In the days after the announcement, it has become clear that nothing new or specific was agreed to. The European Commission does not purchase LNG, and President Trump does not sell it. Both, however, can support this trade in other ways. Europe has long supported infrastructure projects financially, including LNG import terminals that can diversify Europe’s gas supplies. The commission has effectively pledged to continue that policy. The Trump administration’s LNG agenda has been focused on expediting permits for export terminals and this will continue. Other US LNG export promoters have proposed to treat allies in Europe in a preferential way when it comes to LNG exports, but the administration has not indicated it will do this yet. In short, there are undercurrents supporting a closer trading partnership, but nothing in this agreement either advances or materially changes the basic elements of the relationship as it existed before the meeting.
Will US LNG lower the trade deficit with Europe?
The United States had a $151 billion goods deficit with the EU in 2017 (it also ran a small surplus, $328 million, with Turkey, which is usually included in Europe in gas analysis). US exports to the EU were $282 billion in 2017, and LNG accounted for $477 million. As explained in a previous brief, US LNG exports could generate between $15 and $22 billion in export earnings once all projects now under construction are finished. If we assume Europe imported 14 percent of all US LNG, as it did in 2017, the trade impact from US LNG to Europe would be small ($2.1 to $3.1 billion). Even if Europe imported more US LNG, the impact on the bilateral balance would remain small (and LNG diverted to Europe would do less to lower other bilateral trade deficits—like the ones with Mexico or China—since tariffs levied on US LNG might lead it to be sent elsewhere).
Will Europe import more US LNG?
LNG imports into Europe are a function of demand, of European gas production, and of whether LNG is competitive relative to other sources. Even if Europe imports more LNG, that LNG may not come from the United States because US LNG might be more profitably sold elsewhere or because other LNG suppliers are more attractive to European buyers. Importantly, US LNG could displace other LNG, so the mere fact that Europe is importing more US LNG does not tell us much about European energy security or anything else. Without knowing why the LNG is flowing, and at what prices, we cannot infer much about whether this is good for Europe or the United States (or bad for Russia for that matter).
Should the United States try to push more LNG into Europe?
There is a long-standing policy assumption in Washington that US LNG sent to Europe will bring both trade as well as geopolitical benefits. This argument is premised on the idea that Europe is too dependent on Russian gas, and that this dependence weakens Europe geopolitically. Diversification, therefore, will help mute whatever advantages Russia extracts from selling gas to Europe. If US LNG can accelerate that diversification agenda, even better because of the economic gains that accrue to the United States.
That argument has some merit, but with two important qualifications. Market share is a poor gauge for energy security or the geopolitical side-effects of an energy relationship. Competition and resilience are more important—meaning whether the gas is exchanged on market terms and whether a country has sufficient infrastructure to cope with a disruption in gas supplies. Similarly, US LNG might flow to Europe, Asia, or Latin America depending on market conditions at any given point; without context, it is hard to say for sure that more US LNG going to Europe is “good” in a broad sense. The reverse is also true: US LNG might reach Europe in limited quantities, but its latent presence can have an impact. These broader considerations are important to keep in mind.
The second qualifier is about tactics. The United States has long supported efforts to diversify Europe’s gas supplies. From time to time, those efforts have clashed with the interests of some European countries, especially when the United States has used sanctions to block projects that European companies are involved in. When that happens, energy has become a source of friction in the transatlantic alliance. Today is one of those moments, with strong support in Congress to block the proposed Nord Stream 2 pipeline between Russia and Germany. Since the United States is simultaneously trying to sell LNG to Europe, those two actions are linked in the minds of many Europeans, who see the United States advancing narrow commercial interests rather than broader geopolitical interests through that policy. That can hurt an alliance that has other important friction points as well.
In short, whether US LNG goes to Europe is less important than other considerations. For the United States, the key questions are: Is US LNG competitive in the world market? Will companies want to invest here? And will buyers see the United States as an attractive source for gas, rather than a source that is too expensive or too politically prickly? For Europe, the question is: Is there sufficient infrastructure and a well-functioning market where gas can be sourced at the lowest possible cost? That is the conversation to have, not how much US LNG might show up in Europe in one day or the next.
Nikos Tsafos is a senior fellow with the Energy and National Security Program at the Center for Strategic and International Studies in Washington, DC. This article originally appeared here.
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometerclient import exc as ceil_exc
from ceilometerclient.openstack.common.apiclient import exceptions as c_a_exc
from cinderclient import exceptions as cinder_exc
from glanceclient import exc as glance_exc
from heatclient import exc as heat_exc
from keystoneclient import exceptions as keystone_exc
from neutronclient.common import exceptions as neutron_exc
from swiftclient import exceptions as swift_exc
from troveclient.client import exceptions as trove_exc
from heatclient import client as heatclient
import mock
from oslo.config import cfg
from testtools.testcase import skip
from heat.engine import clients
from heat.engine.clients import client_plugin
from heat.tests.common import HeatTestCase
from heat.tests.v1_1 import fakes
class ClientsTest(HeatTestCase):
def test_clients_get_heat_url(self):
con = mock.Mock()
con.tenant_id = "b363706f891f48019483f8bd6503c54b"
c = clients.Clients(con)
con.clients = c
obj = c.client_plugin('heat')
obj._get_client_option = mock.Mock()
obj._get_client_option.return_value = None
obj.url_for = mock.Mock(name="url_for")
obj.url_for.return_value = "url_from_keystone"
self.assertEqual("url_from_keystone", obj.get_heat_url())
heat_url = "http://0.0.0.0:8004/v1/%(tenant_id)s"
obj._get_client_option.return_value = heat_url
tenant_id = "b363706f891f48019483f8bd6503c54b"
result = heat_url % {"tenant_id": tenant_id}
self.assertEqual(result, obj.get_heat_url())
obj._get_client_option.return_value = result
self.assertEqual(result, obj.get_heat_url())
@mock.patch.object(heatclient, 'Client')
def test_clients_heat(self, mock_call):
self.stub_keystoneclient()
con = mock.Mock()
con.auth_url = "http://auth.example.com:5000/v2.0"
con.tenant_id = "b363706f891f48019483f8bd6503c54b"
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
c = clients.Clients(con)
con.clients = c
obj = c.client_plugin('heat')
obj.url_for = mock.Mock(name="url_for")
obj.url_for.return_value = "url_from_keystone"
obj.client()
self.assertEqual('url_from_keystone', obj.get_heat_url())
@mock.patch.object(heatclient, 'Client')
def test_clients_heat_no_auth_token(self, mock_call):
self.stub_keystoneclient(auth_token='anewtoken')
con = mock.Mock()
con.auth_url = "http://auth.example.com:5000/v2.0"
con.tenant_id = "b363706f891f48019483f8bd6503c54b"
con.auth_token = None
c = clients.Clients(con)
con.clients = c
obj = c.client_plugin('heat')
obj.url_for = mock.Mock(name="url_for")
obj.url_for.return_value = "url_from_keystone"
self.assertEqual('anewtoken', c.client('keystone').auth_token)
@mock.patch.object(heatclient, 'Client')
def test_clients_heat_cached(self, mock_call):
self.stub_keystoneclient()
con = mock.Mock()
con.auth_url = "http://auth.example.com:5000/v2.0"
con.tenant_id = "b363706f891f48019483f8bd6503c54b"
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
c = clients.Clients(con)
con.clients = c
obj = c.client_plugin('heat')
obj.get_heat_url = mock.Mock(name="get_heat_url")
obj.get_heat_url.return_value = None
obj.url_for = mock.Mock(name="url_for")
obj.url_for.return_value = "url_from_keystone"
obj._client = None
heat = obj.client()
heat_cached = obj.client()
self.assertEqual(heat, heat_cached)
def test_clients_auth_token_update(self):
fkc = self.stub_keystoneclient(auth_token='token1')
con = mock.Mock()
con.auth_url = "http://auth.example.com:5000/v2.0"
con.trust_id = "b363706f891f48019483f8bd6503c54b"
con.username = 'heat'
con.password = 'verysecret'
con.auth_token = None
obj = clients.Clients(con)
con.clients = obj
self.assertIsNotNone(obj.client('heat'))
self.assertEqual('token1', obj.auth_token)
fkc.auth_token = 'token2'
self.assertEqual('token2', obj.auth_token)
class FooClientsPlugin(client_plugin.ClientPlugin):
def _create(self):
pass
class ClientPluginTest(HeatTestCase):
def test_get_client_option(self):
con = mock.Mock()
con.auth_url = "http://auth.example.com:5000/v2.0"
con.tenant_id = "b363706f891f48019483f8bd6503c54b"
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
c = clients.Clients(con)
con.clients = c
plugin = FooClientsPlugin(con)
cfg.CONF.set_override('ca_file', '/tmp/bar',
group='clients_heat')
cfg.CONF.set_override('ca_file', '/tmp/foo',
group='clients')
cfg.CONF.set_override('endpoint_type', 'internalURL',
group='clients')
# check heat group
self.assertEqual('/tmp/bar',
plugin._get_client_option('heat', 'ca_file'))
# check fallback clients group for known client
self.assertEqual('internalURL',
plugin._get_client_option('glance', 'endpoint_type'))
# check fallback clients group for unknown client foo
self.assertEqual('/tmp/foo',
plugin._get_client_option('foo', 'ca_file'))
def test_auth_token(self):
con = mock.Mock()
con.auth_token = "1234"
c = clients.Clients(con)
con.clients = c
c.client = mock.Mock(name="client")
mock_keystone = mock.Mock()
c.client.return_value = mock_keystone
mock_keystone.auth_token = '5678'
plugin = FooClientsPlugin(con)
# assert token is from keystone rather than context
# even though both are set
self.assertEqual('5678', plugin.auth_token)
c.client.assert_called_with('keystone')
def test_url_for(self):
con = mock.Mock()
con.auth_token = "1234"
c = clients.Clients(con)
con.clients = c
c.client = mock.Mock(name="client")
mock_keystone = mock.Mock()
c.client.return_value = mock_keystone
mock_keystone.url_for.return_value = 'http://192.0.2.1/foo'
plugin = FooClientsPlugin(con)
self.assertEqual('http://192.0.2.1/foo',
plugin.url_for(service_type='foo'))
c.client.assert_called_with('keystone')
def test_abstract_create(self):
con = mock.Mock()
c = clients.Clients(con)
con.clients = c
self.assertRaises(TypeError, client_plugin.ClientPlugin, c)
class TestClientPluginsInitialise(HeatTestCase):
@skip('skipped until keystone can read context auth_ref')
def test_create_all_clients(self):
con = mock.Mock()
con.auth_url = "http://auth.example.com:5000/v2.0"
con.tenant_id = "b363706f891f48019483f8bd6503c54b"
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
c = clients.Clients(con)
con.clients = c
for plugin_name in clients._mgr.names():
self.assertTrue(clients.has_client(plugin_name))
c.client(plugin_name)
def test_create_all_client_plugins(self):
plugin_types = clients._mgr.names()
self.assertIsNotNone(plugin_types)
con = mock.Mock()
c = clients.Clients(con)
con.clients = c
for plugin_name in plugin_types:
plugin = c.client_plugin(plugin_name)
self.assertIsNotNone(plugin)
self.assertEqual(c, plugin.clients)
self.assertEqual(con, plugin.context)
self.assertIsNone(plugin._client)
self.assertTrue(clients.has_client(plugin_name))
class TestIsNotFound(HeatTestCase):
scenarios = [
('ceilometer_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='ceilometer',
exception=lambda: ceil_exc.HTTPNotFound(details='gone'),
)),
('ceilometer_not_found_apiclient', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='ceilometer',
exception=lambda: c_a_exc.NotFound(details='gone'),
)),
('ceilometer_exception', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=False,
plugin='ceilometer',
exception=lambda: Exception()
)),
('ceilometer_overlimit', dict(
is_not_found=False,
is_over_limit=True,
is_client_exception=True,
plugin='ceilometer',
exception=lambda: ceil_exc.HTTPOverLimit(details='over'),
)),
('cinder_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='cinder',
exception=lambda: cinder_exc.NotFound(code=404),
)),
('cinder_exception', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=False,
plugin='cinder',
exception=lambda: Exception()
)),
('cinder_overlimit', dict(
is_not_found=False,
is_over_limit=True,
is_client_exception=True,
plugin='cinder',
exception=lambda: cinder_exc.OverLimit(code=413),
)),
('glance_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='glance',
exception=lambda: glance_exc.HTTPNotFound(details='gone'),
)),
('glance_exception', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=False,
plugin='glance',
exception=lambda: Exception()
)),
('glance_overlimit', dict(
is_not_found=False,
is_over_limit=True,
is_client_exception=True,
plugin='glance',
exception=lambda: glance_exc.HTTPOverLimit(details='over'),
)),
('heat_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='heat',
exception=lambda: heat_exc.HTTPNotFound(message='gone'),
)),
('heat_exception', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=False,
plugin='heat',
exception=lambda: Exception()
)),
('heat_overlimit', dict(
is_not_found=False,
is_over_limit=True,
is_client_exception=True,
plugin='heat',
exception=lambda: heat_exc.HTTPOverLimit(message='over'),
)),
('keystone_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='keystone',
exception=lambda: keystone_exc.NotFound(details='gone'),
)),
('keystone_exception', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=False,
plugin='keystone',
exception=lambda: Exception()
)),
('keystone_overlimit', dict(
is_not_found=False,
is_over_limit=True,
is_client_exception=True,
plugin='keystone',
exception=lambda: keystone_exc.RequestEntityTooLarge(
details='over'),
)),
('neutron_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='neutron',
exception=lambda: neutron_exc.NotFound,
)),
('neutron_network_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='neutron',
exception=lambda: neutron_exc.NetworkNotFoundClient(),
)),
('neutron_port_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='neutron',
exception=lambda: neutron_exc.PortNotFoundClient(),
)),
('neutron_status_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='neutron',
exception=lambda: neutron_exc.NeutronClientException(
status_code=404),
)),
('neutron_exception', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=False,
plugin='neutron',
exception=lambda: Exception()
)),
('neutron_overlimit', dict(
is_not_found=False,
is_over_limit=True,
is_client_exception=True,
plugin='neutron',
exception=lambda: neutron_exc.NeutronClientException(
status_code=413),
)),
('nova_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
is_unprocessable_entity=False,
plugin='nova',
exception=lambda: fakes.fake_exception(),
)),
('nova_exception', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=False,
is_unprocessable_entity=False,
plugin='nova',
exception=lambda: Exception()
)),
('nova_overlimit', dict(
is_not_found=False,
is_over_limit=True,
is_client_exception=True,
is_unprocessable_entity=False,
plugin='nova',
exception=lambda: fakes.fake_exception(413),
)),
('nova_unprocessable_entity', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=True,
is_unprocessable_entity=True,
plugin='nova',
exception=lambda: fakes.fake_exception(422),
)),
('swift_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='swift',
exception=lambda: swift_exc.ClientException(
msg='gone', http_status=404),
)),
('swift_exception', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=False,
plugin='swift',
exception=lambda: Exception()
)),
('swift_overlimit', dict(
is_not_found=False,
is_over_limit=True,
is_client_exception=True,
plugin='swift',
exception=lambda: swift_exc.ClientException(
msg='ouch', http_status=413),
)),
('trove_not_found', dict(
is_not_found=True,
is_over_limit=False,
is_client_exception=True,
plugin='trove',
exception=lambda: trove_exc.NotFound(message='gone'),
)),
('trove_exception', dict(
is_not_found=False,
is_over_limit=False,
is_client_exception=False,
plugin='trove',
exception=lambda: Exception()
)),
('trove_overlimit', dict(
is_not_found=False,
is_over_limit=True,
is_client_exception=True,
plugin='trove',
exception=lambda: trove_exc.RequestEntityTooLarge(
message='over'),
)),
]
def test_is_not_found(self):
con = mock.Mock()
c = clients.Clients(con)
client_plugin = c.client_plugin(self.plugin)
try:
raise self.exception()
except Exception as e:
if self.is_not_found != client_plugin.is_not_found(e):
raise
def test_ignore_not_found(self):
con = mock.Mock()
c = clients.Clients(con)
client_plugin = c.client_plugin(self.plugin)
try:
exp = self.exception()
exp_class = exp.__class__
raise exp
except Exception as e:
if self.is_not_found:
client_plugin.ignore_not_found(e)
else:
self.assertRaises(exp_class,
client_plugin.ignore_not_found,
e)
def test_is_over_limit(self):
con = mock.Mock()
c = clients.Clients(con)
client_plugin = c.client_plugin(self.plugin)
try:
raise self.exception()
except Exception as e:
if self.is_over_limit != client_plugin.is_over_limit(e):
raise
def test_is_client_exception(self):
con = mock.Mock()
c = clients.Clients(con)
client_plugin = c.client_plugin(self.plugin)
try:
raise self.exception()
except Exception as e:
ice = self.is_client_exception
actual = client_plugin.is_client_exception(e)
if ice != actual:
raise
def test_is_unprocessable_entity(self):
con = mock.Mock()
c = clients.Clients(con)
# only 'nova' client plugin need to check this exception
if self.plugin == 'nova':
client_plugin = c.client_plugin(self.plugin)
try:
raise self.exception()
except Exception as e:
iue = self.is_unprocessable_entity
if iue != client_plugin.is_unprocessable_entity(e):
raise
|
THE CHANNELS LOT A4 in Cayman - Crighton Properties Ltd.
Nestled deep within the protective embrace of the famous North South, The Channels presents an offering of secluded waterfront home sites, perfect for the growing family or the downsizing empty-nester. These lots represent an opportunity to dive into canal front living and a boating lifestyle, with amenities such as strong and enforced covenants, underground utilities and gated entries. *Lot sizes are estimated, pending final surveys.
|
from __future__ import unicode_literals
import json
import random
import re
from .common import InfoExtractor
from ..compat import (
compat_b64decode,
compat_HTTPError,
compat_str,
)
from ..utils import (
ExtractorError,
orderedSet,
unescapeHTML,
urlencode_postdata,
urljoin,
)
class LinuxAcademyIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://
(?:www\.)?linuxacademy\.com/cp/
(?:
courses/lesson/course/(?P<chapter_id>\d+)/lesson/(?P<lesson_id>\d+)|
modules/view/id/(?P<course_id>\d+)
)
'''
_TESTS = [{
'url': 'https://linuxacademy.com/cp/courses/lesson/course/1498/lesson/2/module/154',
'info_dict': {
'id': '1498-2',
'ext': 'mp4',
'title': "Introduction to the Practitioner's Brief",
},
'params': {
'skip_download': True,
},
'skip': 'Requires Linux Academy account credentials',
}, {
'url': 'https://linuxacademy.com/cp/courses/lesson/course/1498/lesson/2',
'only_matching': True,
}, {
'url': 'https://linuxacademy.com/cp/modules/view/id/154',
'info_dict': {
'id': '154',
'title': 'AWS Certified Cloud Practitioner',
'description': 'md5:039db7e60e4aac9cf43630e0a75fa834',
},
'playlist_count': 41,
'skip': 'Requires Linux Academy account credentials',
}]
_AUTHORIZE_URL = 'https://login.linuxacademy.com/authorize'
_ORIGIN_URL = 'https://linuxacademy.com'
_CLIENT_ID = 'KaWxNn1C2Gc7n83W9OFeXltd8Utb5vvx'
_NETRC_MACHINE = 'linuxacademy'
def _real_initialize(self):
self._login()
def _login(self):
username, password = self._get_login_info()
if username is None:
return
def random_string():
return ''.join([
random.choice('0123456789ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz-._~')
for _ in range(32)])
webpage, urlh = self._download_webpage_handle(
self._AUTHORIZE_URL, None, 'Downloading authorize page', query={
'client_id': self._CLIENT_ID,
'response_type': 'token id_token',
'redirect_uri': self._ORIGIN_URL,
'scope': 'openid email user_impersonation profile',
'audience': self._ORIGIN_URL,
'state': random_string(),
'nonce': random_string(),
})
login_data = self._parse_json(
self._search_regex(
r'atob\(\s*(["\'])(?P<value>(?:(?!\1).)+)\1', webpage,
'login info', group='value'), None,
transform_source=lambda x: compat_b64decode(x).decode('utf-8')
)['extraParams']
login_data.update({
'client_id': self._CLIENT_ID,
'redirect_uri': self._ORIGIN_URL,
'tenant': 'lacausers',
'connection': 'Username-Password-Authentication',
'username': username,
'password': password,
'sso': 'true',
})
login_state_url = compat_str(urlh.geturl())
try:
login_page = self._download_webpage(
'https://login.linuxacademy.com/usernamepassword/login', None,
'Downloading login page', data=json.dumps(login_data).encode(),
headers={
'Content-Type': 'application/json',
'Origin': 'https://login.linuxacademy.com',
'Referer': login_state_url,
})
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 401:
error = self._parse_json(e.cause.read(), None)
message = error.get('description') or error['code']
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, message), expected=True)
raise
callback_page, urlh = self._download_webpage_handle(
'https://login.linuxacademy.com/login/callback', None,
'Downloading callback page',
data=urlencode_postdata(self._hidden_inputs(login_page)),
headers={
'Content-Type': 'application/x-www-form-urlencoded',
'Origin': 'https://login.linuxacademy.com',
'Referer': login_state_url,
})
access_token = self._search_regex(
r'access_token=([^=&]+)', compat_str(urlh.geturl()),
'access token')
self._download_webpage(
'https://linuxacademy.com/cp/login/tokenValidateLogin/token/%s'
% access_token, None, 'Downloading token validation page')
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
chapter_id, lecture_id, course_id = mobj.group('chapter_id', 'lesson_id', 'course_id')
item_id = course_id if course_id else '%s-%s' % (chapter_id, lecture_id)
webpage = self._download_webpage(url, item_id)
# course path
if course_id:
entries = [
self.url_result(
urljoin(url, lesson_url), ie=LinuxAcademyIE.ie_key())
for lesson_url in orderedSet(re.findall(
r'<a[^>]+\bhref=["\'](/cp/courses/lesson/course/\d+/lesson/\d+/module/\d+)',
webpage))]
title = unescapeHTML(self._html_search_regex(
(r'class=["\']course-title["\'][^>]*>(?P<value>[^<]+)',
r'var\s+title\s*=\s*(["\'])(?P<value>(?:(?!\1).)+)\1'),
webpage, 'title', default=None, group='value'))
description = unescapeHTML(self._html_search_regex(
r'var\s+description\s*=\s*(["\'])(?P<value>(?:(?!\1).)+)\1',
webpage, 'description', default=None, group='value'))
return self.playlist_result(entries, course_id, title, description)
# single video path
info = self._extract_jwplayer_data(
webpage, item_id, require_title=False, m3u8_id='hls',)
title = self._search_regex(
(r'>Lecture\s*:\s*(?P<value>[^<]+)',
r'lessonName\s*=\s*(["\'])(?P<value>(?:(?!\1).)+)\1'), webpage,
'title', group='value')
info.update({
'id': item_id,
'title': title,
})
return info
|
This episode we continue the Joe's adventures as they fight Cobra and Destro!
Then at 39 minutes in to the show, we look at the fifth episode of the regular cartoon with the final episode of The Pyramid of Darkness, Part 5: Knotting Cobra's Coils.
This episode first aired September 20, 1985 and was written by Ron Friedman.
And finally at 2 hours and 59 minutes, I present Postbox: The Pit! Write in and I'll read your e-mail on the air also.
Also, be sure to check out my newest podcast (in which I do with Pat Sampson) called The Krypton Podcast where we will be looking at the new Sci-Fi show, Krypton. Join us for our Season 0.
Join us next month as we look at issue 18 of G.I. Joe: A Real American Hero, the sixth episode of G.I. Joe, the regular series, with me and the guys.
|
from multiprocessing import Process, Queue
class InvalidArguments(ValueError):
pass
class Plugin(Process):
"""A Plugin is a self-contained bit of functionality that runs in it's own
process, and runs via listening for messages and sending messages through
Queues.
"""
listeners = set(['SHUTDOWN'])
messengers = set([])
name = 'Base Plugin'
def __new__(cls, *args, **kwargs):
plug = super(Plugin, cls).__new__(cls, *args, **kwargs)
plug.listeners.update(Plugin.listeners)
return plug
def __init__(self):
"""Set us up to run as a separate process, initialze our listener Queue,
and set our runnable attribute.
"""
super(Plugin, self).__init__()
self.listener = Queue()
self.runnable = True
def send(self, message, payload=None):
"""Send a message through our messenger Queue.
Messages are presumably descriptions of a task that just got completed,
or a notification of status, or whatnot.
"""
self.messenger.put((message, payload))
def recieve(self, message, payload=None):
"""Get a message from our listener Queue.
This should currently be used in a subclasses self.run loop.
"""
self.listener.put((message, payload))
def SHUTDOWN(self, payload):
"""Set self.runnable to false.
This should cause a subclass to break out of it's run loop.
"""
self.runnable=False
def pre_run(self):
"""Code to be run before our run loop starts"""
pass
def pre_call_message(self):
"""Code to be run before calling a message handler"""
pass
def pre_first_call_message(self):
"""Code to be run before calling the first message handler"""
def post_first_call_message(self):
"""Code to be run after the first message has been handled"""
pass
def post_call_message(self):
"""Code to be run after a message has been handled"""
pass
def post_run(self):
"""Code to be run after our run loop terminates"""
pass
def run(self):
"""Run our loop, and any defined hooks...
"""
self.pre_run()
first = True
while self.runnable:
self.pre_call_message()
if first:
self.pre_first_call_message()
message, payload = self.listener.get()
getattr(self, message)(payload)
if first:
first = False
self.post_first_call_message()
self.post_call_message()
self.post_run()
|
Our Cribbage board is made from solid Mahogany with attractive brass scoring pegs, which are stored in a built in sliding compartment. This set does not include cards. Rules are included.
|
#!/usr/bin/env python
import sys
import os
import pdf2txt
import re
REDACTED_TEXT = 'REDACTED'
LINK_REGEX = re.compile('(https?:\/\/)?([a-zA-Z0-9]{2,4}\.)?(linkedin.com|lnkd\.in|github.com)\/.+')
EMAIL_REGEX = re.compile('([\w\.]+@(?:[\w]+\.)+[a-zA-Z]{2,})')
BLACKLIST_FILE = "bad_words.txt"
def get_blacklist_words():
blacklist = []
try:
with open(BLACKLIST_FILE) as f:
lines = f.read().splitlines()
for line in lines:
if line:
blacklist.append(line.lower().strip())
except Exception as e:
print "Unable to read bad words from {0}. Error: {1}".format(BLACKLIST_FILE, e)
return set(blacklist)
def join_newlines(array):
return '\n'.join(array)
def redact_initial(file_lines, lastname):
processed_file_lines = []
fullname = ''
firstname = ''
for index, line in enumerate(file_lines):
newline = line
links = LINK_REGEX.search(newline.replace(" ", ""))
if links:
matching_text = links.group()
# print 'links!', matching_text
newline = newline.replace(" ", "").replace(matching_text, REDACTED_TEXT + ' PROFILE')
# print newline
emails = EMAIL_REGEX.search(newline.replace(" ", ""))
if emails:
matching_text = emails.group(1)
# print 'emails!', matching_text
newline = newline.replace(" ", "").replace(matching_text, REDACTED_TEXT + ' EMAIL')
# print newline
if lastname.lower() in newline.lower() or lastname.lower() in newline.lower().replace(" ", ""):
fullname = newline.replace(" ", "")
firstname = re.split(lastname, fullname, flags=re.IGNORECASE)[0]
print fullname
print firstname
newline = newline.replace(" ", "").replace(firstname, firstname[0] + '. ')
# print 'name',firstname
# print newline
processed_file_lines.append(newline)
return processed_file_lines
def redact(list_of_lines):
output = []
blacklist = get_blacklist_words()
for line in list_of_lines:
newline = line
for word in blacklist:
to_replace = re.compile("[^\w]{0}[^\w]".format(word), re.IGNORECASE)
newline = to_replace.sub(" {} ".format(REDACTED_TEXT), newline)
# print newline
output.append(newline)
return output
def process(fname):
lastname = '.'.join(os.path.basename(fname).split(".")[:-1])
print 'Using name', lastname
pathname = os.path.dirname(fname)
file_path = os.path.join(pathname, lastname)
txt_file_path = file_path + '.txt'
redacted_file_path = file_path + '_redacted.txt'
# os.remove(redacted_file_path)
pdf2txt.main(['', '-o', txt_file_path, fname])
with open(txt_file_path) as f:
lines = f.read().splitlines()
names_redacted = redact_initial(lines, lastname)
output = redact(names_redacted)
with open(redacted_file_path, 'w') as ofile:
ofile.write(join_newlines(output))
if __name__ == "__main__":
filenames = []
if len(sys.argv) > 1:
filenames = sys.argv[1:]
else:
print "You must give at least one file to process"
sys.exit(1)
for filename in filenames:
process(filename)
|
This indoor waterfall is 30 meters in diameter and 24 meters hight with water streaming through all four levels of the building.
Thank you Resa. It is a lot more impressive when seen up close. Don’t think my photos do it justice.
Maybe if you are really scared of hights, but then you can stand at the bottom and enjoy the view up.
Dubai is full of some amazing water features!
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Minimal example of how to read samples from a dataset generated by `generate_pycarbon_dataset.py`
using pyspark"""
from __future__ import print_function
from pyspark.sql import SparkSession
def pyspark_hello_world(dataset_url='file:///tmp/carbon_pycarbon_dataset'):
spark = SparkSession \
.builder \
.master('local[1]') \
.getOrCreate()
dataset_path = dataset_url[7:]
# Create a dataframe object from carbon files
spark.sql("create table readcarbon using carbon location '" + str(dataset_path) + "'")
dataframe = spark.sql("select * from readcarbon")
# Show a schema
dataframe.printSchema()
# Count all
dataframe.count()
# Show just some columns
dataframe.select('id').show()
# This is how you can use a standard SQL to query a dataset. Note that the data is not decoded in this case.
number_of_rows = spark.sql(
'SELECT count(id) '
'from carbon.`{}` '.format(dataset_url)).collect()
print('Number of rows in the dataset: {}'.format(number_of_rows[0][0]))
if __name__ == '__main__':
pyspark_hello_world()
|
Style your wedding day with the map locations that hold special meaning for you. We make hanging map hearts to celebrate where you met, where you marry or any place that holds particular importance. Our travel themed wedding collection includes table numbers, wooden letters and keepsakes to add unique and personalised finishing touches to your big day. Our wooden letters make beautiful cake toppers to keep as a momento.
|
"""Upload single cell BAM."""
from resolwe.process import (
Cmd,
DataField,
FileField,
Process,
SchedulingClass,
StringField,
)
class ImportScBam(Process):
"""Import scSeq BAM file and index."""
slug = "upload-bam-scseq-indexed"
name = "Single cell BAM file and index"
process_type = "data:alignment:bam:scseq"
version = "1.2.1"
category = "Import"
scheduling_class = SchedulingClass.BATCH
entity = {"type": "sample"}
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/s4q6j6e8/resolwebio/common:2.3.1"}
},
}
data_name = '{{ reads|sample_name|default("?") }}'
class Input:
"""Input fields to process Import ScBam."""
src = FileField(
description="A mapping file in BAM format.",
label="Mapping (BAM)",
)
src2 = FileField(
description="An index file of a BAM mapping file (ending with bam.bai).",
label="BAM index (*.bam.bai file)",
)
reads = DataField(
data_type="screads:",
label="Single cell fastq reads",
)
species = StringField(
label="Species",
description="Species latin name.",
)
build = StringField(
label="Build",
)
class Output:
"""Output fields to process Import ScBam."""
bam = FileField(label="Uploaded BAM")
bai = FileField(label="Index BAI")
stats = FileField(label="Alignment statistics")
build = StringField(label="Build")
species = StringField(label="Species")
def run(self, inputs, outputs):
"""Run the analysis."""
bam_path = inputs.src.import_file(imported_format="extracted")
bai_path = inputs.src2.import_file(imported_format="extracted")
assert bam_path.endswith(".bam")
assert bai_path.endswith(".bam.bai")
bam_name = bam_path[:-4]
bai_name = bai_path[:-8]
if bam_name != bai_name:
self.error("BAM and BAI files should have the same name.")
stats = "{}_stats.txt".format(bam_name)
(Cmd["samtools"]["flagstat"][bam_path] > stats)()
outputs.bam = bam_path
outputs.bai = bai_path
outputs.stats = stats
outputs.species = inputs.species
outputs.build = inputs.build
|
Almost half of all SMSF trustees have unmet financial advice needs and represent a market opportunity for advisers, according to a new report from Vanguard.
The report, put together in conjunction with Investment Trends, found that 47 per cent of SMSF trustees have unmet advice needs, particularly around inheritance arrangements and estate planning.
Additionally, while 82 per cent of SMSF trustees believe diversification is important, only 54 per cent believe their portfolio is diversified enough.
Vanguard head of corporate affairs Robin Bowerman added that many trustees had a “concerning” definition of diversification, with 84 per cent of respondents believing an investment across 30 Australian shares to be a well-diversified portfolio.
“Instead it is harbouring high equity concentration risk and home country bias, in addition to very low levels of exposure to international shares and bonds,” Mr Bowerman said.
Vanguard Australia head of intermediary Rebecca Pope said this created a window for financial advisers.
“There is a great opportunity here for advisers to deliver on these unmet advice needs, and we believe that through some key changes to their practices, advisers can leverage this information and better demonstrate their value, as well as tackling some of the perceived issues around expertise and trust with clients,” she said.
|
"""File for running cppapplication.exe to create Amphitrite
data files from MassLynx raw files.
Deprecated - Use RawFileProcessor_v2.py
"""
__author__ = "Ganesh N. Sivalingam <[email protected]"
import os
import shutil
import re
import subprocess
import cPickle as pickle
import numpy as np
import time
import utils
class RawFileProcessor():
def __init__(self,rawPath):
self.path = rawPath
self.rawfolder = os.path.basename(self.path.rstrip('/'))
if not self.path.rstrip('/')[-2:] == '.a':
self.outputfolder = self.path.rstrip('.raw') + '.a'
else:
self.outputfolder = self.path
def setOutputFolder(self,outputFolderPath):
rawFileName = os.path.basename(self.rawfolder)
rawFileName = rawFileName.rstrip('.raw/') + '.a'
self.outputfolder = os.path.join(outputFolderPath,rawFileName)
def processFolder(self,grain=2):
'''1. Copy raw file to working directory
2. Run CppApplication
3. Read text files and make imObj
4. Delete text files, remove raw file
5. Make new folder for processed data
6. Dump pickles there'''
if not self._checkIfProcessed():
# 1
if not os.path.isdir(os.path.join('.',self.rawfolder)):
shutil.copytree(self.path,os.path.join('.',self.rawfolder))
# 2
#print 'raw folder', self.rawfolder
#print 'path', self.path
print \
'''=================================
Arguments passed
================================='''
print ['cppapplication.exe',self.rawfolder,"0","1",str(grain),"0"]
print \
'''================================='''
p = subprocess.call(['cppapplication.exe',self.rawfolder,"0","1",str(grain),"0"])
#print p
#print 'cwd', os.getcwd()
for file in ['MassMobility.txt','MassMobilityXaxis.txt','MassMobilityYaxis.txt']:
try:
os.rename(file, os.path.join(self.rawfolder,file))
except:
print 'waiting for cppapplication'
subprocess.call(['cppapplication.exe',str(self.rawfolder),"0",str(grain),"0"])
time.sleep(5)
try:
os.rename(file, os.path.join(self.rawfolder,file))
except:
print 'still waiting'
time.sleep(10)
try:
os.rename(file, os.path.join(self.rawfolder,file))
except:
print 'Couldnt open file: %s' %self.rawfolder
shutil.rmtree(self.rawfolder)
if not os.path.isdir(self.outputfolder):
os.mkdir(self.outputfolder)
self._processAxisX()
self._processAxisY()
self._processMassMobililty()
shutil.rmtree(path=self.rawfolder)
print 'File processed: %s' %self.rawfolder
def _checkIfProcessed(self):
processed = False
amphiFns = ['MassMobilityXaxis.amphi','MassMobilityYaxis.amphi','MassMobility.amphi']
if os.path.isdir(self.outputfolder):
if utils.isInDir(self.outputfolder, amphiFns):
processed = True
# Legacy support for text files (TO BE REMOVED)
textFns = ['MassMobilityXaxis.txt','MassMobilityYaxis.txt','MassMobility.txt']
if utils.isInDir(self.path,textFns):
processed = True
return processed
def getAxisX(self):
return self._unPickle(os.path.join(self.outputfolder,'MassMobilityXaxis.amphi'))
def getAxisY(self):
return self._unPickle(os.path.join(self.outputfolder,'MassMobilityYaxis.amphi'))
def getMassMobility(self):
return self._unPickle(os.path.join(self.outputfolder,'MassMobility.amphi'))
def _processMassMobililty(self,removeTxt=1):
path = os.path.join(self.rawfolder, 'MassMobility.txt')
text = open(path,'r').readlines()
if removeTxt:
os.remove(path)
lines = len(text)
file = open('temp.xsg','w')
for i,line in enumerate(text):
if i != (lines-1):
print>> file, line.rstrip('\n')
else:
print>> file, line.rstrip(',\n')
file.close()
ifile = open('temp.xsg','r')
temp = np.fromfile(ifile,dtype=np.float64,sep=',')
ifile.close()
os.remove('temp.xsg')
temp = np.array_split(temp,200)
massMobility = np.flipud(temp)
self._pickle(massMobility, os.path.join(self.outputfolder,'MassMobility.amphi'))
def _processAxisX(self,removeTxt=1):
path = os.path.join(self.rawfolder,'MassMobilityXaxis.txt')
ifile = open(path,'r')
xAxis = np.fromfile(ifile,dtype='float64',sep=',')
ifile.close()
if removeTxt:
os.remove(path)
self._pickle(xAxis[:-2], os.path.join(self.outputfolder,'MassMobilityXaxis.amphi'))
def _processAxisY(self,removeTxt=1):
path = os.path.join(self.rawfolder,'MassMobilityYaxis.txt')
ifile = open(path,'r')
yAxis = np.fromfile(path,sep='\n')
ifile.close()
if removeTxt:
os.remove(path)
yAxis = yAxis[::-1]
self._pickle(yAxis, os.path.join(self.outputfolder,'MassMobilityYaxis.amphi'))
def _pickle(self,obj,filename):
obj.dump(filename)
def _unPickle(self,filename):
ifile = open(os.path.join(filename),'rb')
obj = pickle.load(ifile)
ifile.close()
return obj
def makePreview(self):
import fast_driftscope_image as fdi
image = fdi.Driftscope_image()
image.load_folder(self.outputfolder)
image.normalise_mobility()
image.driftscope()
imagename = self.rawfolder.rstrip('/')[:-4] + '_preview.png'
image.savefig(os.path.dirname(self.outputfolder),imagename)
|
I see myself more as an artist than as a photographer, and this creative approach affects how I choose to use my Lensbaby lenses. When used in the right way, they allow me to create something magical.
Lensbaby slows me down in a good way. Having to manual focus makes me think more critically about every shot. I’m not just shooting like crazy and hoping to get the shot. Creativity is the force that breaks down the process, leading to better results.
I am a professional photographer and makeup artist. I use state-of-the-art equipment to ensure the highest quality work. I want to share my vision of the world with my audience. I’m passionate about capturing special moments and emphasizing the pure beauty of nature.
I’m an experienced photographer interested in taking perfect shots of the various things and people that surround me – from marine landscapes to wild nature. I have a special passion for removing the boundaries imposed by various genres of photography by combining different elements into a single masterpiece that’s designed to captivate and fascinate my audience.
|
# encoding: utf-8
# module PyKDE4.kdeui
# from /usr/lib/python3/dist-packages/PyKDE4/kdeui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdecore as __PyKDE4_kdecore
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
import PyQt4.QtSvg as __PyQt4_QtSvg
class KShortcutsEditor(__PyQt4_QtGui.QWidget):
# no doc
def addCollection(self, *args, **kwargs): # real signature unknown
pass
def allDefault(self, *args, **kwargs): # real signature unknown
pass
def clearCollections(self, *args, **kwargs): # real signature unknown
pass
def clearConfiguration(self, *args, **kwargs): # real signature unknown
pass
def commit(self, *args, **kwargs): # real signature unknown
pass
def exportConfiguration(self, *args, **kwargs): # real signature unknown
pass
def importConfiguration(self, *args, **kwargs): # real signature unknown
pass
def isModified(self, *args, **kwargs): # real signature unknown
pass
def keyChange(self, *args, **kwargs): # real signature unknown
pass
def printShortcuts(self, *args, **kwargs): # real signature unknown
pass
def resizeColumns(self, *args, **kwargs): # real signature unknown
pass
def save(self, *args, **kwargs): # real signature unknown
pass
def undoChanges(self, *args, **kwargs): # real signature unknown
pass
def writeConfiguration(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
ActionType = None # (!) real value is ''
ActionTypes = None # (!) real value is ''
AllActions = -1
ApplicationAction = 2
GlobalAction = 4
LetterShortcuts = None # (!) real value is ''
LetterShortcutsAllowed = 1
LetterShortcutsDisallowed = 0
WidgetAction = 0
WindowAction = 1
|
For the remixed version of the original RandomBlackDude-produced track, Left Brain brightens it up a bit with a whiny synthesizer riff. This effort also contains slow, dragging drums, which leaves enough space for Earl to speak his mind.
"It's probably been 12 years since my father left / Left me fatherless / And I just use to say I hate him in dishonest jest / When honestly I miss this n---- like when I was 6 / And every time I had the chance to say it I would swallow it," spits the 20-year-old Odd Future member.
In addition, the folks at The FADER magazine and Sonos take a trip to Earl's Los Angeles home for their monthly 'At Home With...' series. The young spitter shows off his digs and reveals that though he's glad to have his own place, he wishes he lived somewhere besides Hollywood.
"If I got a second chance I would not live here," he admits. "I live in the middle of Hollywood. That's like living in Las Vegas. It's just nasty. Honestly, when I got an apartment I just signed and got my advance. I couldn't stay at my mom's house. We were just on separate pages, so I was staying like in between Tyler's house and Taco's house. Like sleeping on the couch."
The visual also shows different areas of Mr. Sweatshirt's apartment, including his living room, where he jokingly says his couch was handcrafted in Italy back in 1897, although it's actually just a regular sofa.
Check out his Hollywood pad and his fresh new remix below.
|
"""Add smsgateway table
Revision ID: 5402fd96fbca
Revises: 50adc980d625
Create Date: 2016-06-19 17:25:05.152889
"""
# revision identifiers, used by Alembic.
revision = '5402fd96fbca'
down_revision = '50adc980d625'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.exc import OperationalError, ProgrammingError, InternalError
def upgrade():
try:
op.create_table('smsgateway',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('identifier', sa.Unicode(length=255), nullable=False),
sa.Column('description', sa.Unicode(length=1024), nullable=True),
sa.Column('providermodule', sa.Unicode(length=1024), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('identifier')
)
op.create_table('smsgatewayoption',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('Key', sa.Unicode(length=255), nullable=False),
sa.Column('Value', sa.UnicodeText(), nullable=True),
sa.Column('Type', sa.Unicode(length=100), nullable=True),
sa.Column('gateway_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['gateway_id'], ['smsgateway.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('gateway_id', 'Key', name='sgix_1')
)
op.create_index(op.f('ix_smsgatewayoption_gateway_id'), 'smsgatewayoption', ['gateway_id'], unique=False)
except (OperationalError, ProgrammingError, InternalError) as exx:
if exx.orig.message.lower().startswith("duplicate column name"):
print("Good. Table smsgateway already exists.")
else:
print("Table already exists")
print(exx)
except Exception as exx:
print("Could not add Table smsgateway")
print (exx)
### end Alembic commands ###
def downgrade():
op.drop_index(op.f('ix_smsgatewayoption_gateway_id'), table_name='smsgatewayoption')
op.drop_table('smsgatewayoption')
op.drop_table('smsgateway')
|
The Candy Bar is one cool candy store, selling 1/4 pounds of fudge, taffy, chocolate and novelty candy. Probably more types of unique candies in one store than you have ever seen. We have an ice cream shop that sells ice cream, shakes, sundaes, etc. and is available to enjoy on the outside patio. Remember all those candies you enjoyed as a child? The Candy Store probably has it on the shelves.
Customers can find the most well stocked unique Candy shops they have ever visited.
The Candy Bar is one of the oldest kids’ bars on the island.
There are plenty of Things To Do at Put-in-Bay.
|
#!/usr/bin/env python
"""Talk with the daemon."""
import logging
import sys
import Pyro.core
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.DEBUG,
stream=sys.stdout)
def main(up):
"""Do something with bartimaeus - who lives in another realm."""
# you have to change the URI below to match your own host/port.
logging.info("Send up: %i", up)
bartimaeus = Pyro.core.getProxyForURI("PYROLOC://localhost:7766/bartid")
print(bartimaeus.count(up))
def get_parser():
"""Get parser object for call_demon.py."""
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("-n",
dest="up",
default=1,
type=int,
help="count up")
return parser
if __name__ == "__main__":
args = get_parser().parse_args()
main(args.up)
|
Luke 8:22-25. This sermon was preached by Pastor Marty Bonner on December 16, 2018.
Today we are going to talk about the nature of faith. Faith is such that it can be strengthened through its use. Much like a muscle that is not used, our faith can atrophy to a point of spiritual danger.
This is not an attempt to inflate our egos with images of a spiritual Arnold Schwarzenegger. Some people too easily think far more of themselves than they should and others too easily think far less of themselves than they should. We must not err in either direction.
No, this is recognition that God does want believers to become stronger in trusting Him. He uses the difficulties of this life to strengthen our faith and accomplish the good work that He has for us to do. In today’s story we are going to see a day in the life of The Twelve disciples, and how Jesus used it to strengthen their faith.
By the time we get to Luke 8:22, the disciples have followed Jesus for a while. In verses 1-2 of the same passage we are told that they had already passed through every city and village “preaching and bringing the glad tidings of the Kingdom of God. And The Twelve were with Him, and certain women who had been healed of evil spirits and infirmities.” I say this because it shows us that they had a certain level of faith already. They had come to see Jesus as a wise teacher with the knowledge of God, and a powerful healer who could set people free from the worst of diseases and even evil spirits. However, they did not yet understand completely who Jesus was. Even when we know the stories of the Bible and what God can do, we did not walk through those times. Thus we have to learn to trust God for ourselves in the things that we experience.
In this passage the disciples will be tested with a new set of circumstances. Jesus tells them to get in boats and go across the Sea of Galilee towards the eastern side. Now, several of them were experienced fisherman, but not all of them were. Even then, this storm that they would encounter was the worst that they had ever encountered while in a boat.
This highlights an issue in the area of faith. We must resist the temptation to believe that if we are following God the way will be easy and without difficulty. The devil can undermine your faith by getting you first to believe that you are not where God wants you to be and second that God has abandoned you there. Even if the first part is true, God will not abandon the repentant. Remember the prodigal son. Jesus is always as close as the mention of His name. So our faith is going to be tested whether we like it or not. It is the only way to strengthen our faith and accomplish what God wants to accomplish in our lives.
However remember 1 Corinthians 10:12-13. “Therefore let him who thinks he stands take heed lest he fall. No temptation has overtaken you except such as is common to man; but God is faithful, who will not allow you to be tempted beyond what you are able, but with the temptation will also make the way of escape, that you may be able to bear it.” Notice that in verse 12 we are warned to watch out lest we fall. God does not want us to become proud and arrogant in our abilities. We can fall at any time, period. However, verse 13 is the encouragement. We can trust God’s leading. He is faithful and it is His help that will enable us to deal with the trial.
Though this verse speaks of an escape, it also speaks of being able to bear our trials. There is always a way through every test when Jesus is with us, but we should never see avoiding the test as God’s way of escape. His way always leads right through the trial and out the other side. The temptation is to treat Jesus like training wheels for the disciples. Later the wheels will come off and they will be just like Jesus. In truth, they would only do what they did later by continuing to follow Jesus by faith, and allowing Him to do what He wanted to do through them. It was still God’s power that enabled them to do anything and the same will be true for us. There will never be a time in this life that we do not need Jesus. However, the key is that Jesus is with us, just as real as He was with those disciples that day on the boat when the storm threatened to destroy them.
So let’s follow up on the idea that God has a way through our trial. The idea to boat across the sea came from Jesus and He was not ignorant to what was going to happen. He could have had them walk around. God always has options, and yet he chose this one that lead to this difficult place.
It was imperative for the disciples that they come to the knowledge that Jesus was the Son of God, that in Him the fullness of the Godhead dwelt completely. They needed to know that He was much more than a wise teacher, and powerful healer. He is the Christ, the Son of the Living God.
Though the situation looks like they are going to die, God is always watchful to the end that our faith is not overwhelmed. Much like Israel pinned in against the Red Sea by Pharaoh’s army, they had nowhere to go. Let me remind us of a verse in Exodus 13:17-18. It says, “Then it came to pass, when Pharaoh had let the people go, that God did not lead them by way of the land of the Philistines, although that was near; for God said, ‘Lest perhaps the people change their minds when they see war, and return to Egypt.’ So God led the people around by way of the wilderness of the Red Sea.” God is always aware of what would be too much, what would overwhelm us. Just as He was watching out for Israel, so He was watching out for The Twelve, and so He is watching out for you and me.
The truth is not that we can’t go through what is in front of us, but that we don’t want to do it. We are capable of going through far greater things than we want to do and to be tested at levels far above our comfort zones. It is not that we can’t follow the Lord, but that our flesh doesn’t want us to do so. So the question is not can you follow the Lord, but will you?
Notice that God picked the easier way, but not an easy way. We always think we have the toughest road, but the truth is that it could have been tougher. The tests that Israel went through in the wilderness were tough, but they were also preparing them. God taught them that they could trust Him for provision, healing, leading, and defense. All these things were preparing them for the day when they would enter the Promised Land and have to fight giants. It was all God’s preparation for Israel and in our story today, God’s preparation for the disciples. They would face far greater storms than the one they faced on the Sea of Galilee that day. Israel left Egypt unready for war against Egyptians, much less giants. In the beginning God completely fought for them. Moses told them, “Stand still and see the salvation of the Lord!” In the wilderness God had them assist in the miracles of the desert by going out and gathering the manna and the birds, but in obedience to God’s commands about how much. He had them break camp and make camp, marching dutifully along, at the direction of the cloud that led them. When they reach the Promised Land, they are ready to go to war against giants, not by themselves, but with the Lord working with them. This is the careful preparation of God in all of our lives, helping us to grow in our ability to exercise our faith in Him.
If there is an impossible situation in your path, know this. God has led you to this day. If you haven’t been living for Him then repent and ask Him to help you. If you have been living for Him, then reach out to Him to teach you how to come through even this impossible situation. It may be, “stand still and watch me.” It may be “Go up and fight and I will be with you.” Regardless, we can know that He has picked the best way for us to become stronger in Him, to become more like Him.
This brings us full circle to the story in Luke 8. The Twelve are faced with a situation that is beyond them in the natural, but it is also beyond their faith. Yet, in their time of need they turn to Jesus and this is always the right answer. Satan can use fear and doubt to get you to question God’s love. He does so to get you to walk away from Jesus instead of exercising your faith in Him by calling out to Him. We may not have Jesus physically beside us, but He is just as close by the presence of the Holy Spirit and just as powerful.
How we turn to Jesus is important. When we turn to Jesus, we do need to turn to Him in faith rather than in fear and accusation. In the story the disciples are fearful and their statement to Jesus has a tinge of accusation to it. Perhaps they waited longer than they should have to turn to Jesus, while bailing water and trying to keep the boat together. Perhaps they resented the serene look on the face of Jesus as He slept. Doesn’t He know that we are perishing? Yet, Jesus was only asleep as long as they did not wake Him. Why do we delay turning to Christ in our trials? Don’t do that. We don’t always get an answer right away, but the answer doesn’t start until we turn to Him.
The good news is that we don’t have to be perfect to have God’s help. However, we should learn and do better next time. Learn to turn to Christ in expectant hope. Lord, what would you have me/us do? Is there anything? I trust in You, Lord! Clearly this is easier said than done, and none of us do it without error. Yet, we can learn to become stronger in this area as we walk with the Lord Jesus.
Let us end with the obvious lesson of this story. Jesus can handle the material world and the supernatural world in our lives. In this situation the disciples recognize that Jesus is more than a man. “He commands even winds and water, and they obey Him!” When they hit the shore only one event happens before they get in their boats and go back, and that is meeting the Gadarene demoniac. This guy was not just possessed with an evil spirit. He had a legion of spirits within him. He represents a satanic stronghold that only God could break down. Christ demonstrates to His disciples (and us by the way) in back to back situations that He can handle both the natural creation and the supernatural creation. He truly is Lord over all things.
May God help us all to exercise our faith in Him by following Him today. Whatever situation you are in, and whether you arrived there by following Jesus or running your own way, turn to Him today and let Him lead you into a place of stronger faith and victorious living.
Acts 16:16-19; Ephesians 6:10-12; Revelation 9:20-21. This sermon was preached by Pastor Marty Bonner on October 23, 2016.
It is interesting that as drug abuse has sky-rocketed, so has alternative spirituality and paranormal activities. Historically drugs have been used to make it easier to connect with the spirit world. However, America’s fascination with the paranormal began long before the 1950’s. In the 1800’s a large influx of eastern mysticism came to the United States and spawned groups that sought to explore the spirit world. Séances and contacting spirit guides began to catch fire among certain groups. Contrary to popular conception, we are not becoming more and more anti-supernatural in our society. Actually more and more people are adopting a spiritual world view that is alternative to the biblical view. These alternative views typically teach that all things are connected and that the living can tap into this cosmic connection or force in order to obtain information. They do not believe in a personal God that will judge our life. Rather, they believe that we are all part of God, along with everything else in the universe. This paranormal paradigm shift that is happening in America is really a going back to the ancient world views. I do not mean that there are huge numbers of people worshipping Zeus. However, they are reaching out to entities and conversing with “ascended masters,” whether they go by the name Zeus or not. In many cases American Christians are not prepared for the reality of where this is headed.
It is past time for Christians to get acquainted with what the Bible has to say on this issue and be ready to give an answer for the reason for the hope that we hold.
In Acts 16:16-19, we join the Apostle Paul in the Greek city of Philippi. Now a large percentage of the modern Church has rejected the supernatural element of the Bible. Yes, they will acknowledge that there is a God, but the idea of angels and demons interacting with the world is just too much for them. Miracles are too much for them. Thus they explain away the miracles and supernatural events of the Bible. We will see in our passage today that this is completely foreign to the first Christians. Such a “belief” is not true Christianity. It is something else entirely.
To set up this encounter in Acts 16, we need to understand that the Bible presents the reality of spiritual beings. Just as there is a hierarchy among the peoples of the earth, so there is a heavenly hierarchy within the spirit world. The Bible presents a cosmic problem. A rebellion against the Creator has begun in the spirit world and mankind has been caught up in it. There are good and evil spirit beings that are of lesser or greater power. There are holy and fallen angels. Now when Paul entered Philippi, he had come to preach the Gospel of Jesus to those who would listen. At some point a young girl began to follow him around loudly proclaiming that Paul was a servant of the Most High God and people should listen to him. This went on for several days. No doubt Paul was praying about what motivated this girl and what he should do. We are told that the girl was possessed with a spirit of divination and was used by her master to make him money by telling fortunes. The word translated “divination” is literally python. She had a python spirit. This snake spirit was connected to the god Apollo and was supposed to be responsible for the amazingly accurate prophecies given by the oracles of Apollo. Now this is not a story of Greek mythology. Paul meets a real slave girl who is apparently good enough at fortune telling to make her masters money. Notice that this spirit that possesses the girl operates in a “benign” way in order to give her information. This is quite the opposite of the spirits that possessed the Gadarene demoniac that Jesus met on the sea of Galilee (Luke 8). Those spirits manifested in a raging and insane manner that scared the daylights out of anyone who met him. Thus some spirits are predatory in a malignant and dysfunctional way. But, other spirits are predatory in a seemingly benign and somewhat functional way. The ancient world was afraid of the spirits. This fear and yet desire to know how to be safe in this world is how Satan kept the ancient world under his sway. The early Christians knew that demons and deceiving spirits existed. Any fortunes that this girl was ever able to give were part of a deceptive game plan to keep the Philippians under the control of the evil spirits.
Notice how Paul responds. He is annoyed by what the girl is doing. However, his problem is not with her. He knows that the real problem is that python spirit that possessed her. Early Christians knew that their warfare was not against people, kings, or nations. They knew that their warfare was against spiritual entities that Satan used to manipulate the people. This girl was not just a slave of men. She was a slave to a predatory, evil spirit.
The West has come under a world view that is sometimes called Scientific Materialism. It teaches that there is no spirit world and that all things can be explained through natural processes. If there is something that can’t be explained we are given a kind of promissory note that promises that science will eventually figure it out. This materialism points to the supernatural beliefs of the past and labels them all ignorance. This itself is ignorant, but let’s press on. It is partially true that the ancient supernatural beliefs were because of man’s ignorance. However, because it rejects a spirit world, materialists do not recognize that much of the ignorance was because of meddling, evil spirits.
Materialism has problems though. If humans are just biological machines and our thoughts are merely the random firing of synapses (i.e. just natural processes), then there is no right and wrong and there is no purpose or free will. A crushing hopelessness lies at the end of such a belief. It also runs into a logical contradiction. If there is only material then the material had to either bring itself into existence or it is eternal. Yet, if it has always been (if we can eternally go back to an earlier time) then time would never have gotten to us. You cannot have an infinite regression of a material thing (space, time, matter). This adds to the weight that few humans can actually follow such a world view. It is too cruel and too inhuman. Something in us cries for something beyond. The weakness of scientific materialism has given rise to a new wave of the old spiritual ideas and paranormal activity like Paul encountered in Philippi.
As I said earlier, this began in the 1800’s as gurus and Tibetan masters interacted with the West. The mystical world view of the East promoted a mushy spiritual paradigm. Everything is God and we can learn to tap into it and the collective wisdom of all the spirits. Thus séances, channeling, the occult, and many other vehicles became popular ways to connect to the spirit world. Now surely there were charlatans who were out to make a buck off of the superstitions of suckers. But, some activity could not be explained with this. Early debunkers were not entirely successful. Of course they didn’t have the scientific know how that we do today, right? But then explain why even modern debunkers have trouble explaining away every experience and story that comes along. Explain why our colleges and military still conduct experiments in the paranormal.
This alternate spiritual world view was different than the biblical world view. However, during the 1800’s many Christian denominations began to give up the idea of the supernatural and miraculous. Thus the Church was seduced to embrace scientific materialism and create a hybrid that allowed for a God, Jesus and not much else. Jesus became a mere example and most of what is said about him never happened. These moralists on one side and a harsh scientific materialism on the other side became a perfect dichotomy for funneling people toward something more and something spiritual.
The alternative spiritual world view offered something that was new and exciting to many in America and the Western countries. Thus many unwittingly opened themselves up to evil spirit with malignant designs. The Bible warned us about them, but we have been set up to disregard what the biblical prophets had to say. These spirits are deceivers and they cannot be trusted. This is why God forbids and warns against attempts to contact spirits. Christians are to pray only to God Himself. We interact with only His Holy Spirit. Don’t get me wrong. The “wisdom” of these spirit beings can be powerful and seductive. But like Satan with Eve, they only seek to steal from us what we have.
First we need to quit running away from the Bible and start believing it. You do not have to reject the reality of science in order to embrace the Bible. You might have to reject the world view that some scientists have. But, many scientists are Christians too. Some of them do not have a biblical world view, but some of them do. So what is the biblical world view? God created both the spiritual and material world. A rebellion in the spirit world came into the material world, when Satan tricked mankind into disobeying God. God then gave mankind over to the desires of their heart and they became enslaved to these spiritual beings, or false gods. Yet, in His mercy, God provided a way for humans to be saved from this slavery and His eternal judgment. Christians have been sent into the entire world to set people free from the rule of these false gods and their demonic spirits.
In fact, Revelation 9 reveals that in the last days before Christ returns, a spiritual prison called the bottomless pit will be opened up and the earth will be inundated with malevolent spirits who will angrily attack mankind because they know their time is short. The earth is not becoming less spiritual. We are headed towards a time of great spiritual deception and attack. Revelation 9:20-21 says, “The rest of mankind, who were not killed by these plagues, did not repent of the works of their hands nor give up worshiping demons and idols of gold and silver and bronze and stone and wood, which cannot see or hear or walk, nor did they repent of their murders or their sorceries or their sexual immorality or their thefts.” This is where we are headed.
We should also be busy warning people about these deceiving spirits and the deceptions they have promoted in our society. Many are enslaved by the teachings and ideas that come from these evil spirits. Science is not our enemy. Our enemy is using science and a deceptive interpretation of its findings to deceive mankind. He wants us to be caught off-guard by his true intentions.
Lastly, Christians must walk in the authority of Jesus Christ. Paul said to the demon, “I command you in the name of Jesus Christ to come out of her.” A little girl was set free that day because Paul knew that these evil spirits cannot stand up to Jesus. We have been sent on a mission from God to tell people about Jesus. If you read the entire chapter of Acts 16, you see that Paul got his first convert by preaching by the river where people gathered to pray. Lydia believed in Jesus and asked Paul to stay and teach her more. The world would call Paul a cultural murderer. But, He knew he was operating under the authority of Jesus who is Lord of all Lords and King of all kings, both on earth and in the heavens above. Let’s trust Him and be about our Father’s business.
|
''' This class is responsible for architecture-specific things such as call emulation and so forth. '''
import capstone as _capstone
import struct as _struct
import pyvex as _pyvex
import logging
l = logging.getLogger('arch.Arch')
class Arch(object):
def __init__(self, endness):
if endness not in ('Iend_LE', 'Iend_BE'):
raise ArchError('Must pass a valid VEX endness: "Iend_LE" or "Iend_BE"')
if endness == 'Iend_BE':
self.vex_endness = "VexEndnessBE"
self.memory_endness = 'Iend_BE'
self.register_endness = 'Iend_BE'
self.cs_mode -= _capstone.CS_MODE_LITTLE_ENDIAN
self.cs_mode += _capstone.CS_MODE_BIG_ENDIAN
self.ret_instruction = reverse_ends(self.ret_instruction)
self.nop_instruction = reverse_ends(self.nop_instruction)
def __repr__(self):
return '<Arch %s (%s)>' % (self.name, self.memory_endness[-2:])
def __eq__(self, other):
return self.name == other.name and \
self.bits == other.bits and \
self.memory_endness == other.memory_endness
def __ne__(self, other):
return not self == other
def __getstate__(self):
self._cs = None
return self.__dict__
def __setstate__(self, data):
self.__dict__.update(data)
def gather_info_from_state(self, state):
info = {}
for reg in self.persistent_regs:
info[reg] = state.registers.load(reg)
return info
def prepare_state(self, state, info=None):
if info is not None:
# TODO: Only do this for PIC!
for reg in self.persistent_regs:
if reg in info:
state.registers.store(reg, info[reg])
return state
def get_default_reg_value(self, register):
if register == 'sp':
# Convert it to the corresponding register name
registers = [r for r, v in self.registers.items() if v[0] == self.sp_offset]
if len(registers) > 0:
register = registers[0]
else:
return None
for reg, val, _, _ in self.default_register_values:
if reg == register:
return val
return None
def struct_fmt(self, size=None):
fmt = ""
if size is None:
size = self.bits
if self.memory_endness == "Iend_BE":
fmt += ">"
else:
fmt += "<"
if size == 64:
fmt += "Q"
elif size == 32:
fmt += "I"
elif size == 16:
fmt += "H"
elif size == 8:
fmt += "B"
else:
raise ValueError("Invalid size: Must be a muliple of 8")
return fmt
@property
def bytes(self):
return self.bits/8
@property
def capstone(self):
if self.cs_arch is None:
raise ArchError("Arch %s does not support disassembly with capstone" % self.name)
if self._cs is None:
self._cs = _capstone.Cs(self.cs_arch, self.cs_mode)
self._cs.detail = True
return self._cs
def translate_dynamic_tag(self, tag):
try:
return self.dynamic_tag_translation[tag]
except KeyError:
if isinstance(tag, (int, long)):
l.error("Please look up and add dynamic tag type %#x for %s", tag, self.name)
return tag
def translate_symbol_type(self, tag):
try:
return self.symbol_type_translation[tag]
except KeyError:
if isinstance(tag, (int, long)):
l.error("Please look up and add symbol type %#x for %s", tag, self.name)
return tag
def translate_register_name(self, offset):
try:
return self.register_names[offset]
except KeyError:
return str(offset)
def disassemble_vex(self, string, **kwargs):
if self.vex_arch is None:
raise ArchError("Arch %s does not support VEX lifting" % self.name)
return _pyvex.IRSB(bytes=string, arch=self, **kwargs)
# Determined by watching the output of strace ld-linux.so.2 --list --inhibit-cache
def library_search_path(self, pedantic=False):
subfunc = lambda x: x.replace('${TRIPLET}', self.triplet).replace('${ARCH}', self.linux_name)
path = ['/lib/${TRIPLET}/', '/usr/lib/${TRIPLET}/', '/lib/', '/usr/lib', '/usr/${TRIPLET}/lib/']
if self.bits == 64:
path.append('/usr/${TRIPLET}/lib64/')
elif self.bits == 32:
path.append('/usr/${TRIPLET}/lib32/')
if pedantic:
path = sum([[x + 'tls/${ARCH}/', x + 'tls/', x + '${ARCH}/', x] for x in path], [])
return map(subfunc, path)
# various names
name = None
vex_arch = None
qemu_name = None
ida_processor = None
linux_name = None
triplet = None
# instruction stuff
max_inst_bytes = None
ret_instruction = ''
nop_instruction = ''
instruction_alignment = None
# register ofsets
ip_offset = None
sp_offset = None
bp_offset = None
ret_offset = None
# memory stuff
bits = None
vex_endness = 'VexEndnessLE'
memory_endness = 'Iend_LE'
register_endness = 'Iend_LE'
stack_change = None
# is it safe to cache IRSBs?
cache_irsb = True
function_prologs = set()
function_epilogs = set()
# Capstone stuff
cs_arch = None
cs_mode = None
_cs = None
call_pushes_ret = False
initial_sp = 0x7fff0000
# Difference of the stack pointer after a call instruction (or its equivalent) is executed
call_sp_fix = 0
stack_size = 0x8000000
# Register information
default_register_values = [ ]
entry_register_values = { }
default_symbolic_registers = [ ]
registers = { }
register_names = { }
argument_registers = { }
persistent_regs = [ ]
concretize_unique_registers = set() # this is a list of registers that should be concretized, if unique, at the end of each block
lib_paths = []
reloc_s_a = []
reloc_b_a = []
reloc_s = []
reloc_copy = []
reloc_tls_mod_id = []
reloc_tls_doffset = []
reloc_tls_offset = []
dynamic_tag_translation = {}
symbol_type_translation = {}
got_section_name = ''
def arch_from_id(ident, endness='', bits=''):
if bits == 64 or (isinstance(bits, str) and '64' in bits):
bits = 64
else:
bits = 32
endness = endness.lower()
endness_unsure = False
if 'lit' in endness:
endness = 'Iend_LE'
elif 'big' in endness:
endness = 'Iend_BE'
elif 'lsb' in endness:
endness = 'Iend_LE'
elif 'msb' in endness:
endness = 'Iend_BE'
elif 'le' in endness:
endness = 'Iend_LE'
elif 'be' in endness:
endness = 'Iend_BE'
elif 'l' in endness:
endness = 'Iend_LE'
endness_unsure = True
elif 'b' in endness:
endness = 'Iend_BE'
endness_unsure = True
else:
endness = 'Iend_LE'
endness_unsure = True
ident = ident.lower()
if 'ppc64' in ident or 'powerpc64' in ident:
if endness_unsure:
endness = 'Iend_BE'
return ArchPPC64(endness)
elif 'ppc' in ident or 'powerpc' in ident:
if endness_unsure:
endness = 'Iend_BE'
if bits == 64:
return ArchPPC64(endness)
return ArchPPC32(endness)
elif 'mips' in ident:
if 'mipsel' in ident:
if bits == 64:
return ArchMIPS64('Iend_LE')
return ArchMIPS32('Iend_LE')
if endness_unsure:
if bits == 64:
return ArchMIPS64('Iend_BE')
return ArchMIPS32('Iend_BE')
if bits == 64:
return ArchMIPS64(endness)
return ArchMIPS32(endness)
elif 'arm' in ident or 'thumb' in ident:
if bits == 64:
return ArchAArch64(endness)
return ArchARM(endness)
elif 'aarch' in ident:
return ArchAArch64(endness)
elif 'amd64' in ident or ('x86' in ident and '64' in ident) or 'x64' in ident:
return ArchAMD64('Iend_LE')
elif '386' in ident or 'x86' in ident or 'metapc' in ident:
if bits == 64:
return ArchAMD64('Iend_LE')
return ArchX86('Iend_LE')
raise ArchError("Could not parse out arch!")
def reverse_ends(string):
ise = 'I'*(len(string)/4)
return _struct.pack('>' + ise, *_struct.unpack('<' + ise, string))
# pylint: disable=unused-import
from .arch_amd64 import ArchAMD64
from .arch_x86 import ArchX86
from .arch_arm import ArchARM, ArchARMEL, ArchARMHF
from .arch_aarch64 import ArchAArch64
from .arch_ppc32 import ArchPPC32
from .arch_ppc64 import ArchPPC64
from .arch_mips32 import ArchMIPS32
from .arch_mips64 import ArchMIPS64
from .archerror import ArchError
all_arches = [
ArchAMD64(), ArchX86(),
ArchARM('Iend_LE'), ArchARM('Iend_BE'),
ArchAArch64('Iend_LE'), ArchAArch64('Iend_BE'),
ArchPPC32('Iend_LE'), ArchPPC32('Iend_BE'),
ArchPPC64('Iend_LE'), ArchPPC64('Iend_BE'),
ArchMIPS32('Iend_LE'), ArchMIPS32('Iend_BE'),
ArchMIPS64('Iend_LE'), ArchMIPS64('Iend_BE')
]
|
The Light Pink & Black Gym Bag has been designed with very high fashion quotient. The harmony of the pink top area as well as the black base and sides along with the pink padded handles contributes towards making it an irresistible inclusion in any gym wardrobe. This option is spacious and durable at the same time.
|
import pg
import getpass
import math
import string
import sys
import settingReader
import numpy
import matplotlib.pyplot as plt
import matplotlib
from MySQLdb.constants.FLAG import NUM
class BSPTree(object):
def __init__(self,Options):
'''
Constructor
'''
self.Options=Options
self.serverip=self.Options['PGDB:serverip']
self.username=self.Options['PGDB:user']
self.password=self.Options['PGDB:password']
self.port=int(self.Options['PGDB:port'])
self.DBName=self.Options['PGDB:NewDBName']
if self.password==None:
print('Password for user:'+self.username+' is not defined')
self.password=getpass.getpass('Please enter password:')
# Take care that the connection will be opened to standard DB 'master'
# This is temp. until the actual database is created
self.CurrentConnection=pg.connect(host=self.serverip,user=self.username,passwd=self.password,port=self.port,dbname=self.DBName)
print('Connection to DB is open...Start Creating Tables')
def ExecuteNoQuerySQLStatment(self,SQLStatment):
try:
SQLStatment=string.lower(SQLStatment)
self.CurrentConnection.query(SQLStatment)
except Exception as Exp:
print(">>>>>Error While creating New Table")
print(type(Exp))
print(Exp.args)
print(Exp)
print("Current SQL Statement =\n"+SQLStatment)
raw_input("PLease press enter to continue.....")
def ExecuteQuerySQLStatment(self,SQLStatment):
try:
SQLStatment=string.lower(SQLStatment)
resultsList=self.CurrentConnection.query(SQLStatment).getresult()
return resultsList
except Exception as Exp:
print(">>>>>Error While creating New Table")
print(type(Exp))
print(Exp.args)
print(Exp)
print("Current SQL Statement =\n"+SQLStatment)
raw_input("PLease press enter to continue.....")
def GenerateRectangles(self):
GetBoundryBox="select min(MinX), min(MinY), min(MinZ), max(MaxX), max(MaxY), max(MaxZ) from TreeSummary;"
GlobalSummary=self.ExecuteQuerySQLStatment(GetBoundryBox)[0]
MinX=int(math.floor(GlobalSummary[0]))
MinY=int(math.floor(GlobalSummary[1]))
MinZ=int(math.floor(GlobalSummary[2]))
MaxX=int(math.ceil(GlobalSummary[3]))
MaxY=int(math.ceil(GlobalSummary[4]))
MaxZ=int(math.ceil(GlobalSummary[5]))
XLocation=-1
YLocation=-1
StepSize=20
self.RectArr=numpy.zeros((0,6))
### Intersection between two Rectangles
### http://silentmatt.com/rectangle-intersection/
for X in range(MinX,MaxX,StepSize):
XLocation=XLocation+1
YLocation=-1
for Y in range(MinY,MaxY,StepSize):
YLocation=YLocation+1
BX1=X;
BX2=X+StepSize
BY1=Y
BY2=Y+StepSize
self.RectArr=numpy.vstack([self.RectArr,[BX1,BX2,BY1,BY2,XLocation,YLocation]])
def GetRectIds(self,PolyPoints):
BoundingRect=self.GetBoundingRect(PolyPoints)
LocationsMatrix=numpy.zeros([0,2])
for Rect in self.RectArr:
color='yellow'
#if self.InsidePolygon(Rect[0],Rect[2] , PolyPoints):
# color='blue'
#if self.InsidePolygon(Rect[1],Rect[2] , PolyPoints):
# color='blue'
#if self.InsidePolygon(Rect[0],Rect[3] , PolyPoints):
# color='blue'
#if self.InsidePolygon(Rect[1],Rect[3] , PolyPoints):
# color='blue'
if self.IntersectPolyRect(PolyPoints,BoundingRect,Rect):
color='blue'
LocationsMatrix=numpy.vstack([LocationsMatrix,Rect[4:6]])
plt.gca().add_patch(matplotlib.patches.Rectangle((Rect[0],Rect[2]), Rect[1]-Rect[0], Rect[3]-Rect[2],fc=color))
#plt.gca().add_patch(matplotlib.patches.Rectangle((BoundingRect[0],BoundingRect[2]), BoundingRect[1]-BoundingRect[0], BoundingRect[3]-BoundingRect[2],fc='white'))
plt.gca().add_patch(matplotlib.patches.Polygon(PolyPoints,fc='red'))
plt.gca().autoscale_view()
plt.draw()
plt.show()
return LocationsMatrix
def IntersectTwoRect(self,RectA,RectB):
## Rect=[X1,X2,Y1,Y2]
if (RectA[0] < RectB[1] and RectA[1] > RectB[0] and RectA[2] < RectB[3] and RectA[3] > RectB[2]):
return True;
else:
return False;
def GetBoundingRect(self,PolyPoints):
PolyMinX=PolyMaxX=PolyPoints[0][0]
PolyMinY=PolyMaxY=PolyPoints[0][1]
for P in PolyPoints:
if P[0]<PolyMinX:
PolyMinX=P[0]
if P[0]>PolyMaxX:
PolyMaxX=P[0]
if P[1]<PolyMinY:
PolyMinY=P[1]
if P[1]>PolyMaxY:
PolyMaxY=P[1]
return [PolyMinX,PolyMaxX,PolyMinY,PolyMaxY]
def IntersectPolyRect(self,PolyPoints,PolygonBoundingRect,Rect):
PolyPoints= numpy.vstack([PolyPoints,PolyPoints[0]])
if self.IntersectTwoRect(Rect, PolygonBoundingRect):
IntersectionResults=False
for i in range(0,len(PolyPoints)-1):
IntersectionResults= IntersectionResults or self.seg_intersect(PolyPoints[i],PolyPoints[i+1],[Rect[0],Rect[2]],[Rect[0],Rect[3]])
IntersectionResults= IntersectionResults or self.seg_intersect(PolyPoints[i],PolyPoints[i+1],[Rect[0],Rect[3]],[Rect[1],Rect[3]])
IntersectionResults= IntersectionResults or self.seg_intersect(PolyPoints[i],PolyPoints[i+1],[Rect[1],Rect[3]],[Rect[1],Rect[2]])
IntersectionResults= IntersectionResults or self.seg_intersect(PolyPoints[i],PolyPoints[i+1],[Rect[1],Rect[2]],[Rect[0],Rect[2]])
return IntersectionResults
else:
return False
def ccw(self,A,B,C):
return (C[1]-A[1])*(B[0]-A[0]) > (B[1]-A[1])*(C[0]-A[0])
def seg_intersect(self,A,B,C,D):
return self.ccw(A,C,D) != self.ccw(B,C,D) and self.ccw(A,B,C) != self.ccw(A,B,D)
def InsidePolygon(self,x,y,points):
n = len(points)
inside = False
p1x, p1y = points[0]
for i in range(1, n + 1):
p2x, p2y = points[i % n]
if y > min(p1y, p2y):
if y <= max(p1y, p2y):
if x <= max(p1x, p2x):
if p1y != p2y:
xinters = (y - p1y) * (p2x - p1x) / (p2y - p1y) + p1x
if p1x == p2x or x <= xinters:
inside = not inside
p1x, p1y = p2x, p2y
return inside
if __name__ == '__main__':
[CurrentSAGEStruct,Options]=settingReader.ParseParams("settings.xml")
BSPTreeObj=BSPTree(Options)
BSPTreeObj.GenerateRectangles()
#PolyPoints=[(250,90),(400,300),(250,400),(150,250)]
PolyPoints=[(0,0),(500,0),(500,50)]
LocationsMatrix=BSPTreeObj.GetRectIds(PolyPoints)
GridXLocationsstr=''
GridYLocationsstr=''
GridXLocations=numpy.unique(LocationsMatrix[:,0])
GridYLocations=numpy.unique(LocationsMatrix[:,1])
for r in GridXLocations:
GridXLocationsstr=GridXLocationsstr+','+str(int(r))
for r in GridYLocations:
GridYLocationsstr=GridYLocationsstr+','+str(int(r))
GridXLocationsstr=GridXLocationsstr[1:]
GridYLocationsstr=GridYLocationsstr[1:]
Query='select distinct tablename from TreeSummary where globaltreeid in (Select globaltreeid from TreeMapping where gridx in ('+GridXLocationsstr+') and gridy in ('+GridYLocationsstr+'));'
print Query
TablesList=BSPTreeObj.ExecuteQuerySQLStatment(Query)
for table in TablesList:
print(table)
#GridData=BSPTreeObj.ExecuteQuerySQLStatment("select gridx,gridy,count(*) from TreeMapping group by gridx,gridy;")
#Arr=numpy.zeros((25,25))
#for GridPoint in GridData:
# Arr[GridPoint[0],GridPoint[1]]=GridPoint[2]
#print Arr
#plt.contourf(Arr)
#plt.colorbar()
#plt.show()
|
Many of the forms used at the Bexar Appraisal District can be printed from the Internet. In order to do so you must have Adobe Acrobat Reader installed on your machine. If you do not, it can be downloaded for free from Adobe.
Some of these documents require Adobe Acrobat to download. For a free version of Acrobat, please click on the Acrobat Reader logo.
A listing of important dates in the Tax Year Calendar.
Link to the Office of the Attorney General's site to find information regarding Open Records and Open Government. To request records from us, fill in and submit an Open Records Request Form .
Description of the cropland and livestock guidelines, as well as links to the appropriate forms.
Answers to such questions as "How do I file a Protest?" and "What is an Exemption and how do I get one?"
|
from flask import Flask
from flask import session
from flask import Response
from flask import request
from flask import redirect, url_for
from flask import render_template
from api.core import app, db
from api.models.user import User
from api.models.page import Page
import urllib
import json
import datetime
@app.route('/')
def landing():
return render_template('landing.html')
@app.route('/auth')
def login():
return render_template('login.html')
@app.route('/auth/<token>')
def get_token(token):
params = {
'access_token': token
}
params = urllib.urlencode(params)
me = json.loads(urllib.urlopen('https://graph.facebook.com/me?%s' % params).read())
fb_user = {
'first_name': me['first_name'],
'last_name': me['last_name'],
'fb_id': me['id'],
'email': me['email'],
'created': datetime.datetime.now()
}
db_user = db.session.query(User).filter(
User.fb_id == fb_user['fb_id']
).first()
if not db_user:
db_user = User(**fb_user)
db.session.add(db_user)
db.session.commit()
session['USER_ID'] = db_user.id
accounts = json.loads(urllib.urlopen('https://graph.facebook.com/me/accounts?%s' % params).read())
if 'data' not in accounts:
return redirect('/')
if len(accounts) > 1:
return render_template('chooser.html', pages=accounts['data'])
return Response(json.dumps({
'accounts': accounts
}), mimetype='application/json')
|
The history of Basque Linens is closly woven into the culture of a region and its people. Born of linen cultivated and woven by the peasants to create SAÏAL, a strong fabric cover used to protect the cattle from the sun. Then translated into durable fabrics to decorate to most beautiful summer tables and now this range of super soft towels.
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
__author__ = 'francis'
def permute(xs):
"""Returns a generator for the permutations of elements in the sequence
:param xs: a sequence of elements
"""
if len(xs) == 1:
yield xs
else:
for i in xrange(0, len(xs)):
for p in permute(xs[0:i] + xs[i + 1:]):
yield [xs[i]] + p
def longest_inc_seq(xs):
"""Finds the longest increasing sequences in the given sequence
:param xs: a sortable sequence of elements
"""
seq = [] # all increasing sequences
indices = [] # indices of longest increasing sequences
size = 0 # current longest size
for i in xrange(0, len(xs)):
for j in xrange(0, len(seq)):
if xs[i] > seq[j][-1]:
t = seq[j] + [xs[i]]
if len(t) > size:
indices = [len(seq)]
size = len(t)
elif len(t) == size:
indices.append(len(seq))
seq.append(t)
seq.append([xs[i]])
return [seq[k] for k in indices]
def longest_common_seq(first, second):
"""Find the longest common sequence of the given sequences
:param first: the first sequence
:param second: the second sequence
"""
res = []
for i in range(0, len(first)):
for j in range(0, len(second)):
if second[j] == first[i]:
t = [first[i]] + longest_common_seq(first[i + 1:], second[j + 1:])
if len(t) > len(res):
res = t
return res
def fib(n):
"""Computes the fibonacci number for the given term
:param n: the term of the fibonacci sequence
"""
if not n:
return 0
previous = 0
current = 1
while n - 1:
current, previous = (previous + current), current
n -= 1
return current
|
Eichardt’s Bar is renowned as a Queenstown icon and a local favourite since 1867. It has a genuine atmosphere, history and ambience unlike anywhere else in the world, offering a welcoming environment for hotel guests, visitors and Queenstown locals alike.
Comfortable sofas, cosy fireplace and gracious service provide a welcoming and stylish environment for sampling mouth-watering creations and timeless cocktails. Eichardt's Bar is the only bar in Queenstown serving Champagne Bollinger by the glass.
Eichardt's Bar's award winning wine list features a huge range of world-class Otago favourites, regional classics from around New Zealand, and international varietal specialties. Mt Edward make a fantastic Pinot Noir exclusively for Eichardt’s Bar, be sure to drop in for a glass or two.
Here, cocktails are created by top New Zealand bartenders who incorporate years of experience and passion into every drink. Eichardt’s attentive bar team are always ready to serve with a welcoming smile, good humour and an utterly professional, knowledgeable manner.
Eichardt’s signature lunch dishes, tapas, and breakfast menus feature fresh and organic produce sourced from the very best local growers. Eichardt’s chef Will Eaglesfield has built a loyal following for consistent delicious and ever inspiring dishes.
Eichardt’s is located in the heart of Queenstown’s café and restaurant precinct on a premium lakefront setting.
Please note that Eichardt's Bar does not take reservations as we are a small venue and operate on a walk-in basis only - you are welcome to come in at your leisure.
|
#!/usr/bin/env python
from __future__ import print_function
import os
import sys
from collections import OrderedDict
#FIXME Best way to configur pad size? right now we use half the pitch
if len(sys.argv) >1:
infile = sys.argv[1]
else:
print('please provide a configuration file')
sys.exit()
dictParameters=OrderedDict([
('outLibrary',''),
('name',''),
('keywords',''),
('Description','_'),
('3dModelPath','_') ])
with open(infile,'r+') as inf:
while True:
line = inf.readline()
line = line.replace('\n','')
if not line: break
lsplit=[]
lsplit.append(line[0:line.find('=')])
lsplit.append(line[line.find('=')+1:])
if lsplit[0] in dictParameters:
if (lsplit[1] != '' and lsplit[1]!= dictParameters[lsplit[0]]):
dictParameters[lsplit[0]]=lsplit[1]
#retrieve BGA package parameters
string = dictParameters['name']
idx = string.find('P')
pitch = float(string[string.find('C')+1:idx])/100.0
str2 = string[idx + 1 :]
idx = str2.find('X')
nBallx = int(str2[:idx])
str2 = str2[idx+1:]
idx = str2.find('_')
nBally = int(str2[:idx])
str2 = str2[idx+1:]
idx = str2.find('X')
lenx = float(str2[:idx])/100.0
str2 = str2[idx+1:]
idx = str2.find('X')
leny = float(str2[:idx])/100.0
def drawRect(x,y,layer):
print(layer)
print(x)
print(y)
width = 0.15
if layer.find('CrtYd') != -1:
width = 0.05
string = ' (fp_line (start -{} -{}) (end -{} {}) (layer {}) (width {}))\n'.format(x,y,x,y,layer,width)
string += ' (fp_line (start -{} -{}) (end {} -{}) (layer {}) (width {}))\n'.format(x,y,x,y,layer,width)
string += ' (fp_line (start {} {}) (end -{} {}) (layer {}) (width {}))\n'.format(x,y,x,y,layer,width)
string += ' (fp_line (start {} {}) (end {} -{}) (layer {}) (width {}))\n'.format(x,y,x,y,layer,width)
return string
def createPinList(nBallx,nBally):
letterBGA= ['A','B','C','D','E','F','G','H','J','K','L','M','N','P','R','T','U','V','W','Y']
pinlist = []
for i in range(nBallx):
for j in range(nBally):
firstletter = j/len(letterBGA)
defstr = ''
if(firstletter != 0):
defstr = letterBGA[firstletter-1]
pinlist.append(defstr+letterBGA[j-firstletter*len(letterBGA)]+str(i+1))
return pinlist
outstring = "(module " + dictParameters['name'] + ' (layer F.Cu)\n' # module name
outstring += ' (descr "'+dictParameters['Description'] + '")\n' # Description
outstring += ' (tags "'+dictParameters['keywords'] + '")\n' # keywords
outstring += ' (attr smd)\n' # attribute
outstring += ' (fp_text reference REF** (at 0 {0}) (layer F.SilkS)\n'.format(int(leny/2.+2)) # reference
outstring += ' (effects (font (size 1 1) (thickness 0.15)))\n'
outstring += ' )\n'
outstring += ' (fp_text value {} (at 0 -{}) (layer F.Fab)\n'.format(dictParameters['name'],int(leny/2.+2)) # value
outstring += ' (effects (font (size 1 1) (thickness 0.15)))\n'
outstring += ' )\n'
outstring += drawRect(lenx/2.,leny/2.,'F.SilkS') # silkscreen rectangle
outstring += drawRect(lenx/2.+0.2,leny/2.+0.2,'F.CrtYd') # courtyard rectangle
outstring += ' (fp_circle (center -{} -{}) (end -{} -{}) (layer F.SilkS) (width 0.15))\n'.format(lenx/2.+0.5,leny/2.+0.5,lenx/2.+1,leny/2.+0.5)#silkscreen circle
pinlist = createPinList(nBallx,nBally)
minx = (nBallx-1)*pitch/2.; miny = (nBally-1)*pitch/2.
pn = 0 ; posx = -minx ; posy = -miny ; bsize = pitch/2.
for pin in pinlist:
if pn % nBallx == 0 and pn / nBallx != 0: # if we start a new column
posx += pitch
posy = -miny
if abs(posx)<0.001: #avoid python precision issue
posx = 0
if abs(posy)<0.001: #avoid python precision issue
posy = 0
outstring += ' (pad {} smd circle (at {} {}) (size {} {}) (layers F.Cu F.Paste F.Mask))\n'.format(pin,posx,posy,bsize,bsize)
posy += pitch
pn += 1
outstring += ' (model '+str(os.path.join(dictParameters['3dModelPath'],dictParameters['name']+'.wrl'))+'\n (at (xyz 0 0 0))\n (scale (xyz 1 1 1))\n (rotate (xyz 0 0 0))\n )\n'
outstring += ')'
outfilepath = os.path.join(dictParameters['outLibrary'],dictParameters['name']+'.kicad_mod')
print(outfilepath)
with open(outfilepath,'w+') as outfile:
outfile.write(outstring)
|
A couple of weeks ago, I described the new concept of a “tweetchat” about lung cancer, and we’re about to have our second of these tomorrow: Thursday, August 8th, at 8 PM Eastern, 5 PM Pacific time. The topic for this one will be about the growing trend toward personalized cancer care, breaking what was previously one broad category of “lung cancer” into a federation of smaller subgroups — defined by tumor histology, molecular marker results, smoking status, etc. But do we risk losing some hard fought unity by identifying “us” and “them” subgroups within the lung cancer community? Or can we subgroups who still come together toward a common goal of promoting lung cancer awareness, funding, and breakdown of stigma associated with the disease?
Specifically, we’re going to cover three subtopics that have led to identification of subgroups and the potential implications of this.
First, let’s consider smoking status, which certainly has biological relevance in predicting the probability of having certain clinically relevant molecular markers, but it’s also an emotionally charged variable. Lung cancer awareness campaigns often highlight that some victims of lung cancer never smoked, since so many people blame the victim with lung cancer as if they deserve it for smoking. Does this approach risk pitting smokers against never-smokers? Have we created antagonism within the lung cancer community between smokers and never-smoker, or is that increasingly likely as we focus on smoking status? Does the distinction of smoking status, which is relevant with regard to the biology of the cancer, cause people who smoked or continue to smoke feel worse?
Second, we’ve recently identified several important molecular markers present in only a small minority (1-10%) of lung cancer patients. In many cases, these have developed into major treatment advances, but just for these narrow patient subgroups. By testing for and identifying patients as having or not having an EGFR mutation, ALK rearrangement, etc., are we developing a lung cancer world of “haves” and “have nots”?
Finally, we now face the challenge of doing clinical trials in small subgroups that don’t have a critical mass of patients all in the same city or region with, say, an ALK or ROS1 rearrangement. We used to run broad trials for “all people with advanced non-small cell lung cancer” who have received a single line of chemotherapy. How can we feasibly do studies where there might be just 1-5 people in a single area who fit the requirements. Do those people with a rare mutation or acquired resistance to EGFR inhibitors, etc., need to travel to the handful of locations that have such trials? How can we best facilitate such trials being completed and new treatments becoming available for geographically dispersed small subgroups?
For those of you who already participate on Twitter and are able to join our live tweetchat, please join the conversation tomorrow: just filter for the hashtag “#lcsm”,follow the stream of comments that come in, and add that hashtag to your tweets. Otherwise, your insights and opinions would be very appreciated on all of these questions, whether before or after the live tweetchat, and I’d encourage you to share them here.
|
#!/usr/bin/env python
"""
_fabric_put_
Uploader plugin that uses fabric to do a remote put
"""
from cirrus.logger import get_logger
from cirrus.upload_plugins import Uploader
from cirrus.configuration import get_pypi_auth
from cirrus.scp import put
LOGGER = get_logger()
class Pypi(Uploader):
PLUGGAGE_OBJECT_NAME = 'fabric'
def upload(self, opts, build_artifact):
"""
upload to pypi via fabric over ssh
"""
pypi_conf = self.package_conf.pypi_config()
pypi_auth = get_pypi_auth()
if opts.pypi_url:
pypi_url = opts.pypi_url
else:
pypi_url = pypi_conf['pypi_url']
if pypi_auth['ssh_username'] is not None:
pypi_user = pypi_auth['ssh_username']
else:
pypi_user = pypi_auth['username']
package_dir = pypi_conf['pypi_upload_path']
LOGGER.info("Uploading {0} to {1}".format(build_artifact, pypi_url))
put(build_artifact,
package_dir,
pypi_url,
ssh_username=pypi_user,
ssh_keyfile=pypi_auth['ssh_key']
)
|
The Florida Supreme Court on Friday ruled that jury recommendations must be unanimous for the death penalty to be imposed, delivering a blow to prosecutors and the Legislature by striking down a new law aimed at fixing the state's death penalty scheme.
Friday's 5-2 ruling came in the case of Timothy Lee Hurst, whose appeal spawned a U.S. Supreme Court decision in January that found Florida's death-penalty sentencing process was unconstitutional because it gave too much power to judges, instead of juries.
The Florida court on Friday ordered that Hurst be given a new sentencing hearing and also left the state's death-penalty sentencing structure in doubt, for the second time in a year.
The immediate impact of Friday's decision on Florida's 400 Death Row inmates was unclear, but the 89-page ruling left undisputed that unanimous jury recommendations are required for defendants to be ordered to death, in contrast with a hurriedly crafted law passed in March.
“We are reviewing the Florida Supreme Court ruling, but in the meantime Florida juries must make unanimous decisions in capital cases as to the appropriateness of the death penalty,” Attorney General Pam Bondi's spokesman, Whitney Ray, said in an email Friday.
Bondi's office has not ruled out the possibility of seeking a rehearing from the court, something unlikely to be granted, given the 5-2 ruling.
This issues in the Hurst case deal with the sentencing process after juries unanimously find defendants guilty of capital offenses.
Under Florida's old law, jurors by a simple majority could recommend the death penalty. Judges would then make findings of fact that "sufficient" aggravating factors, not outweighed by mitigating circumstances, existed for the death sentence to be imposed.
That system was an unconstitutional violation of the Sixth Amendment right to trial by jury, the U.S. Supreme Court decided in an 8-1 ruling in January in a case known as Hurst v. Florida.
After the Hurst ruling, the Florida Supreme Court indefinitely halted two scheduled executions, and the Legislature quickly passed a new law to address the U.S. Supreme Court decision.
Under the new law, juries have to unanimously find that at least one aggravating circumstance exists in order for defendants to be eligible for the death penalty. The law also requires juries to weigh whether sufficient mitigating factors exist to outweigh the aggravating circumstances, but the law is silent about whether those decisions must be unanimous. The law also required at least 10 jurors to recommend the death penalty, a departure from the old law, which required a simple majority.
The issue of a unanimous recommendations —- not directly addressed in the U.S. Supreme Court decision —- was a flashpoint for lawmakers, prosecutors and defense lawyers during legislative debate on the new law. While the state Senate initially wanted to adopt a requirement of unanimous jury recommendations, lawmakers later struck a deal, pushed by state attorneys, to require at least 10 jurors to recommend death.
But in Friday's highly anticipated decision, the Florida Supreme Court majority found that the state's attempt to repair the death-penalty sentencing structure remained flawed.
Chief Justice Jorge Labarga and justices Barbara Pariente, R. Fred Lewis and Peggy Quince agreed fully in the majority opinion. Justice James E.C. Perry concurred with the issue of requiring unanimous jury recommendations but dissented on another issue. Justices Charles Canady wrote a dissenting opinion that was joined by Justice Ricky Polston.
The U.S. Supreme Court decision mandated that all findings necessary for imposition of a death sentence are “elements” that must be decided by a jury, and Florida “has a longstanding history of requiring unanimous jury verdicts as the elements of a crime,” the majority wrote.
“… We conclude that under the commandments of Hurst v. Florida, Florida's constitutional right to trial by jury, and our Florida jurisprudence, the penalty phase jury must be unanimous in making the critical findings and recommendation that are necessary before a sentence of death may be considered by the judge or imposed,” the majority wrote.
Of 31 states that have the death penalty, Florida is one of just three —- along with Alabama and Delaware — that have not required unanimous recommendations for sentences of death.
“Simply put, Florida's extreme outlier status in not requiring unanimity in the jury's final recommendation renders the current imposition of the death penalty in Florida cruel and unusual under the Eighth Amendment of the United States Constitution,” Justice Barbara Pariente wrote in a concurring opinion.
Resolving the issue “now, as opposed to later, ensures that, for as long as death is a permissible punishment in the United States, Florida's death penalty will be constitutionally sound,” Pariente wrote.
But in his dissent, Canady objected that the majority's decision regarding unanimity went far beyond the bounds of the U.S. Supreme Court's ruling in the Hurst case, saying it “fundamentally misapprehends and misuses” the decision, “unnecessarily disrupting” the administration of the death penalty in Florida.
Hurst was sentenced to death for the 1998 killing of fast-food worker Cynthia Harrison in Pensacola. Harrison, an assistant manager at a Popeye's Fried Chicken restaurant where Hurst worked, was bound, gagged and stabbed more than 60 times. Her body was found in a freezer.
The jury in the Hurst case recommended a death sentence to the judge, but its vote was split seven to five.
Friday's decision was a “major shift in Florida jurisprudence,” according to 2nd Judicial Circuit Public Defender Nancy Daniels, whose office represents Hurst.
“The Hurst opinion from today makes it clear that the whole job a jury has to do has to be unanimous,” she said in a telephone interview.
State attorneys will hold a telephone conference on Monday to review the court opinion and assess how to proceed, Florida Prosecuting Attorneys Association lobbyist Buddy Jacobs said Friday.
Friday's ruling could deepen the rift between the left-leaning court and the Republican-dominated Legislature, which will be forced to address the thorny death-penalty issue again during next year's session.
“With no regard to the Legislature's constitutional duty to establish policy in this state, the Florida Supreme Court expanded the scope of its decision to issues that were not raised by the parties or considered by the U.S. Supreme Court,” Corcoran, a Republican lawyer from Land O' Lakes, said.
Florida lawmakers will have to address the issue when they reconvene, said Pete Mills, an assistant public defender in the 10th Judicial Circuit who is chairman of the Florida Public Defender Association's death penalty steering committee.
|
from __future__ import division
import numpy as np
from numpy import dot
from dipy.core.geometry import sphere2cart
from dipy.core.geometry import vec2vec_rotmat
# Diffusion coefficients for white matter tracts, in mm^2/s
#
# Based roughly on values from:
#
# Pierpaoli, Basser, "Towards a Quantitative Assessment of Diffusion
# Anisotropy", Magnetic Resonance in Medicine, 1996; 36(6):893-906.
#
diffusion_evals = np.array([1500e-6, 400e-6, 400e-6])
def _add_gaussian(sig, noise1, noise2):
"""
Helper function to add_noise
This one simply adds one of the Gaussians to the sig and ignores the other
one.
"""
return sig + noise1
def _add_rician(sig, noise1, noise2):
"""
Helper function to add_noise.
This does the same as abs(sig + complex(noise1, noise2))
"""
return np.sqrt((sig + noise1) ** 2 + noise2 ** 2)
def _add_rayleigh(sig, noise1, noise2):
"""
Helper function to add_noise
The Rayleigh distribution is $\sqrt\{Gauss_1^2 + Gauss_2^2}$.
"""
return sig + np.sqrt(noise1 ** 2 + noise2 ** 2)
def add_noise(signal, snr, S0, noise_type='rician'):
r""" Add noise of specified distribution to the signal from a single voxel.
Parameters
-----------
signal : 1-d ndarray
The signal in the voxel.
snr : float
The desired signal-to-noise ratio. (See notes below.)
If `snr` is None, return the signal as-is.
S0 : float
Reference signal for specifying `snr`.
noise_type : string, optional
The distribution of noise added. Can be either 'gaussian' for Gaussian
distributed noise, 'rician' for Rice-distributed noise (default) or
'rayleigh' for a Rayleigh distribution.
Returns
--------
signal : array, same shape as the input
Signal with added noise.
Notes
-----
SNR is defined here, following [1]_, as ``S0 / sigma``, where ``sigma`` is
the standard deviation of the two Gaussian distributions forming the real
and imaginary components of the Rician noise distribution (see [2]_).
References
----------
.. [1] Descoteaux, Angelino, Fitzgibbons and Deriche (2007) Regularized,
fast and robust q-ball imaging. MRM, 58: 497-510
.. [2] Gudbjartson and Patz (2008). The Rician distribution of noisy MRI
data. MRM 34: 910-914.
Examples
--------
>>> signal = np.arange(800).reshape(2, 2, 2, 100)
>>> signal_w_noise = add_noise(signal, 10., 100., noise_type='rician')
"""
if snr is None:
return signal
sigma = S0 / snr
noise_adder = {'gaussian': _add_gaussian,
'rician': _add_rician,
'rayleigh': _add_rayleigh}
noise1 = np.random.normal(0, sigma, size=signal.shape)
if noise_type == 'gaussian':
noise2 = None
else:
noise2 = np.random.normal(0, sigma, size=signal.shape)
return noise_adder[noise_type](signal, noise1, noise2)
def sticks_and_ball(gtab, d=0.0015, S0=100, angles=[(0, 0), (90, 0)],
fractions=[35, 35], snr=20):
""" Simulate the signal for a Sticks & Ball model.
Parameters
-----------
gtab : GradientTable
Signal measurement directions.
d : float
Diffusivity value.
S0 : float
Unweighted signal value.
angles : array (K,2) or (K, 3)
List of K polar angles (in degrees) for the sticks or array of K
sticks as unit vectors.
fractions : float
Percentage of each stick. Remainder to 100 specifies isotropic
component.
snr : float
Signal to noise ratio, assuming Rician noise. If set to None, no
noise is added.
Returns
--------
S : (N,) ndarray
Simulated signal.
sticks : (M,3)
Sticks in cartesian coordinates.
References
----------
.. [1] Behrens et al., "Probabilistic diffusion
tractography with multiple fiber orientations: what can we gain?",
Neuroimage, 2007.
"""
fractions = [f / 100. for f in fractions]
f0 = 1 - np.sum(fractions)
S = np.zeros(len(gtab.bvals))
angles = np.array(angles)
if angles.shape[-1] == 3:
sticks = angles
else:
sticks = [sphere2cart(1, np.deg2rad(pair[0]), np.deg2rad(pair[1]))
for pair in angles]
sticks = np.array(sticks)
for (i, g) in enumerate(gtab.bvecs[1:]):
S[i + 1] = f0 * np.exp(-gtab.bvals[i + 1] * d) + \
np.sum([fractions[j] * np.exp(-gtab.bvals[i + 1] * d * np.dot(s, g) ** 2)
for (j, s) in enumerate(sticks)])
S[i + 1] = S0 * S[i + 1]
S[gtab.b0s_mask] = S0
S = add_noise(S, snr, S0)
return S, sticks
def single_tensor(gtab, S0=1, evals=None, evecs=None, snr=None):
""" Simulated Q-space signal with a single tensor.
Parameters
-----------
gtab : GradientTable
Measurement directions.
S0 : double,
Strength of signal in the presence of no diffusion gradient (also
called the ``b=0`` value).
evals : (3,) ndarray
Eigenvalues of the diffusion tensor. By default, values typical for
prolate white matter are used.
evecs : (3, 3) ndarray
Eigenvectors of the tensor. You can also think of this as a rotation
matrix that transforms the direction of the tensor. The eigenvectors
needs to be column wise.
snr : float
Signal to noise ratio, assuming Rician noise. None implies no noise.
Returns
--------
S : (N,) ndarray
Simulated signal: ``S(q, tau) = S_0 e^(-b g^T R D R.T g)``.
References
----------
.. [1] M. Descoteaux, "High Angular Resolution Diffusion MRI: from Local
Estimation to Segmentation and Tractography", PhD thesis,
University of Nice-Sophia Antipolis, p. 42, 2008.
.. [2] E. Stejskal and J. Tanner, "Spin diffusion measurements: spin echos
in the presence of a time-dependent field gradient", Journal of
Chemical Physics, nr. 42, pp. 288--292, 1965.
"""
if evals is None:
evals = diffusion_evals
if evecs is None:
evecs = np.eye(3)
out_shape = gtab.bvecs.shape[:gtab.bvecs.ndim - 1]
gradients = gtab.bvecs.reshape(-1, 3)
R = np.asarray(evecs)
S = np.zeros(len(gradients))
D = dot(dot(R, np.diag(evals)), R.T)
for (i, g) in enumerate(gradients):
S[i] = S0 * np.exp(-gtab.bvals[i] * dot(dot(g.T, D), g))
S = add_noise(S, snr, S0)
return S.reshape(out_shape)
def multi_tensor(gtab, mevals, S0=100, angles=[(0, 0), (90, 0)],
fractions=[50, 50], snr=20):
r"""Simulate a Multi-Tensor signal.
Parameters
-----------
gtab : GradientTable
mevals : array (K, 3)
each tensor's eigenvalues in each row
S0 : float
Unweighted signal value (b0 signal).
angles : array (K,2) or (K,3)
List of K tensor directions in polar angles (in degrees) or unit vectors
fractions : float
Percentage of the contribution of each tensor. The sum of fractions
should be equal to 100%.
snr : float
Signal to noise ratio, assuming Rician noise. If set to None, no
noise is added.
Returns
--------
S : (N,) ndarray
Simulated signal.
sticks : (M,3)
Sticks in cartesian coordinates.
Examples
--------
>>> import numpy as np
>>> from dipy.sims.voxel import multi_tensor
>>> from dipy.data import get_data
>>> from dipy.core.gradients import gradient_table
>>> from dipy.io.gradients import read_bvals_bvecs
>>> fimg, fbvals, fbvecs = get_data('small_101D')
>>> bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs)
>>> gtab = gradient_table(bvals, bvecs)
>>> mevals=np.array(([0.0015, 0.0003, 0.0003],[0.0015, 0.0003, 0.0003]))
>>> e0 = np.array([1, 0, 0.])
>>> e1 = np.array([0., 1, 0])
>>> S = multi_tensor(gtab, mevals)
"""
if np.round(np.sum(fractions), 2) != 100.0:
raise ValueError('Fractions should sum to 100')
fractions = [f / 100. for f in fractions]
S = np.zeros(len(gtab.bvals))
angles = np.array(angles)
if angles.shape[-1] == 3:
sticks = angles
else:
sticks = [sphere2cart(1, np.deg2rad(pair[0]), np.deg2rad(pair[1]))
for pair in angles]
sticks = np.array(sticks)
for i in range(len(fractions)):
S = S + fractions[i] * single_tensor(gtab, S0=S0, evals=mevals[i],
evecs=all_tensor_evecs(
sticks[i]).T,
snr=None)
return add_noise(S, snr, S0), sticks
def single_tensor_odf(r, evals=None, evecs=None):
"""Simulated ODF with a single tensor.
Parameters
----------
r : (N,3) or (M,N,3) ndarray
Measurement positions in (x, y, z), either as a list or on a grid.
evals : (3,)
Eigenvalues of diffusion tensor. By default, use values typical for
prolate white matter.
evecs : (3, 3) ndarray
Eigenvectors of the tensor. You can also think of these as the
rotation matrix that determines the orientation of the diffusion
tensor.
Returns
-------
ODF : (N,) ndarray
The diffusion probability at ``r`` after time ``tau``.
References
----------
.. [1] Aganj et al., "Reconstruction of the Orientation Distribution
Function in Single- and Multiple-Shell q-Ball Imaging Within
Constant Solid Angle", Magnetic Resonance in Medicine, nr. 64,
pp. 554--566, 2010.
"""
if evals is None:
evals = diffusion_evals
if evecs is None:
evecs = np.eye(3)
out_shape = r.shape[:r.ndim - 1]
R = np.asarray(evecs)
D = dot(dot(R, np.diag(evals)), R.T)
Di = np.linalg.inv(D)
r = r.reshape(-1, 3)
P = np.zeros(len(r))
for (i, u) in enumerate(r):
P[i] = (dot(dot(u.T, Di), u)) ** (3 / 2)
return (1 / (4 * np.pi * np.prod(evals) ** (1 / 2) * P)).reshape(out_shape)
def all_tensor_evecs(e0):
"""Given the principle tensor axis, return the array of all
eigenvectors (or, the rotation matrix that orientates the tensor).
Parameters
----------
e0 : (3,) ndarray
Principle tensor axis.
Returns
-------
evecs : (3,3) ndarray
Tensor eigenvectors.
"""
axes = np.eye(3)
mat = vec2vec_rotmat(axes[0], e0)
e1 = np.dot(mat, axes[1])
e2 = np.dot(mat, axes[2])
return np.array([e0, e1, e2])
def multi_tensor_odf(odf_verts, mevals, angles, fractions):
r'''Simulate a Multi-Tensor ODF.
Parameters
----------
odf_verts : (N,3) ndarray
Vertices of the reconstruction sphere.
mevals : sequence of 1D arrays,
Eigen-values for each tensor.
angles : sequence of 2d tuples,
Sequence of principal directions for each tensor in polar angles
or cartesian unit coordinates.
fractions : sequence of floats,
Percentages of the fractions for each tensor.
Returns
-------
ODF : (N,) ndarray
Orientation distribution function.
Examples
--------
Simulate a MultiTensor ODF with two peaks and calculate its exact ODF.
>>> import numpy as np
>>> from dipy.sims.voxel import multi_tensor_odf, all_tensor_evecs
>>> from dipy.data import get_sphere
>>> sphere = get_sphere('symmetric724')
>>> vertices, faces = sphere.vertices, sphere.faces
>>> mevals = np.array(([0.0015, 0.0003, 0.0003],[0.0015, 0.0003, 0.0003]))
>>> angles = [(0, 0), (90, 0)]
>>> odf = multi_tensor_odf(vertices, mevals, angles, [50, 50])
'''
mf = [f / 100. for f in fractions]
angles = np.array(angles)
if angles.shape[-1] == 3:
sticks = angles
else:
sticks = [sphere2cart(1, np.deg2rad(pair[0]), np.deg2rad(pair[1]))
for pair in angles]
sticks = np.array(sticks)
odf = np.zeros(len(odf_verts))
mevecs = []
for s in sticks:
mevecs += [all_tensor_evecs(s).T]
for (j, f) in enumerate(mf):
odf += f * single_tensor_odf(odf_verts,
evals=mevals[j], evecs=mevecs[j])
return odf
def single_tensor_rtop(evals=None, tau=1.0 / (4 * np.pi ** 2)):
r'''Simulate a Multi-Tensor rtop.
Parameters
----------
evals : 1D arrays,
Eigen-values for the tensor. By default, values typical for prolate
white matter are used.
tau : float,
diffusion time. By default the value that makes q=sqrt(b).
Returns
-------
rtop : float,
Return to origin probability.
References
----------
.. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and
Its Features in Diffusion MRI", PhD Thesis, 2012.
'''
if evals is None:
evals = diffusion_evals
rtop = 1.0 / np.sqrt((4 * np.pi * tau) ** 3 * np.prod(evals))
return rtop
def multi_tensor_rtop(mf, mevals=None, tau=1 / (4 * np.pi ** 2)):
r'''Simulate a Multi-Tensor rtop.
Parameters
----------
mf : sequence of floats, bounded [0,1]
Percentages of the fractions for each tensor.
mevals : sequence of 1D arrays,
Eigen-values for each tensor. By default, values typical for prolate
white matter are used.
tau : float,
diffusion time. By default the value that makes q=sqrt(b).
Returns
-------
rtop : float,
Return to origin probability.
References
----------
.. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and
Its Features in Diffusion MRI", PhD Thesis, 2012.
'''
rtop = 0
if mevals is None:
mevals = [None, ] * len(mf)
for j, f in enumerate(mf):
rtop += f * single_tensor_rtop(mevals[j], tau=tau)
return rtop
def single_tensor_pdf(r, evals=None, evecs=None, tau=1 / (4 * np.pi ** 2)):
"""Simulated ODF with a single tensor.
Parameters
----------
r : (N,3) or (M,N,3) ndarray
Measurement positions in (x, y, z), either as a list or on a grid.
evals : (3,)
Eigenvalues of diffusion tensor. By default, use values typical for
prolate white matter.
evecs : (3, 3) ndarray
Eigenvectors of the tensor. You can also think of these as the
rotation matrix that determines the orientation of the diffusion
tensor.
tau : float,
diffusion time. By default the value that makes q=sqrt(b).
Returns
-------
pdf : (N,) ndarray
The diffusion probability at ``r`` after time ``tau``.
References
----------
.. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and
Its Features in Diffusion MRI", PhD Thesis, 2012.
"""
if evals is None:
evals = diffusion_evals
if evecs is None:
evecs = np.eye(3)
out_shape = r.shape[:r.ndim - 1]
R = np.asarray(evecs)
D = dot(dot(R, np.diag(evals)), R.T)
Di = np.linalg.inv(D)
r = r.reshape(-1, 3)
P = np.zeros(len(r))
for (i, u) in enumerate(r):
P[i] = (-dot(dot(u.T, Di), u)) / (4 * tau)
pdf = (1 / np.sqrt((4 * np.pi * tau) ** 3 * np.prod(evals))) * np.exp(P)
return pdf.reshape(out_shape)
def multi_tensor_pdf(pdf_points, mevals, angles, fractions,
tau=1 / (4 * np.pi ** 2)):
r'''Simulate a Multi-Tensor ODF.
Parameters
----------
pdf_points : (N, 3) ndarray
Points to evaluate the PDF.
mevals : sequence of 1D arrays,
Eigen-values for each tensor. By default, values typical for prolate
white matter are used.
angles : sequence,
Sequence of principal directions for each tensor in polar angles
or cartesian unit coordinates.
fractions : sequence of floats,
Percentages of the fractions for each tensor.
tau : float,
diffusion time. By default the value that makes q=sqrt(b).
Returns
-------
pdf : (N,) ndarray,
Probability density function of the water displacement.
References
----------
.. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator
and its Features in Diffusion MRI", PhD Thesis, 2012.
'''
mf = [f / 100. for f in fractions]
angles = np.array(angles)
if angles.shape[-1] == 3:
sticks = angles
else:
sticks = [sphere2cart(1, np.deg2rad(pair[0]), np.deg2rad(pair[1]))
for pair in angles]
sticks = np.array(sticks)
pdf = np.zeros(len(pdf_points))
mevecs = []
for s in sticks:
mevecs += [all_tensor_evecs(s).T]
for j, f in enumerate(mf):
pdf += f * single_tensor_pdf(pdf_points,
evals=mevals[j], evecs=mevecs[j], tau=tau)
return pdf
def single_tensor_msd(evals=None, tau=1 / (4 * np.pi ** 2)):
r'''Simulate a Multi-Tensor rtop.
Parameters
----------
evals : 1D arrays,
Eigen-values for the tensor. By default, values typical for prolate
white matter are used.
tau : float,
diffusion time. By default the value that makes q=sqrt(b).
Returns
-------
msd : float,
Mean square displacement.
References
----------
.. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and
Its Features in Diffusion MRI", PhD Thesis, 2012.
'''
if evals is None:
evals = diffusion_evals
msd = 2 * tau * np.sum(evals)
return msd
def multi_tensor_msd(mf, mevals=None, tau=1 / (4 * np.pi ** 2)):
r'''Simulate a Multi-Tensor rtop.
Parameters
----------
mf : sequence of floats, bounded [0,1]
Percentages of the fractions for each tensor.
mevals : sequence of 1D arrays,
Eigen-values for each tensor. By default, values typical for prolate
white matter are used.
tau : float,
diffusion time. By default the value that makes q=sqrt(b).
Returns
-------
msd : float,
Mean square displacement.
References
----------
.. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and
Its Features in Diffusion MRI", PhD Thesis, 2012.
'''
msd = 0
if mevals is None:
mevals = [None, ] * len(mf)
for j, f in enumerate(mf):
msd += f * single_tensor_msd(mevals[j], tau=tau)
return msd
# Use standard naming convention, but keep old names
# for backward compatibility
SticksAndBall = sticks_and_ball
SingleTensor = single_tensor
MultiTensor = multi_tensor
|
Goblin's Lair canyon has become incredibly popular with beginners and other groups since it became "known". The canyon offers a short approach, short exit, and straight forward rappel, all very good beginner canyon qualities.
Though those are all good reasons beginners should do the canyon, the real reason to do it? The very short slot section is amazing! The rappel into the lair is unique and stunning. Beginners and experienced canyoneers alike will enjoy this short route.
Hikers can visit Goblin's Lair for the bottom easily, as well, making this a good recommendation even for the non-technical. On a hot day, the Lair provides a welcome respite from the heat and sun.
This trip currently requires a permit. As of 2016, there is a $2 fee. Stop at the visitor center to purchase one.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.