text
stringlengths 29
850k
|
---|
from backend import message
from backend import helpers
from backend import player
import collections
class Zombie(message.MessageDelegate):
_currentId = 0
def __init__(self, *args, **kwargs):
# call super
super().__init__(*args, **kwargs)
# general vars
self.id = self._currentId
Zombie._currentId += 1
def __str__(self):
return "Zombie%d" % self.id
### server-side methods ###
@staticmethod
def echo(callback, *args, **kwargs):
"""Echo args back to callback
For testing purposes only.
"""
callback(*args, **kwargs)
@staticmethod
def server_info(callback: collections.Callable) -> None:
"""Return information about the current running version of the server"""
from backend.server import RUN_DATE, VERSION
callback(start_date=RUN_DATE, version=VERSION)
def name_entered(self, name: str) -> None:
from backend import game_controller
# attempt to identify player object
p = game_controller.universal_controller.player_with_id(name)
# todo: handle case where player with same id is actually playing
# case 3: if new player
if not p:
# make new player object
p = player.Player(name)
# register player with game controller
game_controller.universal_controller.register_player(p)
# replace zombie by player object, this will properly set the handler on player
self._message_handler.delegate = p
# this zombie should now be living a free life
# case 1: if existing player that is already in game
if p and p.current_game:
p.current_game.unstash_player(p)
# case 2: if existing player not in a game
# case 3: if new player
elif (not p) or (p and not p.current_game):
# display main menu
p.display_main_menu()
### message delegate methods ###
def on_open(self):
pass
def on_close(self):
pass |
HAHAHAHAHAH CUTE CUTE CUTE!!!! I call it the Ostrich.
hahhaaa, how long was she holding this pose for??!she going to be a yogi?
Yay downward dog! Go Nat! |
#!/usr/bin/env python
# reader.py - vote reader for Super Tuesday
import csv
import os
import re
import time
import urllib
import states
from template import *
import private
from candidates import candidates
parties = {
'dem': { 'name':'Democrats' },
'gop': { 'name':'Republicans' }
}
def fetchData():
urllib.urlretrieve( private.csvFeedUrl, 'miniresults/text_output_for_mapping.csv' )
pass
def readVotes():
print 'Processing vote data'
#reader = csv.reader( open( 'test.csv', 'rb' ) )
reader = csv.reader( open( 'miniresults/text_output_for_mapping.csv', 'rb' ) )
header = []
while header == []:
header = reader.next()
#print header
for row in reader:
if len(row) < 2: continue
if row[1] != '*': continue
setData( header, row )
def setData( header, row ):
state = states.byAbbr[ row[0] ]
setVotes( state, header, row )
def getPrecincts( row ):
return {
'reporting': int(row[3]),
'total': int(row[2])
}
def setVotes( entity, header, row ):
for col in xrange( 4, len(header) ):
if col >= len(row) or row[col] == '': continue
name = header[col]
if name == 'guiliani': name = 'giuliani'
candidate = candidates['byname'][name]
party = candidate['party']
p = entity['parties'][party]
if 'precincts' not in p: p['precincts'] = getPrecincts( row )
if 'votes' not in p: p['votes'] = {}
p['votes'][name] = int(row[col])
def linkParty( party, match ):
name = parties[party]['name']
if party == match:
return T('''
<span style="font-weight:bold;">
%(name)s
</span>
''', { 'name': name } )
else:
return T('''
<a href="#" onclick="refresh('%(party)s'); return false;">
%(name)s
</a>
''', { 'name': name, 'party': party } )
def makeMini():
short = makeMiniVersion( 'short', 'Election Coverage', 'CA NY IL MA' )
long = makeMiniVersion( 'long', 'Results', 'AL AK AZ AR CA CO CT DE GA ID IL KS MA MN MO MT NJ NM NY ND OK TN UT WV' )
map = makeMiniVersion( 'map', 'Results', 'AL AK AZ AR CA CO CT DE GA ID IL KS MA MN MO MT NJ NM NY ND OK TN UT WV' )
def makeMiniVersion( kind, title, statenames ):
writeMiniParty( kind, title, statenames, 'dem', 'clinton obama' )
writeMiniParty( kind, title, statenames,'gop' , 'huckabee mccain paul romney' )
def writeMiniParty( kind, title, statenames, partyname, names ):
text = makeMiniParty( kind, title, statenames, partyname, names )
write( 'miniresults/miniresults-%s-%s.html' %( kind, partyname ), text )
def makeMiniParty( kind, title, statenames, partyname, names ):
statelist = statenames.split()
names = names.split()
style = 'font-weight:normal; background-color:#E0E0E0;'
head = [ '<th style="text-align:left; %s">State</th>' % style ]
for name in names:
head.append( T('''
<th style="%(style)s">
%(name)s
</th>
''', {
'name': candidates['byname'][name]['lastName'],
'style': style
} ) )
rows = []
for stateabbr in statelist:
if stateabbr == 'WV' and partyname == 'dem': continue
state = states.byAbbr[stateabbr]
cols = []
winner = { 'name': None, 'votes': 0 }
party = state['parties'][partyname]
if 'votes' not in party: continue
votes = party['votes']
for name in votes:
if name == 'total-d' or name == 'total-r':
total = party['total'] = votes[name]
else:
vote = votes[name]
if vote > winner['votes']:
winner = { 'name': name, 'votes': vote }
precincts = party['precincts']
for name in names:
win = check = ''
if name == winner['name']:
if partyname == 'dem':
win = 'color:white; background-color:#3366CC;'
else:
win = 'color:white; background-color:#AA0031;'
if precincts['reporting'] == precincts['total']:
check = '<img src="http://googlemaps.github.io/js-v2-samples/elections/2008/images/checkmark.gif" style="width:7px; height:6px; margin:0 3px 2px 0" />'
if name in votes and total > 0:
percent = '%d%%' % percentage( float(votes[name]) / float(total) )
else:
percent = '--'
cols.append( T('''
<td style="width:%(width)s%%; text-align:center; %(win)s">
<div>
%(check)s
%(percent)s
</div>
</td>
''', {
'width': 80 / len(names),
'win': win,
'check': check,
'percent': percent
}) )
reporting = percentage( float(precincts['reporting']) / float(precincts['total']) )
rows.append( T('''
<tr style="background-color:#F1EFEF;">
<td style="width:20%%;">
<div>
<span>
%(state)s
</span>
<span style="font-size:11px; color:#666666;">
%(reporting)s%%
</span>
</div>
</td>
%(cols)s
</tr>
''', {
'state': stateabbr,
'reporting': reporting,
'cols': ''.join(cols)
}) )
if kind == 'short':
details = S('''
<a href="http://news.google.com/?ned=us&topic=el" target="_top" style="color:green;">
Full election coverage and results »
</a>
''')
else:
details = ''
if kind == 'map':
follow = '<span id="spanFollow" style="display:none;"><input type="checkbox" checked="checked" id="chkFollow" /><label for="chkFollow">Follow</label></span>'
viewmap = ''
else:
follow = ''
viewmap = S('''
<a href="http://maps.google.com/decision2008" target="_blank" style="color:green;">
View on a map »
</a>
''')
return T('''
<div style="font-family:arial,sans-serif; font-size:13px;">
<div style="margin-bottom:4px;">
<table style="width:100%%;">
<tbody>
<tr style="vertical-align: baseline;">
<td>
<div style="font-size:16px; font-weight:bold;">
%(title)s
</div>
</td>
<td style="text-align:center;">
<div style="font-size:13px;">
%(follow)s
</div>
</td>
</td>
<td style="text-align:right;">
<div style="font-size:13px;">
%(dem)s | %(gop)s
</div>
</td>
</tr>
</tbody>
</table>
</div>
<table style="width:100%%; font-size:13px;">
<thead>
%(head)s
</thead>
<tbody>
%(rows)s
</tbody>
</table>
<div>
%(details)s
%(viewmap)s
</div>
</div>
''', {
'title': title + ': ',
'follow': follow,
'dem': linkParty( 'dem', partyname ),
'gop': linkParty( 'gop', partyname ),
'head': ''.join(head),
'rows': ''.join(rows),
'details': details,
'viewmap': viewmap
})
def percentage( n ):
pct = int( round( 100.0 * float(n) ) )
if pct == 100 and n < 1: pct = 99
return pct
def write( name, text ):
#print 'Writing ' + name
f = open( name, 'w' )
f.write( text )
f.close()
def update():
print 'Retrieving data...'
fetchData()
print 'Parsing data...'
readVotes()
print 'Creating Miniresults HTML...'
makeMini()
print 'Checking in Miniresults HTML...'
os.system( 'svn ci -m "Miniresults update" miniresults/*' )
print 'Done!'
def main():
while 1:
update()
print 'Waiting 10 minute...'
time.sleep( 600 )
if __name__ == "__main__":
main()
|
A database snapshot is a very useful feature in SQL Server. This can be used to fulfill many requirements like point-in-time reporting requirements and reverting a database back to a point in time. In this tip I will explain how to revert a database snapshot to recover your database.
Database snapshots were introduced with SQL Server 2005 and this is still an enterprise only edition feature. This feature has solved many requirements like reporting, data safeguarding and point in time recovery if you do something wrong in your database. This allows us to decrease the database recovery time by reverting a database snapshot.
A database snapshot is basically a full database point-in-time view of a source database. As it's a point-in-time view of the source database, the snapshot data never changes if changes occur on the primary database. So when you revert a database snapshot, it will recover to the point in time when the database snapshot was created.
As changes are made to the source database, a copy of the original source data page is moved to the database snapshot sparse file, so the snapshot remains intact. This way a database snapshot provides the correct point in time data. One sparse file is associated with one data file, so if your source database has multiple data files, then you will have to create a separate sparse file for each corresponding data file.
Below is an example to test this scenario. Suppose you create a database snapshot before running a bulk DMLs on your database. If something goes wrong, then you can revert to this database snapshot to recover your database to the state it was before running the bulk DML.
There are some limitations when recovering using a database snapshots. You must drop all database snapshots except the one you want to recover when using a database snapshot to recover the source database.
Here is a step by step process to create a snapshot, delete some data and then recover the database using a database snapshot.
First we will check how many data files we have in our source database. Make sure to create a separate sparse file for each data file.
Since we have only have one data file, we will need to create the database snapshot with one sparse file. This command creates the database snapshot for the database.
Here we can see we have 5 rows in this table.
Now I will delete all rows from a table, so that we can revert the database snapshot to recover the deleted rows.
Delete all rows from this table.
Let's again run a SELECT statement to see the table contents. We can see we have no data in this table.
As you can see, all rows from tbladmin table have been deleted from the source database in the query above. Now you can run the same SELECT statement on the database snapshot. Here you can see that all of the data which we have deleted in the source database still exists. This is what we will recover using the database snapshot.
If you have multiple database snapshots created then you must first drop all unwanted database snapshots before reverting it to the source database. To test this step, create another database snapshot using Step 2. I have created a separate database snapshot at 12 PM named "KPO_Master_DATABASESNAPSHOT_2012060212PM".
Now suppose you have multiple database snapshots from multiple source databases on your SQL Server Instance. First you have to find the correct database snapshots for your source database. As you can see the database ID is 45 for our source database which we can get in Step 1. Run the below command to get a list of all database snapshots for your source database.
Here you can see that two database snapshots are created for our source database. We must drop the unwanted database snapshot. As we have to recover our database from the first database snapshot that was created before dropping all rows from the above table. To drop a database snapshot run a normal drop database statement using the database snapshot name as shown below.
The final step is to restore a database snapshot to our source database to recover all data. It is a simple restore statement using the with DATABASE_SNAPSHOT option.
Now you can run the SELECT statement again to check that the data was recovered.
Remember, all data which was updated after the database snapshot creation will be lost if you restore a database using a snapshot.
For this example, we only had a few rows and we could have selected the data from the Snapshot back to the Source database, but if this occurred for multiple tables or a large amount of data restoring a snapshot is a much simpler and cleaner approach.
Always make it a best practice to create a database snapshot before running any bulk operations to secure your database and reduce your recovery time.
Use database snapshots to run reports to reduce your source database load.
I really liked this article and tried and tested the database snapshot. What a neat tool. We currently run a system of live update time and I was always nervous about what we would do if our database crashed. Now, I have a tool in place to recover by scheduling a task of a database snapshot hourly!!
I need info on the below point. Please reply as soon as possible.
> Setting up recovery mechanism if anything goes wrong. |
#!python
# -*- coding: utf-8 -*-
"""File: pic_service_api.py
Description:
Online picture services
History:
0.1.2 support tweetpoto.com (plixi.com)
0.1.0 The first version.
"""
__version__ = '0.1.0'
__author__ = 'SpaceLis'
from tcrawl.api import api_call, urlsplit
def get_twit_pic(**kargs):
"""Retrieve the picture from TwitPic"""
twitpage = api_call(*urlsplit(kargs['url'])).read()
anchor = '<img class="photo" id="photo-display" src="'
start = twitpage.index(anchor) + len(anchor)
end = twitpage.index('"', start)
imgurl = twitpage[start:end]
return api_call(*urlsplit(imgurl)).read()
def get_yfrog_pic(**kargs):
"""Retrieve the picture from YFrog
"""
host, path, secure = urlsplit(kargs['url'])
pic = api_call(host, path +':iphone', secure).read()
return pic
def get_twitgoo_pic(**kargs):
"""Retrieve the picture from TwitGoo
"""
host, path, secure = urlsplit(kargs['url'])
pic = api_call(host, path +'/img', secure).read()
return pic
def get_tweetphoto_pic(**kargs):
"""Retrieve the picture from TweetPhoto or Plixi.com
"""
pic_page = api_call(*urlsplit(kargs['url'])).read()
anchor = '" alt="" id="photo"'
end = pic_page.find(anchor)
start = pic_page.rfind('"', 0, end) + 1
imgurl = pic_page[start:end]
return api_call(*urlsplit(imgurl)).read()
# a list of usable picture service support by this crawling module
_SERVICEPROVIDERS = {'twitpic.com':get_twit_pic, \
'yfrog.com':get_yfrog_pic, \
'tweetphoto.com': get_tweetphoto_pic, \
'plixi.com': get_tweetphoto_pic}
def get_pic(**kargs):
""" Retrieving Pictures from the right site
"""
urlpart = kargs['url'].split('/')
pic_api = _SERVICEPROVIDERS[urlpart[2]]
return pic_api(**kargs)
def test():
"""A test
"""
fout = open('test.jpg', 'wb')
#print >> fout, get_twitgoo_pic(url = 'http://twitgoo.com/216kxf')
print >> fout, get_tweetphoto_pic(url = 'http://tweetphoto.com/36367177')
if __name__ == '__main__':
test()
|
Hailing from Chicago‘s Southside, Infinito and Thaione could very well be the next Windy City warriors to break through into the national consciousness. Though they‘ve put out other projects individually, Low Income Housing is their debut as a group, highlighting their skills on the mic (mostly Infinito) and with the beats (mostly Thaione). Soulful, loop-driven production drives rhymes that are smart without being preachy, extolling the virtues of self-empowerment and real hip-hop. "Casual Liberation," "You Are Not It," and the horn and clap-driven "My Life Creation #63" are standouts, but the whole LP is sharp. Keep an eye on these cats. |
# coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from wavefront_api_client.api_client import ApiClient
class SavedSearchApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_saved_search(self, **kwargs): # noqa: E501
"""Create a saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_saved_search(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SavedSearch body: Example Body: <pre>{ \"query\": { \"foo\": \"{\\\"searchTerms\\\":[{\\\"type\\\":\\\"freetext\\\",\\\"value\\\":\\\"foo\\\"}]}\" }, \"entityType\": \"DASHBOARD\" }</pre>
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_saved_search_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_saved_search_with_http_info(**kwargs) # noqa: E501
return data
def create_saved_search_with_http_info(self, **kwargs): # noqa: E501
"""Create a saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_saved_search_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SavedSearch body: Example Body: <pre>{ \"query\": { \"foo\": \"{\\\"searchTerms\\\":[{\\\"type\\\":\\\"freetext\\\",\\\"value\\\":\\\"foo\\\"}]}\" }, \"entityType\": \"DASHBOARD\" }</pre>
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_saved_search" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_saved_search(self, id, **kwargs): # noqa: E501
"""Delete a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_saved_search(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_saved_search_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_saved_search_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_saved_search_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_saved_search_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_saved_search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_saved_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_entity_type_saved_searches(self, entitytype, **kwargs): # noqa: E501
"""Get all saved searches for a specific entity type for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_entity_type_saved_searches(entitytype, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entitytype: (required)
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_entity_type_saved_searches_with_http_info(entitytype, **kwargs) # noqa: E501
else:
(data) = self.get_all_entity_type_saved_searches_with_http_info(entitytype, **kwargs) # noqa: E501
return data
def get_all_entity_type_saved_searches_with_http_info(self, entitytype, **kwargs): # noqa: E501
"""Get all saved searches for a specific entity type for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_entity_type_saved_searches_with_http_info(entitytype, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entitytype: (required)
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entitytype', 'offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_entity_type_saved_searches" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entitytype' is set
if ('entitytype' not in params or
params['entitytype'] is None):
raise ValueError("Missing the required parameter `entitytype` when calling `get_all_entity_type_saved_searches`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entitytype' in params:
path_params['entitytype'] = params['entitytype'] # noqa: E501
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch/type/{entitytype}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_saved_searches(self, **kwargs): # noqa: E501
"""Get all saved searches for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_saved_searches(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_saved_searches_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_saved_searches_with_http_info(**kwargs) # noqa: E501
return data
def get_all_saved_searches_with_http_info(self, **kwargs): # noqa: E501
"""Get all saved searches for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_saved_searches_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_saved_searches" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_saved_search(self, id, **kwargs): # noqa: E501
"""Get a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_saved_search(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_saved_search_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_saved_search_with_http_info(id, **kwargs) # noqa: E501
return data
def get_saved_search_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_saved_search_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_saved_search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_saved_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_saved_search(self, id, **kwargs): # noqa: E501
"""Update a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_saved_search(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SavedSearch body: Example Body: <pre>{ \"query\": { \"foo\": \"{\\\"searchTerms\\\":[{\\\"type\\\":\\\"freetext\\\",\\\"value\\\":\\\"foo\\\"}]}\" }, \"entityType\": \"DASHBOARD\" }</pre>
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_saved_search_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_saved_search_with_http_info(id, **kwargs) # noqa: E501
return data
def update_saved_search_with_http_info(self, id, **kwargs): # noqa: E501
"""Update a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_saved_search_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SavedSearch body: Example Body: <pre>{ \"query\": { \"foo\": \"{\\\"searchTerms\\\":[{\\\"type\\\":\\\"freetext\\\",\\\"value\\\":\\\"foo\\\"}]}\" }, \"entityType\": \"DASHBOARD\" }</pre>
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_saved_search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_saved_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
One hundred years ago a child born in England could expect to live to 55 if it was a girl, 51 for a boy. A girl born in UK in 2006-8 is expected to live, on average, to 81 and a boy to 77 - this increase is due, in part, to modern medicines.
In the 21st century medicines are available to treat a wide variety of diseases, but most of them only treat the symptoms of the disease - they don't provide a long term cure. Because infectious diseases are caused by pathogens that can be killed, medicines to treat infectious disease aim to not only make you feel better, but actually cure the disease. |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @author Paul Petring
from pprint import pprint
from lxml import etree
import hashlib
import requests
import json
from lxml import html
import datetime
from datetime import datetime, timedelta
import pytz
class FritzBox:
"""
allows to interact with Fritzbox OS 6.30 - 6.60 by using HTTP requests
"""
_url="" #URL of the FritzBox
_username="" #username of the FritzBox
_password="" #password of the FritzBox
_sid="" #current session identifier
_last_calls=[] #calls buffer
_last_phonebook_entries=[] #devices buffer
_last_devices=[] #devices buffer
_request_session = requests.Session() #request session object
_request_headers = { #default headers, feel free to modify these
'Referer': 'http://fritz.box/',
'Pragma' : 'no-cache',
'User-Agent': 'User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36',
'Accept-Language': 'en-US,en;q=0.8,de;q=0.6',
'Accept-Encoding': 'gzip, deflate, sdch',
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
'Connection': 'keep-alive',
'Cache-Control': 'no-cache',
}
def __init__(self,password="",url="http://fritz.box",username="",login=True):
"""
@param password of your fritz.box (no username support yet)
@param url of your fritzbox (defaults to "http://fritz.box")
"""
self._url = url
self._password = password
self._username = username
if(login):
self._sid = self.login()
if(self._url!="http://fritz.box"):
self._request_headers["Referer"] = self._url + "/"
def getSID(self):
""" returnes current SID status and challenge
required for logging into the FritzBox
"""
status_url = self._url + "/login_sid.lua"
r = self._request_session.get(status_url)
"""Expected response:
<?xml version="1.0" encoding="utf-8"?>
<SessionInfo>
<SID>0000000000000000</SID>
<Challenge>443a0e07</Challenge>
<BlockTime>0</BlockTime>
<Rights></Rights>
</SessionInfo>
"""
#Parsing XML
parser = etree.XMLParser(recover=True)
root = etree.fromstring(str(r.content), parser=parser)
ret_sid=root.find('SID').text
challenge=root.find('Challenge').text
return (ret_sid,challenge)
def login(self):
"""
performs an login by getting the FirtzBox session challenge
hashing it in combination with the provided password and
returning the newly achieved session identifier
"""
sid_status = self.getSID()
self._sid = sid_status[0]
challenge = sid_status[1]
if(sid_status[0]=="0000000000000000"): # login procedure required
#following the login javascript of OS 6.30+
#encoding it to utf-16E does the trick to get the correct hash
cp_str = challenge + "-" + self._password
md5_str = hashlib.md5(cp_str.encode("utf-16LE")).hexdigest()
response = challenge + "-" + md5_str
#preparing POST statement
post = "response="+response+"&lp=&username="+self._username
data = dict(response=response, lp='',username=self._username)
r = self._request_session.post(self._url, data=data, allow_redirects=True,headers=self._request_headers)
#extracting SID from response (mostly in the last few lines of response)
self._sid = r.content[r.content.find('"sid":'):]
self._sid = self._sid[8:self._sid.find("});")-2]
return self._sid;
def get_devices(self,device_type="active"):
"""
returns a list of the current home network devices as FritzBoxDevice objects
@device_type defaults to active devices, else returns inactive (passive) devices of home network
"""
data = dict(xhr=1, sid=self._sid, lang='en',page='netDev',type="cleanup")
r = self._request_session.post(self._url+"/data.lua", data=data, allow_redirects=True,headers=self._request_headers)
#r.content should contain valid json string with active and passive devices
parsed=json.loads(r.content)
ret_list=[]
if(device_type=="active"):
for active in parsed["data"]["active"]:
ret_list.append(FritzBoxDevice(active))
else:
for passive in parsed["data"]["passive"]:
ret_list.append(FritzBoxDevice(passive))
return ret_list
def get_foncalls(self):
"""
returns a list of last 400(?) fon calls as FritzBoxCall objects
"""
data = dict(sid=self._sid)
r = self._request_session.post(self._url+"/fon_num/foncalls_list.lua?sid="+self._sid+"&csv=", data=data, allow_redirects=True,headers=self._request_headers)
#r.content contains semicolon separated values, surrounding head and tail line
ret_list = []
for line in r.content.split('\n')[2:-1]:
ret_list.append(FritzBoxCall(line))
_last_calls = ret_list
return ret_list
def get_fonbook(self):
""" #downloading it from the button ended in timeout
data = dict(sid=self._sid,PhonebookId=0,PhonebookExportName="Telefonbuch",PhonebookExport="")
print data
r = self._request_session.post(self._url+"/cgi-bin/firmwarecfg", data=data, allow_redirects=True,headers=self._request_headers)
print(r.content)
"""
# as a workaround we parse the delivered table
data = dict(sid=self._sid,xhr=1,page="bookLi",no_sidrenew="",lang="en")
r = self._request_session.post(self._url+"/data.lua", data=data, allow_redirects=True,headers=self._request_headers)
tree = html.fromstring(r.content.decode('utf-8'))
tree_names = tree.xpath('//table[@id="uiInnerTable"]/tr')
ret_list = []
for name_row in tree_names[:-1]: #removing the "no entries-entry
entry = FritzBoxFonBookEntry( )
entry.name = ''.join(name_row.xpath('td[@class="tname"]/text()')).encode('utf-8')
entry.numbers = name_row.xpath('td[@class="tnum"]/text()') #string list!
entry.type = ''.join(name_row.xpath('td[@class="ttype"]/text()')).encode('utf-8')
entry.code = ''.join(name_row.xpath('td[@class="tcode"]/text()')).encode('utf-8')
entry.vanity = ''.join(name_row.xpath('td[@class="tvanity"]/text()')).encode('utf-8')
entry.imp = ''.join(name_row.xpath('td[@class="timp"]/text()')).encode('utf-8')
ret_list.append(entry)
self._last_phonebook_entries = ret_list
return ret_list
class FritzBoxFonBookEntry:
name = ""
numbers = []
type = ""
vanity = ""
code = ""
imp = ""
def __init__(self, name="", numbers="",type="",code="",vanity="",imp=""):
self.name=name
self.numbers=numbers
self.type=type
self.code=code
self.vanity=vanity
self.imp=imp
def __repr__(self): #debug purposes
return str(self.name) #+ " " +''.join(str(e) for e in self.numbers)
def __str__(self): #debug purposes
return str(self.name) #+ " " +''.join(str(e) for e in self.numbers)
class FritzBoxCall:
call_type="" #int
date="" #dateTime
caller_name="" #name of the caller set by FritzBox fon book
caller_number="" #number of the caller as string, as it can be anonymous
fon="" #name of the called internal device
number="" #number of the called internal devices
duration="" #duration as python timespan
UID="" #unique identifier of the call
def __init__(self,csv_line):
parts=csv_line.split(';')
self.call_type = int(parts[0])
parse_date = datetime.now(pytz.timezone('Europe/Berlin'))
tz = pytz.timezone('Europe/Berlin')
self.date = parse_date.strptime(parts[1] + " CET", "%d.%m.%y %H:%M %Z")
tzoffset = tz.utcoffset(self.date)
self.date = self.date-tzoffset
self.caller_name = parts[2]
self.caller_number = parts[3]
self.fon = parts[4]
self.number = parts[5]
t = datetime.strptime(parts[6],"%H:%M")
self.duration = timedelta(hours=t.hour, minutes=t.minute, seconds=t.second)
self.UID = self.get_UID()
def get_UID(self):
return hashlib.md5(self.date.isoformat()+self.caller_number).hexdigest()
def __repr__(self): #debug purposes
return str(self.date) + " " +self.caller_name + " " +self.caller_number + " " +str(self.duration)
def __str__(self): #debug purposes
return str(self.date) + " " +self.caller_name + " " +self.caller_number + " " +str(self.duration)
class FritzBoxDevice:
mac="" #mac adress as string
ipv6="" #ipv6 adress of the device as string
state="" # state as string
name="" # name as string
port="" # port as string
summarypropertie="" # summarypropertie as string (no typo! missing r is real)
classes="" # classes as string
url="" # url as string
type="" # type as string (lan|wlan etc)
ipv4="" # ipv4 as string
UID="" #UID as string
def __init__(self,parsed_json):
"""
expected parsed json and inits values as string
"""
self.mac=parsed_json["mac"]
self.ipv6=parsed_json["ipv6"]
self.UID=parsed_json["UID"]
self.state=parsed_json["state"]
self.port=parsed_json["port"]
self.name=parsed_json["name"]
self.summarypropertie=parsed_json["summarypropertie"]
self.classes=parsed_json["classes"]
self.url=parsed_json["url"]
self.type=parsed_json["type"]
self.ipv4=parsed_json["ipv4"]
self.UID=self.get_UID()
def get_UID(self):
if self.UID:
return self.UID
return str(self) #if vpn UID seems to be empty
def __repr__(self): #debug purposes
return self.UID + " " +self.ipv4 + " " +self.type + " " +self.name
def __str__(self): #debug purposes
return self.UID + " " +self.ipv4 + " " +self.type + " " +self.name
|
Wow the sweater is gorgeous!!! I love the color and the 3/4 sleeves too. It fits you so nicely but 1's and 0's!!!! I am impressed all around.
I can see why your so pleased with this sweater...You've done a fantastic job!
I love the colour...mmm you may have given me the inspiration I need to try wool dying too!!
Wow!!! That's gorgeous, and size 1 and ZERO needles for a sweater??? You're my latest knitting hero!!
It looks so good on you, and those buttons compliment it perfectly! Lovely, beautiful and classic - what a great first sweater!
That is so gorgeous! Beautiful work (as usual)!
Oh, Shirley. That is absolutely gorgeous!
Wow, that sweater is freaking awesome! I love the fit. It's unbelievable that this is your first sweater ever. I'm so nervous about my first sweater; I don't think it will turn out half as professional looking as yours. Great job.
I cannot believe you knit that with your own two hands. wow. Wow. WOW.
This is stunning! The color is fabby! I have been wanting to dye for ages but have been to frightened is it really easy? the shorter sleeves totally set it off!
rdhtdshrteu6ydytduiyetret It's so nice to see you crafting and posting again! I think the knitted pizza slice really completes the set - so cute! Fun to see crochet and knit together, so few people are interested in both, it seems!
Your sweater is beautiful. I am currently knitting this pattern and I hope mine looks as great as yours when it's done. |
#
import MySQLdb, os, re, json
from functools import *
from tableinfo import *
from sys import argv
from graph import *
from extra import *
from defines import *
import readline
from optparse import OptionParser
usage = """
Usage:
python3 relations.py --source=<database> [--options]
<source> format:
username:password@host[:port]/database
python3 relations.py root:root@localhost/mydb
"""
def fetch_database_info(extra_info, user, password, server, db):
"""
Fetch database info and mixin extra info from json config
"""
host = server
port = 3306
if ':' in server:
host, port = server.split(':')
port = int(port)
db = MySQLdb.connect(host=host, user=user, passwd=password, db=db, port=port, charset="utf8")
print("#Reading database scheme")
ct = db.cursor()
ct.execute("SHOW TABLES")
table_info_list = []
id_table_map = {} # Stores id-field names => tableInfo mapping
for (table,) in ct.fetchall():
ct.execute("SHOW FULL COLUMNS FROM " + table)
fields = ct.fetchall()
table_info = TableInfo(table, fields, extra_info)
id_fields = table_info.get_id_fields()
for id_field_name in id_fields:
if id_field_name not in id_table_map:
id_table_map[id_field_name] = [table_info]
else:
id_table_map[id_field_name].append(table_info)
table_info_list.append(table_info)
ct.close()
return table_info_list, id_table_map, db
def calc_tables_relations(tables, id_table_map):
"""
Calc the tables' relations
"""
for table in tables:
primary_key = table.primary_key[0]
if primary_key not in id_table_map:
continue
follower_tables = id_table_map[primary_key]
for follower_table in follower_tables:
table.add_follower_table(follower_table)
def update_logic_foreign_key(table_info_list, table_info, uncertain_id, keys, extra):
keys = keys.split(',')
for key in keys:
key = key.strip()
table_name, field_name = key.split(".")
if table_name not in map(lambda x: x.table_name, table_info_list):
raise Exception("Table `%s` not found" % red_text(table_name))
this_table_info = list(filter(lambda x: x.table_name==table_name, table_info_list))[0]
if field_name not in this_table_info.id_fields and field_name != this_table_info.primary_key[0]:
raise Exception("Field `%s`.`%s` not found" % (red_text(table_name), red_text(field_name)))
extra.set_virtual_foreign_key(table_info, uncertain_id, table_name, field_name)
extra.update_table_extra_info()
return True
def query_uncertain_id_fields(table_info_list, extra):
"""
"""
for table_info in table_info_list:
id_fields = table_info.get_id_fields()
depends = table_info.depends
if len(id_fields) == len(depends):
continue
depends_ids = list(map(lambda x: x[0], depends.keys()))
uncertain_ids = list(set(id_fields) - set(depends_ids))
if len(uncertain_ids) == 0:
continue
index = 0
while index < len(uncertain_ids):
uncertain_id = uncertain_ids[index]
try:
print("Could you point out `%s`.`%s` corresponds to which primary key?"
% (green_text(table_info.table_name), green_text(uncertain_id)))
keys = input('')
if len(keys) > 0 and '.' in keys:
if update_logic_foreign_key(table_info_list, table_info, uncertain_id, keys, extra):
index += 1
elif keys == 'i':
# Ignore it this time
index += 1
elif keys == 'n':
# It's not an Id.
index += 1
elif keys == 'e':
# The fields means an id from extra system
extra.set_virtual_foreign_key(table_info, uncertain_id, '', '')
extra.update_table_extra_info()
index += 1
except Exception as e:
print(e)
# show all tables' followers and depends
def print_relations(results):
for table in results:
print(table)
for f in table.followers:
print("\t", f)
# print("\t", '-' * 30)
# for d in table.depends:
# print("\t", d)
print("=" * 40, end='\n\n')
def init_graph_from_relations(results):
graph = Graph()
for table in results:
graph.add_vertex(table.table_name, table)
for table in results:
for follow in table.followers:
graph.add_edge(table.table_name, follow.table_name)
return graph
def plot(graph, filename="social_network.png"):
from igraph import plot
layout = graph.layout("circle")
visual_style = dict()
visual_style["vertex_size"] = 20
visual_style["vertex_label_size"] = 30
visual_style["vertex_label_dist"] = 2
visual_style["vertex_color"] = "white"
visual_style["vertex_label_color"] = "blue"
visual_style["vertex_label"] = graph.vs["name"]
visual_style["edge_width"] = 2
visual_style["layout"] = layout
visual_style["bbox"] = (1200, 1000)
visual_style["margin"] = 100
plot(graph, filename, **visual_style)
def calc_database_table_relations(db_args):
extra = ExtraTableInfo(db_args[3])
extra_info = extra.load_table_extra_info()
table_info_list, id_table_map, db = fetch_database_info(extra_info, *db_args)
calc_tables_relations(table_info_list, id_table_map)
return table_info_list, extra
def main(options, other_args):
# For local test
u = re.compile("(.*):(.*)@(.*)/(.*)")
a = u.match(options.source)
db_args = a.groups()
table_info_list, extra = calc_database_table_relations(db_args)
print("Press [i] to ignore this time, [n] means not an id(key), [e] means an id from an external system.")
print("")
try:
query_uncertain_id_fields(table_info_list, extra)
except KeyboardInterrupt as e:
print('Ignore all uncertain foreign keys')
table_info_list, extra = calc_database_table_relations(db_args)
if options.graph:
graph = init_graph_from_relations(table_info_list)
plot(graph, options.graph)
if options.way:
begin_point, end_point = options.way.split(',')
paths = graph.all_paths(begin_point, end_point)
count = 1
for path in paths:
print('-' * 5, "Way %d" % count, '-' * 5)
graph.prints(path)
count += 1
#
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-s", "--source", action="store", dest="source", help="Provide source database")
parser.add_option("-g", "--graph", action="store", dest="graph", help="Render the relations in a graph")
parser.add_option("-w", "--way", action="store", dest="way", help="Provide a way from a begin point to the end point")
options, args = parser.parse_args()
main(options, argv[2:])
|
Stop your bin going walkabout with these Mickey Mouse Clubhouse Wheelie Self Adhesive Bin Numbers! Fully water and weatherproof, each self-adhesive sticker features a Clubhouse character!
Fed up of your wheelie bin going walkabout on collection day? Never worry about losing your wheelie bin again with these great Disney Mickey Mouse Clubhouse Wheelie Bin Stickers, featuring Mickey, Minnie and all their Clubhouse friends! Weatherproof and waterproof, each sticker features one of the Clubhouse characters, Either Mickey, Minnie, Donald Duck, Daisy Duck or Goofy. Measuring 17cm high, each number features a quality adhesive that sticks to any clean and dry surface. Printed on to self adhesive waterproof and weatherproof vinyl, they are designed to withstand whatever the weather throws at them, keeping your bin looking fresh and fun year after year.
Great for identifying your bin from your neighbour's bin, the numbers are manufactured here in the UK, using the highest quality waterproof vinyl. Quick and easy to apply, they will rejuvenate your wheelie bin in seconds and will make sure it never goes walkabout again! |
from datetime import datetime
import json
from os.path import isfile
def output_json(obj):
if isinstance(obj, datetime):
if obj.utcoffset() is not None:
obj = obj - obj.utcoffset()
return obj.strftime('%Y-%m-%d %H:%M:%S')
return str(obj)
def input_json(obj):
new_dic = {}
for key in obj:
try:
if float(key) == int(float(key)):
new_key = int(key)
else:
new_key = float(key)
new_dic[new_key] = obj[key]
continue
except ValueError:
pass
try:
new_dic[str(key)] = datetime.strptime(obj[key], '%Y-%m-%d %H:%M:%S')
continue
except (TypeError, ValueError):
pass
new_dic[str(key)] = obj[key]
return new_dic
def load_saved(file):
if isfile(file):
try:
saved = json.load(open(file, 'r'), object_hook=input_json)
except ValueError:
saved = []
else:
saved = []
ids = set()
for suchar in saved:
ids.add(suchar['id'])
return saved, ids
def convert_to_date_time(date):
year = int(date[:4])
month = int(date[5:7])
day = int(date[8:10])
hour = int(date[11:13])
minute = int(date[14:16])
second = int(date[17:19])
suchar_date = datetime(year, month, day, hour, minute, second)
return suchar_date
def create_suchar_to_save(id, date, votes, body):
dst = {'id': id, 'date': date, 'votes': votes, 'body': body}
return dst |
Are you looking for Massage Therapy Schools in Poindexter, Kentucky? Simply enter your zip code to see what schools are available near you. You can select free information from multiple schools, so you can compare later on to decide which ones are right for you.
You decided that you really hate your current career and that you want to go back to school near Poindexter, KY to become a massage therapist. Where do you learn about this new and exciting career you want to embrace? And, how hard could it really be, you are just giving a massage. Before getting started, you will need to talk to other therapists or do some internet research on the requirements of this profession.
Now, you have to be realistic about this. There will always be fly-by-night organizations ready to take your money and to give you a piece of paper certifying that you are completely trained. You should remember that you can not be certified overnight and as with any type of training, you need to make sure that the massage therapy school you select is accredited near Poindexter or Kentucky where you plan to work.
There are more than 80 different types of massage-some of them include the Swedish massage, deep tissue massage, reflexology, acupressure, sports massage, and neuromuscular massage. Massage schools will teach you the basics of these and other types of massage, but individuals usually focus on 3-5 specific types and primarily give that variety. You do not have to determine your specialty up-front, but getting an education as a massage therapist is a good way to begin and to set the groundwork for your new career in Poindexter, KY. After your initial training, you can expand from your primary therapy training and add new varieties to your repertoire.
You can even locate web sites that offer training as a massage therapist over the internet and they tout that you can learn your skills and become certified using them as your instructor or massage therapy school in Poindexter, KY. Is this possible? Well, maybe. Again, check what the requirements are in Kentucky and then follow their guidance.
If there are several reputable massage therapy schools near Poindexter, KY and you can not figure out which one to choose, ask other licensed therapist. They should be able to provide you with enough information to help you narrow down the possibilities. Once you narrow your search, check with the Better Business Bureau about the schools--they keep a record of people's complaints concerning businesses.
Poindexter Massage therapy schools are great to attend because they prepare you not only for the field but also on how to get a job. Most schools who specialize in something, like massage therapy has school hours like high school.
Many people who live in small towns will open their own business because of the low competition in Poindexter; however, some times a student will join a spa or something because there is a lot of competition in the area. When it comes to working for yourself, you will probably want to further your education and obtain a degree in accounting or business management.
When you are planning to work for someone, you need to take the time to write a better resume than anyone else who may apply. Think of your fellow students as competition for jobs once you get out in the field. While you are attending a massage therapy school near Poindexter, KY, you need to look at your fellow classmates and identify their pros and cons. If you see that someone who is good at doing a hot stone massage, but you are better at another type of special massage, try to work on your hot stone techniques. By looking at your fellow classmates, you can prepare yourself to be the one who gets hired. In addition, it will make you better in all areas of massage therapy.
If you are still not sure, what you want to do with your life after Poindexter high school or just in general, you may want to consider looking into massage therapy if you find that some of the spa services are interesting. When you attend massage therapy school you will learn about pressure points and the many different types of massages. You will be able to try the different types of massages because you will usually have a classmate as your partner and you practice and rate each other.
Massage Therapy Schools near Poindexter, KY reviewed by Melissa Taku. |
# -*- coding: utf-8 -*-
#
# Copyright 2009 - 2013 Clark Williams <[email protected]>
# Copyright 2009 - 2013 David Sommerseth <[email protected]>
# Copyright 2012 - 2013 Raphaël Beamonte <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# For the avoidance of doubt the "preferred form" of this code is one which
# is in an open unpatent encumbered format. Where cryptographic key signing
# forms part of the process of creating an executable the information
# including keys needed to generate an equivalently functional executable
# are deemed to be part of the source code.
#
import sys, subprocess, os, glob, fnmatch, libxml2
from rteval.sysinfo.tools import getcmdpath
from rteval.Log import Log
class SystemServices(object):
def __init__(self, logger=None):
self.__logger = logger
self.__init = "unknown"
def __log(self, logtype, msg):
if self.__logger:
self.__logger.log(logtype, msg)
def __get_services_sysvinit(self):
reject = ('functions', 'halt', 'killall', 'single', 'linuxconf', 'kudzu',
'skeleton', 'README', '*.dpkg-dist', '*.dpkg-old', 'rc', 'rcS',
'single', 'reboot', 'bootclean.sh')
for sdir in ('/etc/init.d', '/etc/rc.d/init.d'):
if os.path.isdir(sdir):
servicesdir = sdir
break
if not servicesdir:
raise RuntimeError, "No services dir (init.d) found on your system"
self.__log(Log.DEBUG, "Services located in %s, going through each service file to check status" % servicesdir)
ret_services = {}
for service in glob.glob(os.path.join(servicesdir, '*')):
servicename = os.path.basename(service)
if not [1 for p in reject if fnmatch.fnmatch(servicename, p)] and os.access(service, os.X_OK):
cmd = '%s -qs "\(^\|\W\)status)" %s' % (getcmdpath('grep'), service)
c = subprocess.Popen(cmd, shell=True)
c.wait()
if c.returncode == 0:
cmd = ['env', '-i', 'LANG="%s"' % os.environ['LANG'], 'PATH="%s"' % os.environ['PATH'], 'TERM="%s"' % os.environ['TERM'], service, 'status']
c = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
c.wait()
if c.returncode == 0 and (c.stdout.read() or c.stderr.read()):
ret_services[servicename] = 'running'
else:
ret_services[servicename] = 'not running'
else:
ret_services[servicename] = 'unknown'
return ret_services
def __get_services_systemd(self):
ret_services = {}
cmd = '%s list-unit-files -t service --no-legend' % getcmdpath('systemctl')
self.__log(Log.DEBUG, "cmd: %s" % cmd)
c = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for p in c.stdout:
# p are lines like "servicename.service status"
v = p.strip().split()
ret_services[v[0].split('.')[0]] = v[1]
return ret_services
def services_get(self):
cmd = [getcmdpath('ps'), '-ocomm=', '1']
c = subprocess.Popen(cmd, stdout=subprocess.PIPE)
self.__init = c.stdout.read().strip()
if self.__init == 'systemd':
self.__log(Log.DEBUG, "Using systemd to get services status")
return self.__get_services_systemd()
elif self.__init == 'init':
self.__init = 'sysvinit'
self.__log(Log.DEBUG, "Using sysvinit to get services status")
return self.__get_services_sysvinit()
else:
raise RuntimeError, "Unknown init system (%s)" % self.__init
return {}
def MakeReport(self):
srvs = self.services_get()
rep_n = libxml2.newNode("Services")
rep_n.newProp("init", self.__init)
for s in srvs:
srv_n = libxml2.newNode("Service")
srv_n.newProp("state", srvs[s])
srv_n.addContent(s)
rep_n.addChild(srv_n)
return rep_n
def unit_test(rootdir):
from pprint import pprint
try:
syssrv = SystemServices()
pprint(syssrv.services_get())
srv_xml = syssrv.MakeReport()
xml_d = libxml2.newDoc("1.0")
xml_d.setRootElement(srv_xml)
xml_d.saveFormatFileEnc("-", "UTF-8", 1)
return 0
except Exception, e:
print "** EXCEPTION: %s" % str(e)
return 1
if __name__ == '__main__':
sys.exit(unit_test(None))
|
On Tuesday, April 4th Dane County voters will go to the polls to elect a new Circuit Court Judge. As polls close, the campaign will gather with friends and supporters who have made an incredible effort to build a grassroots movement to elect Marilyn Townsend, a labor and civil rights attorney and Municipal Court Judge to the Dane County Bench. All are invited. |
#! /usr/bin/env python
#-----------------------------------------------------------------------
# COPYRIGHT_BEGIN
# Copyright (C) 2017, FixFlyer, LLC.
# All rights reserved.
# COPYRIGHT_END
#-----------------------------------------------------------------------
"""Flyer remote protocol definitions."""
# Carriage return + linefeed, used in properties serialisation.
FLYER_CRLF = "\r\n"
# SOH, used as field separator in FIX-style serialisation.
FLYER_SOH = "\x01"
# End-of-message marker for Flyer protocol messages.
FLYER_EOF = "EOF"
PAYLOAD_EVENT_ID = 0
RESEND_EVENT_ID = 104
SESSION_LOGON_EVENT_ID = 105
SESSION_LOGOUT_EVENT_ID = 106
RESTORE_EVENT_ID = 111
LOGON_RESPONSE_EVENT_ID = 200
HEARTBEAT_EVENT_ID = 201
HEARTBEAT_ACK_EVENT_ID = 202
LOGON_REQUEST_EVENT_ID = 203
LOGOUT_REQUEST_EVENT_ID = 204
ERROR_EVENT_ID = 301
HEARTBEAT_MESSAGE_TYPE = 0
PAYLOAD_MESSAGE_TYPE = 1
COMMON_MESSAGE_TYPE = 2
HEARTBEAT_ACK_MESSAGE_TYPE = 3
COMMIT_MESSAGE_TYPE = 4
FLYER_MESSAGE_TYPE_TAG = 50001
FLYER_REQUEST_ID_TAG = 50002
FLYER_FIX_MESSAGE_TYPE_TAG = 50003
FLYER_MESSAGE_SEQUENCE_NUMBER_TAG = 50004
FLYER_SENDER_COMP_ID_TAG = 50005
FLYER_TARGET_COMP_ID_TAG = 50006
FLYER_MESSAGE_TAG = 50007
FLYER_POSS_DUP_TAG = 50008
FLYER_POSS_RESEND_TAG = 50009
FLYER_LAST_APP_SEQ_NO_TAG = 50010
FLYER_EVENT_TYPE_TAG = 50011
FLYER_BEGIN_STRING_TAG = 50012
FLYER_SERIAL_EVENT_DATA_TAG = 50013
FLYER_ROOT_FIELD_TAG = 50014
FLYER_EOF_TAG = 50015
FLYER_CLIENT_MESSAGE_ID_TAG = 50016
FLYER_LAST_OUTGOING_MSG_SEQ_NUM_TAG = 50017
FLYER_APPLVER_ID_TAG = 50018
FLYER_CUSTOMER_APPLVER_ID_TAG = 50019
FLYER_SESSION_QUALIFIER_TAG = 50020
|
The Internet Services Providers’ Association (ISPA UK) has announced the finalists for the 2016 annual awards, the longest running and most prestigious Internet industry awards in the UK. Fifty companies have made the shortlist across fifteen categories and the winners will be announced at the gala ceremony on 7 July.
Luminet have been nominated in the Best Wireless category.
The ISPA Awards recognise ISPs of all shapes and sizes and the shortlist highlights the true breadth of the industry, plus the services that keep the UK at the leading edge of the digital economy. Over two million tests were carried out by thinkbroadband and Malden Electronics to identify the shortlists for the broadband, hosting and VoIP categories and this testing, as well as the expert judging panel, keeps the ISPAs truly independent. |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heat.engine.resources import signal_responder
from heat.engine.resources import wait_condition as wc_base
from heat.engine import support
class WaitConditionHandle(wc_base.BaseWaitConditionHandle):
"""AWS WaitConditionHandle resource.
the main point of this class is to :
have no dependencies (so the instance can reference it)
generate a unique url (to be returned in the reference)
then the cfn-signal will use this url to post to and
WaitCondition will poll it to see if has been written to.
"""
support_status = support.SupportStatus(version='2014.1')
METADATA_KEYS = (
DATA, REASON, STATUS, UNIQUE_ID
) = (
'Data', 'Reason', 'Status', 'UniqueId'
)
def get_reference_id(self):
if self.resource_id:
wc = signal_responder.WAITCONDITION
return six.text_type(self._get_ec2_signed_url(signal_type=wc))
else:
return six.text_type(self.name)
def metadata_update(self, new_metadata=None):
"""DEPRECATED. Should use handle_signal instead."""
self.handle_signal(details=new_metadata)
def handle_signal(self, details=None):
"""Validate and update the resource metadata.
metadata must use the following format:
{
"Status" : "Status (must be SUCCESS or FAILURE)",
"UniqueId" : "Some ID, should be unique for Count>1",
"Data" : "Arbitrary Data",
"Reason" : "Reason String"
}
"""
if details is None:
return
return super(WaitConditionHandle, self).handle_signal(details)
def resource_mapping():
return {
'AWS::CloudFormation::WaitConditionHandle': WaitConditionHandle,
}
|
Raising Baitys | A Blog About Life in the Baity Family: earth day is everyday!
so yesterday was earth day....something i am conscious of on a daily basis. although i had to work alllll day and all night yesterday i made my earth day impact on every person i talked to....well at least i like to think that i did.
after i got my son off to school in his earth day shirt i made some cupcakes. i baked them in recycled wrappers. i also make little "earths" on top of them using white chocolate that i melted, colored blue, and let set. i then used a buttercream icing recipe that i love because it's so simple and delicious.
i encouraged everyone i worked with to go buy flowers or plants (we always do a tree and this year my son wanted a golden delicious apple tree so my mother helped him plant it yesterday since i was gone all day) several of the people i talked to said they were going to go do it so i hope they did.
well thats the extent of my earth day blog. since i was not able to be more overly active myself i used my powerful tool of persuasion to get people to be more aware of the earth. i told them to try and be more conscious of the daily choices they make that could easily be changed without effort to be more eco friendly! |
from model.group import Group
import random
import pytest
def test_modify_group_name(app, db, check_ui):
if app.group.count() == 0:
with pytest.allure.step("Create new group if group list is empty"):
app.group.create(Group(name="Test"))
with pytest.allure.step("Given a group list"):
old_groups = db.get_group_list()
with pytest.allure.step("choose random group"):
group = random.choice(old_groups)
with pytest.allure.step("modify random group"):
group.name = "new name"
app.group.modify_group_by_id(group.id, group)
with pytest.allure.step("Get group list again"):
new_groups = db.get_group_list()
with pytest.allure.step("Compare length of old list with length of new list "):
assert len(old_groups) == len(new_groups)
if check_ui:
app_groups = app.group.get_group_list()
for new_group in new_groups:
for app_group in app_groups:
if new_group.id == app_group.id:
if new_group.id == group.id:
assert new_group.name == group.name
else:
assert new_group.name == app_group.name
break
#def test_modify_group_header(app):
# if app.group.count() == 0:
# app.group.create(Group(name="Test"))
# old_groups = app.group.get_group_list()
# app.group.modify_first_group(Group(header="new header"))
# new_groups = app.group.get_group_list()
# assert len(old_groups) == len(new_groups)
# assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
|
Planica - Simon Ammann leads half-way run an FIS Skiflying Wolrd Chapionships in Planica. After two rounds he leads in Planica ahead of Adam Malysz and Gregor Schlierenzauer. Malysz is only 2.8 points behind the Swiss. Schlierenzauer is already about 20 points behind Ammann. |
import time
import decimal
import sys
import json
import logging
import apsw
import collections
import inspect
import requests
from datetime import datetime
from dateutil.tz import tzlocal
from operator import itemgetter
import fractions
import warnings
import binascii
import hashlib
from . import (config, exceptions)
D = decimal.Decimal
b26_digits = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
b58_digits = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
dhash = lambda x: hashlib.sha256(hashlib.sha256(x).digest()).digest()
# Obsolete in Python 3.4, with enum module.
BET_TYPE_NAME = {0: 'BullCFD', 1: 'BearCFD', 2: 'Equal', 3: 'NotEqual'}
BET_TYPE_ID = {'BullCFD': 0, 'BearCFD': 1, 'Equal': 2, 'NotEqual': 3}
BLOCK_LEDGER = []
# TODO: This doesn’t timeout properly. (If server hangs, then unhangs, no result.)
def api (method, params):
headers = {'content-type': 'application/json'}
payload = {
"method": method,
"params": params,
"jsonrpc": "2.0",
"id": 0,
}
response = requests.post(config.RPC, data=json.dumps(payload), headers=headers)
if response == None:
raise exceptions.RPCError('Cannot communicate with {} server.'.format(config.XCP_CLIENT))
elif response.status_code != 200:
if response.status_code == 500:
raise exceptions.RPCError('Malformed API call.')
else:
raise exceptions.RPCError(str(response.status_code) + ' ' + response.reason)
response_json = response.json()
if 'error' not in response_json.keys() or response_json['error'] == None:
try:
return response_json['result']
except KeyError:
raise exceptions.RPCError(response_json)
else:
raise exceptions.RPCError('{}'.format(response_json['error']))
def price (numerator, denominator, block_index):
if block_index >= 294500 or config.TESTNET: # Protocol change.
return fractions.Fraction(numerator, denominator)
else:
numerator = D(numerator)
denominator = D(denominator)
return D(numerator / denominator)
def log (db, command, category, bindings):
cursor = db.cursor()
# Slow?!
def output (quantity, asset):
try:
if asset not in ('fraction', 'leverage'):
return str(devise(db, quantity, asset, 'output')) + ' ' + asset
else:
return str(devise(db, quantity, asset, 'output'))
except exceptions.AssetError:
return '<AssetError>'
except decimal.DivisionByZero:
return '<DivisionByZero>'
if command == 'update':
if category == 'order':
logging.debug('Database: set status of order {} to {}.'.format(bindings['tx_hash'], bindings['status']))
elif category == 'bet':
logging.debug('Database: set status of bet {} to {}.'.format(bindings['tx_hash'], bindings['status']))
elif category == 'order_matches':
logging.debug('Database: set status of order_match {} to {}.'.format(bindings['order_match_id'], bindings['status']))
elif category == 'bet_matches':
logging.debug('Database: set status of bet_match {} to {}.'.format(bindings['bet_match_id'], bindings['status']))
# TODO: elif category == 'balances':
# logging.debug('Database: set balance of {} in {} to {}.'.format(bindings['address'], bindings['asset'], output(bindings['quantity'], bindings['asset']).split(' ')[0]))
elif command == 'insert':
if category == 'credits':
logging.debug('Credit: {} to {} #{}# <{}>'.format(output(bindings['quantity'], bindings['asset']), bindings['address'], bindings['action'], bindings['event']))
elif category == 'debits':
logging.debug('Debit: {} from {} #{}# <{}>'.format(output(bindings['quantity'], bindings['asset']), bindings['address'], bindings['action'], bindings['event']))
elif category == 'sends':
logging.info('Send: {} from {} to {} ({}) [{}]'.format(output(bindings['quantity'], bindings['asset']), bindings['source'], bindings['destination'], bindings['tx_hash'], bindings['status']))
elif category == 'orders':
logging.info('Order: {} ordered {} for {} in {} blocks, with a provided fee of {} {} and a required fee of {} {} ({}) [{}]'.format(bindings['source'], output(bindings['give_quantity'], bindings['give_asset']), output(bindings['get_quantity'], bindings['get_asset']), bindings['expiration'], bindings['fee_provided'] / config.UNIT, config.BTC, bindings['fee_required'] / config.UNIT, config.BTC, bindings['tx_hash'], bindings['status']))
elif category == 'order_matches':
logging.info('Order Match: {} for {} ({}) [{}]'.format(output(bindings['forward_quantity'], bindings['forward_asset']), output(bindings['backward_quantity'], bindings['backward_asset']), bindings['id'], bindings['status']))
elif category == 'btcpays':
logging.info('{} Payment: {} paid {} to {} for order match {} ({}) [{}]'.format(config.BTC, bindings['source'], output(bindings['btc_amount'], config.BTC), bindings['destination'], bindings['order_match_id'], bindings['tx_hash'], bindings['status']))
elif category == 'issuances':
if bindings['transfer']:
logging.info('Issuance: {} transfered asset {} to {} ({}) [{}]'.format(bindings['source'], bindings['asset'], bindings['issuer'], bindings['tx_hash'], bindings['status']))
elif bindings['locked']:
logging.info('Issuance: {} locked asset {} ({}) [{}]'.format(bindings['issuer'], bindings['asset'], bindings['tx_hash'], bindings['status']))
else:
if bindings['divisible']:
divisibility = 'divisible'
unit = config.UNIT
else:
divisibility = 'indivisible'
unit = 1
if bindings['callable'] and (bindings['block_index'] > 283271 or config.TESTNET): # Protocol change.
callability = 'callable from {} for {} XCP/{}'.format(isodt(bindings['call_date']), bindings['call_price'], bindings['asset'])
else:
callability = 'uncallable'
try:
quantity = devise(db, bindings['quantity'], None, dest='output', divisible=bindings['divisible'])
except Exception as e:
quantity = '?'
logging.info('Issuance: {} created {} of asset {}, which is {} and {}, with description ‘{}’ ({}) [{}]'.format(bindings['issuer'], quantity, bindings['asset'], divisibility, callability, bindings['description'], bindings['tx_hash'], bindings['status']))
elif category == 'broadcasts':
if bindings['locked']:
logging.info('Broadcast: {} locked his feed ({}) [{}]'.format(bindings['source'], bindings['tx_hash'], bindings['status']))
else:
if not bindings['value']: infix = '‘{}’'.format(bindings['text'])
else: infix = '‘{}’ = {}'.format(bindings['text'], bindings['value'])
suffix = ' from ' + bindings['source'] + ' at ' + isodt(bindings['timestamp']) + ' with a fee of {}%'.format(output(D(bindings['fee_fraction_int'] / 1e8) * D(100), 'fraction')) + ' (' + bindings['tx_hash'] + ')' + ' [{}]'.format(bindings['status'])
logging.info('Broadcast: {}'.format(infix + suffix))
elif category == 'bets':
# Last text
broadcasts = list(cursor.execute('''SELECT * FROM broadcasts WHERE (status = ? AND source = ?) ORDER BY tx_index ASC''', ('valid', bindings['feed_address'])))
try:
last_broadcast = broadcasts[-1]
text = last_broadcast['text']
except IndexError:
text = '<Text>'
# Suffix
end = 'in {} blocks ({}) [{}]'.format(bindings['expiration'], bindings['tx_hash'], bindings['status'])
if 'CFD' not in BET_TYPE_NAME[bindings['bet_type']]:
log_message = 'Bet: {} against {}, by {}, on {} that ‘{}’ will {} {} at {}, {}'.format(output(bindings['wager_quantity'], config.XCP), output(bindings['counterwager_quantity'], config.XCP), bindings['source'], bindings['feed_address'], text, BET_TYPE_NAME[bindings['bet_type']], str(output(bindings['target_value'], 'value').split(' ')[0]), isodt(bindings['deadline']), end)
else:
log_message = 'Bet: {}, by {}, on {} for {} against {}, leveraged {}x, {}'.format(BET_TYPE_NAME[bindings['bet_type']], bindings['source'], bindings['feed_address'],output(bindings['wager_quantity'], config.XCP), output(bindings['counterwager_quantity'], config.XCP), output(bindings['leverage']/ 5040, 'leverage'), end)
logging.info(log_message)
elif category == 'bet_matches':
placeholder = ''
if bindings['target_value'] >= 0: # Only non‐negative values are valid.
placeholder = ' that ' + str(output(bindings['target_value'], 'value'))
if bindings['leverage']:
placeholder += ', leveraged {}x'.format(output(bindings['leverage'] / 5040, 'leverage'))
logging.info('Bet Match: {} for {} against {} for {} on {} at {}{} ({}) [{}]'.format(BET_TYPE_NAME[bindings['tx0_bet_type']], output(bindings['forward_quantity'], config.XCP), BET_TYPE_NAME[bindings['tx1_bet_type']], output(bindings['backward_quantity'], config.XCP), bindings['feed_address'], isodt(bindings['deadline']), placeholder, bindings['id'], bindings['status']))
elif category == 'dividends':
logging.info('Dividend: {} paid {} per unit of {} ({}) [{}]'.format(bindings['source'], output(bindings['quantity_per_unit'], bindings['dividend_asset']), bindings['asset'], bindings['tx_hash'], bindings['status']))
elif category == 'burns':
logging.info('Burn: {} burned {} for {} ({}) [{}]'.format(bindings['source'], output(bindings['burned'], config.BTC), output(bindings['earned'], config.XCP), bindings['tx_hash'], bindings['status']))
elif category == 'cancels':
logging.info('Cancel: {} ({}) [{}]'.format(bindings['offer_hash'], bindings['tx_hash'], bindings['status']))
elif category == 'callbacks':
logging.info('Callback: {} called back {}% of {} ({}) [{}]'.format(bindings['source'], float(D(bindings['fraction']) * D(100)), bindings['asset'], bindings['tx_hash'], bindings['status']))
elif category == 'rps':
log_message = 'RPS: {} opens game with {} possible moves and a wager of {}'.format(bindings['source'], bindings['possible_moves'], output(bindings['wager'], 'XCP'))
logging.info(log_message)
elif category == 'rps_matches':
log_message = 'RPS Match: {} is playing a {}-moves game with {} with a wager of {} ({}) [{}]'.format(bindings['tx0_address'], bindings['possible_moves'], bindings['tx1_address'], output(bindings['wager'], 'XCP'), bindings['id'], bindings['status'])
logging.info(log_message)
elif category == 'rpsresolves':
if bindings['status'] == 'valid':
rps_matches = list(cursor.execute('''SELECT * FROM rps_matches WHERE id = ?''', (bindings['rps_match_id'],)))
assert len(rps_matches) == 1
rps_match = rps_matches[0]
log_message = 'RPS Resolved: {} is playing {} on a {}-moves game with {} with a wager of {} ({}) [{}]'.format(rps_match['tx0_address'], bindings['move'], rps_match['possible_moves'], rps_match['tx1_address'], output(rps_match['wager'], 'XCP'), rps_match['id'], rps_match['status'])
else:
log_message = 'RPS Resolved: {} [{}]'.format(bindings['tx_hash'], bindings['status'])
logging.info(log_message)
elif category == 'order_expirations':
logging.info('Expired order: {}'.format(bindings['order_hash']))
elif category == 'order_match_expirations':
logging.info('Expired Order Match awaiting payment: {}'.format(bindings['order_match_id']))
elif category == 'bet_expirations':
logging.info('Expired bet: {}'.format(bindings['bet_hash']))
elif category == 'bet_match_expirations':
logging.info('Expired Bet Match: {}'.format(bindings['bet_match_id']))
elif category == 'bet_match_resolutions':
# DUPE
cfd_type_id = BET_TYPE_ID['BullCFD'] + BET_TYPE_ID['BearCFD']
equal_type_id = BET_TYPE_ID['Equal'] + BET_TYPE_ID['NotEqual']
if bindings['bet_match_type_id'] == cfd_type_id:
if bindings['settled']:
logging.info('Bet Match Settled: {} credited to the bull, {} credited to the bear, and {} credited to the feed address ({})'.format(output(bindings['bull_credit'], config.XCP), output(bindings['bear_credit'], config.XCP), output(bindings['fee'], config.XCP), bindings['bet_match_id']))
else:
logging.info('Bet Match Force‐Liquidated: {} credited to the bull, {} credited to the bear, and {} credited to the feed address ({})'.format(output(bindings['bull_credit'], config.XCP), output(bindings['bear_credit'], config.XCP), output(bindings['fee'], config.XCP), bindings['bet_match_id']))
elif bindings['bet_match_type_id'] == equal_type_id:
logging.info('Bet Match Settled: {} won the pot of {}; {} credited to the feed address ({})'.format(bindings['winner'], output(bindings['escrow_less_fee'], config.XCP), output(bindings['fee'], config.XCP), bindings['bet_match_id']))
elif category == 'rps_expirations':
logging.info('Expired RPS: {}'.format(bindings['rps_hash']))
elif category == 'rps_match_expirations':
logging.info('Expired RPS Match: {}'.format(bindings['rps_match_id']))
cursor.close()
def message (db, block_index, command, category, bindings, tx_hash=None):
cursor = db.cursor()
# Get last message index.
messages = list(cursor.execute('''SELECT * FROM messages
WHERE message_index = (SELECT MAX(message_index) from messages)'''))
if messages:
assert len(messages) == 1
message_index = messages[0]['message_index'] + 1
else:
message_index = 0
# Not to be misleading…
if block_index == config.MEMPOOL_BLOCK_INDEX:
try:
del bindings['status']
del bindings['block_index']
del bindings['tx_index']
except KeyError:
pass
bindings_string = json.dumps(collections.OrderedDict(sorted(bindings.items())))
cursor.execute('insert into messages values(:message_index, :block_index, :command, :category, :bindings, :timestamp)',
(message_index, block_index, command, category, bindings_string, curr_time()))
# Log only real transactions.
if block_index != config.MEMPOOL_BLOCK_INDEX:
log(db, command, category, bindings)
cursor.close()
def rowtracer(cursor, sql):
"""Converts fetched SQL data into dict-style"""
dictionary = {}
for index, (name, type_) in enumerate(cursor.getdescription()):
dictionary[name] = sql[index]
return dictionary
def exectracer(cursor, sql, bindings):
# This means that all changes to database must use a very simple syntax.
# TODO: Need sanity checks here.
sql = sql.lower()
# Parse SQL.
array = sql.split('(')[0].split(' ')
if 'insert' in sql:
command, category = array[0], array[2]
elif 'update' in sql:
command, category = array[0], array[1]
else:
return True
db = cursor.getconnection()
dictionary = {'command': command, 'category': category, 'bindings': bindings}
# Skip blocks, transactions.
if 'blocks' in sql or 'transactions' in sql: return True
# Record alteration in database.
if category not in ('balances', 'messages', 'mempool'):
if not (command in ('update') and category in ('orders', 'bets', 'rps', 'order_matches', 'bet_matches', 'rps_matches')): # List message manually.
message(db, bindings['block_index'], command, category, bindings)
return True
def connect_to_db(flags=None):
"""Connects to the SQLite database, returning a db Connection object"""
logging.debug('Status: Creating connection to `{}`.'.format(config.DATABASE.split('/').pop()))
if flags == None:
db = apsw.Connection(config.DATABASE)
elif flags == 'SQLITE_OPEN_READONLY':
db = apsw.Connection(config.DATABASE, flags=0x00000001)
else:
raise exceptions.DatabaseError
cursor = db.cursor()
# For speed.
cursor.execute('''PRAGMA count_changes = OFF''')
# For integrity, security.
cursor.execute('''PRAGMA foreign_keys = ON''')
cursor.execute('''PRAGMA defer_foreign_keys = ON''')
# So that writers don’t block readers.
if flags != 'SQLITE_OPEN_READONLY':
cursor.execute('''PRAGMA journal_mode = WAL''')
# Make case sensitive the LIKE operator.
# For insensitive queries use 'UPPER(fieldname) LIKE value.upper()''
cursor.execute('''PRAGMA case_sensitive_like = ON''')
rows = list(cursor.execute('''PRAGMA foreign_key_check'''))
if rows: raise exceptions.DatabaseError('Foreign key check failed.')
# Integrity check
integral = False
for i in range(10): # DUPE
try:
logging.debug('Status: Checking database integrity.')
cursor.execute('''PRAGMA integrity_check''')
rows = cursor.fetchall()
if not (len(rows) == 1 and rows[0][0] == 'ok'):
raise exceptions.DatabaseError('Integrity check failed.')
integral = True
break
except exceptions.DatabaseIntegrityError:
time.sleep(1)
continue
if not integral:
raise exceptions.DatabaseError('Could not perform integrity check.')
cursor.close()
db.setrowtrace(rowtracer)
db.setexectrace(exectracer)
return db
def version_check (db):
try:
host = 'https://counterpartyxcp.github.io/counterpartyd/version.json'
response = requests.get(host, headers={'cache-control': 'no-cache'})
versions = json.loads(response.text)
except Exception as e:
raise exceptions.VersionError('Unable to check version. How’s your Internet access?')
# Check client version.
passed = True
if config.VERSION_MAJOR < versions['minimum_version_major']:
passed = False
elif config.VERSION_MAJOR == versions['minimum_version_major']:
if config.VERSION_MINOR < versions['minimum_version_minor']:
passed = False
elif config.VERSION_MINOR == versions['minimum_version_minor']:
if config.VERSION_REVISION < versions['minimum_version_revision']:
passed = False
if not passed:
explanation = 'Your version of counterpartyd is v{}, but, as of block {}, the minimum version is v{}.{}.{}. Reason: ‘{}’. Please upgrade to the latest version and restart the server.'.format(
config.VERSION_STRING, versions['block_index'], versions['minimum_version_major'], versions['minimum_version_minor'],
versions['minimum_version_revision'], versions['reason'])
if last_block(db)['block_index'] >= versions['block_index']:
raise exceptions.VersionUpdateRequiredError(explanation)
else:
warnings.warn(explanation)
logging.debug('Status: Version check passed.')
return
def database_check (db, blockcount):
"""Checks {} database to see if the {} server has caught up with Bitcoind.""".format(config.XCP_NAME, config.XCP_CLIENT)
if last_block(db)['block_index'] + 1 < blockcount:
raise exceptions.DatabaseError('{} database is behind Bitcoind. Is the {} server running?'.format(config.XCP_NAME, config.XCP_CLIENT))
return
def isodt (epoch_time):
return datetime.fromtimestamp(epoch_time, tzlocal()).isoformat()
def curr_time():
return int(time.time())
def date_passed(date):
return date <= time.time()
def sortkeypicker(keynames):
"""http://stackoverflow.com/a/1143719"""
negate = set()
for i, k in enumerate(keynames):
if k[:1] == '-':
keynames[i] = k[1:]
negate.add(k[1:])
def getit(adict):
composite = [adict[k] for k in keynames]
for i, (k, v) in enumerate(zip(keynames, composite)):
if k in negate:
composite[i] = -v
return composite
return getit
def last_block (db):
cursor = db.cursor()
blocks = list(cursor.execute('''SELECT * FROM blocks WHERE block_index = (SELECT MAX(block_index) from blocks)'''))
if blocks:
assert len(blocks) == 1
last_block = blocks[0]
else:
raise exceptions.DatabaseError('No blocks found.')
cursor.close()
return last_block
def last_message (db):
cursor = db.cursor()
messages = list(cursor.execute('''SELECT * FROM messages WHERE message_index = (SELECT MAX(message_index) from messages)'''))
if messages:
assert len(messages) == 1
last_message = messages[0]
else:
raise exceptions.DatabaseError('No messages found.')
cursor.close()
return last_message
def asset_id (asset):
# Special cases.
if asset == config.BTC: return 0
elif asset == config.XCP: return 1
if asset[0] == 'A': raise exceptions.AssetNameError('starts with ‘A’')
# Checksum
"""
if not checksum.verify(asset):
raise exceptions.AssetNameError('invalid checksum')
else:
asset = asset[:-1] # Strip checksum character.
"""
# Convert the Base 26 string to an integer.
n = 0
for c in asset:
n *= 26
if c not in b26_digits:
raise exceptions.AssetNameError('invalid character:', c)
digit = b26_digits.index(c)
n += digit
if n < 26**3:
raise exceptions.AssetNameError('too short')
return n
def asset_name (asset_id):
if asset_id == 0: return config.BTC
elif asset_id == 1: return config.XCP
if asset_id < 26**3:
raise exceptions.AssetIDError('too low')
# Divide that integer into Base 26 string.
res = []
n = asset_id
while n > 0:
n, r = divmod (n, 26)
res.append(b26_digits[r])
asset_name = ''.join(res[::-1])
"""
return asset_name + checksum.compute(asset_name)
"""
return asset_name
def debit (db, block_index, address, asset, quantity, action=None, event=None):
debit_cursor = db.cursor()
assert asset != config.BTC # Never BTC.
assert type(quantity) == int
assert quantity >= 0
if asset == config.BTC:
raise exceptions.BalanceError('Cannot debit bitcoins from a {} address!'.format(config.XCP_NAME))
debit_cursor.execute('''SELECT * FROM balances \
WHERE (address = ? AND asset = ?)''', (address, asset))
balances = debit_cursor.fetchall()
if not len(balances) == 1: old_balance = 0
else: old_balance = balances[0]['quantity']
if old_balance < quantity:
raise exceptions.BalanceError('Insufficient funds.')
balance = round(old_balance - quantity)
balance = min(balance, config.MAX_INT)
assert balance >= 0
bindings = {
'quantity': balance,
'address': address,
'asset': asset
}
sql='update balances set quantity = :quantity where (address = :address and asset = :asset)'
debit_cursor.execute(sql, bindings)
# Record debit.
bindings = {
'block_index': block_index,
'address': address,
'asset': asset,
'quantity': quantity,
'action': action,
'event': event
}
sql='insert into debits values(:block_index, :address, :asset, :quantity, :action, :event)'
debit_cursor.execute(sql, bindings)
debit_cursor.close()
BLOCK_LEDGER.append('{}{}{}{}'.format(block_index, address, asset, quantity))
def credit (db, block_index, address, asset, quantity, action=None, event=None):
credit_cursor = db.cursor()
assert asset != config.BTC # Never BTC.
assert type(quantity) == int
assert quantity >= 0
credit_cursor.execute('''SELECT * FROM balances \
WHERE (address = ? AND asset = ?)''', (address, asset))
balances = credit_cursor.fetchall()
if len(balances) == 0:
assert balances == []
#update balances table with new balance
bindings = {
'address': address,
'asset': asset,
'quantity': quantity,
}
sql='insert into balances values(:address, :asset, :quantity)'
credit_cursor.execute(sql, bindings)
elif len(balances) > 1:
assert False
else:
old_balance = balances[0]['quantity']
assert type(old_balance) == int
balance = round(old_balance + quantity)
balance = min(balance, config.MAX_INT)
bindings = {
'quantity': balance,
'address': address,
'asset': asset
}
sql='update balances set quantity = :quantity where (address = :address and asset = :asset)'
credit_cursor.execute(sql, bindings)
# Record credit.
bindings = {
'block_index': block_index,
'address': address,
'asset': asset,
'quantity': quantity,
'action': action,
'event': event
}
sql='insert into credits values(:block_index, :address, :asset, :quantity, :action, :event)'
credit_cursor.execute(sql, bindings)
credit_cursor.close()
BLOCK_LEDGER.append('{}{}{}{}'.format(block_index, address, asset, quantity))
def devise (db, quantity, asset, dest, divisible=None):
# For output only.
def norm(num, places):
# Round only if necessary.
num = round(num, places)
fmt = '{:.' + str(places) + 'f}'
num = fmt.format(num)
return num.rstrip('0')+'0' if num.rstrip('0')[-1] == '.' else num.rstrip('0')
# TODO: remove price, odds
if asset in ('leverage', 'value', 'fraction', 'price', 'odds'):
if dest == 'output':
return norm(quantity, 6)
elif dest == 'input':
# Hackish
if asset == 'leverage':
return round(quantity)
else:
return float(quantity) # TODO: Float?!
if asset in ('fraction',):
return norm(fraction(quantity, 1e8), 6)
if divisible == None:
if asset in (config.BTC, config.XCP):
divisible = True
else:
cursor = db.cursor()
cursor.execute('''SELECT * FROM issuances \
WHERE (status = ? AND asset = ?)''', ('valid', asset))
issuances = cursor.fetchall()
cursor.close()
if not issuances: raise exceptions.AssetError('No such asset: {}'.format(asset))
divisible = issuances[0]['divisible']
if divisible:
if dest == 'output':
quantity = D(quantity) / D(config.UNIT)
if quantity == quantity.to_integral():
return str(quantity) + '.0' # For divisible assets, display the decimal point.
else:
return norm(quantity, 8)
elif dest == 'input':
quantity = D(quantity) * config.UNIT
if quantity == quantity.to_integral():
return int(quantity)
else:
raise exceptions.QuantityError('Divisible assets have only eight decimal places of precision.')
else:
return quantity
else:
quantity = D(quantity)
if quantity != round(quantity):
raise exceptions.QuantityError('Fractional quantities of indivisible assets.')
return round(quantity)
def holders(db, asset):
holders = []
cursor = db.cursor()
# Balances
cursor.execute('''SELECT * FROM balances \
WHERE asset = ?''', (asset,))
for balance in list(cursor):
holders.append({'address': balance['address'], 'address_quantity': balance['quantity'], 'escrow': None})
# Funds escrowed in orders. (Protocol change.)
cursor.execute('''SELECT * FROM orders \
WHERE give_asset = ? AND status = ?''', (asset, 'open'))
for order in list(cursor):
holders.append({'address': order['source'], 'address_quantity': order['give_remaining'], 'escrow': order['tx_hash']})
# Funds escrowed in pending order matches. (Protocol change.)
cursor.execute('''SELECT * FROM order_matches \
WHERE (forward_asset = ? AND status = ?)''', (asset, 'pending'))
for order_match in list(cursor):
holders.append({'address': order_match['tx0_address'], 'address_quantity': order_match['forward_quantity'], 'escrow': order_match['id']})
cursor.execute('''SELECT * FROM order_matches \
WHERE (backward_asset = ? AND status = ?)''', (asset, 'pending'))
for order_match in list(cursor):
holders.append({'address': order_match['tx1_address'], 'address_quantity': order_match['backward_quantity'], 'escrow': order_match['id']})
# Bets and RPS (and bet/rps matches) only escrow XCP.
if asset == config.XCP:
cursor.execute('''SELECT * FROM bets \
WHERE status = ?''', ('open',))
for bet in list(cursor):
holders.append({'address': bet['source'], 'address_quantity': bet['wager_remaining'], 'escrow': bet['tx_hash']})
cursor.execute('''SELECT * FROM bet_matches \
WHERE status = ?''', ('pending',))
for bet_match in list(cursor):
holders.append({'address': bet_match['tx0_address'], 'address_quantity': bet_match['forward_quantity'], 'escrow': bet_match['id']})
holders.append({'address': bet_match['tx1_address'], 'address_quantity': bet_match['backward_quantity'], 'escrow': bet_match['id']})
cursor.execute('''SELECT * FROM rps \
WHERE status = ?''', ('open',))
for rps in list(cursor):
holders.append({'address': rps['source'], 'address_quantity': rps['wager'], 'escrow': rps['tx_hash']})
cursor.execute('''SELECT * FROM rps_matches \
WHERE status IN (?, ?, ?)''', ('pending', 'pending and resolved', 'resolved and pending'))
for rps_match in list(cursor):
holders.append({'address': rps_match['tx0_address'], 'address_quantity': rps_match['wager'], 'escrow': rps_match['id']})
holders.append({'address': rps_match['tx1_address'], 'address_quantity': rps_match['wager'], 'escrow': rps_match['id']})
cursor.close()
return holders
def xcp_supply (db):
cursor = db.cursor()
# Add burns.
cursor.execute('''SELECT * FROM burns \
WHERE status = ?''', ('valid',))
burn_total = sum([burn['earned'] for burn in cursor.fetchall()])
# Subtract issuance fees.
cursor.execute('''SELECT * FROM issuances\
WHERE status = ?''', ('valid',))
issuance_fee_total = sum([issuance['fee_paid'] for issuance in cursor.fetchall()])
# Subtract dividend fees.
cursor.execute('''SELECT * FROM dividends\
WHERE status = ?''', ('valid',))
dividend_fee_total = sum([dividend['fee_paid'] for dividend in cursor.fetchall()])
cursor.close()
return burn_total - issuance_fee_total - dividend_fee_total
def supplies (db):
cursor = db.cursor()
supplies = {config.XCP: xcp_supply(db)}
cursor.execute('''SELECT * from issuances \
WHERE status = ?''', ('valid',))
for issuance in list(cursor):
asset = issuance['asset']
quantity = issuance['quantity']
if asset in supplies.keys():
supplies[asset] += quantity
else:
supplies[asset] = quantity
cursor.close()
return supplies
def get_url(url, abort_on_error=False, is_json=True, fetch_timeout=5):
try:
r = requests.get(url, timeout=fetch_timeout)
except Exception as e:
raise GetURLError("Got get_url request error: %s" % e)
else:
if r.status_code != 200 and abort_on_error:
raise GetURLError("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.text))
result = json.loads(r.text) if is_json else r.text
return result
def dhash_string(text):
return binascii.hexlify(hashlib.sha256(hashlib.sha256(bytes(text, 'utf-8')).digest()).digest()).decode()
### Bitcoin Addresses ###
def validate_address(address, block_index):
# Get array of pubkeyhashes to check.
if is_multisig(address):
if not (config.TESTNET and block_index >= config.FIRST_MULTISIG_BLOCK_TESTNET):
raise MultiSigAddressError('Multi‐signature addresses are currently disabled.')
pubkeyhashes = pubkeyhash_array(address)
else:
pubkeyhashes = [address]
# Check validity by attempting to decode.
for pubkeyhashes in pubkeyhashes:
base58_check_decode(pubkeyhashes, config.ADDRESSVERSION)
def base58_encode(binary):
# Convert big‐endian bytes to integer
n = int('0x0' + binascii.hexlify(binary).decode('utf8'), 16)
# Divide that integer into base58
res = []
while n > 0:
n, r = divmod (n, 58)
res.append(b58_digits[r])
res = ''.join(res[::-1])
return res
def base58_check_encode(original, version):
b = binascii.unhexlify(bytes(original, 'utf-8'))
d = version + b
binary = d + dhash(d)[:4]
res = base58_encode(binary)
# Encode leading zeros as base58 zeros
czero = 0
pad = 0
for c in d:
if c == czero: pad += 1
else: break
address = b58_digits[0] * pad + res
if bytes(original, 'utf-8') != binascii.hexlify(base58_check_decode(address, version)):
raise exceptions.AddressError('encoded address does not decode properly')
return address
def base58_check_decode (s, version):
# Convert the string to an integer
n = 0
for c in s:
n *= 58
if c not in b58_digits:
raise exceptions.InvalidBase58Error('Not a valid base58 character:', c)
digit = b58_digits.index(c)
n += digit
# Convert the integer to bytes
h = '%x' % n
if len(h) % 2:
h = '0' + h
res = binascii.unhexlify(h.encode('utf8'))
# Add padding back.
pad = 0
for c in s[:-1]:
if c == b58_digits[0]: pad += 1
else: break
k = version * pad + res
addrbyte, data, chk0 = k[0:1], k[1:-4], k[-4:]
if addrbyte != version:
raise exceptions.VersionByteError('incorrect version byte')
chk1 = dhash(addrbyte + data)[:4]
if chk0 != chk1:
raise exceptions.Base58ChecksumError('Checksum mismatch: %r ≠ %r' % (chk0, chk1))
return data
### Bitcoin Addresses ###
### Multi‐signature Addresses ###
# NOTE: a `pub` is either a pubkey or a pubkeyhash
class MultiSigAddressError (exceptions.AddressError):
pass
def is_multisig(address):
array = address.split('_')
return (len(array) > 1)
def canonical_address(address):
if is_multisig(address):
signatures_required, pubkeyhashes, signatures_possible = extract_array(address)
if not all([base58_check_decode(pubkeyhash, config.ADDRESSVERSION) for pubkeyhash in pubkeyhashes]):
raise MultiSigAddressError('Multi‐signature address must use PubKeyHashes, not public keys.')
return construct_array(signatures_required, pubkeyhashes, signatures_possible)
else:
return address
def test_array(signatures_required, pubs, signatures_possible):
try:
signatures_required, signatures_possible = int(signatures_required), int(signatures_possible)
except ValueError:
raise MultiSigAddressError('Signature values not integers.')
if signatures_required < 1 or signatures_required > 3:
raise MultiSigAddressError('Invalid signatures_required.')
if signatures_possible < 2 or signatures_possible > 3:
raise MultiSigAddressError('Invalid signatures_possible.')
if signatures_possible != len(pubs):
raise exceptions.InputError('Incorrect number of pubkeys/pubkeyhashes in multi‐signature address.')
def construct_array(signatures_required, pubs, signatures_possible):
test_array(signatures_required, pubs, signatures_possible)
address = '_'.join([str(signatures_required)] + sorted(pubs) + [str(signatures_possible)])
return address
def extract_array(address):
assert is_multisig(address)
array = address.split('_')
signatures_required, pubs, signatures_possible = array[0], sorted(array[1:-1]), array[-1]
test_array(signatures_required, pubs, signatures_possible)
return int(signatures_required), pubs, int(signatures_possible)
def pubkeyhash_array(address):
signatures_required, pubkeyhashes, signatures_possible = extract_array(address)
if not all([base58_check_decode(pubkeyhash, config.ADDRESSVERSION) for pubkeyhash in pubkeyhashes]):
raise MultiSigAddressError('Multi‐signature address must use PubKeyHashes, not public keys.')
return pubkeyhashes
### Multi‐signature Addresses ###
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Sara Gates, founder of the social networking service Wisegate, is creating an invitation-only private community of security and IT professionals. Gates believes that senior executives, such as CIOs and CISOs, need other people at their peer level to share war stories and get firsthand feedback on what works and what doesn't work.
Wisegate uses a mix of old and new methods to communicate and collaborate. There’s the online social media side, which includes forums, interactive discussions, member polls and the like. Members also can meet via roundtable conference calls, on private phone calls and in person at forums that are hosted in various cities.
The community is exclusive; you need an invitation to join. “We don’t want to be another Facebook,” says Gates. “We are looking for quality, not quantity, in our membership.” The most desirable applicants are those in high level technology-related positions in their companies —regarding IT and information security executive decision-makers.
The Wisegate network is vendor-free. “We want our members to talk openly and honestly about their experiences with various products and services,” says Gates. “They can’t do that if a vendor joins the discussion. Rather than having vendors influence our members, we want our members to influence the vendors.” The no vendors rule keeps members from being bombarded by sales pitches.
One aspect of Wisegate that members really like is that they can use “polling questions” to ask what other companies are doing about an issue. For example, one member asked the others about their policies on encrypted USB drives. The frank information she got in the responses helped her formulate the policies and processes she eventually implemented at her own firm. In another instance, members helped each other devise correlation rules for their SIEM products.
If you meet the membership profile and want to apply, go to Wisegate to submit your request to join the Wisegate community. |
# encoding: utf-8
"""
Copyright 2015 iACT, universite de Montreal, Olivier Belanger, Jean Piche
This file is part of Cecilia4Csound.
Cecilia4Csound is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Cecilia4Csound is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Cecilia4Csound. If not, see <http://www.gnu.org/licenses/>.
"""
import wx, os, time, math, sys
from constants import *
import CeciliaLib
from Widgets import *
from types import ListType
from TogglePopup import SamplerPopup, SamplerToggle
from Plugins import *
import wx.lib.scrolledpanel as scrolled
from pyo.lib._wxwidgets import ControlSlider
def powerOf2(value):
for i in range(24):
p2 = int(math.pow(2,(i+1)))
if p2 > value:
break
return p2
def chooseColourFromName(name):
def clip(x):
val = int(x*255)
if val < 0: val = 0
elif val > 255: val = 255
else: val = val
return val
def colour(name):
vals = COLOUR_CLASSES[name]
hue = vals[0]
bright = vals[1]
sat = vals[2]
segment = int(math.floor(hue / 60))
fraction = hue / 60 - segment
t1 = bright * (1 - sat)
t2 = bright * (1 - (sat * fraction))
t3 = bright * (1 - (sat * (1 - fraction)))
if segment == 0:
r, g, b = bright, t3, t1
elif segment == 1:
r, g, b = t2, bright, t1
elif segment == 2:
r, g, b = t1, bright, t3
elif segment == 3:
r, g, b = t1, t2, bright
elif segment == 4:
r, g, b = t3, t1, bright
elif segment == 5:
r, g, b = bright, t1, t2
return wx.Colour(clip(r),clip(g),clip(b))
lineColour = colour(name)
midColour = colour(name)
knobColour = colour(name)
sliderColour = colour(name)
return [lineColour, midColour, knobColour, sliderColour]
class CECControl(scrolled.ScrolledPanel):
def __init__(self, parent, id=-1, size=wx.DefaultSize, style=wx.SIMPLE_BORDER):
scrolled.ScrolledPanel.__init__(self, parent, id, size=size, style=style)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.parent = parent
self.outputFilename = ''
self.cfileinList = []
self.peak = ''
self.time = 0
self.charNumForLabel = 34
self.sizerMain = wx.FlexGridSizer(0,1)
self.sizerMain.Add(Separator(self, (230,1), colour=TITLE_BACK_COLOUR), 1, wx.EXPAND)
##### Control Panel #####
controlPanel = wx.Panel(self, -1)
controlPanel.SetBackgroundColour(TITLE_BACK_COLOUR)
controlSizer = wx.FlexGridSizer(1,3)
self.transportButtons = Transport(controlPanel, outPlayFunction=self.onPlayStop,
outRecordFunction=self.onRec,
backgroundColour=TITLE_BACK_COLOUR,
borderColour=WIDGET_BORDER_COLOUR)
self.clocker = Clocker(controlPanel, backgroundColour=TITLE_BACK_COLOUR, borderColour=WIDGET_BORDER_COLOUR)
controlSizer.Add(self.transportButtons, 0, wx.ALIGN_LEFT | wx.ALL | wx.ALIGN_CENTER_VERTICAL, 5)
fakePanel = wx.Panel(controlPanel, -1, size=(10, self.GetSize()[1]))
fakePanel.SetBackgroundColour(TITLE_BACK_COLOUR)
controlSizer.Add(fakePanel)
controlSizer.Add(self.clocker, 1, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
controlSizer.AddGrowableCol(1)
controlPanel.SetSizer(controlSizer)
self.sizerMain.Add(controlPanel, 1, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, 0)
self.sizerMain.Add(Separator(self, (230,1), colour=TITLE_BACK_COLOUR), 1, wx.EXPAND)
self.sizerMain.Add(Separator(self, (230,2), colour=BORDER_COLOUR), 1, wx.EXPAND)
self.sizerMain.AddSpacer((5,1))
self.tabs = TabsPanel(self, outFunction=self.onTogglePanels)
self.sizerMain.Add(self.tabs, 1, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, 0)
##### Input Panel #####
self.inOutSeparators = []
isEmpty = self.createInputPanel()
self.sizerMain.Add(self.inputPanel, 1, wx.EXPAND | wx.ALL, 0)
if not isEmpty:
sep = Separator(self, (230,2), colour=BACKGROUND_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
sep = Separator(self, (230,2), colour=BORDER_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
sep = Separator(self, (230,1), colour=BACKGROUND_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
###### Output Panel #####
self.createOutputPanel()
self.sizerMain.Add(self.outputPanel, 1, wx.EXPAND | wx.ALL, 0)
sep = Separator(self, (230,2), colour=BACKGROUND_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
sep = Separator(self, (230,2), colour=BORDER_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
sep = Separator(self, (230,1), colour=BACKGROUND_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
### Plugins panel ###
self.createPluginPanel()
self.sizerMain.Add(self.pluginsPanel, 1, wx.EXPAND | wx.ALL, 0)
self.sizerMain.Show(self.pluginsPanel, False)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
controlPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.inputPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.outputPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.peakLabel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.durationSlider.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.gainSlider.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.vuMeter.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.pluginsPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.SetSizer(self.sizerMain)
self.SetAutoLayout(1)
self.SetupScrolling(scroll_x = False)
wx.CallAfter(self.updateOutputFormat)
def listenSoundfile(self):
CeciliaLib.listenSoundfile(self.outputFilename)
def editSoundfile(self):
CeciliaLib.editSoundfile(self.outputFilename)
def OnLooseFocus(self, event):
win = wx.FindWindowAtPointer()
if win != None:
win = win.GetTopLevelParent()
if win not in [CeciliaLib.getCeciliaEditor(), CeciliaLib.getInterface()]:
win.Raise()
event.Skip()
def onTogglePanels(self, state):
if state == 0:
self.sizerMain.Show(self.pluginsPanel, False, True)
self.sizerMain.Show(self.inputPanel, True, True)
self.sizerMain.Show(self.outputPanel, True, True)
[self.sizerMain.Show(sep, True, True) for sep in self.inOutSeparators]
else:
self.sizerMain.Show(self.pluginsPanel, True, True)
self.sizerMain.Show(self.inputPanel, False, True)
self.sizerMain.Show(self.outputPanel, False, True)
[self.sizerMain.Show(sep, False, True) for sep in self.inOutSeparators]
self.sizerMain.Layout()
def createGrapherLines(self, plugin):
knobs = [plugin.knob1, plugin.knob2, plugin.knob3]
grapher = CeciliaLib.getGrapher()
choice = grapher.toolbar.getPopupChoice()
choice.extend([knob.getLongLabel() for knob in knobs])
grapher.toolbar.setPopupChoice(choice)
tableNum = CeciliaLib.getSliderTableNum()
for knob in knobs:
tableNum += 1
knob.setTable(tableNum)
func = '0 %f 1 %f' % (knob.GetValue(), knob.GetValue())
func = [float(v.replace('"', '')) for v in func.split()]
func = [[func[i*2] * CeciliaLib.getTotalTime(), func[i*2+1]] for i in range(len(func) / 2)]
mini = knob.getRange()[0]
maxi = knob.getRange()[1]
colour = chooseColourFromName('red')
label = knob.getLongLabel()
log = knob.getLog()
name = knob.getName()
size = 8192
sl = knob
grapher.plotter.createLine(func, (mini, maxi), colour, label, log, name, tableNum, size, sl, '')
grapher.plotter.getData()[-1].setShow(0)
grapher.plotter.draw()
CeciliaLib.setSliderTableNum(tableNum+1)
def removeGrapherLines(self, plugin):
knobs = [plugin.knob1, plugin.knob2, plugin.knob3]
tmp = [knob.getLongLabel() for knob in knobs]
names = [knob.getName() for knob in knobs]
grapher = CeciliaLib.getGrapher()
choice = grapher.toolbar.getPopupChoice()
for label in tmp:
if label in choice:
choice.remove(label)
grapher.toolbar.setPopupChoice(choice)
grapher.plotter.removeLines(names)
def replacePlugin1(self, i, new):
self.pluginsParams[0][self.oldPlugins[0]] = self.plugin1.getParams()
oldPlugin1 = self.plugin1
if self.oldPlugins[0] != 0:
self.removeGrapherLines(oldPlugin1)
if new == 'None':
self.plugin1 = NonePlugin(self.pluginsPanel, self.replacePlugin1, 0)
CeciliaLib.setPlugins(None, 0)
elif new == 'Reverb':
self.plugin1 = ReverbPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Filter':
self.plugin1 = FilterPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Chorus':
self.plugin1 = ChorusPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Para EQ':
self.plugin1 = EQPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == '3 Bands EQ':
self.plugin1 = EQ3BPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Compress':
self.plugin1 = CompressPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Gate':
self.plugin1 = GatePlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Disto':
self.plugin1 = DistoPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'AmpMod':
self.plugin1 = AmpModPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Phaser':
self.plugin1 = PhaserPlugin(self.pluginsPanel, self.replacePlugin1, 0)
if new != 'None':
CeciliaLib.setPlugins(self.plugin1, 0)
self.createGrapherLines(self.plugin1)
ind = PLUGINS_CHOICE.index(self.plugin1.getName())
self.oldPlugins[0] = ind
self.plugin1.setParams(self.pluginsParams[0][ind])
if CeciliaLib.getPlatform() == 'darwin':
self.pluginSizer.Replace(oldPlugin1, self.plugin1)
else:
item = self.pluginSizer.GetItem(oldPlugin1)
item.DeleteWindows()
self.pluginSizer.Insert(2, self.plugin1, 0)
self.pluginsPanel.Layout()
def replacePlugin2(self, i, new):
self.pluginsParams[1][self.oldPlugins[1]] = self.plugin2.getParams()
oldPlugin2 = self.plugin2
if self.oldPlugins[1] != 0:
self.removeGrapherLines(oldPlugin2)
if new == 'None':
self.plugin2 = NonePlugin(self.pluginsPanel, self.replacePlugin2, 1)
CeciliaLib.setPlugins(None, 1)
elif new == 'Reverb':
self.plugin2 = ReverbPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Filter':
self.plugin2 = FilterPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Chorus':
self.plugin2 = ChorusPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Para EQ':
self.plugin2 = EQPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == '3 Bands EQ':
self.plugin2 = EQ3BPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Compress':
self.plugin2 = CompressPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Gate':
self.plugin2 = GatePlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Disto':
self.plugin2 = DistoPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'AmpMod':
self.plugin2 = AmpModPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Phaser':
self.plugin2 = PhaserPlugin(self.pluginsPanel, self.replacePlugin2, 1)
if new != 'None':
CeciliaLib.setPlugins(self.plugin2, 1)
self.createGrapherLines(self.plugin2)
ind = PLUGINS_CHOICE.index(self.plugin2.getName())
self.oldPlugins[1] = ind
self.plugin2.setParams(self.pluginsParams[1][ind])
if CeciliaLib.getPlatform() == 'darwin':
self.pluginSizer.Replace(oldPlugin2, self.plugin2)
else:
item = self.pluginSizer.GetItem(oldPlugin2)
item.DeleteWindows()
self.pluginSizer.Insert(8, self.plugin2, 0)
self.pluginsPanel.Layout()
def replacePlugin3(self, i, new):
self.pluginsParams[2][self.oldPlugins[2]] = self.plugin3.getParams()
oldPlugin3 = self.plugin3
if self.oldPlugins[2] != 0:
self.removeGrapherLines(oldPlugin3)
if new == 'None':
self.plugin3 = NonePlugin(self.pluginsPanel, self.replacePlugin3, 2)
CeciliaLib.setPlugins(None, 2)
elif new == 'Reverb':
self.plugin3 = ReverbPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Filter':
self.plugin3 = FilterPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Chorus':
self.plugin3 = ChorusPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Para EQ':
self.plugin3 = EQPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == '3 Bands EQ':
self.plugin3 = EQ3BPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Compress':
self.plugin3 = CompressPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Gate':
self.plugin3 = GatePlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Disto':
self.plugin3 = DistoPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'AmpMod':
self.plugin3 = AmpModPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Phaser':
self.plugin3 = PhaserPlugin(self.pluginsPanel, self.replacePlugin3, 2)
if new != 'None':
CeciliaLib.setPlugins(self.plugin3, 2)
self.createGrapherLines(self.plugin3)
ind = PLUGINS_CHOICE.index(self.plugin3.getName())
self.oldPlugins[2] = ind
self.plugin3.setParams(self.pluginsParams[2][ind])
if CeciliaLib.getPlatform() == 'darwin':
self.pluginSizer.Replace(oldPlugin3, self.plugin3)
else:
item = self.pluginSizer.GetItem(oldPlugin3)
item.DeleteWindows()
self.pluginSizer.Insert(13, self.plugin3, 0)
self.pluginsPanel.Layout()
def setPlugins(self, pluginsDict):
for key in pluginsDict.keys():
if key == 0:
self.replacePlugin1(None, pluginsDict[key][0])
self.plugin1.setParams(pluginsDict[key][1])
self.plugin1.setStates(pluginsDict[key][2])
elif key == 1:
self.replacePlugin2(None, pluginsDict[key][0])
self.plugin2.setParams(pluginsDict[key][1])
self.plugin2.setStates(pluginsDict[key][2])
elif key == 2:
self.replacePlugin3(None, pluginsDict[key][0])
self.plugin3.setParams(pluginsDict[key][1])
self.plugin3.setStates(pluginsDict[key][2])
def updateTime(self, time):
self.setTime(time)
self.GetParent().grapher.plotter.drawCursor(time)
def updateAmps(self, amps):
self.vuMeter.setAmplitude(amps)
def createInputPanel(self):
isEmpty = True
self.inputPanel = wx.Panel(self, -1, style=wx.NO_BORDER)
inputSizer = wx.FlexGridSizer(5,1)
self.cfileinList = []
samplersList = []
widgets = CeciliaLib.getInterfaceWidgets()
for w in range(len(widgets)):
if widgets[w]['type'] == 'cfilein':
cFileIn = Cfilein(self.inputPanel, label=widgets[w].get('label', ''), name=widgets[w]['name'])
self.cfileinList.append(cFileIn)
elif widgets[w]['type'] == 'csampler':
cSampler = CSampler(self.inputPanel, label=widgets[w].get('label', ''), name=widgets[w]['name'])
self.cfileinList.append(cSampler)
samplersList.append(cSampler)
CeciliaLib.setUserSamplers(samplersList)
if self.cfileinList != []:
isEmpty = False
# Section title
inputTextPanel = wx.Panel(self.inputPanel, -1, style=wx.NO_BORDER)
inputTextPanel.SetBackgroundColour(TITLE_BACK_COLOUR)
inputTextSizer = wx.FlexGridSizer(1,1)
inputText = wx.StaticText(inputTextPanel, -1, 'INPUT')
inputText.SetFont(wx.Font(SECTION_TITLE_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
inputText.SetBackgroundColour(TITLE_BACK_COLOUR)
inputText.SetForegroundColour(SECTION_TITLE_COLOUR)
inputTextSizer.Add(inputText, 0, wx.ALIGN_RIGHT | wx.ALL, 3)
inputTextSizer.AddGrowableCol(0)
inputTextPanel.SetSizer(inputTextSizer)
inputSizer.Add(inputTextPanel, 1, wx.EXPAND| wx.ALIGN_RIGHT | wx.ALL, 0)
for i in range(len(self.cfileinList)):
inputSizer.Add(self.cfileinList[i], 1, wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP, -1)
if i != len(self.cfileinList)-1:
inputSizer.Add(Separator(self.inputPanel, size=(230,1)), 1, wx.EXPAND)
inputSizer.AddGrowableCol(0)
self.inputPanel.SetSizer(inputSizer)
return isEmpty
def createOutputPanel(self):
self.outputPanel = wx.Panel(self, -1, style=wx.NO_BORDER)
self.outputPanel.SetBackgroundColour(BACKGROUND_COLOUR)
outputSizer = wx.FlexGridSizer(0,1)
outputTextPanel = wx.Panel(self.outputPanel, -1, style=wx.NO_BORDER)
outputTextPanel.SetBackgroundColour(TITLE_BACK_COLOUR)
outputTextSizer = wx.FlexGridSizer(1,1)
outputText = wx.StaticText(outputTextPanel, -1, 'OUTPUT')
outputText.SetFont(wx.Font(SECTION_TITLE_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
outputText.SetBackgroundColour(TITLE_BACK_COLOUR)
outputText.SetForegroundColour(SECTION_TITLE_COLOUR)
outputTextSizer.Add(outputText, 0, wx.ALIGN_RIGHT | wx.ALL, 3)
outputTextSizer.AddGrowableCol(0)
outputTextPanel.SetSizer(outputTextSizer)
outputSizer.Add(outputTextPanel, 1, wx.EXPAND| wx.ALIGN_RIGHT | wx.ALL, 0)
outputSizer.AddSpacer((5,7))
outLine1 = wx.BoxSizer(wx.HORIZONTAL)
# File Name Label
self.filenameLabel = OutputLabel(self.outputPanel, label='', size=(130,20),
colour=CONTROLLABEL_BACK_COLOUR, outFunction=self.onSelectOutputFilename)
self.filenameLabel.SetToolTip(CECTooltip(TT_OUTPUT))
self.filenameLabel.setItalicLabel('File name')
outLine1.Add(self.filenameLabel, 0, wx.LEFT | wx.ALIGN_LEFT | wx.ALIGN_CENTER_VERTICAL, 0)
outLine1.AddSpacer((25,1))
outToolbox = ToolBox(self.outputPanel,
tools=['play','edit','recycle'],
outFunction=[self.listenSoundfile,
self.editSoundfile,
self.onReuseOutputFile])
outLine1.Add(outToolbox, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 2)
outputSizer.Add(outLine1, 1, wx.EXPAND | wx.LEFT | wx.RIGHT | wx.BOTTOM, 7)
# Duration Static Text
durationText = wx.StaticText(self.outputPanel, -1, 'Duration (sec) :')
durationText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
durationText.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
outputSizer.Add(durationText, 0, wx.ALIGN_LEFT | wx.LEFT, 9)
# Duration Slider
outputSizer.AddSpacer((3,1))
self.durationSlider = ControlSlider(self.outputPanel,
0.001, 3600, CeciliaLib.getDefaultTotalTime(),
size=(220,15),
log=True,
backColour=BACKGROUND_COLOUR,
outFunction=self.setTotalTime)
self.durationSlider.setSliderHeight(10)
self.durationSlider.SetToolTip(CECTooltip(TT_DUR_SLIDER))
outputSizer.Add(self.durationSlider, 0, wx.ALIGN_LEFT | wx.LEFT | wx.BOTTOM, 7)
# Gain Static Text
gainText = wx.StaticText(self.outputPanel, -1, 'Gain (dB) :')
gainText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
gainText.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
outputSizer.Add(gainText, 0, wx.ALIGN_LEFT | wx.LEFT, 9)
# Gain Slider
outputSizer.AddSpacer((3,1))
self.gainSlider = ControlSlider(self.outputPanel,
-48, 18, 0,
size=(220,15),
log=False,
backColour=BACKGROUND_COLOUR,
outFunction=self.onChangeGain)
self.gainSlider.setSliderHeight(10)
self.gainSlider.SetToolTip(CECTooltip(TT_GAIN_SLIDER))
CeciliaLib.setGainSlider(self.gainSlider)
outputSizer.Add(self.gainSlider, 0, wx.ALIGN_LEFT | wx.LEFT | wx.BOTTOM, 7)
# VU Meter
self.meterSizer = wx.BoxSizer()
self.vuMeter = VuMeter(self.outputPanel)
self.meterSizer.Add(self.vuMeter, 0, wx.EXPAND | wx.ALIGN_LEFT | wx.LEFT | wx.BOTTOM, 8)
# Format choice
self.lineSizer = wx.BoxSizer(wx.HORIZONTAL)
formatList, selectedNchnls = self.defineFormatsList()
formatSizer = wx.BoxSizer(wx.VERTICAL)
self.formatText = wx.StaticText(self.outputPanel, -1, 'Format :')
self.formatText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
self.formatText.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
formatSizer.Add(self.formatText, 0, wx.ALIGN_LEFT | wx.LEFT, 2)
self.formatChoice = CustomMenu(self.outputPanel,
choice=formatList,
init=selectedNchnls,
outFunction=self.onFormatChange,
colour=CONTROLLABEL_BACK_COLOUR)
self.formatChoice.SetToolTip(CECTooltip(TT_CHANNELS))
formatSizer.Add(self.formatChoice, 0, wx.ALIGN_LEFT | wx.TOP, 1)
self.lineSizer.Add(formatSizer, 0, wx.ALIGN_LEFT | wx.RIGHT, 10)
# Peak
peakSizer = wx.BoxSizer(wx.VERTICAL)
self.peakText = wx.StaticText(self.outputPanel, -1, 'Peak :')
self.peakText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
self.peakText.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
peakSizer.Add(self.peakText, 0, wx.ALIGN_LEFT | wx.LEFT, 2)
self.peakLabel = PeakLabel(self.outputPanel,
label=self.peak,
size=(100,20),
font=None,
colour=CONTROLLABEL_BACK_COLOUR,
gainSlider=self.gainSlider)
self.peakLabel.SetToolTip(CECTooltip(TT_PEAK))
peakSizer.Add(self.peakLabel, 0, wx.ALIGN_LEFT | wx.TOP, 1)
self.lineSizer.Add(peakSizer, 0, wx.ALIGN_LEFT | wx.LEFT, 10)
outputSizer.Add(self.meterSizer, 1, wx.EXPAND)
outputSizer.Add(self.lineSizer, 0, wx.ALIGN_LEFT | wx.LEFT | wx.BOTTOM, 7)
outputSizer.AddGrowableRow(9)
self.outputPanel.SetSizer(outputSizer)
outputTextPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
outToolbox.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
def createPluginPanel(self):
self.oldPlugins = [0,0,0]
for i in range(3):
CeciliaLib.setPlugins(None, i)
self.pluginsParams = { 0: [[0,0,0,0], [.25,1,.5,1], [1,1000,1,1], [.5,.2,.5,1], [1000,500,1,1], [1,1,1,1], [-20,3,0,1], [-20,0,.005,1], [3,1000,0.5,1], [8,1,0,1], [100,.5,1,1]],
1: [[0,0,0,0], [.25,1,.5,1], [1,1000,1,1], [.5,.2,.5,1], [1000,500,1,1], [1,1,1,1], [-20,3,0,1], [-20,0,.005,1], [3,1000,0.5,1], [8,1,0,1], [100,.5,1,1]],
2: [[0,0,0,0], [.25,1,.5,1], [1,1000,1,1], [.5,.2,.5,1], [1000,500,1,1], [1,1,1,1], [-20,3,0,1], [-20,0,.005,1], [3,1000,0.5,1], [8,1,0,1], [100,.5,1,1]]}
self.pluginsPanel = wx.Panel(self, -1, style=wx.NO_BORDER)
self.pluginsPanel.SetBackgroundColour(BACKGROUND_COLOUR)
self.pluginSizer = wx.BoxSizer(wx.VERTICAL)
pluginTextPanel = wx.Panel(self.pluginsPanel, -1, style=wx.NO_BORDER)
pluginTextPanel.SetBackgroundColour(TITLE_BACK_COLOUR)
pluginTextSizer = wx.BoxSizer(wx.HORIZONTAL)
pluginText = wx.StaticText(pluginTextPanel, -1, 'POST-PROCESSING')
pluginText.SetFont(wx.Font(SECTION_TITLE_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
pluginText.SetBackgroundColour(TITLE_BACK_COLOUR)
pluginText.SetForegroundColour(SECTION_TITLE_COLOUR)
pluginTextSizer.Add(pluginText, 0, wx.ALIGN_RIGHT | wx.ALL, 3)
pluginTextPanel.SetSizer(pluginTextSizer)
self.pluginSizer.Add(pluginTextPanel, 1, wx.EXPAND| wx.ALIGN_RIGHT, 0) # 1
self.pluginSizer.AddSpacer((5,3)) # 2
self.plugin1 = NonePlugin(self.pluginsPanel, self.replacePlugin1, 0)
self.pluginSizer.Add(self.plugin1, 0) # 3
self.pluginSizer.AddSpacer((5,7)) # 4
self.pluginSizer.Add(Separator(self.pluginsPanel, (230,2), colour=BORDER_COLOUR), 0, wx.EXPAND) # 5
self.pluginSizer.AddSpacer((5,3)) # 6
self.plugin2 = NonePlugin(self.pluginsPanel, self.replacePlugin2, 1)
self.pluginSizer.Add(self.plugin2, 0) # 7
self.pluginSizer.AddSpacer((5,7)) # 8
self.pluginSizer.Add(Separator(self.pluginsPanel, (230,2), colour=BORDER_COLOUR), 0, wx.EXPAND) # 9
self.pluginSizer.AddSpacer((5,3)) # 10
self.plugin3 = NonePlugin(self.pluginsPanel, self.replacePlugin3, 2)
self.pluginSizer.Add(self.plugin3, 0) # 11
self.pluginSizer.AddSpacer((5,7)) # 12
self.pluginSizer.Add(Separator(self.pluginsPanel, (230,2), colour=BORDER_COLOUR), 0, wx.EXPAND) # 13
self.pluginSizer.AddSpacer((5,1)) # 14
self.pluginsPanel.SetSizer(self.pluginSizer)
def getCfileinList(self):
return self.cfileinList
def getCfileinFromName(self, name):
good = None
for cfilein in self.cfileinList:
if name == cfilein.getName():
good = cfilein
break
return good
def defineFormatsList(self):
formats=[]
self.formatDict=dict()
selectedNCHNLS = ''
for format in CeciliaLib.getSupportedFormats().items():
if format[0]!='Custom...':
self.formatDict[format[1]] = format[0]
if self.formatDict.has_key(CeciliaLib.getNchnls()):
selectedNCHNLS = self.formatDict[CeciliaLib.getNchnls()]
else:
selectedNCHNLS = 'Custom...'
formatsNCHNLS=self.formatDict.keys()
formatsNCHNLS.sort()
for i in formatsNCHNLS:
formats.append(self.formatDict[i])
formats.append('Custom...')
return formats, selectedNCHNLS
def getTime(self):
return self.time
def setTime(self,curTime=0):
self.time = curTime
self.clocker.setTime(curTime)
def resetMeter(self):
self.updatePeak(0)
self.resetVuMeter()
def onPlayStop(self, value):
if value:
CeciliaLib.setOutputFile('dac')
CeciliaLib.startCeciliaSound()
else:
CeciliaLib.stopCeciliaSound()
def onRec(self, value):
if value:
if CeciliaLib.getAutoRenameFlag() and self.outputFilename != '':
filename = CeciliaLib.autoRename(self.outputFilename)
self.filenameLabel.setLabel(CeciliaLib.shortenName(os.path.split(filename)[1],self.charNumForLabel))
if self.outputFilename == '':
filename = self.onSelectOutputFilename()
if filename == None:
CeciliaLib.stopCeciliaSound()
CeciliaLib.getCeciliaEditor().transportButtons.setRecord(False)
CeciliaLib.getCeciliaEditor().transportButtons.setPlay(False)
return
self.outputFilename = filename
CeciliaLib.setOutputFile(filename)
CeciliaLib.startCeciliaSound()
self.updatePeak(0)
CeciliaLib.getCeciliaEditor().transportButtons.setRecord(True)
CeciliaLib.getCeciliaEditor().transportButtons.setPlay(True)
else:
CeciliaLib.stopCeciliaSound()
CeciliaLib.getCeciliaEditor().transportButtons.setRecord(False)
CeciliaLib.getCeciliaEditor().transportButtons.setPlay(False)
def onSelectOutputFilename(self):
if CeciliaLib.getFileType() == 'wav':
wildcard = "Wave file|*.wave;*.WAV;*.WAVE;*.Wav;*.Wave*.wav|" \
"All files|*.*"
elif CeciliaLib.getFileType() == 'aiff':
wildcard = "AIFF file|*.aiff;*.aifc;*.AIF;*.AIFF;*.Aif;*.Aiff*.aif|" \
"All files|*.*"
file = CeciliaLib.saveFileDialog(self, wildcard, type='Save audio')
if file != None:
CeciliaLib.setSaveAudioFilePath(os.path.split(file)[0])
self.filenameLabel.setLabel(CeciliaLib.shortenName(os.path.split(file)[1],self.charNumForLabel))
self.outputFilename = file
return file
def updateOutputFormat(self):
self.vuMeter.updateNchnls()
x, y = self.meterSizer.GetPosition()
w, h = self.vuMeter.GetSize()
self.meterSizer.SetMinSize((w, h+8))
self.meterSizer.SetDimension(x, y, w, h+8)
w2, h2 = self.lineSizer.GetSize()
self.lineSizer.SetDimension(7, y+h+10, w2, h2)
self.Layout()
wx.CallAfter(self.Refresh)
def onFormatChange(self, idx, choice):
if choice == 'Custom...':
nchnls = CeciliaLib.dialogSelectCustomNchnls(self)
if nchnls==None:
nchnls = CeciliaLib.getNchnls()
if nchnls in self.formatDict.keys():
self.formatChoice.setStringSelection(self.formatDict[nchnls])
else:
self.formatChoice.setStringSelection('Custom...')
return
if not nchnls in self.formatDict.keys():
CeciliaLib.setCustomSupportedFormats(nchnls)
self.formatChoice.setStringSelection('Custom...')
else:
self.formatChoice.setStringSelection(self.formatDict[nchnls])
else:
nchnls = CeciliaLib.getSupportedFormats()[choice]
CeciliaLib.setNchnls(nchnls)
self.updateOutputFormat()
def onReuseOutputFile(self):
if os.path.isfile(self.outputFilename):
if self.cfileinList != []:
self.cfileinList[0].updateMenuFromPath(self.outputFilename)
def setTotalTime(self, time):
if self.cfileinList != [] and time == 0:
dur = self.cfileinList[0].getDuration()
CeciliaLib.setTotalTime(time)
self.durationSlider.SetValue(dur)
else:
CeciliaLib.setTotalTime(time)
def updateDurationSlider(self):
self.durationSlider.SetValue(CeciliaLib.getTotalTime())
def updateNchnls(self):
nchnls = CeciliaLib.getNchnls()
if nchnls==1:
format = 'Mono'
elif nchnls==2:
format = 'Stereo'
elif nchnls==4:
format = 'Quad'
elif nchnls==6:
format = '5.1'
elif nchnls==8:
format = 'Octo'
else:
format = 'Custom...'
self.formatChoice.setStringSelection(format)
self.updateOutputFormat()
def onChangeGain(self, gain):
CeciliaLib.getCsound().setChannel("masterVolume", gain)
def updatePeak(self, peak):
self.peak = peak - 90.
label = ''
if self.peak > 0:
label += '+'
label += '%2.2f dB' % self.peak
self.peakLabel.setLabel(label)
def resetVuMeter(self):
self.vuMeter.resetMax()
def getCfileinList(self):
return self.cfileinList
class Cfilein(wx.Panel):
def __init__(self, parent, id=-1, label='', size=(-1,-1), style = wx.NO_BORDER, name=''):
wx.Panel.__init__(self, parent, id, size=size, style=style, name=name)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.label = label
self.name = name
self.duration = None
self.chnls = None
self.type = None
self.samprate = None
self.bitrate = None
self.filePath = ''
self.folderInfo = None
mainSizer = wx.FlexGridSizer(4,1)
mainSizer.AddSpacer((200,4))
# Static label for the popup menu
textLabel = wx.StaticText(self, -1, "%s :" % self.label)
textLabel.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
textLabel.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
mainSizer.Add(textLabel, 0, wx.LEFT, 9)
# Popup menu
line2 = wx.BoxSizer(wx.HORIZONTAL)
self.fileMenu = FolderPopup(self, path=None, init='', outFunction=self.onSelectSound,
emptyFunction=self.onLoadFile, backColour=CONTROLLABEL_BACK_COLOUR, tooltip=TT_SEL_SOUND)
line2.Add(self.fileMenu, 0, wx.ALIGN_CENTER | wx.TOP, 1)
line2.AddSpacer((25,5))
self.toolbox = ToolBox(self, tools=['play','edit','open'],
outFunction=[self.listenSoundfile,
self.editSoundfile,
self.onShowSampler])
line2.Add(self.toolbox, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 2)
mainSizer.Add(line2, 1, wx.LEFT, 8)
mainSizer.AddSpacer((5,2))
self.createSamplerFrame()
self.SetSizer(mainSizer)
CeciliaLib.getUserInputs()[self.name] = dict()
CeciliaLib.getUserInputs()[self.name]['type'] = 'cfilein'
CeciliaLib.getUserInputs()[self.name]['path'] = ''
def listenSoundfile(self):
CeciliaLib.listenSoundfile(self.filePath)
def editSoundfile(self):
CeciliaLib.editSoundfile(self.filePath)
def createSamplerFrame(self):
self.samplerFrame = CfileinFrame(self, self.name)
def onShowSampler(self):
if self.samplerFrame.IsShown():
self.samplerFrame.Hide()
else:
pos = wx.GetMousePosition()
framepos = (pos[0]+10, pos[1]+20)
self.samplerFrame.SetPosition(framepos)
self.samplerFrame.Show()
def getDuration(self):
return self.duration
def setTotalTime(self):
if self.duration:
CeciliaLib.getControlPanel().setTotalTime(self.duration)
CeciliaLib.getControlPanel().updateDurationSlider()
def onSelectSound(self, idx, file):
self.filePath = self.folderInfo[file]['path']
self.duration = self.folderInfo[file]['dur']
self.chnls = self.folderInfo[file]['chnls']
self.type = self.folderInfo[file]['type']
self.samprate = self.folderInfo[file]['samprate']
self.bitrate = self.folderInfo[file]['bitrate']
self.samplerFrame.offsetSlider.Enable()
self.samplerFrame.offsetSlider.SetRange(0,self.duration)
self.samplerFrame.offsetSlider.SetValue(self.getOffset())
self.samplerFrame.update(path=self.filePath,
dur=self.duration,
type=self.type,
bitDepth=self.bitrate,
chanNum=self.chnls,
sampRate=self.samprate)
nsamps = self.samprate * self.duration
tableSize = powerOf2(nsamps)
fracPart = float(nsamps) / tableSize
CeciliaLib.getUserInputs()[self.name]['gensize%s' % self.name] = tableSize
CeciliaLib.getUserInputs()[self.name]['sr%s' % self.name] = self.samprate
CeciliaLib.getUserInputs()[self.name]['dur%s' % self.name] = self.duration
CeciliaLib.getUserInputs()[self.name]['nchnls%s' % self.name] = self.chnls
CeciliaLib.getUserInputs()[self.name]['off%s' % self.name] = self.getOffset()
CeciliaLib.getUserInputs()[self.name]['path'] = self.filePath
def onLoadFile(self, filePath=''):
wildcard = "All files|*.*|" \
"AIFF file|*.aif;*.aiff;*.aifc;*.AIF;*.AIFF;*.Aif;*.Aiff|" \
"Wave file|*.wav;*.wave;*.WAV;*.WAVE;*.Wav;*.Wave"
if filePath == '':
path = CeciliaLib.openAudioFileDialog(self, wildcard, defaultPath=CeciliaLib.getOpenAudioFilePath())
elif not os.path.isfile(filePath):
return
else:
path = filePath
if path:
CeciliaLib.setOpenAudioFilePath(os.path.split(path)[0])
self.updateMenuFromPath(path)
def reset(self):
self.fileMenu.reset()
self.filePath = ''
CeciliaLib.getUserInputs()[self.name]['path'] = self.filePath
def updateMenuFromPath(self, path):
root = os.path.split(path)[0]
pathList = []
for p in os.listdir(root):
pathList.append(os.path.join(root,p))
self.folderInfo = CeciliaLib.getCsound().getSoundsFromList(pathList)
files = self.folderInfo.keys()
files.sort()
self.fileMenu.setChoice(files)
self.fileMenu.setLabel(CeciliaLib.ensureNFD(os.path.split(path)[1]))
def onOffsetSlider(self, value):
CeciliaLib.getUserInputs()[self.name]['off%s' % self.name] = value
if self.duration != None:
newMaxDur = self.duration - value
CeciliaLib.getUserInputs()[self.name]['dur%s' % self.name] = newMaxDur
try:
self.samplerFrame.loopOutSlider.setRange(0, newMaxDur)
except:
pass
def setOffset(self, value):
CeciliaLib.getUserInputs()[self.name]['off%s' % self.name] = value
self.samplerFrame.offsetSlider.Enable()
self.samplerFrame.offsetSlider.SetValue(value)
def getOffset(self):
try:
off = CeciliaLib.getUserInputs()[self.name]['off%s' % self.name]
except:
off = self.samplerFrame.offsetSlider.GetValue()
return off
def getName(self):
return self.name
class CSampler(Cfilein):
def __init__(self, parent, id=-1, label='', size=(-1,-1), style = wx.NO_BORDER, name=''):
wx.Panel.__init__(self, parent, id, size=size, style=style, name=name)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.frameOpen = False
self.samplerFrame = None
self.folderInfo = None
self.label = label
self.name = name
self.duration = 0.
self.chnls = 0
self.outputChnls = 1
self.gainMod = None
self.transMod = None
self.startPos = None
self.type = ''
self.samprate = 0
self.bitrate = 0
self.tableNums = [CeciliaLib.getSamplerSliderTableNum() + i for i in range(5)]
CeciliaLib.setSamplerSliderTableNum(self.tableNums[-1]+1)
self.filePath = ''
mainSizer = wx.FlexGridSizer(4,1)
mainSizer.AddSpacer((200,4))
# Static label for the popup menu
textLabel = wx.StaticText(self, -1, "%s :" % self.label)
textLabel.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
textLabel.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
mainSizer.Add(textLabel, 0, wx.LEFT, 9)
# Popup menu
line2 = wx.BoxSizer(wx.HORIZONTAL)
self.fileMenu = FolderPopup(self, path=None, init='', outFunction=self.onSelectSound,
emptyFunction=self.onLoadFile, backColour=CONTROLLABEL_BACK_COLOUR, tooltip=TT_SEL_SOUND)
line2.Add(self.fileMenu, 0, wx.ALIGN_CENTER | wx.TOP, 1)
line2.AddSpacer((25,5))
self.toolbox = ToolBox(self, tools=['play','edit','open'],
outFunction=[self.listenSoundfile,
self.editSoundfile,
self.onShowSampler],
openSampler=True)
self.toolbox.setOpen(False)
line2.Add(self.toolbox, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 2)
mainSizer.Add(line2, 1, wx.LEFT, 8)
mainSizer.AddSpacer((5,2))
self.createSamplerFrame()
self.SetSizer(mainSizer)
CeciliaLib.getUserInputs()[self.name] = dict()
CeciliaLib.getUserInputs()[self.name]['type'] = 'csampler'
CeciliaLib.getUserInputs()[self.name]['path'] = ''
def setOutputChnls(self, chnls):
self.outputChnls = chnls
def getOutputChnls(self):
return self.outputChnls
def setGainAndTrans(self, values):
if values == []:
self.gainMod = ''
self.transMod = ''
self.startPos = ''
elif len(values) == 1:
self.gainMod = values[0]
self.transMod = ''
self.startPos = ''
elif len(values) == 2:
self.gainMod = values[0]
self.transMod = values[1]
self.startPos = ''
else:
self.gainMod = values[0]
self.transMod = values[1]
self.startPos = values[2]
def createSamplerFrame(self):
self.samplerFrame = SamplerFrame(self, self.name, self.tableNums)
def onShowSampler(self):
if self.samplerFrame.IsShown():
self.samplerFrame.Hide()
else:
pos = wx.GetMousePosition()
framepos = (pos[0]+10, pos[1]+20)
self.samplerFrame.SetPosition(framepos)
self.samplerFrame.Show()
def onSelectSound(self, idx, file):
self.filePath = self.folderInfo[file]['path']
self.duration = self.folderInfo[file]['dur']
self.chnls = self.folderInfo[file]['chnls']
self.type = self.folderInfo[file]['type']
self.samprate = self.folderInfo[file]['samprate']
self.bitrate = self.folderInfo[file]['bitrate']
self.samplerFrame.offsetSlider.Enable()
self.samplerFrame.offsetSlider.SetRange(0,self.duration)
self.samplerFrame.offsetSlider.SetValue(self.getOffset())
self.samplerFrame.update(path=self.filePath,
dur=self.duration,
type=self.type,
bitDepth=self.bitrate,
chanNum=self.chnls,
sampRate=self.samprate)
nsamps = self.samprate * self.duration
tableSize = powerOf2(nsamps)
fracPart = float(nsamps) / tableSize
CeciliaLib.getUserInputs()[self.name]['gensize%s' % self.name] = tableSize
CeciliaLib.getUserInputs()[self.name]['sr%s' % self.name] = self.samprate
CeciliaLib.getUserInputs()[self.name]['dur%s' % self.name] = self.duration
CeciliaLib.getUserInputs()[self.name]['nchnls%s' % self.name] = self.chnls
CeciliaLib.getUserInputs()[self.name]['off%s' % self.name] = self.getOffset()
CeciliaLib.getUserInputs()[self.name]['path'] = self.filePath
if CeciliaLib.getGrapher():
for line in CeciliaLib.getGrapher().plotter.getData():
if line.getName() == self.samplerFrame.loopInSlider.getCName() or \
line.getName() == self.samplerFrame.loopOutSlider.getCName():
line.changeYrange((0, self.duration))
def getSamplerInfo(self):
info = {}
info['loopMode'] = self.samplerFrame.getLoopMode()
info['startFromLoop'] = self.samplerFrame.getStartFromLoop()
info['loopX'] = self.samplerFrame.getLoopX()
info['loopIn'] = self.samplerFrame.getLoopIn()
info['loopOut'] = self.samplerFrame.getLoopOut()
info['gain'] = self.samplerFrame.getGain()
info['transp'] = self.samplerFrame.getTransp()
return info
def getSamplerFrame(self):
return self.samplerFrame
def getText(self):
offset = self.getOffset()
sndnchnls = self.chnls
nchnls = self.getOutputChnls()
udoText = ''
loadTableText = ''
if self.samplerFrame.loopMenu.popup.getIndex() == 0:
tableLen = powerOf2(self.duration*self.samprate)
else:
tableLen = 0
for i in range(sndnchnls):
chnNum = i + 1
genNum = CeciliaLib.getSamplerTableNum() + i
path = CeciliaLib.convertWindowsPath(CeciliaLib.getUserInputs()[self.name]['path'])
loadTableText += 'gitab%d ftgen %d, 0, %d, -1, "%s", %f, 0, %d\n' % (chnNum, genNum, tableLen, path, offset, chnNum)
if not self.gainMod and not self.transMod:
udoText += '\nopcode Sampler_%s, ' % self.name + 'a'*nchnls + ', S\n'
udoText += 'Spath xin\n\n'
elif self.gainMod and not self.transMod and not self.startPos:
udoText += 'opcode Sampler_%s, ' % self.name + 'a'*nchnls + ', Sk\n'
udoText += 'Spath, kgainMod xin\n\n'
elif self.gainMod and self.transMod and not self.startPos:
udoText += 'opcode Sampler_%s, ' % self.name + 'a'*nchnls + ', Skk\n'
udoText += 'Spath, kgainMod, ktransMod xin\n\n'
elif self.gainMod and self.transMod and self.startPos:
udoText += 'opcode Sampler_%s, ' % self.name + 'a'*nchnls + ', Skki\n'
udoText += 'Spath, kgainMod, ktransMod, istartpos xin\n\n'
udoText += 'iHalfSr = sr / 2.2\n'
udoText += 'iSrAdjust = %f / sr\n\n' % self.samprate
istart = 0
if self.gainMod:
gain = 'ampdbfs(gk%sgain) / 0dbfs * kgainMod' % self.name
else:
gain = 'ampdbfs(gk%sgain) / 0dbfs' % self.name
if self.transMod:
udoText += 'kSamplerTrans = semitone(gk%strans) * ktransMod\n' % self.name
else:
udoText += 'kSamplerTrans = semitone(gk%strans)\n' % self.name
udoText += 'kend = gk%sstart + gk%send\n' % (self.name, self.name)
if not self.startPos:
udoText += 'if gi%sstartpoint == 0 then\n' % self.name
udoText += 'istart = 0\nelse\nistart = i(gk%sstart)\nendif\n' % self.name
else:
udoText += 'istart = istartpos\n'
for i in range(sndnchnls):
chnNum = i + 1
genNum = CeciliaLib.getSamplerTableNum() + i
if self.samplerFrame.loopMenu.popup.getIndex() == 0:
#udoText += 'aSampler%d loscil %s, kSamplerTrans * iSrAdjust, %d, 1\n' % (chnNum, gain, genNum)
udoText += 'iend = nsamp(%d)\n' % genNum
udoText += 'aphase%d lphasor kSamplerTrans * iSrAdjust, 0, iend\n' % chnNum
udoText += 'aSampler%d tablei aphase%d, %d\n' % (chnNum, chnNum, genNum)
udoText += 'aSampler%d = aSampler%d * %s\n' % (chnNum, chnNum, gain)
else:
udoText += 'aSampler%d flooper2 %s, kSamplerTrans * iSrAdjust, gk%sstart * iSrAdjust, kend * iSrAdjust, gk%sxfade * iSrAdjust, %d, istart * iSrAdjust, gi%sloopi-1\n' %(chnNum, gain, self.name, self.name, genNum, self.name)
CeciliaLib.setSamplerTableNum(genNum+1)
udoText += """
if kSamplerTrans < 1 then
kSamplerAlias = iHalfSr/kSamplerTrans
else
kSamplerAlias = iHalfSr
endif
"""
for i in range(sndnchnls):
chnNum = i + 1
udoText += 'aSampler%d tonex aSampler%d, kSamplerAlias, 4\n' % (chnNum, chnNum)
if not self.gainMod:
udoText += 'aSampler%d dcblock aSampler%d\n' % (chnNum, chnNum)
else:
udoText += 'aSampler%d dcblock aSampler%d\n' % (chnNum, chnNum)
samplerslist = [[] for i in range(nchnls)]
outputslist = ['aOut%d' % (i+1) for i in range(nchnls)]
if sndnchnls >= nchnls:
for i in range(sndnchnls):
mod = i % nchnls
samplerslist[mod].append('aSampler%d' % (i+1))
else:
for i in range(nchnls):
mod = i % sndnchnls
samplerslist[i].append('aSampler%d' % (mod+1))
for i in range(nchnls):
if len(samplerslist[i]) > 1:
div = len(samplerslist[i])
udoText += '\naOut%d = (' % (i+1) + '+'.join(samplerslist[i]) + ') / %d' % div
else:
udoText += '\naOut%d = (' % (i+1) + '+'.join(samplerslist[i]) + ')'
udoText += '\n\nxout ' + ', '.join(outputslist)
udoText += '\nendop\n\n'
return udoText, loadTableText
class CfileinFrame(wx.Frame):
def __init__(self, parent, name, pos=wx.DefaultPosition):
style = ( wx.CLIP_CHILDREN | wx.FRAME_NO_TASKBAR | wx.FRAME_SHAPED | wx.NO_BORDER | wx.FRAME_FLOAT_ON_PARENT)
wx.Frame.__init__(self, parent, title='', pos=pos, style=style)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.parent = parent
self.name = name
self.SetClientSize((385, 143))
panel = wx.Panel(self, -1)
w, h = self.GetSize()
panel.SetBackgroundColour(BACKGROUND_COLOUR)
box = wx.BoxSizer(wx.VERTICAL)
# Header
self.title = FrameLabel(panel, '', size=(w-2, 50))
box.Add(self.title, 0, wx.ALL, 1)
box.AddSpacer((200,2))
#toolbox
toolsBox = wx.BoxSizer(wx.HORIZONTAL)
tools = ToolBox(panel, size=(80,20), tools=['play', 'edit', 'time' ],
outFunction=[self.parent.listenSoundfile,
self.parent.editSoundfile,
self.parent.setTotalTime])
toolsBox.Add(tools, 0, wx.ALIGN_CENTER_VERTICAL | wx.LEFT, 320)
box.Add(toolsBox, 0, wx.TOP, 5)
# Static label for the offset slider
textOffset = wx.StaticText(panel, -1, '%s Offset :' % self.parent.label)
textOffset.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
textOffset.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
box.Add(textOffset, 0, wx.LEFT, 20)
box.AddSpacer((200,2))
# Offset slider
self.offsetSlider = ControlSlider(self, minvalue=0, maxvalue=100, size=(222,15), init=0,
outFunction=self.parent.onOffsetSlider, backColour=BACKGROUND_COLOUR)
self.offsetSlider.setSliderHeight(10)
self.offsetSlider.Disable()
box.Add(self.offsetSlider, 1, wx.EXPAND | wx.LEFT | wx.RIGHT, 20)
box.AddSpacer((200,10))
self.close = CloseBox(panel, outFunction=self.close)
box.Add(self.close, 0, wx.LEFT, 330)
box.AddSpacer((200,7))
panel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.title.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
panel.SetSizerAndFit(box)
self.Show(False)
def OnLooseFocus(self, event):
win = wx.FindWindowAtPointer()
if win.GetTopLevelParent() in [self, CeciliaLib.getCeciliaEditor()]:
pass
else:
win = CeciliaLib.getInterface()
win.Raise()
def close(self):
self.Hide()
self.GetParent().toolbox.setOpen(False)
def update(self, path, dur, type, bitDepth, chanNum, sampRate):
self.path = path
self.dur = dur
self.type = type
self.bitDepth = bitDepth
self.chanNum = chanNum
self.sampRate = sampRate
soundInfoText = self.createHeader()
self.title.setLabel(soundInfoText)
def createHeader(self):
if self.sampRate > 1000:
self.sampRate = self.sampRate / 1000.
header = '%s\n' % CeciliaLib.shortenName(self.path,48)
header += '%0.2f sec - %s - %dBit - %d ch. - %2.1fkHz' % (self.dur, self.type, self.bitDepth, self.chanNum, self.sampRate)
return header
class SamplerFrame(wx.Frame):
def __init__(self, parent, name, tableNums, pos=wx.DefaultPosition, size=(390, 295)):
style = ( wx.CLIP_CHILDREN | wx.FRAME_NO_TASKBAR | wx.FRAME_SHAPED | wx.NO_BORDER | wx.FRAME_FLOAT_ON_PARENT)
wx.Frame.__init__(self, parent, title='', pos=pos, style=style)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.parent = parent
self.SetClientSize(size)
self.size = size
self.name = name
self.tableNums = tableNums
self.loopList = ['Off', 'Forward', 'Backward', 'Back and Forth']
panel = wx.Panel(self, -1)
w, h = size
panel.SetBackgroundColour(BACKGROUND_COLOUR)
box = wx.BoxSizer(wx.VERTICAL)
# Header
self.title = FrameLabel(panel, '', size=(w-2, 50))
box.Add(self.title, 0, wx.ALL, 1)
# Static label for the offset slider
textOffset = wx.StaticText(panel, -1, '%s Offset :' % self.parent.label)
textOffset.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
textOffset.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
box.Add(textOffset, 0, wx.LEFT, 20)
box.AddSpacer((200,2))
# Offset slider
offBox = wx.BoxSizer(wx.HORIZONTAL)
self.offsetSlider = ControlSlider(panel, minvalue=0, maxvalue=100, size=(345,15), init=0,
outFunction=self.parent.onOffsetSlider, backColour=BACKGROUND_COLOUR)
self.offsetSlider.SetToolTip(CECTooltip(TT_SAMPLER_OFFSET))
self.offsetSlider.setSliderHeight(10)
self.offsetSlider.Disable()
offBox.Add(self.offsetSlider, 1, wx.EXPAND | wx.LEFT | wx.RIGHT, 20)
box.Add(offBox)
box.AddSpacer((200,10))
#Loop type + toolbox
loopBox = wx.FlexGridSizer(1,5,5,5)
loopLabel = wx.StaticText(panel, -1, "Loop")
loopLabel.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, face=FONT_FACE))
loopLabel.SetForegroundColour("#FFFFFF")
loopBox.Add(loopLabel, 0, wx.ALIGN_CENTER_VERTICAL | wx.LEFT, 10)
self.loopMenu = SamplerPopup(panel, self.loopList, self.loopList[1], self.name)
self.loopMenu.popup.SetToolTip(CECTooltip(TT_SAMPLER_LOOP))
loopBox.Add(self.loopMenu.popup, 0, wx.ALIGN_CENTER_VERTICAL, 20)
startLabel = wx.StaticText(panel, -1, "Start from loop")
startLabel.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, face=FONT_FACE))
startLabel.SetForegroundColour("#FFFFFF")
loopBox.Add(startLabel, 0, wx.ALIGN_CENTER_VERTICAL | wx.LEFT, 20)
self.startToggle = SamplerToggle(panel, 0, self.name)
self.startToggle.toggle.SetToolTip(CECTooltip(TT_SAMPLER_START))
loopBox.Add(self.startToggle.toggle, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT , 30)
tools = ToolBox(panel, size=(80,20), tools=['play', 'edit', 'time' ],
outFunction=[self.parent.listenSoundfile,
self.parent.editSoundfile,
self.parent.setTotalTime])
loopBox.Add(tools, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT)
loopBox.AddGrowableCol(2)
box.Add(loopBox, 0, wx.ALL, 10)
# Sliders
slidersBox = wx.FlexGridSizer(5, 4, 5, 5)
self.loopInSlider = SamplerSlider(panel, self.name, "Loop In", "sec", 0, 1, 0, self.tableNums[0])
self.loopInSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_LOOP_IN))
slidersBox.AddMany([(self.loopInSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.loopInSlider.buttons, 0, wx.CENTER),
(self.loopInSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.loopInSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
self.loopOutSlider = SamplerSlider(panel, self.name, "Loop Time", "sec", 0, 1, 1, self.tableNums[1])
self.loopOutSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_LOOP_DUR))
slidersBox.AddMany([(self.loopOutSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.loopOutSlider.buttons, 0, wx.CENTER),
(self.loopOutSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.loopOutSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
self.loopXSlider = SamplerSlider(panel, self.name, "Loop X", "sec", 0, 1, .05, self.tableNums[2])
self.loopXSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_CROSSFADE))
slidersBox.AddMany([(self.loopXSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.loopXSlider.buttons, 0, wx.CENTER),
(self.loopXSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.loopXSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
self.gainSlider = SamplerSlider(panel, self.name, "Gain", "dB", -48, 18, 0, self.tableNums[3])
self.gainSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_GAIN))
slidersBox.AddMany([(self.gainSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.gainSlider.buttons, 0, wx.CENTER),
(self.gainSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.gainSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
self.transpSlider = SamplerSlider(panel, self.name, "Transpo", "cents", -48, 48, 0, self.tableNums[4], integer=False)
self.transpSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_TRANSPO))
slidersBox.AddMany([(self.transpSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.transpSlider.buttons, 0, wx.CENTER),
(self.transpSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.transpSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
box.Add(slidersBox, 0, wx.EXPAND | wx.ALL, 6)
self.close = CloseBox(panel, outFunction=self.close)
box.Add(self.close, 0, wx.LEFT, 330)
box.AddSpacer((200,7))
sliderlist = [self.loopInSlider, self.loopOutSlider, self.loopXSlider, self.gainSlider, self.transpSlider]
samplerSliders = CeciliaLib.getSamplerSliders()
CeciliaLib.setSamplerSliders(samplerSliders + sliderlist)
userSliders = CeciliaLib.getUserSliders()
CeciliaLib.setUserSliders(userSliders + sliderlist)
samplerTogPop = CeciliaLib.getSamplerTogglePopup()
CeciliaLib.setSamplerTogglePopup(samplerTogPop + [self.loopMenu, self.startToggle])
panel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.title.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
panel.SetSizerAndFit(box)
self.Show(False)
def OnLooseFocus(self, event):
win = wx.FindWindowAtPointer()
if win.GetTopLevelParent() in [self, CeciliaLib.getCeciliaEditor()]:
pass
else:
win = CeciliaLib.getInterface()
win.Raise()
def close(self):
self.Hide()
self.GetParent().toolbox.setOpen(False)
def update(self, path, dur, type, bitDepth, chanNum, sampRate):
self.path = path
self.dur = dur
self.type = type
self.bitDepth = bitDepth
self.chanNum = chanNum
self.sampRate = sampRate
soundInfoText = self.createHeader()
self.title.setLabel(soundInfoText)
self.loopInSlider.setRange(0, self.dur)
self.loopInSlider.setValue(0)
self.loopOutSlider.setRange(0, self.dur)
self.loopOutSlider.setValue(self.dur)
self.loopXSlider.setRange(0, self.dur)
def createHeader(self):
if self.sampRate > 1000:
self.sampRate = self.sampRate / 1000.
header = '%s\n' % CeciliaLib.shortenName(self.path,48)
header += '%0.2f sec - %s - %dBit - %d ch. - %2.1fkHz' % (self.dur, self.type, self.bitDepth, self.chanNum, self.sampRate)
return header
def setLoopMode(self, index):
self.loopMenu.popup.setByIndex(index)
def getLoopMode(self):
return self.loopMenu.getValue()
def setStartFromLoop(self, value):
self.startToggle.setValue(value)
def getStartFromLoop(self):
return self.startToggle.getValue()
def setLoopX(self, values):
self.loopXSlider.setValue(values[0])
self.loopXSlider.setPlay(values[1])
def getLoopX(self):
return [self.loopXSlider.getValue(), self.loopXSlider.getPlay()]
def setLoopIn(self, values):
self.loopInSlider.setValue(values[0])
self.loopInSlider.setPlay(values[1])
def getLoopIn(self):
return [self.loopInSlider.getValue(), self.loopInSlider.getPlay()]
def setLoopOut(self, values):
self.loopOutSlider.setValue(values[0])
self.loopOutSlider.setPlay(values[1])
def getLoopOut(self):
return [self.loopOutSlider.getValue(), self.loopOutSlider.getPlay()]
def setGain(self, values):
self.gainSlider.setValue(values[0])
self.gainSlider.setPlay(values[1])
def getGain(self):
return [self.gainSlider.getValue(), self.gainSlider.getPlay()]
def setTransp(self, values):
self.transpSlider.setValue(values[0])
self.transpSlider.setPlay(values[1])
def getTransp(self):
return [self.transpSlider.getValue(), self.transpSlider.getPlay()]
class SamplerPlayRecButtons(wx.Panel):
def __init__(self, parent, id=wx.ID_ANY, pos=(0,0), size=(40,20)):
wx.Panel.__init__(self, parent=parent, id=wx.ID_ANY, pos=pos, size=size)
self.SetMaxSize(self.GetSize())
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_MOTION, self.OnMotion)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeave)
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.playColour = SLIDER_PLAY_COLOUR_HOT
self.recColour = SLIDER_REC_COLOUR_HOT
self.playOver = False
self.recOver = False
self.playOverWait = True
self.recOverWait = True
self.play = False
self.rec = False
if CeciliaLib.getPlatform() == "win32":
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def setOverWait(self, which):
if which == 0:
self.playOverWait = False
elif which == 1:
self.recOverWait = False
def checkForOverReady(self, pos):
if not wx.Rect(2, 2, 17, 17).Contains(pos):
self.playOverWait = True
if not wx.Rect(21, 2, 38, 17).Contains(pos):
self.recOverWait = True
def setPlay(self, x):
if x == 0:
self.play = False
self.playColour = SLIDER_PLAY_COLOUR_HOT
elif x == 1:
self.play = True
self.playColour = SLIDER_PLAY_COLOUR_NO_BIND
wx.CallAfter(self.Refresh)
def setRec(self, x):
if x == 0:
self.rec = False
self.recColour = SLIDER_REC_COLOUR_HOT
else:
self.rec = True
self.recColour = SLIDER_REC_COLOUR_PRESSED
def MouseDown(self, evt):
pos = evt.GetPosition()
if wx.Rect(2, 2, 17, 17).Contains(pos):
if self.play:
self.play = False
self.playColour = SLIDER_PLAY_COLOUR_HOT
else:
self.play = True
self.playColour = SLIDER_PLAY_COLOUR_NO_BIND
self.setOverWait(0)
elif wx.Rect(21, 2, 38, 17).Contains(pos):
if self.rec:
self.rec = False
self.recColour = SLIDER_REC_COLOUR_HOT
else:
self.rec = True
self.recColour = SLIDER_REC_COLOUR_PRESSED
self.setOverWait(1)
self.playOver = False
self.recOver = False
wx.CallAfter(self.Refresh)
self.CaptureMouse()
evt.Skip()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
def OnMotion(self, evt):
pos = evt.GetPosition()
if wx.Rect(2, 2, 17, 17).Contains(pos) and self.playOverWait:
self.playOver = True
self.recOver = False
elif wx.Rect(21, 2, 38, 17).Contains(pos) and self.recOverWait:
self.playOver = False
self.recOver = True
self.checkForOverReady(pos)
wx.CallAfter(self.Refresh)
evt.Skip()
def OnLeave(self, evt):
self.playOver = False
self.recOver = False
self.playOverWait = True
self.recOverWait = True
wx.CallAfter(self.Refresh)
evt.Skip()
def OnPaint(self, evt):
w,h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush(BACKGROUND_COLOUR, wx.SOLID))
dc.Clear()
dc.SetPen(wx.Pen(BACKGROUND_COLOUR, width=0, style=wx.SOLID))
dc.DrawRectangle(0, 0, w, h)
# Draw triangle
if self.playOver: playColour = SLIDER_PLAY_COLOUR_OVER
else: playColour = self.playColour
gc.SetPen(wx.Pen(playColour, width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush(playColour, wx.SOLID))
tri = [(14,h/2), (9,6), (9,h-6), (14,h/2)]
gc.DrawLines(tri)
dc.SetPen(wx.Pen('#333333', width=1, style=wx.SOLID))
dc.DrawLine(w/2,4,w/2,h-4)
# Draw circle
if self.recOver: recColour = SLIDER_REC_COLOUR_OVER
else: recColour = self.recColour
gc.SetPen(wx.Pen(recColour, width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush(recColour, wx.SOLID))
gc.DrawEllipse(w/4+w/2-4, h/2-4, 8, 8)
evt.Skip()
def getPlay(self):
return self.play
def getRec(self):
return self.rec
class SamplerSlider:
def __init__(self, parent, name, label, unit, mini, maxi, init, tableNum, integer=False):
self.name = name
self.tableNum = tableNum
self.automationLength = None
self.automationData = []
self.path = None
self.label = name + ' ' + label
self.cname = {'Loop In': name+'start', 'Loop Time': name+'end',
'Loop X': name+'xfade', 'Gain': name+'gain', 'Transpo': name+'trans'}[label]
self.labelText = wx.StaticText(parent, -1, label)
self.labelText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, face=FONT_FACE))
self.labelText.SetForegroundColour("#FFFFFF")
self.buttons = SamplerPlayRecButtons(parent)
self.slider = ControlSlider(parent, mini, maxi, init, size=(236, 15),
integer=integer, outFunction=self.sendValue,
backColour=BACKGROUND_COLOUR)
self.slider.setSliderHeight(10)
self.unit = wx.StaticText(parent, -1, unit)
self.unit.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, face=FONT_FACE))
self.unit.SetForegroundColour("#FFFFFF")
def getLog(self):
return False
def getMinValue(self):
return self.slider.getMinValue()
def getMaxValue(self):
return self.slider.getMaxValue()
def setAutomationLength(self, x):
self.automationLength = x
def getAutomationLength(self):
return self.automationLength
def sendValue(self, value):
if self.getPlay() == False or self.getRec() == True:
CeciliaLib.getCsound().setChannel("%s_value" % self.getCName(), value)
def setRange(self, minval, maxval):
self.slider.SetRange(minval, maxval)
self.setValue(self.getValue())
def setValue(self, val):
self.slider.SetValue(val)
def getValue(self):
return self.slider.GetValue()
def getLabel(self):
return self.label
def getCName(self):
return self.cname
def getName(self):
return self.name
def setPlay(self, x):
self.buttons.setPlay(x)
def getPlay(self):
return self.buttons.getPlay()
def setRec(self, x):
self.buttons.setRec(x)
def getRec(self):
return self.buttons.getRec()
def getValue(self):
return self.slider.GetValue()
def getTable(self):
return self.tableNum
def getPath(self):
return self.path
def setAutomationData(self, data):
# convert values on scaling
temp = []
log = self.getLog()
minval = self.getMinValue()
maxval = self.getMaxValue()
automationlength = self.getAutomationLength()
frac = automationlength / CeciliaLib.getTotalTime()
virtuallength = len(data) / frac
data.extend([data[-1]] * int(((1 - frac) * virtuallength)))
totallength = float(len(data))
oldpos = 0
oldval = data[0]
if log:
maxOnMin = maxval / minval
torec = math.log10(oldval/minval) / math.log10(maxOnMin)
else:
maxMinusMin = maxval - minval
torec = (oldval - minval) / maxMinusMin
temp.append([0.0, torec])
for i, val in enumerate(data):
length = (i - oldpos) / totallength
pos = oldpos / totallength + length
if log:
torec = math.log10(val/minval) / math.log10(maxOnMin)
else:
torec = (val - minval) / maxMinusMin
temp.append([pos, torec])
oldval = val
oldpos = i
self.automationData = temp
def getAutomationData(self):
return [[x[0],x[1]] for x in self.automationData]
def getCeciliaText(self):
orchtext = ''
scotext = ''
if self.cname.find('start') != -1 or self.cname.find('end') != -1 or self.cname.find('xfade') != -1:
porta = 0.
else:
porta = .05
orchtext += 'instr Cecilia_%s\n' % self.cname
orchtext += 'ksliderValue init %f\n' % self.getValue()
if self.getPlay():
orchtext += 'ksliderDown init 0\n'
orchtext += 'kreadValue init %f\n' % self.getValue()
orchtext += 'ksliderValue chnget "%s_value"\n' % self.cname
if self.getPlay():
orchtext += 'ksliderDown chnget "%s_down"\n' % self.cname
orchtext += '\nkreadValue \t oscil1i \t 0, 1, p3, %d\n' % self.getTable()
orchtext += '\nif ksliderDown == 1 then\n'
orchtext += 'gk%s port ksliderValue, %f, %f\n' % (self.cname, porta, self.getValue())
if self.getPlay():
orchtext += 'else\n'
orchtext += 'gk%s = kreadValue\n' % self.cname
orchtext += 'endif\n\n'
if self.getRec():
self.path = os.path.join(AUTOMATION_SAVE_PATH, '%s.auto' % self.cname)
orchtext += 'dumpk gk%s, "%s", 8, 4/kr\n\n' % (self.cname, self.path)
orchtext += 'endin\n\n'
scotext += 'i "Cecilia_%s" 0 %f\n' % (self.cname, CeciliaLib.getTotalTime())
return orchtext, scotext
|
George III guinea. A gold coin worth 21 shillings.
Mr Pumblechook: "And may I--May I--?", by John McLenan.
Chapter 20, outside Bartholomew Close, Jaggers threatening a woman with a shawl called Amelia, by F.A. Fraser.
Great Expectations Facts for Kids. Kiddle Encyclopedia. |
import numpy as np
from PyQt4.QtCore import Qt, QTimer
from PyQt4 import QtGui
from PyQt4.QtGui import QApplication
from sklearn.neighbors import NearestNeighbors
from sklearn.metrics import r2_score
import Orange
from Orange.data import Table, Domain, StringVariable, ContinuousVariable, \
DiscreteVariable
from Orange.canvas import report
from Orange.data.sql.table import SqlTable, AUTO_DL_LIMIT
from Orange.preprocess.score import ReliefF, RReliefF
from Orange.widgets import gui
from Orange.widgets.settings import \
DomainContextHandler, Setting, ContextSetting, SettingProvider
from Orange.widgets.visualize.owscatterplotgraph import OWScatterPlotGraph
from Orange.widgets.visualize.utils import VizRankDialogAttrPair
from Orange.widgets.widget import OWWidget, Default, AttributeList
def font_resize(font, factor, minsize=None, maxsize=None):
font = QtGui.QFont(font)
fontinfo = QtGui.QFontInfo(font)
size = fontinfo.pointSizeF() * factor
if minsize is not None:
size = max(size, minsize)
if maxsize is not None:
size = min(size, maxsize)
font.setPointSizeF(size)
return font
class ScatterPlotVizRank(VizRankDialogAttrPair):
captionTitle = "Score plots"
K = 10
def check_preconditions(self):
if not super().check_preconditions():
return False
if not self.master.data.domain.class_var:
self.information(33, "Data with a class variable is required.")
return False
self.master.information(33)
return True
def iterate_states(self, initial_state):
# If we put initialization of `self.attrs` to `initialize`,
# `score_heuristic` would be run on every call to `set_data`.
if initial_state is None: # on the first call, compute order
self.attrs = self.score_heuristic()
yield from super().iterate_states(initial_state)
def compute_score(self, state):
graph = self.master.graph
ind12 = [graph.data_domain.index(self.attrs[x]) for x in state]
valid = graph.get_valid_list(ind12)
X = graph.scaled_data[ind12, :][:, valid].T
Y = self.master.data.Y[valid]
if X.shape[0] < self.K:
return
n_neighbors = min(self.K, len(X) - 1)
knn = NearestNeighbors(n_neighbors=n_neighbors).fit(X)
ind = knn.kneighbors(return_distance=False)
if self.master.data.domain.has_discrete_class:
return -np.sum(Y[ind] == Y.reshape(-1, 1))
else:
return -r2_score(Y, np.mean(Y[ind], axis=1)) * \
(len(Y) / len(self.master.data))
def score_heuristic(self):
X = self.master.graph.scaled_data.T
Y = self.master.data.Y
mdomain = self.master.data.domain
dom = Domain([ContinuousVariable(str(i)) for i in range(X.shape[1])],
mdomain.class_vars)
data = Table(dom, X, Y)
relief = ReliefF if isinstance(dom.class_var, DiscreteVariable) \
else RReliefF
weights = relief(n_iterations=100, k_nearest=self.K)(data)
attrs = sorted(zip(weights, mdomain.attributes), reverse=True)
return [a for _, a in attrs]
class OWScatterPlot(OWWidget):
name = 'Scatter Plot'
description = 'Scatterplot visualization with explorative analysis and intelligent data visualization enhancements.'
icon = "icons/ScatterPlot.svg"
inputs = [("Data", Table, "set_data", Default),
("Data Subset", Table, "set_subset_data"),
("Features", AttributeList, "set_shown_attributes")]
outputs = [("Selected Data", Table, Default),
("Other Data", Table),
("Features", Table)]
settingsHandler = DomainContextHandler()
auto_send_selection = Setting(True)
auto_sample = Setting(True)
toolbar_selection = Setting(0)
attr_x = ContextSetting("")
attr_y = ContextSetting("")
graph = SettingProvider(OWScatterPlotGraph)
jitter_sizes = [0, 0.1, 0.5, 1, 2, 3, 4, 5, 7, 10]
graph_name = "graph.plot_widget.plotItem"
def __init__(self):
super().__init__()
box = gui.vBox(self.mainArea, True, margin=0)
self.graph = OWScatterPlotGraph(self, box, "ScatterPlot")
box.layout().addWidget(self.graph.plot_widget)
plot = self.graph.plot_widget
axispen = QtGui.QPen(self.palette().color(QtGui.QPalette.Text))
axis = plot.getAxis("bottom")
axis.setPen(axispen)
axis = plot.getAxis("left")
axis.setPen(axispen)
self.data = None # Orange.data.Table
self.subset_data = None # Orange.data.Table
self.data_metas_X = None # self.data, where primitive metas are moved to X
self.sql_data = None # Orange.data.sql.table.SqlTable
self.attribute_selection_list = None # list of Orange.data.Variable
self.__timer = QTimer(self, interval=1200)
self.__timer.timeout.connect(self.add_data)
common_options = dict(
labelWidth=50, orientation=Qt.Horizontal, sendSelectedValue=True,
valueType=str)
box = gui.vBox(self.controlArea, "Axis Data")
self.cb_attr_x = gui.comboBox(box, self, "attr_x", label="Axis x:",
callback=self.update_attr,
**common_options)
self.cb_attr_y = gui.comboBox(box, self, "attr_y", label="Axis y:",
callback=self.update_attr,
**common_options)
self.vizrank = ScatterPlotVizRank(self)
vizrank_box = gui.hBox(box)
gui.separator(vizrank_box, width=common_options["labelWidth"])
self.vizrank_button = gui.button(
vizrank_box, self, "Score Plots", callback=self.vizrank.reshow,
tooltip="Find informative projections", enabled=False)
self.vizrank.pairSelected.connect(self.set_attr)
gui.separator(box)
gui.valueSlider(
box, self, value='graph.jitter_size', label='Jittering: ',
values=self.jitter_sizes, callback=self.reset_graph_data,
labelFormat=lambda x:
"None" if x == 0 else ("%.1f %%" if x < 1 else "%d %%") % x)
gui.checkBox(
gui.indentedBox(box), self, 'graph.jitter_continuous',
'Jitter continuous values', callback=self.reset_graph_data)
self.sampling = gui.auto_commit(
self.controlArea, self, "auto_sample", "Sample", box="Sampling",
callback=self.switch_sampling, commit=lambda: self.add_data(1))
self.sampling.setVisible(False)
box = gui.vBox(self.controlArea, "Points")
self.cb_attr_color = gui.comboBox(
box, self, "graph.attr_color", label="Color:",
emptyString="(Same color)", callback=self.update_colors,
**common_options)
self.cb_attr_label = gui.comboBox(
box, self, "graph.attr_label", label="Label:",
emptyString="(No labels)", callback=self.graph.update_labels,
**common_options)
self.cb_attr_shape = gui.comboBox(
box, self, "graph.attr_shape", label="Shape:",
emptyString="(Same shape)", callback=self.graph.update_shapes,
**common_options)
self.cb_attr_size = gui.comboBox(
box, self, "graph.attr_size", label="Size:",
emptyString="(Same size)", callback=self.graph.update_sizes,
**common_options)
g = self.graph.gui
box2 = g.point_properties_box(self.controlArea, box)
box = gui.vBox(self.controlArea, "Plot Properties")
g.add_widgets([g.ShowLegend, g.ShowGridLines], box)
gui.checkBox(
box, self, value='graph.tooltip_shows_all',
label='Show all data on mouse hover')
self.cb_class_density = gui.checkBox(
box, self, value='graph.class_density', label='Show class density',
callback=self.update_density)
gui.checkBox(
box, self, 'graph.label_only_selected',
'Label only selected points', callback=self.graph.update_labels)
self.zoom_select_toolbar = g.zoom_select_toolbar(
gui.vBox(self.controlArea, "Zoom/Select"), nomargin=True,
buttons=[g.StateButtonsBegin, g.SimpleSelect, g.Pan, g.Zoom,
g.StateButtonsEnd, g.ZoomReset]
)
buttons = self.zoom_select_toolbar.buttons
buttons[g.Zoom].clicked.connect(self.graph.zoom_button_clicked)
buttons[g.Pan].clicked.connect(self.graph.pan_button_clicked)
buttons[g.SimpleSelect].clicked.connect(self.graph.select_button_clicked)
buttons[g.ZoomReset].clicked.connect(self.graph.reset_button_clicked)
self.controlArea.layout().addStretch(100)
self.icons = gui.attributeIconDict
p = self.graph.plot_widget.palette()
self.graph.set_palette(p)
gui.auto_commit(self.controlArea, self, "auto_send_selection",
"Send Selection", "Send Automatically")
def zoom(s):
"""Zoom in/out by factor `s`."""
viewbox = plot.getViewBox()
# scaleBy scales the view's bounds (the axis range)
viewbox.scaleBy((1 / s, 1 / s))
def fit_to_view():
viewbox = plot.getViewBox()
viewbox.autoRange()
zoom_in = QtGui.QAction(
"Zoom in", self, triggered=lambda: zoom(1.25)
)
zoom_in.setShortcuts([QtGui.QKeySequence(QtGui.QKeySequence.ZoomIn),
QtGui.QKeySequence(self.tr("Ctrl+="))])
zoom_out = QtGui.QAction(
"Zoom out", self, shortcut=QtGui.QKeySequence.ZoomOut,
triggered=lambda: zoom(1 / 1.25)
)
zoom_fit = QtGui.QAction(
"Fit in view", self,
shortcut=QtGui.QKeySequence(Qt.ControlModifier | Qt.Key_0),
triggered=fit_to_view
)
self.addActions([zoom_in, zoom_out, zoom_fit])
# def settingsFromWidgetCallback(self, handler, context):
# context.selectionPolygons = []
# for curve in self.graph.selectionCurveList:
# xs = [curve.x(i) for i in range(curve.dataSize())]
# ys = [curve.y(i) for i in range(curve.dataSize())]
# context.selectionPolygons.append((xs, ys))
# def settingsToWidgetCallback(self, handler, context):
# selections = getattr(context, "selectionPolygons", [])
# for (xs, ys) in selections:
# c = SelectionCurve("")
# c.setData(xs,ys)
# c.attach(self.graph)
# self.graph.selectionCurveList.append(c)
def reset_graph_data(self, *_):
self.graph.rescale_data()
self.update_graph()
def set_data(self, data):
self.information(1)
self.__timer.stop()
self.sampling.setVisible(False)
self.sql_data = None
if isinstance(data, SqlTable):
if data.approx_len() < 4000:
data = Table(data)
else:
self.information(1, "Large SQL table (showing a sample)")
self.sql_data = data
data_sample = data.sample_time(0.8, no_cache=True)
data_sample.download_data(2000, partial=True)
data = Table(data_sample)
self.sampling.setVisible(True)
if self.auto_sample:
self.__timer.start()
if data is not None and (len(data) == 0 or len(data.domain) == 0):
data = None
if self.data and data and self.data.checksum() == data.checksum():
return
self.closeContext()
same_domain = (self.data and data and
data.domain.checksum() == self.data.domain.checksum())
self.data = data
self.data_metas_X = self.move_primitive_metas_to_X(data)
if not same_domain:
self.init_attr_values()
self.vizrank.initialize()
self.vizrank_button.setEnabled(
self.data is not None and self.data.domain.class_var is not None
and len(self.data.domain.attributes) > 1 and len(self.data) > 1)
self.openContext(self.data)
def add_data(self, time=0.4):
if self.data and len(self.data) > 2000:
return self.__timer.stop()
data_sample = self.sql_data.sample_time(time, no_cache=True)
if data_sample:
data_sample.download_data(2000, partial=True)
data = Table(data_sample)
self.data = Table.concatenate((self.data, data), axis=0)
self.data_metas_X = self.move_primitive_metas_to_X(self.data)
self.handleNewSignals()
def switch_sampling(self):
self.__timer.stop()
if self.auto_sample and self.sql_data:
self.add_data()
self.__timer.start()
def move_primitive_metas_to_X(self, data):
if data is not None:
new_attrs = [a for a in data.domain.attributes + data.domain.metas
if a.is_primitive()]
new_metas = [m for m in data.domain.metas if not m.is_primitive()]
data = Table.from_table(Domain(new_attrs, data.domain.class_vars,
new_metas), data)
return data
def set_subset_data(self, subset_data):
self.warning(0)
if isinstance(subset_data, SqlTable):
if subset_data.approx_len() < AUTO_DL_LIMIT:
subset_data = Table(subset_data)
else:
self.warning(0, "Data subset does not support large Sql tables")
subset_data = None
self.subset_data = self.move_primitive_metas_to_X(subset_data)
# called when all signals are received, so the graph is updated only once
def handleNewSignals(self):
self.graph.new_data(self.data_metas_X, self.subset_data)
if self.attribute_selection_list and \
all(attr in self.graph.data_domain
for attr in self.attribute_selection_list):
self.attr_x = self.attribute_selection_list[0].name
self.attr_y = self.attribute_selection_list[1].name
self.attribute_selection_list = None
self.update_graph()
self.cb_class_density.setEnabled(self.graph.can_draw_density())
self.unconditional_commit()
def set_shown_attributes(self, attributes):
if attributes and len(attributes) >= 2:
self.attribute_selection_list = attributes[:2]
else:
self.attribute_selection_list = None
def get_shown_attributes(self):
return self.attr_x, self.attr_y
def init_attr_values(self):
self.cb_attr_x.clear()
self.cb_attr_y.clear()
self.attr_x = None
self.attr_y = None
self.cb_attr_color.clear()
self.cb_attr_color.addItem("(Same color)")
self.cb_attr_label.clear()
self.cb_attr_label.addItem("(No labels)")
self.cb_attr_shape.clear()
self.cb_attr_shape.addItem("(Same shape)")
self.cb_attr_size.clear()
self.cb_attr_size.addItem("(Same size)")
if not self.data:
return
for var in self.data.domain.metas:
if not var.is_primitive():
self.cb_attr_label.addItem(self.icons[var], var.name)
for attr in self.data.domain.variables:
self.cb_attr_x.addItem(self.icons[attr], attr.name)
self.cb_attr_y.addItem(self.icons[attr], attr.name)
self.cb_attr_color.addItem(self.icons[attr], attr.name)
if attr.is_discrete:
self.cb_attr_shape.addItem(self.icons[attr], attr.name)
else:
self.cb_attr_size.addItem(self.icons[attr], attr.name)
self.cb_attr_label.addItem(self.icons[attr], attr.name)
for var in self.data.domain.metas:
if var.is_primitive():
self.cb_attr_x.addItem(self.icons[var], var.name)
self.cb_attr_y.addItem(self.icons[var], var.name)
self.cb_attr_color.addItem(self.icons[var], var.name)
if var.is_discrete:
self.cb_attr_shape.addItem(self.icons[var], var.name)
else:
self.cb_attr_size.addItem(self.icons[var], var.name)
self.cb_attr_label.addItem(self.icons[var], var.name)
self.attr_x = self.cb_attr_x.itemText(0)
if self.cb_attr_y.count() > 1:
self.attr_y = self.cb_attr_y.itemText(1)
else:
self.attr_y = self.cb_attr_y.itemText(0)
if self.data.domain.class_var:
self.graph.attr_color = self.data.domain.class_var.name
else:
self.graph.attr_color = ""
self.graph.attr_shape = ""
self.graph.attr_size = ""
self.graph.attr_label = ""
def set_attr(self, attr_x, attr_y):
self.attr_x, self.attr_y = attr_x.name, attr_y.name
self.update_attr()
def update_attr(self):
self.update_graph()
self.cb_class_density.setEnabled(self.graph.can_draw_density())
self.send_features()
def update_colors(self):
self.graph.update_colors()
self.cb_class_density.setEnabled(self.graph.can_draw_density())
def update_density(self):
self.update_graph(reset_view=False)
def update_graph(self, reset_view=True, **_):
self.graph.zoomStack = []
if not self.graph.have_data:
return
self.graph.update_data(self.attr_x, self.attr_y, reset_view)
def selection_changed(self):
self.send_data()
def send_data(self):
selected = unselected = None
# TODO: Implement selection for sql data
if isinstance(self.data, SqlTable):
selected = unselected = self.data
elif self.data is not None:
selection = self.graph.get_selection()
selected = self.data[selection]
unselection = np.full(len(self.data), True, dtype=bool)
unselection[selection] = False
unselected = self.data[unselection]
self.send("Selected Data", selected)
self.send("Other Data", unselected)
def send_features(self):
features = None
if self.attr_x or self.attr_y:
dom = Domain([], metas=(StringVariable(name="feature"),))
features = Table(dom, [[self.attr_x], [self.attr_y]])
features.name = "Features"
self.send("Features", features)
def commit(self):
self.send_data()
self.send_features()
def closeEvent(self, ce):
self.vizrank.close()
super().closeEvent(ce)
def hideEvent(self, he):
self.vizrank.hide()
super().hideEvent(he)
def get_widget_name_extension(self):
if self.data is not None:
return "{} vs {}".format(self.combo_value(self.cb_attr_x),
self.combo_value(self.cb_attr_y))
def send_report(self):
disc_attr = False
if self.data:
domain = self.data.domain
disc_attr = domain[self.attr_x].is_discrete or \
domain[self.attr_y].is_discrete
caption = report.render_items_vert((
("Color", self.combo_value(self.cb_attr_color)),
("Label", self.combo_value(self.cb_attr_label)),
("Shape", self.combo_value(self.cb_attr_shape)),
("Size", self.combo_value(self.cb_attr_size)),
("Jittering", (self.graph.jitter_continuous or disc_attr) and
self.graph.jitter_size)))
self.report_plot()
if caption:
self.report_caption(caption)
def onDeleteWidget(self):
super().onDeleteWidget()
self.graph.plot_widget.getViewBox().deleteLater()
self.graph.plot_widget.clear()
def test_main(argv=None):
import sys
if argv is None:
argv = sys.argv
argv = list(argv)
a = QApplication(argv)
if len(argv) > 1:
filename = argv[1]
else:
filename = "iris"
ow = OWScatterPlot()
ow.show()
ow.raise_()
data = Orange.data.Table(filename)
ow.set_data(data)
ow.set_subset_data(data[:30])
ow.handleNewSignals()
rval = a.exec()
ow.set_data(None)
ow.set_subset_data(None)
ow.handleNewSignals()
ow.saveSettings()
ow.onDeleteWidget()
return rval
if __name__ == "__main__":
test_main()
|
Designed in a compact discreet pocket knife, the Salute Mini is still capable of delivering big time. The handle is machined G-10 with scalloped full-length steel liebers. In addition to the detailed handle, the Salute Mini is completed with a strong lockback mechanism, smooth opening and operation and a proven Bowie-style blade. The Salute is a great tactical folder for everyday use. |
import json
import re
from bs4 import BeautifulSoup
from oauth_tokens.models import AccessToken
HEADERS = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:33.0) Gecko/20100101 Firefox/33.0',
'Accept_Language': 'en'
}
IDS_RE = re.compile('data-tweet-id="(\d+)"')
def get_replies(status):
"Return all replies ids of tweet"
replies_ids = set()
url = 'https://twitter.com/i/%s/conversation/%s' % (status.author.screen_name, status.pk)
ar = AccessToken.objects.get_token('twitter').auth_request
headers = dict(HEADERS)
headers['X-Requested-With'] = 'XMLHttpRequest'
resp = ar.authorized_request(url=status.get_url(), headers=headers)
params = {'max_position': BeautifulSoup(resp.content).find('div', **{'class': 'stream-container'})['data-min-position']}
while True:
r = ar.authorized_request(url=url, params=params, headers=headers)
response = r.json()
if 'descendants' in response:
response = response['descendants']
ids = IDS_RE.findall(response['items_html'])
[replies_ids.add(id) for id in ids]
if response['has_more_items'] and len(ids):
params = {'max_position': response['min_position']}
else:
break
return list(replies_ids)
|
Desire Street Ministries continues in its calling and journey to make every neighborhood a desirable place to live. From a single Bible study in the Desire neighborhood of New Orleans, to a stable organization that focuses on helping others reach thousands of kids and families living in impoverished neighborhoods, Desire Street and local ministry partners are striding forward on this path toward revitalization of urban communities across America.
Transforming impoverished urban neighborhoods into flourishing, healthy communitie s is the goal, and Desire Street believes that bolstering the leaders in those neighborhoods is the best route toward that goal. Urban ministry leaders walk amidst some of the toughest challenges in the United States today: concentrated poverty, failing schools, unsafe streets, deteriorated housing, high unemployment, inadequate health care. In this maze of need those who commit themselves to ministry in today’s cities navigate toward glimmers of light and hope. They walk in God’s promises to uncover His hidden beauty in places marred by outward decay.
Our Mission – To love our neighbor by revitalizing impoverished neighborhoods, through spiritual and community development.
Our Vision – To partner with leaders in developing, thriving and sustainable urban ministries.
Our Purpose – Desire Street Ministries gathers and directs resources on the goal of revitalizing impoverished neighborhoods. We seek, educate and engage individuals and organizations with a heart for the inner-city—and coach and care for urban ministry leaders called there to live and work. Our partnership approach encourages, equips, empowers and connects leaders to build effectiveness and sustainability in their efforts towards spiritual and community development. Currently partnering with 7 ministries in 4 cities across 3 states, we continue the work where our roots are—rebuilding the Ninth Ward of New Orleans. |
from collections import Counter, OrderedDict
from django.contrib import admin
from django.http import HttpResponse
from main.models import FoodOrder
@admin.register(FoodOrder)
class FoodOrderAdmin(admin.ModelAdmin):
list_filter = ('time', 'paid', 'collected')
list_display = ('pk', 'time', 'get_lan', 'order', 'get_profile', 'price', 'paid', 'collected')
list_display_links = ('pk', 'time', 'order')
search_fields = ('lanprofile__profile__user__first_name', 'lanprofile__profile__user__username', 'order')
def get_queryset(self, request):
return (super().get_queryset(request)
.select_related('lanprofile')
.select_related('lanprofile__profile')
.select_related('lanprofile__profile__user'))
def get_profile(self, food_order):
return food_order.lanprofile.profile
get_profile.short_description = 'profil'
get_profile.admin_order_field = 'lanprofile__profile'
def get_lan(self, food_order):
return food_order.lanprofile.lan
get_lan.short_description = 'lan'
get_lan.admin_order_field = 'lanprofile__lan'
actions = ['paid', 'not_paid', 'collected', 'not_collected', 'generate_summary']
def paid(self, request, queryset):
queryset.update(paid=True)
paid.short_description = "Makér som betalt."
def not_paid(self, request, queryset):
queryset.update(paid=False)
not_paid.short_description = "Markér som ikke betalt."
def collected(self, request, queryset):
queryset.update(collected=True)
collected.short_description = "Makér som afhentet."
def not_collected(self, request, queryset):
queryset.update(collected=False)
not_collected.short_description = "Markér som ikke afhentet."
def generate_summary(self, request, queryset):
out = Counter()
for order in queryset:
out[str(order)] += 1
out = OrderedDict(sorted(out.items(), key=lambda x: x[0]))
texts, last = [], ''
for key, value in out.items():
splitted = [x.strip() for x in key.split('-')]
if splitted[0] != last:
texts.append('')
last = splitted[0]
key = ' - '.join(splitted[1:])
texts.append('{} stk. {}'.format(value, key))
texts = texts[1:]
response = HttpResponse('\r\n'.join(texts), content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename="Madbestillinger.txt"'
return response
generate_summary.short_description = "Vis oversigt."
|
Given a rectangular boundary partitioned into rectangles, the Minimum-Length Corridor (MLC-R) problem consists of nding a corridor of least total length. A corridor is a set of connected line segments, each of which must lie along the line segments that form the rectangular boundary and/or the boundary of the rectangles, and must include at least one point from the boundary of every rectangle and from the rectangular boundary. The MLC-R problem has been shown to be NP-hard. In this paper we present the first polynomial time constant ratio approximation algorithm for the MLC-R and MLC_n problems. The MLC_n problem is a generalization of the MLC-R problem where the rectangles are rectilinear k-gons, for k <= n and n is a constant. We also present a polynomial time constant ratio approximation algorithm for the Group Traveling Salesperson Problem (GTSP) for a rectangular boundary partitioned into rectilinear k-gons as in the MLC_n problem. |
# Copyright (c) 2020 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
"""This module defines StoragePluginManager which loads and provides
access to StoragePlugins and their StorageResources"""
import sys
import traceback
from django.conf import settings
from chroma_core.lib.storage_plugin.api import relations
from chroma_core.lib.storage_plugin.base_resource import (
BaseStorageResource,
BaseScannableResource,
ResourceProgrammingError,
)
from chroma_core.lib.storage_plugin.base_plugin import BaseStoragePlugin
from chroma_core.lib.storage_plugin.log import storage_plugin_log
from chroma_core.lib.util import all_subclasses
from chroma_core.models.storage_plugin import StoragePluginRecord
from chroma_core.models.storage_plugin import StorageResourceRecord, StorageResourceClass
class PluginNotFound(Exception):
def __str__(self):
return "PluginNotFound: %s" % self.message
class PluginProgrammingError(Exception):
pass
class VersionMismatchError(PluginProgrammingError):
"""Raised when a plugin is loaded that declares a different version.
The version requested by the plugin is saved to the Error.
"""
def __init__(self, version):
self.version = version
class VersionNotFoundError(PluginProgrammingError):
"""Raised when a plugin doesn't declare a version attribute."""
pass
class LoadedResourceClass(object):
"""Convenience store of introspected information about BaseStorageResource
subclasses from loaded modules."""
def __init__(self, resource_class, resource_class_id):
self.resource_class = resource_class
self.resource_class_id = resource_class_id
class LoadedPlugin(object):
"""Convenience store of introspected information about loaded
plugin modules."""
def __init__(self, plugin_manager, module, module_name, plugin_class):
# Populate _resource_classes from all BaseStorageResource in the same module
# (or leave it untouched if the plugin author overrode it)
if not hasattr(plugin_class, "_resource_classes"):
import inspect
plugin_class._resource_classes = []
for name, cls in inspect.getmembers(module):
if inspect.isclass(cls) and issubclass(cls, BaseStorageResource) and cls != BaseStorageResource:
plugin_class._resource_classes.append(cls)
# Map of name string to class
self.resource_classes = {}
self.plugin_class = plugin_class
self.plugin_record, created = StoragePluginRecord.objects.get_or_create(module_name=module_name)
if created:
self.plugin_record.internal = plugin_class.internal
self.plugin_record.save()
self.scannable_resource_classes = []
for cls in plugin_class._resource_classes:
if not hasattr(cls._meta, "identifier"):
raise ResourceProgrammingError(cls.__name__, "No Meta.identifier")
# Populate database records for the classes
vrc, created = StorageResourceClass.objects.get_or_create(
storage_plugin=self.plugin_record, class_name=cls.__name__
)
if created:
vrc.user_creatable = issubclass(cls, BaseScannableResource)
vrc.save()
plugin_manager.resource_class_id_to_class[vrc.id] = cls
plugin_manager.resource_class_class_to_id[cls] = vrc.id
self.resource_classes[cls.__name__] = LoadedResourceClass(cls, vrc.id)
if issubclass(cls, BaseScannableResource):
self.scannable_resource_classes.append(cls.__name__)
class StoragePluginManager(object):
def __init__(self):
self.loaded_plugins = {}
self.errored_plugins = []
self.resource_class_id_to_class = {}
self.resource_class_class_to_id = {}
from settings import INSTALLED_STORAGE_PLUGINS
for plugin in INSTALLED_STORAGE_PLUGINS:
try:
self.load_plugin(plugin)
except (ImportError, SyntaxError, ResourceProgrammingError, PluginProgrammingError) as e:
storage_plugin_log.error("Failed to load plugin '%s': %s" % (plugin, traceback.format_exc()))
self.errored_plugins.append((plugin, e))
for id, klass in self.resource_class_id_to_class.items():
klass._meta.relations = list(klass._meta.orig_relations)
def can_satisfy_relation(klass, attributes):
for attribute in attributes:
if not attribute in klass._meta.storage_attributes:
return False
return True
for id, klass in self.resource_class_id_to_class.items():
for relation in klass._meta.relations:
# If ('linux', 'ScsiDevice') form was used, substitute the real class
if isinstance(relation, relations.Provide):
if isinstance(relation.provide_to, tuple):
prov_klass, prov_klass_id = self.get_plugin_resource_class(*relation.provide_to)
relation.provide_to = prov_klass
elif isinstance(relation, relations.Subscribe):
if isinstance(relation.subscribe_to, tuple):
sub_klass, sub_klass_id = self.get_plugin_resource_class(*relation.subscribe_to)
relation.subscribe_to = sub_klass
# Generate reverse-Subscribe relations
if isinstance(relation, relations.Provide):
# Synthesize Subscribe objects on the objects which might
# be on the receiving event of a Provide relation. The original
# Provide object plays no further role.
subscription = relations.Subscribe(klass, relation.attributes, relation.ignorecase)
if can_satisfy_relation(relation.provide_to, relation.attributes):
relation.provide_to._meta.relations.append(subscription)
for sc in all_subclasses(relation.provide_to):
if can_satisfy_relation(sc, relation.attributes):
sc._meta.relations.append(subscription)
@property
def loaded_plugin_names(self):
return self.loaded_plugins.keys()
def get_errored_plugins(self):
return [e[0] for e in self.errored_plugins]
def get_resource_class_id(self, klass):
try:
return self.resource_class_class_to_id[klass]
except KeyError:
raise PluginNotFound("Looking for class %s" % klass.__name__)
def get_resource_class_by_id(self, id):
try:
return self.resource_class_id_to_class[id]
except KeyError:
raise PluginNotFound("Looking for class id %s " % id)
def get_scannable_resource_ids(self, plugin):
loaded_plugin = self.loaded_plugins[plugin]
records = (
StorageResourceRecord.objects.filter(resource_class__storage_plugin=loaded_plugin.plugin_record)
.filter(resource_class__class_name__in=loaded_plugin.scannable_resource_classes)
.filter(parents=None)
.values("id")
)
return [r["id"] for r in records]
def get_resource_classes(self, scannable_only=False, show_internal=False):
"""Return a list of StorageResourceClass records
:param scannable_only: Only report BaseScannableResource subclasses
:param show_internal: Include plugins with the internal=True attribute (excluded by default)
"""
class_records = []
for k, v in self.loaded_plugins.items():
if not show_internal and v.plugin_class.internal:
continue
filter = {}
filter["storage_plugin"] = v.plugin_record
if scannable_only:
filter["class_name__in"] = v.scannable_resource_classes
class_records.extend(list(StorageResourceClass.objects.filter(**filter)))
return class_records
def register_plugin(self, plugin_instance):
"""Register a particular instance of a BaseStoragePlugin"""
# FIXME: session ID not really used for anything, it's a vague
# nod to the future remote-run plugins.
session_id = plugin_instance.__class__.__name__
storage_plugin_log.info("Registered plugin instance %s with id %s" % (plugin_instance, session_id))
return session_id
def get_plugin_resource_class(self, plugin_module, resource_class_name):
"""Return a BaseStorageResource subclass"""
try:
loaded_plugin = self.loaded_plugins[plugin_module]
except KeyError:
raise PluginNotFound("Plugin %s not found (not one of %s)" % (plugin_module, self.loaded_plugins.keys()))
try:
loaded_resource = loaded_plugin.resource_classes[resource_class_name]
except KeyError:
raise PluginNotFound(
"Resource %s not found in %s (not one of %s)"
% (resource_class_name, plugin_module, loaded_plugin.resource_classes.keys())
)
return loaded_resource.resource_class, loaded_resource.resource_class_id
# FIXME: rename to get_all_resource_classes
def get_all_resources(self):
for plugin in self.loaded_plugins.values():
for loaded_res in plugin.resource_classes.values():
yield (loaded_res.resource_class_id, loaded_res.resource_class)
def get_plugin_class(self, module):
try:
return self.loaded_plugins[module].plugin_class
except KeyError:
raise PluginNotFound(module)
def validate_plugin(self, module):
errors = []
try:
self.load_plugin(module)
except ResourceProgrammingError as e:
errors.append(e.__str__())
except VersionNotFoundError as e:
errors.append(
"Add version=<int> to your plugin module. Consult "
"Comand Center documentation for API version "
"supported."
)
except VersionMismatchError as e:
plugin_version = e.version
command_center_version = settings.STORAGE_API_VERSION
errors.append(
"The plugin declares version %s. "
"However, this manager server version supports "
"version %s of the Plugin API." % (plugin_version, command_center_version)
)
except PluginProgrammingError as e:
errors.append(e.__str__())
except SyntaxError as e:
errors.append("SyntaxError: %s:%s:%s: %s" % (e.filename, e.lineno, e.offset, e.text))
except ImportError as e:
errors.append(e.__str__())
return errors
def _validate_api_version(self, module):
if not hasattr(module, "version"):
raise VersionNotFoundError()
if type(module.version) != int or settings.STORAGE_API_VERSION != module.version:
raise VersionMismatchError(module.version)
def _load_plugin(self, module, module_name, plugin_klass):
storage_plugin_log.debug("_load_plugin %s %s" % (module_name, plugin_klass))
self.loaded_plugins[module_name] = LoadedPlugin(self, module, module_name, plugin_klass)
def load_plugin(self, module):
"""Load a BaseStoragePlugin class from a module given a
python path like chroma_core.lib.lvm',
or simply return it if it was already loaded. Note that the
BaseStoragePlugin within the module will not be instantiated when this
returns, caller is responsible for instantiating it.
@return A subclass of BaseStoragePlugin"""
if module in self.loaded_plugins:
raise PluginProgrammingError("Duplicate storage plugin module %s" % module)
if module in sys.modules:
storage_plugin_log.warning("Reloading module %s (okay if testing)" % module)
mod = sys.modules[module]
else:
# Load the module
try:
mod = __import__(module)
except (ImportError, ResourceProgrammingError, SyntaxError) as e:
storage_plugin_log.error("Error importing %s: %s" % (module, e))
raise
components = module.split(".")
plugin_name = module
for comp in components[1:]:
mod = getattr(mod, comp)
plugin_name = comp
plugin_module = mod
self._validate_api_version(plugin_module)
# Find all BaseStoragePlugin subclasses in the module
from chroma_core.lib.storage_plugin.api.plugin import Plugin
plugin_klasses = []
import inspect
for name, cls in inspect.getmembers(plugin_module):
if (
inspect.isclass(cls)
and issubclass(cls, BaseStoragePlugin)
and cls != BaseStoragePlugin
and cls != Plugin
):
plugin_klasses.append(cls)
# Make sure we have exactly one BaseStoragePlugin subclass
if len(plugin_klasses) > 1:
raise PluginProgrammingError(
"Module %s defines more than one BaseStoragePlugin: %s!" % (module, plugin_klasses)
)
elif len(plugin_klasses) == 0:
raise PluginProgrammingError("Module %s does not define a BaseStoragePlugin!" % module)
else:
plugin_klass = plugin_klasses[0]
# Hook in a logger to the BaseStoragePlugin subclass
if not plugin_klass._log:
import logging
import settings
log = logging.getLogger("storage_plugin_log_%s" % module)
if module in settings.STORAGE_PLUGIN_DEBUG_PLUGINS or settings.STORAGE_PLUGIN_DEBUG:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.WARNING)
plugin_klass._log = log
plugin_klass._log_format = "[%%(asctime)s: %%(levelname)s/%s] %%(message)s" % module
else:
storage_plugin_log.warning("Double load of %s (okay if testing)" % plugin_name)
try:
self._load_plugin(plugin_module, plugin_name, plugin_klass)
except ResourceProgrammingError as e:
storage_plugin_log.error("Error loading %s: %s" % (plugin_name, e))
raise
else:
return plugin_klass
storage_plugin_manager = StoragePluginManager()
|
How are tu irman???? I know that tu have the best time. tu publicado some amazing photos. The weather look like it was an amazing time over there, right? Tell me more.
Have some great vacation time. Tell me all once tu came back!!!!
Just ended camioneta, van Helsing last week, I was late on this. The finale was a let down.
I am thrilled for you! That exam went so well for you! Now tu have más exams ahead? OMG, tu will do good in your siguiente exams, don't think about them too much, take a break.
Sister I am so thrilled that your exam went so well, tell me más about it. I am sure tu feel a lot más chill now.
Happy Sunday to tu dear!!!
Feeling better now? Now that tu have a fecha on mind? Everything's will be just fine, as soon as tu do the exam, just relax!
Did tu have a cool week? I hope tu have. I know, until tu have news about the exam, it will be a crazy time. Try to be positive, everything will be allright!!!!
tu probably are having a sunny weekend. Here we´re having a great weekend too.
Hope your having a great weekend hun! How's the weather over there? Last week we have some very warm days, like spring time almost. |
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Defines interface for DB access.
Discovery modifies the original `nova.db.api.py' in order to choose
between MySQL backend and REDIS backend. The operator should specify
the backend in the `nova.conf' file.
```
[discovery]
db_backend = (redis | mysql)
gen_logs = (True | False)
```
- `db_backend' targets the database backend.
- `gen_logs' records the execution time of db methods.
-----
Functions in this module are imported into the nova.db namespace. Call
these functions from nova.db namespace, not the nova.db.api namespace.
All functions in this module return objects that implement a
dictionary-like interface. Currently, many of these objects are
sqlalchemy objects that implement a dictionary interface. However, a
future goal is to have all of these objects be simple dictionaries.
"""
from oslo_config import cfg
from oslo_log import log as logging
from nova.cells import rpcapi as cells_rpcapi
from nova.i18n import _LE
import json
import time
import inspect
get_time_ms = lambda: int(round(time.time() * 1000))
db_opts = [
cfg.BoolOpt('enable_new_services',
default=True,
help='Services to be added to the available pool on create'),
cfg.StrOpt('instance_name_template',
default='instance-%08x',
help='Template string to be used to generate instance names'),
cfg.StrOpt('snapshot_name_template',
default='snapshot-%s',
help='Template string to be used to generate snapshot names')]
# Discovery parameters in the nova.conf file
discovery_opts = [
cfg.StrOpt('db_backend',
default='redis',
help='Database backend'),
cfg.BoolOpt('gen_logs',
default=False,
help='Generates logs')
]
CONF = cfg.CONF
CONF.register_opts(db_opts)
CONF.register_opts(discovery_opts, group='discovery')
class ApiProxy:
"""Class that enables the comparison between MySQL and Discovery
implementations. It logs the execution time of db methods.
"""
def __init__(self):
self.backend = None
self.label = ''
def _init_backend(self):
# Proxy that targets the correct backend
if CONF.discovery.db_backend.upper() == 'REDIS':
from nova.db.discovery import api as discovery_api
self.backend = discovery_api
self.label = "[Discovery_impl]"
else:
from nova.db.sqlalchemy import api as mysql_api
self.backend = mysql_api
self.label = "[MySQL_impl]"
def __getattr__(self, attr):
if attr in ["backend", "label"]:
return self.__dict__[attr]
self._init_backend()
ret = object.__getattribute__(self.backend, attr)
if hasattr(ret, "__call__") and CONF.discovery.gen_logs:
return self.FunctionWrapper(ret, attr, self.label)
return ret
class FunctionWrapper:
"""Class used to measure the execution time of a method and log it
inside `opt.logs.db_api_<backend>.log'.
"""
def __init__(self, callable, call_name, label):
self.callable = callable
self.call_name = call_name
self.label = label
def __call__(self, *args, **kwargs):
time_before = get_time_ms()
result_callable = self.callable(*args, **kwargs)
time_after = get_time_ms()
duration = time_after - time_before
frm = inspect.stack()[1]
mod = inspect.getmodule(frm[0])
dct = {
'backend': self.label,
'class': mod.__name__,
'method': self.call_name,
'args': str(args),
'kwargs': str(kwargs),
'result': str(result_callable),
'timestamp': get_time_ms(),
'duration': duration
}
ppjson = json.dumps(dct)
print(ppjson)
if self.label == "[MySQL_impl]":
with open("/opt/logs/db_api_mysql.log", "a") as f:
f.write(ppjson+"\n")
else:
with open("/opt/logs/db_api_disco.log", "a") as f:
f.write(ppjson+"\n")
return result_callable
IMPL = ApiProxy()
LOG = logging.getLogger(__name__)
# The maximum value a signed INT type may have
MAX_INT = 0x7FFFFFFF
###################
def constraint(**conditions):
"""Return a constraint object suitable for use with some updates."""
return IMPL.constraint(**conditions)
def equal_any(*values):
"""Return an equality condition object suitable for use in a constraint.
Equal_any conditions require that a model object's attribute equal any
one of the given values.
"""
return IMPL.equal_any(*values)
def not_equal(*values):
"""Return an inequality condition object suitable for use in a constraint.
Not_equal conditions require that a model object's attribute differs from
all of the given values.
"""
return IMPL.not_equal(*values)
def create_context_manager(connection):
"""Return a context manager for a cell database connection."""
return IMPL.create_context_manager(connection=connection)
###################
def select_db_reader_mode(f):
"""Decorator to select synchronous or asynchronous reader mode.
The kwarg argument 'use_slave' defines reader mode. Asynchronous reader
will be used if 'use_slave' is True and synchronous reader otherwise.
"""
return IMPL.select_db_reader_mode(f)
###################
def service_destroy(context, service_id):
"""Destroy the service or raise if it does not exist."""
return IMPL.service_destroy(context, service_id)
def service_get(context, service_id):
"""Get a service or raise if it does not exist."""
return IMPL.service_get(context, service_id)
def service_get_minimum_version(context, binary):
"""Get the minimum service version in the database."""
return IMPL.service_get_minimum_version(context, binary)
def service_get_by_host_and_topic(context, host, topic):
"""Get a service by hostname and topic it listens to."""
return IMPL.service_get_by_host_and_topic(context, host, topic)
def service_get_by_host_and_binary(context, host, binary):
"""Get a service by hostname and binary."""
return IMPL.service_get_by_host_and_binary(context, host, binary)
def service_get_all(context, disabled=None):
"""Get all services."""
return IMPL.service_get_all(context, disabled)
def service_get_all_by_topic(context, topic):
"""Get all services for a given topic."""
return IMPL.service_get_all_by_topic(context, topic)
def service_get_all_by_binary(context, binary, include_disabled=False):
"""Get services for a given binary.
Includes disabled services if 'include_disabled' parameter is True
"""
return IMPL.service_get_all_by_binary(context, binary,
include_disabled=include_disabled)
def service_get_all_by_host(context, host):
"""Get all services for a given host."""
return IMPL.service_get_all_by_host(context, host)
def service_get_by_compute_host(context, host):
"""Get the service entry for a given compute host.
Returns the service entry joined with the compute_node entry.
"""
return IMPL.service_get_by_compute_host(context, host)
def service_create(context, values):
"""Create a service from the values dictionary."""
return IMPL.service_create(context, values)
def service_update(context, service_id, values):
"""Set the given properties on a service and update it.
Raises NotFound if service does not exist.
"""
return IMPL.service_update(context, service_id, values)
###################
def compute_node_get(context, compute_id):
"""Get a compute node by its id.
:param context: The security context
:param compute_id: ID of the compute node
:returns: Dictionary-like object containing properties of the compute node
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_get(context, compute_id)
# TODO(edleafe): remove once the compute node resource provider migration is
# complete, and this distinction is no longer necessary.
def compute_node_get_model(context, compute_id):
"""Get a compute node sqlalchemy model object by its id.
:param context: The security context
:param compute_id: ID of the compute node
:returns: Sqlalchemy model object containing properties of the compute node
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_get_model(context, compute_id)
def compute_nodes_get_by_service_id(context, service_id):
"""Get a list of compute nodes by their associated service id.
:param context: The security context
:param service_id: ID of the associated service
:returns: List of dictionary-like objects, each containing properties of
the compute node, including its corresponding service and
statistics
Raises ServiceNotFound if service with the given ID doesn't exist.
"""
return IMPL.compute_nodes_get_by_service_id(context, service_id)
def compute_node_get_by_host_and_nodename(context, host, nodename):
"""Get a compute node by its associated host and nodename.
:param context: The security context (admin)
:param host: Name of the host
:param nodename: Name of the node
:returns: Dictionary-like object containing properties of the compute node,
including its statistics
Raises ComputeHostNotFound if host with the given name doesn't exist.
"""
return IMPL.compute_node_get_by_host_and_nodename(context, host, nodename)
def compute_node_get_all(context):
"""Get all computeNodes.
:param context: The security context
:returns: List of dictionaries each containing compute node properties
"""
return IMPL.compute_node_get_all(context)
def compute_node_get_all_by_host(context, host):
"""Get compute nodes by host name
:param context: The security context (admin)
:param host: Name of the host
:returns: List of dictionaries each containing compute node properties
"""
return IMPL.compute_node_get_all_by_host(context, host)
def compute_node_search_by_hypervisor(context, hypervisor_match):
"""Get compute nodes by hypervisor hostname.
:param context: The security context
:param hypervisor_match: The hypervisor hostname
:returns: List of dictionary-like objects each containing compute node
properties
"""
return IMPL.compute_node_search_by_hypervisor(context, hypervisor_match)
def compute_node_create(context, values):
"""Create a compute node from the values dictionary.
:param context: The security context
:param values: Dictionary containing compute node properties
:returns: Dictionary-like object containing the properties of the created
node, including its corresponding service and statistics
"""
return IMPL.compute_node_create(context, values)
def compute_node_update(context, compute_id, values):
"""Set the given properties on a compute node and update it.
:param context: The security context
:param compute_id: ID of the compute node
:param values: Dictionary containing compute node properties to be updated
:returns: Dictionary-like object containing the properties of the updated
compute node, including its corresponding service and statistics
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_update(context, compute_id, values)
def compute_node_delete(context, compute_id):
"""Delete a compute node from the database.
:param context: The security context
:param compute_id: ID of the compute node
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_delete(context, compute_id)
def compute_node_statistics(context):
"""Get aggregate statistics over all compute nodes.
:param context: The security context
:returns: Dictionary containing compute node characteristics summed up
over all the compute nodes, e.g. 'vcpus', 'free_ram_mb' etc.
"""
return IMPL.compute_node_statistics(context)
###################
def certificate_create(context, values):
"""Create a certificate from the values dictionary."""
return IMPL.certificate_create(context, values)
def certificate_get_all_by_project(context, project_id):
"""Get all certificates for a project."""
return IMPL.certificate_get_all_by_project(context, project_id)
def certificate_get_all_by_user(context, user_id):
"""Get all certificates for a user."""
return IMPL.certificate_get_all_by_user(context, user_id)
def certificate_get_all_by_user_and_project(context, user_id, project_id):
"""Get all certificates for a user and project."""
return IMPL.certificate_get_all_by_user_and_project(context,
user_id,
project_id)
###################
def floating_ip_get(context, id):
return IMPL.floating_ip_get(context, id)
def floating_ip_get_pools(context):
"""Returns a list of floating IP pools."""
return IMPL.floating_ip_get_pools(context)
def floating_ip_allocate_address(context, project_id, pool,
auto_assigned=False):
"""Allocate free floating IP from specified pool and return the address.
Raises if one is not available.
"""
return IMPL.floating_ip_allocate_address(context, project_id, pool,
auto_assigned)
def floating_ip_bulk_create(context, ips, want_result=True):
"""Create a lot of floating IPs from the values dictionary.
:param want_result: If set to True, return floating IPs inserted
"""
return IMPL.floating_ip_bulk_create(context, ips, want_result=want_result)
def floating_ip_bulk_destroy(context, ips):
"""Destroy a lot of floating IPs from the values dictionary."""
return IMPL.floating_ip_bulk_destroy(context, ips)
def floating_ip_create(context, values):
"""Create a floating IP from the values dictionary."""
return IMPL.floating_ip_create(context, values)
def floating_ip_deallocate(context, address):
"""Deallocate a floating IP by address."""
return IMPL.floating_ip_deallocate(context, address)
def floating_ip_destroy(context, address):
"""Destroy the floating_ip or raise if it does not exist."""
return IMPL.floating_ip_destroy(context, address)
def floating_ip_disassociate(context, address):
"""Disassociate a floating IP from a fixed IP by address.
:returns: the fixed IP record joined to network record or None
if the IP was not associated to an IP.
"""
return IMPL.floating_ip_disassociate(context, address)
def floating_ip_fixed_ip_associate(context, floating_address,
fixed_address, host):
"""Associate a floating IP to a fixed_ip by address.
:returns: the fixed IP record joined to network record or None
if the IP was already associated to the fixed IP.
"""
return IMPL.floating_ip_fixed_ip_associate(context,
floating_address,
fixed_address,
host)
def floating_ip_get_all(context):
"""Get all floating IPs."""
return IMPL.floating_ip_get_all(context)
def floating_ip_get_all_by_host(context, host):
"""Get all floating IPs by host."""
return IMPL.floating_ip_get_all_by_host(context, host)
def floating_ip_get_all_by_project(context, project_id):
"""Get all floating IPs by project."""
return IMPL.floating_ip_get_all_by_project(context, project_id)
def floating_ip_get_by_address(context, address):
"""Get a floating IP by address or raise if it doesn't exist."""
return IMPL.floating_ip_get_by_address(context, address)
def floating_ip_get_by_fixed_address(context, fixed_address):
"""Get a floating IPs by fixed address."""
return IMPL.floating_ip_get_by_fixed_address(context, fixed_address)
def floating_ip_get_by_fixed_ip_id(context, fixed_ip_id):
"""Get a floating IPs by fixed address."""
return IMPL.floating_ip_get_by_fixed_ip_id(context, fixed_ip_id)
def floating_ip_update(context, address, values):
"""Update a floating IP by address or raise if it doesn't exist."""
return IMPL.floating_ip_update(context, address, values)
def dnsdomain_get_all(context):
"""Get a list of all dnsdomains in our database."""
return IMPL.dnsdomain_get_all(context)
def dnsdomain_register_for_zone(context, fqdomain, zone):
"""Associated a DNS domain with an availability zone."""
return IMPL.dnsdomain_register_for_zone(context, fqdomain, zone)
def dnsdomain_register_for_project(context, fqdomain, project):
"""Associated a DNS domain with a project id."""
return IMPL.dnsdomain_register_for_project(context, fqdomain, project)
def dnsdomain_unregister(context, fqdomain):
"""Purge associations for the specified DNS zone."""
return IMPL.dnsdomain_unregister(context, fqdomain)
def dnsdomain_get(context, fqdomain):
"""Get the db record for the specified domain."""
return IMPL.dnsdomain_get(context, fqdomain)
####################
def migration_update(context, id, values):
"""Update a migration instance."""
return IMPL.migration_update(context, id, values)
def migration_create(context, values):
"""Create a migration record."""
return IMPL.migration_create(context, values)
def migration_get(context, migration_id):
"""Finds a migration by the id."""
return IMPL.migration_get(context, migration_id)
def migration_get_by_id_and_instance(context, migration_id, instance_uuid):
"""Finds a migration by the migration id and the instance uuid."""
return IMPL.migration_get_by_id_and_instance(context,
migration_id,
instance_uuid)
def migration_get_by_instance_and_status(context, instance_uuid, status):
"""Finds a migration by the instance uuid its migrating."""
return IMPL.migration_get_by_instance_and_status(context, instance_uuid,
status)
def migration_get_unconfirmed_by_dest_compute(context, confirm_window,
dest_compute):
"""Finds all unconfirmed migrations within the confirmation window for
a specific destination compute host.
"""
return IMPL.migration_get_unconfirmed_by_dest_compute(context,
confirm_window, dest_compute)
def migration_get_in_progress_by_host_and_node(context, host, node):
"""Finds all migrations for the given host + node that are not yet
confirmed or reverted.
"""
return IMPL.migration_get_in_progress_by_host_and_node(context, host, node)
def migration_get_all_by_filters(context, filters):
"""Finds all migrations in progress."""
return IMPL.migration_get_all_by_filters(context, filters)
def migration_get_in_progress_by_instance(context, instance_uuid,
migration_type=None):
"""Finds all migrations of an instance in progress."""
return IMPL.migration_get_in_progress_by_instance(context, instance_uuid,
migration_type)
####################
def fixed_ip_associate(context, address, instance_uuid, network_id=None,
reserved=False, virtual_interface_id=None):
"""Associate fixed IP to instance.
Raises if fixed IP is not available.
"""
return IMPL.fixed_ip_associate(context, address, instance_uuid, network_id,
reserved, virtual_interface_id)
def fixed_ip_associate_pool(context, network_id, instance_uuid=None,
host=None, virtual_interface_id=None):
"""Find free IP in network and associate it to instance or host.
Raises if one is not available.
"""
return IMPL.fixed_ip_associate_pool(context, network_id,
instance_uuid, host,
virtual_interface_id)
def fixed_ip_create(context, values):
"""Create a fixed IP from the values dictionary."""
return IMPL.fixed_ip_create(context, values)
def fixed_ip_bulk_create(context, ips):
"""Create a lot of fixed IPs from the values dictionary."""
return IMPL.fixed_ip_bulk_create(context, ips)
def fixed_ip_disassociate(context, address):
"""Disassociate a fixed IP from an instance by address."""
return IMPL.fixed_ip_disassociate(context, address)
def fixed_ip_disassociate_all_by_timeout(context, host, time):
"""Disassociate old fixed IPs from host."""
return IMPL.fixed_ip_disassociate_all_by_timeout(context, host, time)
def fixed_ip_get(context, id, get_network=False):
"""Get fixed IP by id or raise if it does not exist.
If get_network is true, also return the associated network.
"""
return IMPL.fixed_ip_get(context, id, get_network)
def fixed_ip_get_all(context):
"""Get all defined fixed IPs."""
return IMPL.fixed_ip_get_all(context)
def fixed_ip_get_by_address(context, address, columns_to_join=None):
"""Get a fixed IP by address or raise if it does not exist."""
return IMPL.fixed_ip_get_by_address(context, address,
columns_to_join=columns_to_join)
def fixed_ip_get_by_floating_address(context, floating_address):
"""Get a fixed IP by a floating address."""
return IMPL.fixed_ip_get_by_floating_address(context, floating_address)
def fixed_ip_get_by_instance(context, instance_uuid):
"""Get fixed IPs by instance or raise if none exist."""
return IMPL.fixed_ip_get_by_instance(context, instance_uuid)
def fixed_ip_get_by_host(context, host):
"""Get fixed IPs by compute host."""
return IMPL.fixed_ip_get_by_host(context, host)
def fixed_ip_get_by_network_host(context, network_uuid, host):
"""Get fixed IP for a host in a network."""
return IMPL.fixed_ip_get_by_network_host(context, network_uuid, host)
def fixed_ips_by_virtual_interface(context, vif_id):
"""Get fixed IPs by virtual interface or raise if none exist."""
return IMPL.fixed_ips_by_virtual_interface(context, vif_id)
def fixed_ip_update(context, address, values):
"""Create a fixed IP from the values dictionary."""
return IMPL.fixed_ip_update(context, address, values)
####################
def virtual_interface_create(context, values):
"""Create a virtual interface record in the database."""
return IMPL.virtual_interface_create(context, values)
def virtual_interface_get(context, vif_id):
"""Gets a virtual interface from the table."""
return IMPL.virtual_interface_get(context, vif_id)
def virtual_interface_get_by_address(context, address):
"""Gets a virtual interface from the table filtering on address."""
return IMPL.virtual_interface_get_by_address(context, address)
def virtual_interface_get_by_uuid(context, vif_uuid):
"""Gets a virtual interface from the table filtering on vif uuid."""
return IMPL.virtual_interface_get_by_uuid(context, vif_uuid)
def virtual_interface_get_by_instance(context, instance_id):
"""Gets all virtual_interfaces for instance."""
return IMPL.virtual_interface_get_by_instance(context, instance_id)
def virtual_interface_get_by_instance_and_network(context, instance_id,
network_id):
"""Gets all virtual interfaces for instance."""
return IMPL.virtual_interface_get_by_instance_and_network(context,
instance_id,
network_id)
def virtual_interface_delete_by_instance(context, instance_id):
"""Delete virtual interface records associated with instance."""
return IMPL.virtual_interface_delete_by_instance(context, instance_id)
def virtual_interface_get_all(context):
"""Gets all virtual interfaces from the table."""
return IMPL.virtual_interface_get_all(context)
####################
def instance_create(context, values):
"""Create an instance from the values dictionary."""
return IMPL.instance_create(context, values)
def instance_destroy(context, instance_uuid, constraint=None):
"""Destroy the instance or raise if it does not exist."""
return IMPL.instance_destroy(context, instance_uuid, constraint)
def instance_get_by_uuid(context, uuid, columns_to_join=None):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get_by_uuid(context, uuid, columns_to_join)
def instance_get(context, instance_id, columns_to_join=None):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get(context, instance_id,
columns_to_join=columns_to_join)
def instance_get_all(context, columns_to_join=None):
"""Get all instances."""
return IMPL.instance_get_all(context, columns_to_join=columns_to_join)
def instance_get_all_by_filters(context, filters, sort_key='created_at',
sort_dir='desc', limit=None, marker=None,
columns_to_join=None):
"""Get all instances that match all filters."""
# Note: This function exists for backwards compatibility since calls to
# the instance layer coming in over RPC may specify the single sort
# key/direction values; in this case, this function is invoked instead
# of the 'instance_get_all_by_filters_sort' function.
return IMPL.instance_get_all_by_filters(context, filters, sort_key,
sort_dir, limit=limit,
marker=marker,
columns_to_join=columns_to_join)
def instance_get_all_by_filters_sort(context, filters, limit=None,
marker=None, columns_to_join=None,
sort_keys=None, sort_dirs=None):
"""Get all instances that match all filters sorted by multiple keys.
sort_keys and sort_dirs must be a list of strings.
"""
return IMPL.instance_get_all_by_filters_sort(
context, filters, limit=limit, marker=marker,
columns_to_join=columns_to_join, sort_keys=sort_keys,
sort_dirs=sort_dirs)
def instance_get_active_by_window_joined(context, begin, end=None,
project_id=None, host=None,
columns_to_join=None):
"""Get instances and joins active during a certain time window.
Specifying a project_id will filter for a certain project.
Specifying a host will filter for instances on a given compute host.
"""
return IMPL.instance_get_active_by_window_joined(context, begin, end,
project_id, host,
columns_to_join=columns_to_join)
def instance_get_all_by_host(context, host, columns_to_join=None):
"""Get all instances belonging to a host."""
return IMPL.instance_get_all_by_host(context, host, columns_to_join)
def instance_get_all_by_host_and_node(context, host, node,
columns_to_join=None):
"""Get all instances belonging to a node."""
return IMPL.instance_get_all_by_host_and_node(
context, host, node, columns_to_join=columns_to_join)
def instance_get_all_by_host_and_not_type(context, host, type_id=None):
"""Get all instances belonging to a host with a different type_id."""
return IMPL.instance_get_all_by_host_and_not_type(context, host, type_id)
def instance_get_all_by_grantee_security_groups(context, group_ids):
"""Get instances with rules granted to them by a list of secgroups ids."""
return IMPL.instance_get_all_by_grantee_security_groups(context, group_ids)
def instance_floating_address_get_all(context, instance_uuid):
"""Get all floating IP addresses of an instance."""
return IMPL.instance_floating_address_get_all(context, instance_uuid)
# NOTE(hanlind): This method can be removed as conductor RPC API moves to v2.0.
def instance_get_all_hung_in_rebooting(context, reboot_window):
"""Get all instances stuck in a rebooting state."""
return IMPL.instance_get_all_hung_in_rebooting(context, reboot_window)
def instance_update(context, instance_uuid, values, expected=None):
"""Set the given properties on an instance and update it.
Raises NotFound if instance does not exist.
"""
return IMPL.instance_update(context, instance_uuid, values,
expected=expected)
def instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=None, expected=None):
"""Set the given properties on an instance and update it. Return
a shallow copy of the original instance reference, as well as the
updated one.
:param context: = request context object
:param instance_uuid: = instance id or uuid
:param values: = dict containing column values
:returns: a tuple of the form (old_instance_ref, new_instance_ref)
Raises NotFound if instance does not exist.
"""
rv = IMPL.instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=columns_to_join,
expected=expected)
return rv
def instance_add_security_group(context, instance_id, security_group_id):
"""Associate the given security group with the given instance."""
return IMPL.instance_add_security_group(context, instance_id,
security_group_id)
def instance_remove_security_group(context, instance_id, security_group_id):
"""Disassociate the given security group from the given instance."""
return IMPL.instance_remove_security_group(context, instance_id,
security_group_id)
####################
def instance_group_create(context, values, policies=None, members=None):
"""Create a new group.
Each group will receive a unique uuid. This will be used for access to the
group.
"""
return IMPL.instance_group_create(context, values, policies, members)
def instance_group_get(context, group_uuid):
"""Get a specific group by id."""
return IMPL.instance_group_get(context, group_uuid)
def instance_group_get_by_instance(context, instance_uuid):
"""Get the group an instance is a member of."""
return IMPL.instance_group_get_by_instance(context, instance_uuid)
def instance_group_update(context, group_uuid, values):
"""Update the attributes of an group."""
return IMPL.instance_group_update(context, group_uuid, values)
def instance_group_delete(context, group_uuid):
"""Delete an group."""
return IMPL.instance_group_delete(context, group_uuid)
def instance_group_get_all(context):
"""Get all groups."""
return IMPL.instance_group_get_all(context)
def instance_group_get_all_by_project_id(context, project_id):
"""Get all groups for a specific project_id."""
return IMPL.instance_group_get_all_by_project_id(context, project_id)
def instance_group_members_add(context, group_uuid, members,
set_delete=False):
"""Add members to the group."""
return IMPL.instance_group_members_add(context, group_uuid, members,
set_delete=set_delete)
def instance_group_member_delete(context, group_uuid, instance_id):
"""Delete a specific member from the group."""
return IMPL.instance_group_member_delete(context, group_uuid, instance_id)
def instance_group_members_get(context, group_uuid):
"""Get the members from the group."""
return IMPL.instance_group_members_get(context, group_uuid)
###################
def instance_info_cache_get(context, instance_uuid):
"""Gets an instance info cache from the table.
:param instance_uuid: = uuid of the info cache's instance
"""
return IMPL.instance_info_cache_get(context, instance_uuid)
def instance_info_cache_update(context, instance_uuid, values):
"""Update an instance info cache record in the table.
:param instance_uuid: = uuid of info cache's instance
:param values: = dict containing column values to update
"""
return IMPL.instance_info_cache_update(context, instance_uuid, values)
def instance_info_cache_delete(context, instance_uuid):
"""Deletes an existing instance_info_cache record
:param instance_uuid: = uuid of the instance tied to the cache record
"""
return IMPL.instance_info_cache_delete(context, instance_uuid)
###################
def instance_extra_get_by_instance_uuid(context, instance_uuid, columns=None):
"""Get the instance extra record
:param instance_uuid: = uuid of the instance tied to the topology record
:param columns: A list of the columns to load, or None for 'all of them'
"""
return IMPL.instance_extra_get_by_instance_uuid(
context, instance_uuid, columns=columns)
def instance_extra_update_by_uuid(context, instance_uuid, updates):
"""Update the instance extra record by instance uuid
:param instance_uuid: = uuid of the instance tied to the record
:param updates: A dict of updates to apply
"""
return IMPL.instance_extra_update_by_uuid(context, instance_uuid,
updates)
###################
def key_pair_create(context, values):
"""Create a key_pair from the values dictionary."""
return IMPL.key_pair_create(context, values)
def key_pair_destroy(context, user_id, name):
"""Destroy the key_pair or raise if it does not exist."""
return IMPL.key_pair_destroy(context, user_id, name)
def key_pair_get(context, user_id, name):
"""Get a key_pair or raise if it does not exist."""
return IMPL.key_pair_get(context, user_id, name)
def key_pair_get_all_by_user(context, user_id):
"""Get all key_pairs by user."""
return IMPL.key_pair_get_all_by_user(context, user_id)
def key_pair_count_by_user(context, user_id):
"""Count number of key pairs for the given user ID."""
return IMPL.key_pair_count_by_user(context, user_id)
####################
def network_associate(context, project_id, network_id=None, force=False):
"""Associate a free network to a project."""
return IMPL.network_associate(context, project_id, network_id, force)
def network_count_reserved_ips(context, network_id):
"""Return the number of reserved IPs in the network."""
return IMPL.network_count_reserved_ips(context, network_id)
def network_create_safe(context, values):
"""Create a network from the values dict.
The network is only returned if the create succeeds. If the create violates
constraints because the network already exists, no exception is raised.
"""
return IMPL.network_create_safe(context, values)
def network_delete_safe(context, network_id):
"""Delete network with key network_id.
This method assumes that the network is not associated with any project
"""
return IMPL.network_delete_safe(context, network_id)
def network_disassociate(context, network_id, disassociate_host=True,
disassociate_project=True):
"""Disassociate the network from project or host
Raises if it does not exist.
"""
return IMPL.network_disassociate(context, network_id, disassociate_host,
disassociate_project)
def network_get(context, network_id, project_only="allow_none"):
"""Get a network or raise if it does not exist."""
return IMPL.network_get(context, network_id, project_only=project_only)
def network_get_all(context, project_only="allow_none"):
"""Return all defined networks."""
return IMPL.network_get_all(context, project_only)
def network_get_all_by_uuids(context, network_uuids,
project_only="allow_none"):
"""Return networks by ids."""
return IMPL.network_get_all_by_uuids(context, network_uuids,
project_only=project_only)
def network_in_use_on_host(context, network_id, host=None):
"""Indicates if a network is currently in use on host."""
return IMPL.network_in_use_on_host(context, network_id, host)
def network_get_associated_fixed_ips(context, network_id, host=None):
"""Get all network's IPs that have been associated."""
return IMPL.network_get_associated_fixed_ips(context, network_id, host)
def network_get_by_uuid(context, uuid):
"""Get a network by uuid or raise if it does not exist."""
return IMPL.network_get_by_uuid(context, uuid)
def network_get_by_cidr(context, cidr):
"""Get a network by cidr or raise if it does not exist."""
return IMPL.network_get_by_cidr(context, cidr)
def network_get_all_by_host(context, host):
"""All networks for which the given host is the network host."""
return IMPL.network_get_all_by_host(context, host)
def network_set_host(context, network_id, host_id):
"""Safely set the host for network."""
return IMPL.network_set_host(context, network_id, host_id)
def network_update(context, network_id, values):
"""Set the given properties on a network and update it.
Raises NotFound if network does not exist.
"""
return IMPL.network_update(context, network_id, values)
###############
def quota_create(context, project_id, resource, limit, user_id=None):
"""Create a quota for the given project and resource."""
return IMPL.quota_create(context, project_id, resource, limit,
user_id=user_id)
def quota_get(context, project_id, resource, user_id=None):
"""Retrieve a quota or raise if it does not exist."""
return IMPL.quota_get(context, project_id, resource, user_id=user_id)
def quota_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all quotas associated with a given project and user."""
return IMPL.quota_get_all_by_project_and_user(context, project_id, user_id)
def quota_get_all_by_project(context, project_id):
"""Retrieve all quotas associated with a given project."""
return IMPL.quota_get_all_by_project(context, project_id)
def quota_get_all(context, project_id):
"""Retrieve all user quotas associated with a given project."""
return IMPL.quota_get_all(context, project_id)
def quota_update(context, project_id, resource, limit, user_id=None):
"""Update a quota or raise if it does not exist."""
return IMPL.quota_update(context, project_id, resource, limit,
user_id=user_id)
###################
def quota_class_create(context, class_name, resource, limit):
"""Create a quota class for the given name and resource."""
return IMPL.quota_class_create(context, class_name, resource, limit)
def quota_class_get(context, class_name, resource):
"""Retrieve a quota class or raise if it does not exist."""
return IMPL.quota_class_get(context, class_name, resource)
def quota_class_get_default(context):
"""Retrieve all default quotas."""
return IMPL.quota_class_get_default(context)
def quota_class_get_all_by_name(context, class_name):
"""Retrieve all quotas associated with a given quota class."""
return IMPL.quota_class_get_all_by_name(context, class_name)
def quota_class_update(context, class_name, resource, limit):
"""Update a quota class or raise if it does not exist."""
return IMPL.quota_class_update(context, class_name, resource, limit)
###################
def quota_usage_get(context, project_id, resource, user_id=None):
"""Retrieve a quota usage or raise if it does not exist."""
return IMPL.quota_usage_get(context, project_id, resource, user_id=user_id)
def quota_usage_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project_and_user(context,
project_id, user_id)
def quota_usage_get_all_by_project(context, project_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project(context, project_id)
def quota_usage_update(context, project_id, user_id, resource, **kwargs):
"""Update a quota usage or raise if it does not exist."""
return IMPL.quota_usage_update(context, project_id, user_id, resource,
**kwargs)
###################
def quota_reserve(context, resources, quotas, user_quotas, deltas, expire,
until_refresh, max_age, project_id=None, user_id=None):
"""Check quotas and create appropriate reservations."""
return IMPL.quota_reserve(context, resources, quotas, user_quotas, deltas,
expire, until_refresh, max_age,
project_id=project_id, user_id=user_id)
def reservation_commit(context, reservations, project_id=None, user_id=None):
"""Commit quota reservations."""
return IMPL.reservation_commit(context, reservations,
project_id=project_id,
user_id=user_id)
def reservation_rollback(context, reservations, project_id=None, user_id=None):
"""Roll back quota reservations."""
return IMPL.reservation_rollback(context, reservations,
project_id=project_id,
user_id=user_id)
def quota_destroy_all_by_project_and_user(context, project_id, user_id):
"""Destroy all quotas associated with a given project and user."""
return IMPL.quota_destroy_all_by_project_and_user(context,
project_id, user_id)
def quota_destroy_all_by_project(context, project_id):
"""Destroy all quotas associated with a given project."""
return IMPL.quota_destroy_all_by_project(context, project_id)
def reservation_expire(context):
"""Roll back any expired reservations."""
return IMPL.reservation_expire(context)
###################
def ec2_volume_create(context, volume_id, forced_id=None):
return IMPL.ec2_volume_create(context, volume_id, forced_id)
def ec2_volume_get_by_id(context, volume_id):
return IMPL.ec2_volume_get_by_id(context, volume_id)
def ec2_volume_get_by_uuid(context, volume_uuid):
return IMPL.ec2_volume_get_by_uuid(context, volume_uuid)
def ec2_snapshot_create(context, snapshot_id, forced_id=None):
return IMPL.ec2_snapshot_create(context, snapshot_id, forced_id)
def ec2_snapshot_get_by_ec2_id(context, ec2_id):
return IMPL.ec2_snapshot_get_by_ec2_id(context, ec2_id)
def ec2_snapshot_get_by_uuid(context, snapshot_uuid):
return IMPL.ec2_snapshot_get_by_uuid(context, snapshot_uuid)
####################
def block_device_mapping_create(context, values, legacy=True):
"""Create an entry of block device mapping."""
return IMPL.block_device_mapping_create(context, values, legacy)
def block_device_mapping_update(context, bdm_id, values, legacy=True):
"""Update an entry of block device mapping."""
return IMPL.block_device_mapping_update(context, bdm_id, values, legacy)
def block_device_mapping_update_or_create(context, values, legacy=True):
"""Update an entry of block device mapping.
If not existed, create a new entry
"""
return IMPL.block_device_mapping_update_or_create(context, values, legacy)
def block_device_mapping_get_all_by_instance_uuids(context, instance_uuids):
"""Get all block device mapping belonging to a list of instances."""
return IMPL.block_device_mapping_get_all_by_instance_uuids(context,
instance_uuids)
def block_device_mapping_get_all_by_instance(context, instance_uuid):
"""Get all block device mapping belonging to an instance."""
return IMPL.block_device_mapping_get_all_by_instance(context,
instance_uuid)
def block_device_mapping_get_all_by_volume_id(context, volume_id,
columns_to_join=None):
"""Get block device mapping for a given volume."""
return IMPL.block_device_mapping_get_all_by_volume_id(context, volume_id,
columns_to_join)
def block_device_mapping_get_by_instance_and_volume_id(context, volume_id,
instance_uuid,
columns_to_join=None):
"""Get block device mapping for a given volume ID and instance UUID."""
return IMPL.block_device_mapping_get_by_instance_and_volume_id(
context, volume_id, instance_uuid, columns_to_join)
def block_device_mapping_destroy(context, bdm_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy(context, bdm_id)
def block_device_mapping_destroy_by_instance_and_device(context, instance_uuid,
device_name):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy_by_instance_and_device(
context, instance_uuid, device_name)
def block_device_mapping_destroy_by_instance_and_volume(context, instance_uuid,
volume_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy_by_instance_and_volume(
context, instance_uuid, volume_id)
####################
def security_group_get_all(context):
"""Get all security groups."""
return IMPL.security_group_get_all(context)
def security_group_get(context, security_group_id, columns_to_join=None):
"""Get security group by its id."""
return IMPL.security_group_get(context, security_group_id,
columns_to_join)
def security_group_get_by_name(context, project_id, group_name,
columns_to_join=None):
"""Returns a security group with the specified name from a project."""
return IMPL.security_group_get_by_name(context, project_id, group_name,
columns_to_join=None)
def security_group_get_by_project(context, project_id):
"""Get all security groups belonging to a project."""
return IMPL.security_group_get_by_project(context, project_id)
def security_group_get_by_instance(context, instance_uuid):
"""Get security groups to which the instance is assigned."""
return IMPL.security_group_get_by_instance(context, instance_uuid)
def security_group_in_use(context, group_id):
"""Indicates if a security group is currently in use."""
return IMPL.security_group_in_use(context, group_id)
def security_group_create(context, values):
"""Create a new security group."""
return IMPL.security_group_create(context, values)
def security_group_update(context, security_group_id, values,
columns_to_join=None):
"""Update a security group."""
return IMPL.security_group_update(context, security_group_id, values,
columns_to_join=columns_to_join)
def security_group_ensure_default(context):
"""Ensure default security group exists for a project_id.
Returns a tuple with the first element being a bool indicating
if the default security group previously existed. Second
element is the dict used to create the default security group.
"""
return IMPL.security_group_ensure_default(context)
def security_group_destroy(context, security_group_id):
"""Deletes a security group."""
return IMPL.security_group_destroy(context, security_group_id)
####################
def security_group_rule_create(context, values):
"""Create a new security group."""
return IMPL.security_group_rule_create(context, values)
def security_group_rule_get_by_security_group(context, security_group_id,
columns_to_join=None):
"""Get all rules for a given security group."""
return IMPL.security_group_rule_get_by_security_group(
context, security_group_id, columns_to_join=columns_to_join)
def security_group_rule_get_by_instance(context, instance_uuid):
"""Get all rules for a given instance."""
return IMPL.security_group_rule_get_by_instance(context, instance_uuid)
def security_group_rule_destroy(context, security_group_rule_id):
"""Deletes a security group rule."""
return IMPL.security_group_rule_destroy(context, security_group_rule_id)
def security_group_rule_get(context, security_group_rule_id):
"""Gets a security group rule."""
return IMPL.security_group_rule_get(context, security_group_rule_id)
def security_group_rule_count_by_group(context, security_group_id):
"""Count rules in a given security group."""
return IMPL.security_group_rule_count_by_group(context, security_group_id)
###################
def security_group_default_rule_get(context, security_group_rule_default_id):
return IMPL.security_group_default_rule_get(context,
security_group_rule_default_id)
def security_group_default_rule_destroy(context,
security_group_rule_default_id):
return IMPL.security_group_default_rule_destroy(
context, security_group_rule_default_id)
def security_group_default_rule_create(context, values):
return IMPL.security_group_default_rule_create(context, values)
def security_group_default_rule_list(context):
return IMPL.security_group_default_rule_list(context)
###################
def provider_fw_rule_create(context, rule):
"""Add a firewall rule at the provider level (all hosts & instances)."""
return IMPL.provider_fw_rule_create(context, rule)
def provider_fw_rule_get_all(context):
"""Get all provider-level firewall rules."""
return IMPL.provider_fw_rule_get_all(context)
def provider_fw_rule_destroy(context, rule_id):
"""Delete a provider firewall rule from the database."""
return IMPL.provider_fw_rule_destroy(context, rule_id)
###################
def project_get_networks(context, project_id, associate=True):
"""Return the network associated with the project.
If associate is true, it will attempt to associate a new
network if one is not found, otherwise it returns None.
"""
return IMPL.project_get_networks(context, project_id, associate)
###################
def console_pool_create(context, values):
"""Create console pool."""
return IMPL.console_pool_create(context, values)
def console_pool_get_by_host_type(context, compute_host, proxy_host,
console_type):
"""Fetch a console pool for a given proxy host, compute host, and type."""
return IMPL.console_pool_get_by_host_type(context,
compute_host,
proxy_host,
console_type)
def console_pool_get_all_by_host_type(context, host, console_type):
"""Fetch all pools for given proxy host and type."""
return IMPL.console_pool_get_all_by_host_type(context,
host,
console_type)
def console_create(context, values):
"""Create a console."""
return IMPL.console_create(context, values)
def console_delete(context, console_id):
"""Delete a console."""
return IMPL.console_delete(context, console_id)
def console_get_by_pool_instance(context, pool_id, instance_uuid):
"""Get console entry for a given instance and pool."""
return IMPL.console_get_by_pool_instance(context, pool_id, instance_uuid)
def console_get_all_by_instance(context, instance_uuid, columns_to_join=None):
"""Get consoles for a given instance."""
return IMPL.console_get_all_by_instance(context, instance_uuid,
columns_to_join)
def console_get(context, console_id, instance_uuid=None):
"""Get a specific console (possibly on a given instance)."""
return IMPL.console_get(context, console_id, instance_uuid)
##################
def flavor_create(context, values, projects=None):
"""Create a new instance type."""
return IMPL.flavor_create(context, values, projects=projects)
def flavor_get_all(context, inactive=False, filters=None, sort_key='flavorid',
sort_dir='asc', limit=None, marker=None):
"""Get all instance flavors."""
return IMPL.flavor_get_all(
context, inactive=inactive, filters=filters, sort_key=sort_key,
sort_dir=sort_dir, limit=limit, marker=marker)
def flavor_get(context, id):
"""Get instance type by id."""
return IMPL.flavor_get(context, id)
def flavor_get_by_name(context, name):
"""Get instance type by name."""
return IMPL.flavor_get_by_name(context, name)
def flavor_get_by_flavor_id(context, id, read_deleted=None):
"""Get instance type by flavor id."""
return IMPL.flavor_get_by_flavor_id(context, id, read_deleted)
def flavor_destroy(context, name):
"""Delete an instance type."""
return IMPL.flavor_destroy(context, name)
def flavor_access_get_by_flavor_id(context, flavor_id):
"""Get flavor access by flavor id."""
return IMPL.flavor_access_get_by_flavor_id(context, flavor_id)
def flavor_access_add(context, flavor_id, project_id):
"""Add flavor access for project."""
return IMPL.flavor_access_add(context, flavor_id, project_id)
def flavor_access_remove(context, flavor_id, project_id):
"""Remove flavor access for project."""
return IMPL.flavor_access_remove(context, flavor_id, project_id)
def flavor_extra_specs_get(context, flavor_id):
"""Get all extra specs for an instance type."""
return IMPL.flavor_extra_specs_get(context, flavor_id)
def flavor_extra_specs_delete(context, flavor_id, key):
"""Delete the given extra specs item."""
IMPL.flavor_extra_specs_delete(context, flavor_id, key)
def flavor_extra_specs_update_or_create(context, flavor_id,
extra_specs):
"""Create or update instance type extra specs.
This adds or modifies the key/value pairs specified in the
extra specs dict argument
"""
IMPL.flavor_extra_specs_update_or_create(context, flavor_id,
extra_specs)
####################
def pci_device_get_by_addr(context, node_id, dev_addr):
"""Get PCI device by address."""
return IMPL.pci_device_get_by_addr(context, node_id, dev_addr)
def pci_device_get_by_id(context, id):
"""Get PCI device by id."""
return IMPL.pci_device_get_by_id(context, id)
def pci_device_get_all_by_node(context, node_id):
"""Get all PCI devices for one host."""
return IMPL.pci_device_get_all_by_node(context, node_id)
def pci_device_get_all_by_instance_uuid(context, instance_uuid):
"""Get PCI devices allocated to instance."""
return IMPL.pci_device_get_all_by_instance_uuid(context, instance_uuid)
def pci_device_get_all_by_parent_addr(context, node_id, parent_addr):
"""Get all PCI devices by parent address."""
return IMPL.pci_device_get_all_by_parent_addr(context, node_id,
parent_addr)
def pci_device_destroy(context, node_id, address):
"""Delete a PCI device record."""
return IMPL.pci_device_destroy(context, node_id, address)
def pci_device_update(context, node_id, address, value):
"""Update a pci device."""
return IMPL.pci_device_update(context, node_id, address, value)
###################
def cell_create(context, values):
"""Create a new child Cell entry."""
return IMPL.cell_create(context, values)
def cell_update(context, cell_name, values):
"""Update a child Cell entry."""
return IMPL.cell_update(context, cell_name, values)
def cell_delete(context, cell_name):
"""Delete a child Cell."""
return IMPL.cell_delete(context, cell_name)
def cell_get(context, cell_name):
"""Get a specific child Cell."""
return IMPL.cell_get(context, cell_name)
def cell_get_all(context):
"""Get all child Cells."""
return IMPL.cell_get_all(context)
####################
def instance_metadata_get(context, instance_uuid):
"""Get all metadata for an instance."""
return IMPL.instance_metadata_get(context, instance_uuid)
def instance_metadata_delete(context, instance_uuid, key):
"""Delete the given metadata item."""
IMPL.instance_metadata_delete(context, instance_uuid, key)
def instance_metadata_update(context, instance_uuid, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
return IMPL.instance_metadata_update(context, instance_uuid,
metadata, delete)
####################
def instance_system_metadata_get(context, instance_uuid):
"""Get all system metadata for an instance."""
return IMPL.instance_system_metadata_get(context, instance_uuid)
def instance_system_metadata_update(context, instance_uuid, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
IMPL.instance_system_metadata_update(
context, instance_uuid, metadata, delete)
####################
def agent_build_create(context, values):
"""Create a new agent build entry."""
return IMPL.agent_build_create(context, values)
def agent_build_get_by_triple(context, hypervisor, os, architecture):
"""Get agent build by hypervisor/OS/architecture triple."""
return IMPL.agent_build_get_by_triple(context, hypervisor, os,
architecture)
def agent_build_get_all(context, hypervisor=None):
"""Get all agent builds."""
return IMPL.agent_build_get_all(context, hypervisor)
def agent_build_destroy(context, agent_update_id):
"""Destroy agent build entry."""
IMPL.agent_build_destroy(context, agent_update_id)
def agent_build_update(context, agent_build_id, values):
"""Update agent build entry."""
IMPL.agent_build_update(context, agent_build_id, values)
####################
def bw_usage_get(context, uuid, start_period, mac):
"""Return bw usage for instance and mac in a given audit period."""
return IMPL.bw_usage_get(context, uuid, start_period, mac)
def bw_usage_get_by_uuids(context, uuids, start_period):
"""Return bw usages for instance(s) in a given audit period."""
return IMPL.bw_usage_get_by_uuids(context, uuids, start_period)
def bw_usage_update(context, uuid, mac, start_period, bw_in, bw_out,
last_ctr_in, last_ctr_out, last_refreshed=None,
update_cells=True):
"""Update cached bandwidth usage for an instance's network based on mac
address. Creates new record if needed.
"""
rv = IMPL.bw_usage_update(context, uuid, mac, start_period, bw_in,
bw_out, last_ctr_in, last_ctr_out, last_refreshed=last_refreshed)
if update_cells:
try:
cells_rpcapi.CellsAPI().bw_usage_update_at_top(context,
uuid, mac, start_period, bw_in, bw_out,
last_ctr_in, last_ctr_out, last_refreshed)
except Exception:
LOG.exception(_LE("Failed to notify cells of bw_usage update"))
return rv
###################
def vol_get_usage_by_time(context, begin):
"""Return volumes usage that have been updated after a specified time."""
return IMPL.vol_get_usage_by_time(context, begin)
def vol_usage_update(context, id, rd_req, rd_bytes, wr_req, wr_bytes,
instance_id, project_id, user_id, availability_zone,
update_totals=False):
"""Update cached volume usage for a volume
Creates new record if needed.
"""
return IMPL.vol_usage_update(context, id, rd_req, rd_bytes, wr_req,
wr_bytes, instance_id, project_id, user_id,
availability_zone,
update_totals=update_totals)
###################
def s3_image_get(context, image_id):
"""Find local s3 image represented by the provided id."""
return IMPL.s3_image_get(context, image_id)
def s3_image_get_by_uuid(context, image_uuid):
"""Find local s3 image represented by the provided uuid."""
return IMPL.s3_image_get_by_uuid(context, image_uuid)
def s3_image_create(context, image_uuid):
"""Create local s3 image represented by provided uuid."""
return IMPL.s3_image_create(context, image_uuid)
####################
def aggregate_create(context, values, metadata=None):
"""Create a new aggregate with metadata."""
return IMPL.aggregate_create(context, values, metadata)
def aggregate_get(context, aggregate_id):
"""Get a specific aggregate by id."""
return IMPL.aggregate_get(context, aggregate_id)
def aggregate_get_by_host(context, host, key=None):
"""Get a list of aggregates that host belongs to."""
return IMPL.aggregate_get_by_host(context, host, key)
def aggregate_metadata_get_by_host(context, host, key=None):
"""Get metadata for all aggregates that host belongs to.
Returns a dictionary where each value is a set, this is to cover the case
where there two aggregates have different values for the same key.
Optional key filter
"""
return IMPL.aggregate_metadata_get_by_host(context, host, key)
def aggregate_get_by_metadata_key(context, key):
return IMPL.aggregate_get_by_metadata_key(context, key)
def aggregate_update(context, aggregate_id, values):
"""Update the attributes of an aggregates.
If values contains a metadata key, it updates the aggregate metadata too.
"""
return IMPL.aggregate_update(context, aggregate_id, values)
def aggregate_delete(context, aggregate_id):
"""Delete an aggregate."""
return IMPL.aggregate_delete(context, aggregate_id)
def aggregate_get_all(context):
"""Get all aggregates."""
return IMPL.aggregate_get_all(context)
def aggregate_metadata_add(context, aggregate_id, metadata, set_delete=False):
"""Add/update metadata. If set_delete=True, it adds only."""
IMPL.aggregate_metadata_add(context, aggregate_id, metadata, set_delete)
def aggregate_metadata_get(context, aggregate_id):
"""Get metadata for the specified aggregate."""
return IMPL.aggregate_metadata_get(context, aggregate_id)
def aggregate_metadata_delete(context, aggregate_id, key):
"""Delete the given metadata key."""
IMPL.aggregate_metadata_delete(context, aggregate_id, key)
def aggregate_host_add(context, aggregate_id, host):
"""Add host to the aggregate."""
IMPL.aggregate_host_add(context, aggregate_id, host)
def aggregate_host_get_all(context, aggregate_id):
"""Get hosts for the specified aggregate."""
return IMPL.aggregate_host_get_all(context, aggregate_id)
def aggregate_host_delete(context, aggregate_id, host):
"""Delete the given host from the aggregate."""
IMPL.aggregate_host_delete(context, aggregate_id, host)
####################
def instance_fault_create(context, values):
"""Create a new Instance Fault."""
return IMPL.instance_fault_create(context, values)
def instance_fault_get_by_instance_uuids(context, instance_uuids):
"""Get all instance faults for the provided instance_uuids."""
return IMPL.instance_fault_get_by_instance_uuids(context, instance_uuids)
####################
def action_start(context, values):
"""Start an action for an instance."""
return IMPL.action_start(context, values)
def action_finish(context, values):
"""Finish an action for an instance."""
return IMPL.action_finish(context, values)
def actions_get(context, uuid):
"""Get all instance actions for the provided instance."""
return IMPL.actions_get(context, uuid)
def action_get_by_request_id(context, uuid, request_id):
"""Get the action by request_id and given instance."""
return IMPL.action_get_by_request_id(context, uuid, request_id)
def action_event_start(context, values):
"""Start an event on an instance action."""
return IMPL.action_event_start(context, values)
def action_event_finish(context, values):
"""Finish an event on an instance action."""
return IMPL.action_event_finish(context, values)
def action_events_get(context, action_id):
"""Get the events by action id."""
return IMPL.action_events_get(context, action_id)
def action_event_get_by_id(context, action_id, event_id):
return IMPL.action_event_get_by_id(context, action_id, event_id)
####################
def get_instance_uuid_by_ec2_id(context, ec2_id):
"""Get uuid through ec2 id from instance_id_mappings table."""
return IMPL.get_instance_uuid_by_ec2_id(context, ec2_id)
def ec2_instance_create(context, instance_uuid, id=None):
"""Create the ec2 id to instance uuid mapping on demand."""
return IMPL.ec2_instance_create(context, instance_uuid, id)
def ec2_instance_get_by_uuid(context, instance_uuid):
return IMPL.ec2_instance_get_by_uuid(context, instance_uuid)
def ec2_instance_get_by_id(context, instance_id):
return IMPL.ec2_instance_get_by_id(context, instance_id)
####################
def task_log_end_task(context, task_name,
period_beginning,
period_ending,
host,
errors,
message=None):
"""Mark a task as complete for a given host/time period."""
return IMPL.task_log_end_task(context, task_name,
period_beginning,
period_ending,
host,
errors,
message)
def task_log_begin_task(context, task_name,
period_beginning,
period_ending,
host,
task_items=None,
message=None):
"""Mark a task as started for a given host/time period."""
return IMPL.task_log_begin_task(context, task_name,
period_beginning,
period_ending,
host,
task_items,
message)
def task_log_get_all(context, task_name, period_beginning,
period_ending, host=None, state=None):
return IMPL.task_log_get_all(context, task_name, period_beginning,
period_ending, host, state)
def task_log_get(context, task_name, period_beginning,
period_ending, host, state=None):
return IMPL.task_log_get(context, task_name, period_beginning,
period_ending, host, state)
####################
def archive_deleted_rows(max_rows=None):
"""Move up to max_rows rows from production tables to corresponding shadow
tables.
:returns: dict that maps table name to number of rows archived from that
table, for example:
::
{
'instances': 5,
'block_device_mapping': 5,
'pci_devices': 2,
}
"""
return IMPL.archive_deleted_rows(max_rows=max_rows)
def pcidevice_online_data_migration(context, max_count):
return IMPL.pcidevice_online_data_migration(context, max_count)
def aggregate_uuids_online_data_migration(context, max_count):
return IMPL.aggregate_uuids_online_data_migration(context, max_count)
def computenode_uuids_online_data_migration(context, max_count):
return IMPL.computenode_uuids_online_data_migration(context, max_count)
####################
def instance_tag_add(context, instance_uuid, tag):
"""Add tag to the instance."""
return IMPL.instance_tag_add(context, instance_uuid, tag)
def instance_tag_set(context, instance_uuid, tags):
"""Replace all of the instance tags with specified list of tags."""
return IMPL.instance_tag_set(context, instance_uuid, tags)
def instance_tag_get_by_instance_uuid(context, instance_uuid):
"""Get all tags for a given instance."""
return IMPL.instance_tag_get_by_instance_uuid(context, instance_uuid)
def instance_tag_delete(context, instance_uuid, tag):
"""Delete specified tag from the instance."""
return IMPL.instance_tag_delete(context, instance_uuid, tag)
def instance_tag_delete_all(context, instance_uuid):
"""Delete all tags from the instance."""
return IMPL.instance_tag_delete_all(context, instance_uuid)
def instance_tag_exists(context, instance_uuid, tag):
"""Check if specified tag exist on the instance."""
return IMPL.instance_tag_exists(context, instance_uuid, tag)
|
For more information on our local community, click here.
A stalk of corn waiting for harvest started as a seed; our community's the same. We're planting the seed of brighter tomorrows today. It is our goal for Lee County to be self-sustained with its own produce, and limit the amount of importation. Buying locally grown is not only buying a fresher product, it is a celebration of good, hard, honest work- the work done by the hands of our neighbors. Come out and help us grow. |
#!/usr/bin/env python
"""This is the manager for the various queues."""
import os
import random
import socket
import time
import logging
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import rdfvalue
from grr.lib import registry
from grr.lib import stats
from grr.lib import utils
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import flows as rdf_flows
class Error(Exception):
"""Base class for errors in this module."""
class MoreDataException(Error):
"""Raised when there is more data available."""
class QueueManager(object):
"""This class manages the representation of the flow within the data store.
The workflow for client task scheduling is as follows:
1) Create a bunch of tasks (rdf_flows.GrrMessage()). Tasks must
be assigned to queues and contain arbitrary values.
2) Call QueueManager.Schedule(task) to add the tasks to their queues.
3) In another thread, call QueueManager.QueryAndOwn(queue) to
obtain a list of tasks leased for a particular time.
4) If the lease time expires, the tasks automatically become
available for consumption. When done with the task we can remove it
from the scheduler using QueueManager.Delete(tasks).
5) Tasks can be re-leased by calling QueueManager.Schedule(task)
repeatedly. Each call will extend the lease by the specified amount.
Important QueueManager's feature is the ability to freeze the timestamp used
for time-limiting Resolve and Delete queries to the datastore. "with"
statement should be used to freeze the timestamp, like:
with queue_manager.QueueManager(token=self.token) as manager:
...
Another option is to use FreezeTimestamp()/UnfreezeTimestamp() methods:
queue_manager.FreezeTimestamp()
...
queue_manager.UnfreezeTimestamp()
"""
# These attributes are related to a flow's internal data structures Requests
# are protobufs of type RequestState. They have a constant prefix followed by
# the request number:
FLOW_REQUEST_PREFIX = "flow:request:"
FLOW_REQUEST_TEMPLATE = FLOW_REQUEST_PREFIX + "%08X"
# When a status message is received from the client, we write it with the
# request using the following template.
FLOW_STATUS_TEMPLATE = "flow:status:%08X"
FLOW_STATUS_REGEX = "flow:status:.*"
# This regex will return all the requests in order
FLOW_REQUEST_REGEX = FLOW_REQUEST_PREFIX + ".*"
# Each request may have any number of responses. Responses are kept in their
# own subject object. The subject name is derived from the session id.
FLOW_RESPONSE_PREFIX = "flow:response:%08X:"
FLOW_RESPONSE_TEMPLATE = FLOW_RESPONSE_PREFIX + "%08X"
# This regex will return all the responses in order
FLOW_RESPONSE_REGEX = "flow:response:.*"
TASK_PREDICATE_PREFIX = "task:%s"
NOTIFY_PREDICATE_PREFIX = "notify:%s"
STUCK_PRIORITY = "Flow stuck"
request_limit = 1000000
response_limit = 1000000
notification_shard_counters = {}
def __init__(self, store=None, sync=True, token=None):
self.sync = sync
self.token = token
if store is None:
store = data_store.DB
self.data_store = store
# We cache all these and write/delete in one operation.
self.to_write = {}
self.to_delete = {}
# A queue of client messages to remove. Keys are client ids, values are
# lists of task ids.
self.client_messages_to_delete = {}
self.new_client_messages = []
self.notifications = []
self.prev_frozen_timestamps = []
self.frozen_timestamp = None
self.num_notification_shards = config_lib.CONFIG["Worker.queue_shards"]
def GetNotificationShard(self, queue):
queue_name = str(queue)
QueueManager.notification_shard_counters.setdefault(queue_name, 0)
QueueManager.notification_shard_counters[queue_name] += 1
notification_shard_index = (
QueueManager.notification_shard_counters[queue_name] %
self.num_notification_shards)
if notification_shard_index > 0:
return queue.Add(str(notification_shard_index))
else:
return queue
def GetAllNotificationShards(self, queue):
result = [queue]
for i in range(1, self.num_notification_shards):
result.append(queue.Add(str(i)))
return result
def Copy(self):
"""Return a copy of the queue manager.
Returns:
Copy of the QueueManager object.
NOTE: pending writes/deletions are not copied. On the other hand, if the
original object has a frozen timestamp, a copy will have it as well.
"""
result = QueueManager(store=self.data_store, sync=self.sync,
token=self.token)
result.prev_frozen_timestamps = self.prev_frozen_timestamps
result.frozen_timestamp = self.frozen_timestamp
return result
def FreezeTimestamp(self):
"""Freezes the timestamp used for resolve/delete database queries.
Frozen timestamp is used to consistently limit the datastore resolve and
delete queries by time range: from 0 to self.frozen_timestamp. This is
done to avoid possible race conditions, like accidentally deleting
notifications that were written by another process while we were
processing requests.
"""
self.prev_frozen_timestamps.append(self.frozen_timestamp)
self.frozen_timestamp = rdfvalue.RDFDatetime().Now()
def UnfreezeTimestamp(self):
"""Unfreezes the timestamp used for resolve/delete database queries."""
if not self.prev_frozen_timestamps:
raise RuntimeError("Unbalanced UnfreezeTimestamp call.")
self.frozen_timestamp = self.prev_frozen_timestamps.pop()
def __enter__(self):
"""Supports 'with' protocol."""
self.FreezeTimestamp()
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
"""Supports 'with' protocol."""
self.UnfreezeTimestamp()
self.Flush()
def GetFlowResponseSubject(self, session_id, request_id):
"""The subject used to carry all the responses for a specific request_id."""
return session_id.Add("state/request:%08X" % request_id)
def DeQueueClientRequest(self, client_id, task_id):
"""Remove the message from the client queue that this request forms."""
# Check this request was actually bound for a client.
if client_id:
client_id = rdf_client.ClientURN(client_id)
self.client_messages_to_delete.setdefault(client_id, []).append(task_id)
def MultiCheckStatus(self, messages):
"""Checks if there is a client status queued for a number of requests."""
subjects = [m.session_id.Add("state") for m in messages]
statuses_found = {}
for subject, result in self.data_store.MultiResolveRegex(
subjects, self.FLOW_STATUS_REGEX,
token=self.token):
for predicate, _, _ in result:
request_nr = int(predicate.split(":")[-1], 16)
statuses_found.setdefault(subject, set()).add(request_nr)
status_available = set()
for m in messages:
if m.request_id in statuses_found.get(m.session_id.Add("state"), set()):
status_available.add(m)
return status_available
def FetchCompletedRequests(self, session_id, timestamp=None):
"""Fetch all the requests with a status message queued for them."""
subject = session_id.Add("state")
requests = {}
status = {}
if timestamp is None:
timestamp = (0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now())
for predicate, serialized, _ in self.data_store.ResolveRegex(
subject, [self.FLOW_REQUEST_REGEX, self.FLOW_STATUS_REGEX],
token=self.token, limit=self.request_limit, timestamp=timestamp):
parts = predicate.split(":", 3)
request_id = parts[2]
if parts[1] == "status":
status[request_id] = serialized
else:
requests[request_id] = serialized
for request_id, serialized in sorted(requests.items()):
if request_id in status:
yield (rdf_flows.RequestState(serialized),
rdf_flows.GrrMessage(status[request_id]))
def FetchCompletedResponses(self, session_id, timestamp=None, limit=10000):
"""Fetch only completed requests and responses up to a limit."""
response_subjects = {}
if timestamp is None:
timestamp = (0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now())
total_size = 0
for request, status in self.FetchCompletedRequests(
session_id, timestamp=timestamp):
# Make sure at least one response is fetched.
response_subject = self.GetFlowResponseSubject(session_id, request.id)
response_subjects[response_subject] = request
# Quit if there are too many responses.
total_size += status.response_id
if total_size > limit:
break
response_data = dict(self.data_store.MultiResolveRegex(
response_subjects, self.FLOW_RESPONSE_REGEX, token=self.token,
timestamp=timestamp))
for response_urn, request in sorted(response_subjects.items()):
responses = []
for _, serialized, _ in response_data.get(response_urn, []):
responses.append(rdf_flows.GrrMessage(serialized))
yield (request, sorted(responses, key=lambda msg: msg.response_id))
# Indicate to the caller that there are more messages.
if total_size > limit:
raise MoreDataException()
def FetchRequestsAndResponses(self, session_id, timestamp=None):
"""Fetches all outstanding requests and responses for this flow.
We first cache all requests and responses for this flow in memory to
prevent round trips.
Args:
session_id: The session_id to get the requests/responses for.
timestamp: Tupe (start, end) with a time range. Fetched requests and
responses will have timestamp in this range.
Yields:
an tuple (request protobufs, list of responses messages) in ascending
order of request ids.
Raises:
MoreDataException: When there is more data available than read by the
limited query.
"""
subject = session_id.Add("state")
requests = {}
if timestamp is None:
timestamp = (0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now())
# Get some requests.
for predicate, serialized, _ in self.data_store.ResolveRegex(
subject, self.FLOW_REQUEST_REGEX, token=self.token,
limit=self.request_limit, timestamp=timestamp):
request_id = predicate.split(":", 1)[1]
requests[str(subject.Add(request_id))] = serialized
# And the responses for them.
response_data = dict(self.data_store.MultiResolveRegex(
requests.keys(), self.FLOW_RESPONSE_REGEX,
limit=self.response_limit, token=self.token,
timestamp=timestamp))
for urn, request_data in sorted(requests.items()):
request = rdf_flows.RequestState(request_data)
responses = []
for _, serialized, _ in response_data.get(urn, []):
responses.append(rdf_flows.GrrMessage(serialized))
yield (request, sorted(responses, key=lambda msg: msg.response_id))
if len(requests) >= self.request_limit:
raise MoreDataException()
def DeleteFlowRequestStates(self, session_id, request_state):
"""Deletes the request and all its responses from the flow state queue."""
queue = self.to_delete.setdefault(session_id.Add("state"), [])
queue.append(self.FLOW_REQUEST_TEMPLATE % request_state.id)
queue.append(self.FLOW_STATUS_TEMPLATE % request_state.id)
if request_state and request_state.HasField("request"):
self.DeQueueClientRequest(request_state.client_id,
request_state.request.task_id)
# Efficiently drop all responses to this request.
response_subject = self.GetFlowResponseSubject(session_id, request_state.id)
self.data_store.DeleteSubject(response_subject, token=self.token)
def DestroyFlowStates(self, session_id):
"""Deletes all states in this flow and dequeues all client messages."""
self.MultiDestroyFlowStates([session_id])
def MultiDestroyFlowStates(self, session_ids):
"""Deletes all states in multiple flows and dequeues all client messages."""
subjects = [session_id.Add("state") for session_id in session_ids]
to_delete = []
for subject, values in self.data_store.MultiResolveRegex(
subjects, self.FLOW_REQUEST_REGEX, token=self.token,
limit=self.request_limit):
for _, serialized, _ in values:
request = rdf_flows.RequestState(serialized)
# Drop all responses to this request.
response_subject = self.GetFlowResponseSubject(request.session_id,
request.id)
to_delete.append(response_subject)
if request.HasField("request"):
# Client request dequeueing is cached so we can call it directly.
self.DeQueueClientRequest(request.client_id, request.request.task_id)
# Mark the request itself for deletion.
to_delete.append(subject)
# Drop them all at once.
self.data_store.DeleteSubjects(to_delete, token=self.token)
def Flush(self):
"""Writes the changes in this object to the datastore."""
session_ids = set(self.to_write) | set(self.to_delete)
for session_id in session_ids:
try:
self.data_store.MultiSet(session_id, self.to_write.get(session_id, {}),
to_delete=self.to_delete.get(session_id, []),
sync=False, token=self.token)
except data_store.Error:
pass
for client_id, messages in self.client_messages_to_delete.iteritems():
self.Delete(client_id.Queue(), messages)
if self.new_client_messages:
for timestamp, messages in utils.GroupBy(
self.new_client_messages, lambda x: x[1]).iteritems():
self.Schedule([x[0] for x in messages], timestamp=timestamp)
# We need to make sure that notifications are written after the requests so
# we flush here and only notify afterwards.
if self.sync and session_ids:
self.data_store.Flush()
for notification, timestamp in self.notifications:
self.NotifyQueue(notification, timestamp=timestamp, sync=False)
if self.sync:
self.data_store.Flush()
self.to_write = {}
self.to_delete = {}
self.client_messages_to_delete = {}
self.notifications = []
self.new_client_messages = []
def QueueResponse(self, session_id, response, timestamp=None):
"""Queues the message on the flow's state."""
if timestamp is None:
timestamp = self.frozen_timestamp
# Status messages cause their requests to be marked as complete. This allows
# us to quickly enumerate all the completed requests - it is essentially an
# index for completed requests.
if response.type == rdf_flows.GrrMessage.Type.STATUS:
subject = session_id.Add("state")
queue = self.to_write.setdefault(subject, {})
queue.setdefault(
self.FLOW_STATUS_TEMPLATE % response.request_id, []).append((
response.SerializeToString(), timestamp))
subject = self.GetFlowResponseSubject(session_id, response.request_id)
queue = self.to_write.setdefault(subject, {})
queue.setdefault(
QueueManager.FLOW_RESPONSE_TEMPLATE % (
response.request_id, response.response_id),
[]).append((response.SerializeToString(), timestamp))
def QueueRequest(self, session_id, request_state, timestamp=None):
if timestamp is None:
timestamp = self.frozen_timestamp
subject = session_id.Add("state")
queue = self.to_write.setdefault(subject, {})
queue.setdefault(
self.FLOW_REQUEST_TEMPLATE % request_state.id, []).append(
(request_state.SerializeToString(), timestamp))
def QueueClientMessage(self, msg, timestamp=None):
if timestamp is None:
timestamp = self.frozen_timestamp
self.new_client_messages.append((msg, timestamp))
def QueueNotification(self, notification=None, timestamp=None, **kw):
"""Queues a notification for a flow."""
if notification is None:
notification = rdf_flows.GrrNotification(**kw)
if notification.session_id:
if timestamp is None:
timestamp = self.frozen_timestamp
self.notifications.append((notification, timestamp))
def _TaskIdToColumn(self, task_id):
"""Return a predicate representing this task."""
return self.TASK_PREDICATE_PREFIX % ("%08d" % task_id)
def Delete(self, queue, tasks):
"""Removes the tasks from the queue.
Note that tasks can already have been removed. It is not an error
to re-delete an already deleted task.
Args:
queue: A queue to clear.
tasks: A list of tasks to remove. Tasks may be Task() instances
or integers representing the task_id.
"""
if queue:
predicates = []
for task in tasks:
try:
task_id = task.task_id
except AttributeError:
task_id = int(task)
predicates.append(self._TaskIdToColumn(task_id))
data_store.DB.DeleteAttributes(
queue, predicates, token=self.token, sync=False)
def Schedule(self, tasks, sync=False, timestamp=None):
"""Schedule a set of Task() instances."""
if timestamp is None:
timestamp = self.frozen_timestamp
for queue, queued_tasks in utils.GroupBy(
tasks, lambda x: x.queue).iteritems():
if queue:
to_schedule = dict(
[(self._TaskIdToColumn(task.task_id),
[task.SerializeToString()]) for task in queued_tasks])
self.data_store.MultiSet(
queue, to_schedule, timestamp=timestamp, sync=sync,
token=self.token)
def _SortByPriority(self, notifications, queue, output_dict=None):
"""Sort notifications by priority into output_dict."""
if not output_dict:
output_dict = {}
for notification in notifications:
priority = notification.priority
if notification.in_progress:
priority = self.STUCK_PRIORITY
output_dict.setdefault(priority, []).append(notification)
for priority in output_dict:
stats.STATS.SetGaugeValue("notification_queue_count",
len(output_dict[priority]),
fields=[queue.Basename(), str(priority)])
random.shuffle(output_dict[priority])
return output_dict
def GetNotificationsByPriority(self, queue):
"""Retrieves session ids for processing grouped by priority."""
# Check which sessions have new data.
# Read all the sessions that have notifications.
queue_shard = self.GetNotificationShard(queue)
return self._SortByPriority(
self._GetUnsortedNotifications(queue_shard).values(), queue)
def GetNotificationsByPriorityForAllShards(self, queue):
"""Same as GetNotificationsByPriority but for all shards.
Used by worker_test to cover all shards with a single worker.
Args:
queue: usually rdfvalue.RDFURN("aff4:/W")
Returns:
dict of notifications objects keyed by priority.
"""
output_dict = {}
for queue_shard in self.GetAllNotificationShards(queue):
output_dict = self._GetUnsortedNotifications(
queue_shard, notifications_by_session_id=output_dict)
output_dict = self._SortByPriority(output_dict.values(), queue)
return output_dict
def GetNotifications(self, queue):
"""Returns all queue notifications sorted by priority."""
queue_shard = self.GetNotificationShard(queue)
notifications = self._GetUnsortedNotifications(queue_shard).values()
notifications.sort(key=lambda notification: notification.priority,
reverse=True)
return notifications
def GetNotificationsForAllShards(self, queue):
"""Returns notifications for all shards of a queue at once.
Used by test_lib.MockWorker to cover all shards with a single worker.
Args:
queue: usually rdfvalue.RDFURN("aff4:/W")
Returns:
List of rdf_flows.GrrNotification objects
"""
notifications_by_session_id = {}
for queue_shard in self.GetAllNotificationShards(queue):
notifications_by_session_id = self._GetUnsortedNotifications(
queue_shard, notifications_by_session_id=notifications_by_session_id)
notifications = notifications_by_session_id.values()
notifications.sort(key=lambda notification: notification.priority,
reverse=True)
return notifications
def _GetUnsortedNotifications(self, queue_shard,
notifications_by_session_id=None):
"""Returns all the available notifications for a queue_shard.
Args:
queue_shard: urn of queue shard
notifications_by_session_id: store notifications in this dict rather than
creating a new one
Returns:
dict of notifications. keys are session ids.
"""
if not notifications_by_session_id:
notifications_by_session_id = {}
end_time = self.frozen_timestamp or rdfvalue.RDFDatetime().Now()
for predicate, serialized_notification, ts in data_store.DB.ResolveRegex(
queue_shard, self.NOTIFY_PREDICATE_PREFIX % ".*",
timestamp=(0, end_time),
token=self.token, limit=10000):
# Parse the notification.
try:
notification = rdf_flows.GrrNotification(serialized_notification)
except Exception: # pylint: disable=broad-except
logging.exception("Can't unserialize notification, deleting it: "
"predicate=%s, ts=%d", predicate, ts)
data_store.DB.DeleteAttributes(
queue_shard, [predicate], token=self.token,
# Make the time range narrow, but be sure to include the needed
# notification.
start=ts, end=ts, sync=True)
continue
# Strip the prefix from the predicate to get the session_id.
session_id = predicate[len(self.NOTIFY_PREDICATE_PREFIX % ""):]
notification.session_id = session_id
notification.timestamp = ts
existing = notifications_by_session_id.get(notification.session_id)
if existing:
# If we have a notification for this session_id already, we only store
# the one that was scheduled last.
if notification.first_queued > existing.first_queued:
notifications_by_session_id[notification.session_id] = notification
else:
notifications_by_session_id[notification.session_id] = notification
return notifications_by_session_id
def NotifyQueue(self, notification, **kwargs):
"""This signals that there are new messages available in a queue."""
self._MultiNotifyQueue(notification.session_id.Queue(), [notification],
**kwargs)
def MultiNotifyQueue(self, notifications, timestamp=None, sync=True):
"""This is the same as NotifyQueue but for several session_ids at once.
Args:
notifications: A list of notifications.
timestamp: An optional timestamp for this notification.
sync: If True, sync to the data_store immediately.
Raises:
RuntimeError: An invalid session_id was passed.
"""
extract_queue = lambda notification: notification.session_id.Queue()
for queue, notifications in utils.GroupBy(
notifications, extract_queue).iteritems():
self._MultiNotifyQueue(
queue, notifications, timestamp=timestamp, sync=sync)
def _MultiNotifyQueue(self, queue, notifications, timestamp=None, sync=True):
"""Does the actual queuing."""
serialized_notifications = {}
now = rdfvalue.RDFDatetime().Now()
expiry_time = config_lib.CONFIG["Worker.notification_expiry_time"]
for notification in notifications:
if not notification.first_queued:
notification.first_queued = (self.frozen_timestamp or
rdfvalue.RDFDatetime().Now())
else:
diff = now - notification.first_queued
if diff.seconds >= expiry_time:
# This notification has been around for too long, we drop it.
logging.debug("Dropping notification: %s", str(notification))
continue
session_id = notification.session_id
# Don't serialize session ids to save some bytes.
notification.session_id = None
notification.timestamp = None
serialized_notifications[session_id] = notification.SerializeToString()
data_store.DB.MultiSet(
self.GetNotificationShard(queue),
dict([(self.NOTIFY_PREDICATE_PREFIX % session_id,
[(data, timestamp)])
for session_id, data in serialized_notifications.iteritems()]),
sync=sync, replace=False, token=self.token)
def DeleteNotification(self, session_id, start=None, end=None):
"""This deletes the notification when all messages have been processed."""
if not isinstance(session_id, rdfvalue.SessionID):
raise RuntimeError(
"Can only delete notifications for rdfvalue.SessionIDs.")
if start is None:
start = 0
else:
start = int(start)
if end is None:
end = self.frozen_timestamp or rdfvalue.RDFDatetime().Now()
for queue_shard in self.GetAllNotificationShards(session_id.Queue()):
data_store.DB.DeleteAttributes(
queue_shard, [self.NOTIFY_PREDICATE_PREFIX % session_id],
token=self.token, start=start, end=end, sync=True)
def Query(self, queue, limit=1, task_id=None):
"""Retrieves tasks from a queue without leasing them.
This is good for a read only snapshot of the tasks.
Args:
queue: The task queue that this task belongs to, usually client.Queue()
where client is the ClientURN object you want to schedule msgs on.
limit: Number of values to fetch.
task_id: If an id is provided we only query for this id.
Returns:
A list of Task() objects.
"""
# This function is usually used for manual testing so we also accept client
# ids and get the queue from it.
if isinstance(queue, rdf_client.ClientURN):
queue = queue.Queue()
if task_id is None:
regex = self.TASK_PREDICATE_PREFIX % ".*"
else:
regex = utils.SmartStr(task_id)
all_tasks = []
for _, serialized, ts in self.data_store.ResolveRegex(
queue, regex, timestamp=self.data_store.ALL_TIMESTAMPS,
token=self.token):
task = rdf_flows.GrrMessage(serialized)
task.eta = ts
all_tasks.append(task)
# Sort the tasks in order of priority.
all_tasks.sort(key=lambda task: task.priority, reverse=True)
return all_tasks[:limit]
def DropQueue(self, queue):
"""Deletes a queue - all tasks will be lost."""
data_store.DB.DeleteSubject(queue, token=self.token)
def QueryAndOwn(self, queue, lease_seconds=10, limit=1):
"""Returns a list of Tasks leased for a certain time.
Args:
queue: The queue to query from.
lease_seconds: The tasks will be leased for this long.
limit: Number of values to fetch.
Returns:
A list of GrrMessage() objects leased.
"""
user = ""
if self.token:
user = self.token.username
# Do the real work in a transaction
try:
res = self.data_store.RetryWrapper(
queue, self._QueryAndOwn, lease_seconds=lease_seconds, limit=limit,
token=self.token, user=user)
return res
except data_store.TransactionError:
# This exception just means that we could not obtain the lock on the queue
# so we just return an empty list, let the worker sleep and come back to
# fetch more tasks.
return []
except data_store.Error as e:
logging.warning("Datastore exception: %s", e)
return []
def _QueryAndOwn(self, transaction, lease_seconds=100,
limit=1, user=""):
"""Does the real work of self.QueryAndOwn()."""
tasks = []
lease = long(lease_seconds * 1e6)
ttl_exceeded_count = 0
# Only grab attributes with timestamps in the past.
for predicate, task, timestamp in transaction.ResolveRegex(
self.TASK_PREDICATE_PREFIX % ".*",
timestamp=(0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now())):
task = rdf_flows.GrrMessage(task)
task.eta = timestamp
task.last_lease = "%s@%s:%d" % (user,
socket.gethostname(),
os.getpid())
# Decrement the ttl
task.task_ttl -= 1
if task.task_ttl <= 0:
# Remove the task if ttl is exhausted.
transaction.DeleteAttribute(predicate)
ttl_exceeded_count += 1
stats.STATS.IncrementCounter("grr_task_ttl_expired_count")
else:
if task.task_ttl != rdf_flows.GrrMessage.max_ttl - 1:
stats.STATS.IncrementCounter("grr_task_retransmission_count")
# Update the timestamp on the value to be in the future
transaction.Set(predicate, task.SerializeToString(), replace=True,
timestamp=long(time.time() * 1e6) + lease)
tasks.append(task)
if len(tasks) >= limit:
break
if ttl_exceeded_count:
logging.info("TTL exceeded for %d messages on queue %s",
ttl_exceeded_count, transaction.subject)
return tasks
class WellKnownQueueManager(QueueManager):
"""A flow manager for well known flows."""
response_limit = 10000
def DeleteWellKnownFlowResponses(self, session_id, responses):
"""Deletes given responses from the flow state queue."""
subject = session_id.Add("state/request:00000000")
predicates = []
for response in responses:
predicates.append(QueueManager.FLOW_RESPONSE_TEMPLATE % (
response.request_id, response.response_id))
data_store.DB.DeleteAttributes(
subject, predicates, sync=True, start=0, token=self.token)
def FetchRequestsAndResponses(self, session_id):
"""Well known flows do not have real requests.
This manages retrieving all the responses without requiring corresponding
requests.
Args:
session_id: The session_id to get the requests/responses for.
Yields:
A tuple of request (None) and responses.
"""
subject = session_id.Add("state/request:00000000")
# Get some requests
for _, serialized, _ in sorted(self.data_store.ResolveRegex(
subject, self.FLOW_RESPONSE_REGEX, token=self.token,
limit=self.response_limit,
timestamp=(0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now()))):
# The predicate format is flow:response:REQUEST_ID:RESPONSE_ID. For well
# known flows both request_id and response_id are randomized.
response = rdf_flows.GrrMessage(serialized)
yield rdf_flows.RequestState(id=0), [response]
class QueueManagerInit(registry.InitHook):
"""Registers vars used by the QueueManager."""
pre = ["StatsInit"]
def Run(self):
# Counters used by the QueueManager.
stats.STATS.RegisterCounterMetric("grr_task_retransmission_count")
stats.STATS.RegisterCounterMetric("grr_task_ttl_expired_count")
stats.STATS.RegisterGaugeMetric("notification_queue_count", int,
fields=[("queue_name", str),
("priority", str)])
|
In May 2017 Cambridgeshire and Peterborough Combined Authority elected its first metro mayor, James Palmer for the Conservatives. He has control over the whole combined authority area, working with existing city leaders to take forward the strategic plan. You can find out more about the metro mayors on our FAQ page.
This page brings together the key facts and figures about the area’s economy and asks what local residents see as the top priorities for the new metro mayor.
Understanding how the city-region performs on key areas such as productivity and employment helps the new metro mayor to assess what they should prioritise. The table below shows how Cambridgeshire and Peterborough compares to the national average on these indicators.
You can also find data on areas that the metro mayor has the most power over on the Cambridgeshire and Peterborough Metro Mayor Data Dashboard.
What powers does the Cambridgeshire and Peterborough metro mayor have?
Below is a table detailing the powers that the Cambridgeshire and Peterborough metro mayor has. The original East Anglia deal is available here.
What is the political landscape in Cambridgeshire and Peterborough?
The current metro mayor is Conservative James Palmer who won with 57% of the vote after second preferences. Second was Liberal Democrat candidate Rod Cantrill with 43.7%. The turn out was 33%.
Check ‘Getting ready for mayors’ for all the latest blogs, articles, events and research on the new mayors. |
import logging
import sys
import time
import pytest
from test_general import start_gather_thread, mock_db_open
import misc.queue_manager as queue_manager
from misc.constants import implemented_hardware, HARDWARE_DEFAULTS, SYSTEM_METRICS_TO_COMPS
DATA_TEST_TIMEOUT = 5
log = logging.getLogger("opserv.test")
log.setLevel(logging.DEBUG)
# Test system gathering more specifically, e.g. check results for fitting structure
def test_system_gathering():
mock_db_open()
with start_gather_thread() as t:
pass
return
def test_all_components():
'''
Tests all components, that don't require an argument
'''
mock_db_open()
with start_gather_thread() as t:
check_all_hardware()
return
def test_gathering_delete():
'''
Sets a gathering rate and then deletes it
'''
test_comp = "cpu"
test_metric = "usage"
mock_db_open()
with start_gather_thread() as t:
queue_manager.set_gathering_rate(test_comp, test_metric, 500)
time.sleep(1)
queue_manager.set_gathering_rate(test_comp, test_metric, 0)
# Add some extra sleep to ensure no function is still inserting data into the queue
time.sleep(0.5)
# Empty the whole queue
while queue_manager.read_measurement_from_queue(test_comp, test_metric) is not None:
pass
time.sleep(2)
# Queue Should still be empty
assert queue_manager.real_time_queue_empty(test_comp, test_metric)
return
def check_all_hardware():
'''
Sends a data request to the gathering backend and immediately checks for response
The response will be timedout after a certain period of time
'''
# For each hardware in the hardware list
for hw in implemented_hardware:
# For Each metric of that specific hardware
for met in implemented_hardware[hw]:
if HARDWARE_DEFAULTS[hw][0] and HARDWARE_DEFAULTS[hw][1] is not None:
queue_manager.request_data_queue.put({"component": hw, "metric": met,
"args": HARDWARE_DEFAULTS[hw][1]})
queue_manager.get_queue(hw, met,
HARDWARE_DEFAULTS[hw][1]).get(timeout=DATA_TEST_TIMEOUT)
elif not HARDWARE_DEFAULTS[hw][0]:
queue_manager.request_data_queue.put({"component": hw, "metric": met})
queue_manager.get_queue(hw, met).get(timeout=DATA_TEST_TIMEOUT)
# Check that system gathering is always a list
def test_system_is_list():
'''
Test that the system gathering data is always list type
'''
mock_db_open()
with start_gather_thread() as t:
for metric in implemented_hardware["system"]:
queue_manager.request_data("system", metric)
return_type = type(queue_manager.read_measurement_from_queue("system", metric,
blocking=True)["value"])
assert return_type == type(list())
return
@pytest.mark.skipif(sys.platform != 'win32',
reason="does not run on windows")
def test_ohm():
from gathering.measuring.ohm_source import OHMSource
ohm = OHMSource()
if ohm.can_measure("cpu", "temperature"):
newTemp = ohm.get_measurement("cpu", "temperature", "0")
ohm.deinit()
def test_advanced_all_components():
'''
Similar test to test all components, but here all the arguments are gathered aswell
and are used to really test all the available hardware
'''
# Get available args for each componennt
# RequestData for each comp/arg/metric only do one process
# Wait for a queue entry for each combo
SYSTEM_DATA_TIMEOUT = 6
mock_db_open()
with start_gather_thread() as t:
available_args = {}
for component in implemented_hardware["system"]:
queue_manager.request_data("system", component)
new_args = queue_manager.read_measurement_from_queue("system", component,
None, True, SYSTEM_DATA_TIMEOUT)
available_args[SYSTEM_METRICS_TO_COMPS[component]] = new_args["value"]
# Specifically add memory
available_args["memory"] = [None]
# For each component in the system
for comp in available_args:
# For each possible argument in the
for i, arg in enumerate(available_args[comp]):
# Only check one process and only the third process in the list
if not (comp == "process" and i != 3):
for metric in implemented_hardware[comp]:
queue_manager.request_data(comp, metric, arg)
result = queue_manager.read_measurement_from_queue(comp, metric, arg, True,
SYSTEM_DATA_TIMEOUT)
log.info("result: %s", result)
def test_psutil_network():
'''
Tests the Psutil MeasuringSource directly
Currently only then network measures
'''
from gathering.measuring.psutil_source import PsUtilWrap
ps = PsUtilWrap()
all_netif = ps.get_measurement("system", "networks", None)
for netif in all_netif:
log.info(ps.get_measurement("network", "receivepersec", netif))
log.info(ps.get_measurement("network", "transmitpersec", netif))
log.info(ps.get_measurement("network", "info", netif))
time.sleep(0.5)
# Get System Data, and test everything ADVANCED
# Test measuring wrong metric, component or argument
|
Wife Swap is finally making a return in 2019: What channel is it on? When will it air?
Stormzy’s role in BBC series Noughts and Crosses explained – does he have acting experience?
Who plays Callum in Noughts and Crosses? What else the actor been in?
Noughts and Crosses Sephy casting confirmed – Who is the lead actress? What else has she featured in? |
import matplotlib.pyplot as plt
import time
import numpy as np
import quadcopter
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.animation as animation
#don't try to understand these imports
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
# NP.ARRAYS are NOT Matrices. #
# Always print your array operation results to #
# check result with expected dimensionality and #
# values. #
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
#create the figure object, this will hold subplots, which will hold (multiple) axes.
fig = plt.figure()
#add an axis to first subplot (111) of the fig-object
ax = fig.add_subplot(111, projection='3d')
#set limits. Refer the todo.md for a cautionary nnote on limits
#IF LIMITS SET, THEN PAN AND ZOOM FAIL.
ax.set_xlim3d(-1.3,1.3)
ax.set_ylim3d(-1.3,1.3)
ax.set_zlim3d(-1.3,1.3)
quad = quadcopter.copter(ax, False) #false => NO tracking
#make the animation object
quad_anim = animation.FuncAnimation(fig, quad.update_ends, interval=15, blit=True)
plt.show() |
Our Diamond Helen Woven Jute Tassel Area Rug will bring a natural print to your minimalist home décor. You'll love its organic, jute design with a bit of fringe for added flair!
You added Diamond Helen Woven Jute Tassel Area Rug, 5x8 to your Favorites! |
import os
import re
import urllib
from django.db import models
from django.core.validators import RegexValidator
from django.utils.translation import ugettext_lazy as _
from cms.models import CMSPlugin
class DailymotionViewer(CMSPlugin):
"""
Plugin for embedding a Dailymotion video.
"""
video_src = models.URLField(_('video address'))
width = models.CharField(_('width'), max_length=6, default='480px', validators=[RegexValidator(r'\d+(px|\%)')], help_text=_('Width in pixels or percent'))
height = models.CharField(_('height'), max_length=6, default='270px', validators=[RegexValidator(r'\d+(px|\%)')], help_text=_('Height in pixels or percent'))
allow_fullscreen = models.BooleanField(_('allow fullscreen'), default=True)
start_at = models.PositiveIntegerField(_('start at'), blank=True, null=True, help_text=_('Start delay in seconds'))
auto_start = models.BooleanField(_('auto start'), blank=True, default=False)
@property
def src(self):
kwargs = dict()
if self.start_at:
kwargs['start'] = self.start_at
if self.auto_start:
kwargs['autoPlay'] = 1
base_url = self.get_base_url()
return '{0}{1}'.format(base_url, '?{0}'.format(urllib.urlencode(kwargs)) if kwargs else '')
def get_base_url(self):
short = re.findall(r'://dai\.ly/([a-zA-Z0-9]+)', self.video_src)
if short:
return 'http://www.dailymotion.com/embed/video/{0}'.format(short[0])
classic = re.findall(r'dailymotion.com/video/([a-zA-Z0-9]+)?', self.video_src)
if classic:
return 'http://www.dailymotion.com/embed/video/{0}'.format(classic[0])
return self.video_src
def __unicode__(self):
return self.video_src
|
Google is tracking you to give you better results.
The problem with advancements in technology is the fact that the users’ privacy has a tendency to get in the way. Google launched Personalized Search on Tuesday and it now tracks where you go in order to give you better results in the future.
Of course, in order for Google to track someone, they must have a Google account and be logged in. So if you don’t want Google following you, don’t log in any Google services before searching.
Though a partnership with Kanoodle, TypePad pro users can now take advantage of text-based advertising on their blogs. This feature can not only earn you extra cash, but can also pay for your TypePad fees.
You can set up the ads one of two ways. Easy or Customized. Easy does everything for you while customized allows you to configure fonts and colors.
TypePad also gives tools for tracking earnings so you can view daily ad impressions and ad clicks.
As the conference progresses, links will be added to this page.
Fresh content is a distinct advantage for your search engine placement and for your site visitors. Repeat traffic from people and search engine spiders will increase proportionate to how fresh your content is. Producing fresh content takes time and resources. Tools like RSS Mix can help by automating a part of your content development.
RSS Mix allows you to combine any number of RSS feeds into one unique new feed. You can then use a parser to display a mix of stories from various sources on your website.
Tim Mayer has announced on the Yahoo Search Blog that there will be a major update to it’s index this evening.
Per a request from Danny Sullivan at Search Engine Strategies in San Jose last summer, Yahoo has started issuing “weather reports” of updates to its search engine.
Alas, I am unable to attend the two conferences I’ve been looking forward to for months: WebmasterWorld Search Conference in New Orleans starting today and Chris Pirillo’s sold out Gnomedex June 23-25 in Seattle. I had to go and turn 40 this week and my dear wife is taking me to some exotic locale (I don’t know where yet, it’s a surprise) for a week.
Posting from me will be light, but Thomas will be posting daily on search engine marketing news and insights.
Be sure to watch for RustyBrick’s coverage each day of the New Orleans WebmasterWorld conference.
Via iMediaConnection, eMarketer has published an excellent article on BtoB buyer’s search behavior and future trends for BtoB search marketing.
I just noticed an interview I did with a Minneapolis Star Tribune reporter on Yahoo 360 was published today, “Yahoo spreads its net on the Web “. At best I thought I’d get one little quote. He used at least three plus a mention of my SEO firm, TopRank Online Marketing. I guess everyone gets lucky once in a while.
The article touches on what Yahoo is doing with Yahoo 360 and also Microsoft’s Wallop. Here’s a post on Online Marketing Blog/2005/06/whats-up-with-microsoft-wallop.html”>Microsoft Wallop from earlier this month.
Incidentally, if you are interested in a Yahoo 360 or Wallop invite, post a comment.
Here’s a cool keyword research tool I discovered when looking at this site’s referrer logs.
NicheBOT offers keyword research options including: WordTracker Keywords, Overture Keywords, Thesaurus, Lateralus, Keyword Analysis and Google Ranking. |
import ocl
import camvtk
import time
if __name__ == "__main__":
p = ocl.Point()
p.x=7
p.y=8
p.z=-3
print p
q = ocl.Point(1,2,3)
r = p + q
t = ocl.Triangle(p,q,r)
print t
s= ocl.STLSurf()
print s
s.addTriangle(t)
s.addTriangle(t)
print s
print "end."
myscreen = camvtk.VTKScreen()
print "screen created"
stl = camvtk.STLSurf("../stl/sphere.stl")
print "STL surface read"
myscreen.addActor(stl)
b = stl.src.GetOutput()
print b
print "Verts:",b.GetNumberOfVerts()
print "Cells:",b.GetNumberOfCells()
print "Lines:",b.GetNumberOfLines()
print "Polys:",b.GetNumberOfPolys()
print "Strips:",b.GetNumberOfStrips()
c = b.GetCell(0)
print c
print "Points:",c.GetNumberOfPoints()
print "Edges:",c.GetNumberOfEdges()
print "Faces:",c.GetNumberOfFaces()
ps = c.GetPoints()
print ps
n=ps.GetNumberOfPoints()
print "Nr of Points:",n
for id in range(0,n):
print id,"=",
print ps.GetPoint(id)
myscreen.addActor( camvtk.Sphere(radius=0.5,center=ps.GetPoint(id)) )
myscreen.render()
myscreen.iren.Start()
#raw_input("Press Enter to terminate")
|
Selkirk Powder Company gets reservation requests from all over the country. We get many repeat visitors, and plenty of new visitors alike. Here’s a list of our Top 12 to enhance your visit with us.
Bring your friends and family…the more the merrier!
Visit Schweitzer’s Activity Center for a schedule of activities and the Kinder Kamp for your little ones. |
# -*- coding: utf-8 -*-
"""
Created on Sun Feb 14 08:28:34 2016
@author: South Eugene Robotics Team
ParcadeArcade gameElement
This is intended to encapsulate the general case of a game element.
"""
import grovepi
import lcd
import requests
import json
import mote
import socket
import fcntl
import struct
from flask import Flask, request
# create a mote object to track the device specific bit
mote = mote.Mote( "Name", "Description", "10.0.0.1" )
myId = configure()
# create the device specific I/O
greenLed = 2
grovepi.pinMode( greenLed, "OUTPUT" )
button1 = 3
grovepi.pinMode( button1, "INPUT" )
# call game()
# create a web listener hook
app = Flask(__name__)
app.run(host = '0.0.0.0')
# configure this device by reading its config.ini
def configure( self ):
self.mote.loadConfig()
url = 'http://andrew.local:1337/add_listener'
header = {'content-type': 'application/json'}
foo = requests.post(url, params=self.mote.toDict(), headers=header)
rslt = json.loads( foo.text)
id = rslt["response"]["id"]
self.mote.id = id
for ob in self.mote.capabilities:
ob.moteId = id
# send a test signal
#grovepi.digitalWrite( greenLed, 0 )
addCapUrl = 'http://andrew.local:1337/add_capability'
clist = [ requests.post(addCapUrl, params=ob.toDict(), headers=header) for ob in self.mote.capabilities ]
print(self.mote.id)
print(self.mote.name)
print(self.mote.description)
lcd.settext( self.mote.name )
return id
@app.route("/set", methods=['POST'])
def respond():
port = request.args["port"]
value = request.args["value"]
ioType = request.args["ioType"]
print( 'port: ' + port )
print( 'value: ' + value )
print( 'ioType: ' + ioType )
grovepi.digitalWrite( int(port), int(value) )
# ToDo: validate that this capability exists
return "Success\n"
# hack to get this device's ip address
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24]) |
Red-cockaded Woodpeckers are (or were until the subsequent news of Ivory-billed Woodpecker returning from the dead) the most endangered woodpecker in the USA. This forest is one of the better locations to see them, since it contains numerous active clusters in a small area. We had 5 hours on the last morning before returning to the airport, so decided to give them a try. When we arrived, we headed straight for People’s Road, where we had been told of an active cluster. We gave this about an hour without any luck, apart from a single Downy Woodpecker next to the private ranch, along with female Indigo Bunting and Carolina Chickadee. We decided to find a member of the park staff for more information, which was at first closed, but returned after a hapless search down the privately owned Jones Road where we found one of the staff. It transpired that one of the best clusters was directly behind the office on the loop trail, and also the discouraging news that they were usually most active late in the day. Perhaps the one day delay of the trip due to our missed connection in Paris, where we had planned a late afternoon visit had we been on time, might now be proving costly. However, we started well around the office, with family parties of Eastern Bluebird, and Brown-headed Nuthatch above, with quite a number of Pine Warblers singing. We then started on the loop, just after seeing Red-headed Woodpecker from the office clearing, which was replaced by a pair of American Kestrels (the female with a lizard). We walked some way along the loop, and added more Red-headed Woodpeckers and a singing Yellow-breasted Chat, but time was pressing on, so we started on the return leg. After a short way, our path was crossed by a stunning Coral Snake. We watched it from only about 15 feet, until it disappeared into the leaf litter. We decided that this would more than make up for our lack of rattlesnake, and even Red-cockaded Woodpeckers if they continued to be elusive. That thought was sealed, when only 5 minutes later, in a pool to the left of the track, we found another snake, this one much larger and almost totally dark, which the park staff named as Water Mocassin. It stopped swimming and gave superb scope views. Almost back at the office, we tried a slightly different track, where we found yet another pair of Red-headed Woodpeckers excavating a hole in a dead tree above, but this was only a prelude to finding our main quarry – a Red-cockaded Woodpecker using an active nest. We had seen the hole, but were told that these trees were marked with a green band, which this one lacked. The bird then obliged by flying around us, regularly landing on nearby trees. Unbelievably, we found a second bird minutes later. This was to be the last species seen on the trip. Not a bad ending! |
#
# Copyright 2012 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import binarygenerator
import parsetypes
import passes
import pltools
import pyast
import rewrites
import typeinference
import unifier
import utility
import visitor
import imp as _imp
import os as _os
import glob as _glob
_cur_dir, _cur_file = _os.path.split(__file__)
def _find_module(name):
_ext_poss = [ path for path in _glob.glob(_os.path.join(_cur_dir, name+'*')) if _os.path.splitext(path)[1] in ['.so', '.dll'] ]
if len(_ext_poss) != 1:
raise ImportError(name)
return _imp.load_dynamic(name, _ext_poss[0])
backendcompiler = _find_module('backendcompiler')
backendsyntax = _find_module('backendsyntax')
backendtypes = _find_module('backendtypes')
import conversions
|
Michelle Obama shows a St. Louis Cardinals team jersey presented to her during a ceremony in the White House in Washington, DC.
Jennifer Hudson promotes her new book ‘I Got This How I Changed My Ways and Lost What Weighed Me Down’ in Chicago, Illinois.
Dwayne Johnson arrives at Taoyuan Airport to promote his movie ‘Journey 2: The Mysterious Island’ in Taipei, Taiwan.
Oprah Winfrey preparing to leave Mumbai for Jaipur.
Ne-Yo spotted signing autographs for fans after taping a TV show in Hollywood, CA.
Ciara attends a Knicks game at Madison Square Garden in New York City.
Diddy seen leaving BOA steakhouse in Los Angeles, California. |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Eficent (<http://www.eficent.com/>)
# Jordi Ballester Alomar <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class purchase_order(osv.osv):
_inherit = "purchase.order"
def _prepare_order_line_move(self, cr, uid, order, order_line, picking_id, group_id, context=None):
res = super(purchase_order, self)._prepare_order_line_move(
cr, uid, order, order_line, picking_id, group_id, context=context)
# res['analytic_account_id'] = order_line.account_analytic_id.id
return res
|
The NCSC Job Description Database provides descriptions for a wide range of state court jobs. These descriptions are compiled to serve as models for use by the court community, human resources officers and the resume-writing public, and are updated frequently to reflect the ever-changing features of state court jobs.
These job descriptions are for use as models only; they are NOT open positions being advertised.
You can find open job postings here. |
# Generated by Django 2.0.8 on 2018-12-18 07:24
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='WebTweet',
fields=[
('statusid', models.BigIntegerField(primary_key=True, serialize=False, unique=True)),
('conversationid', models.BigIntegerField()),
('userid', models.BigIntegerField()),
('username', models.CharField(max_length=15)),
('name', models.CharField(max_length=50)),
('time', models.PositiveIntegerField(help_text='time in seconds since epoch')),
('html', models.TextField()),
('text', models.TextField()),
('reply', models.PositiveIntegerField(null=True)),
('like', models.PositiveIntegerField(null=True)),
('retweet', models.PositiveIntegerField(null=True)),
('parentid', models.BigIntegerField(null=True)),
('rtl', models.BooleanField(help_text='right to left')),
('image0', models.URLField(null=True)),
('image1', models.URLField(null=True)),
('image2', models.URLField(null=True)),
('image3', models.URLField(null=True)),
('avatar_mini', models.URLField(null=True)),
('avatar_normal', models.URLField(null=True)),
('avatar_bigger', models.URLField(null=True)),
],
),
]
|
The icy waters of the Neumeyer Channel on the Antarctica Peninsula, and regions nearby, are home to several species of Antarctic icefish, animals that fit Darwin's phrase, a “wreck of nature.” Unique among vertebrates, icefish lack red blood cells and functional hemoglobin genes, have greatly reduced bone mineralization compared to related fish, and have lost the nearly-ubiquitous inducible heat shock response. Image courtesy of John Postlethwait, 2015 recipient of the Genetics Society of America's George W. Beadle Award.
The July issue of GENETICS is out now! Check out the Highlights below of the full Table of Contents here. And don’t miss the essays by winners of 2015 GSA Honors and Awards!
Most genome-wide association studies (GWAS) reveal only regions of association, while the underlying causal variants remain unclear. Chen et al. show that two existing methods for identifying candidate causal variants can be unified in a general Bayesian framework, allowing application of an approximate Bayesian method that uses only the summary information. Through simulations and real data analysis, the authors show the proposed method identified candidate causal variants with high accuracy.
Septins are a family of GTP-binding proteins that self-assemble into higher-order structures. In these companion papers, Finnigan et al. analyzed the function of Shs1 and Cdc11, the paralogous terminal subunits of the septin hetero-octamer. Comprehensive analyses revealed the importance of a C-terminal extension that optimizes recruitment of the protein Bni5, thereby ensuring efficient localization of the type II myosin of the actomyosin contractile ring.
Like the teeth of a bicycle sprocket-wheel, the “sprocket” arginine residues of histones insert into the minor groove of the DNA “chain.” Hodges et al. identify novel functions for histone sprocket arginine residues in gene expression, cryptic transcription, DNA repair, and histone occupancy. These findings reveal simple rules for how the biological function of each “sprocket” residue is influenced by the location and structural mode of DNA binding.
To test many hypotheses of molecular sequence evolution, the gene sequences of ancestral species must be inferred. However, the use of reconstructed ancestral sequences may produce spurious results because systematic biases emerge from using the single best reconstructions while ignoring the suboptimal ones, and from model violations. Matsumoto et al. developed methods to correct for such biases and used simulation to evaluate their performance when nucleotide substitution patterns are not constant. The authors suggest the new methods may be useful for studying complex patterns of nucleotide substitution in large genomic datasets.
Helicobacter pylori is a bacterium that inhabits the stomachs of half of all humans, but the vast majority of infections are asymptomatic. Montano et al. analyze the genomes of 60 strains from around the world and find H. pylori has been co-habiting with our species much longer than previously thought. The authors demonstrate that this long-term interaction has led to the evolution of differential local adaptation. They also outline the potential for future medical, functional, and evolutionary research on H. pylori, our oldest known commensal.
Thompson et al. report a reference genome sequence for the Hawaiian strain CB4856 of C. elegans, widely studied for its phenotypic differences from the N2 laboratory strain. This revealed 61 regions spanning 2.8 Mb that contain a disproportionate number of SNVs and an abundance of genes from large, rapidly evolving gene families. Because of their high divergence—up to 16% differences—these regions had largely escaped detection in prior studies. Comparison with other wild isolates suggests that these regions are maintained over long evolutionary periods by balancing selection.
In genome-wide association studies (GWAS) of metabolites, non-additive genetic effects might be especially important because metabolite phenotypes are closer to the underlying pathways than many other traits. However, most GWAS on metabolites assume additivity. Tsepilov et al. analyzed a panel of 151 metabolites and their ratios (22,801 in total) for non-additive genetic effects in a population based cohort and found that in fact most SNP effects were additive. This provides empirical support for the additivity of genetic control of metabolism.
The relative contribution of regulatory and coding changes to the evolution of phenotypic traits is an important, and highly debated, question in evolutionary biology. Wang et al. show that a single nucleotide change in the gene teosinte glume architecturel (tgal) confers naked kernels in maize vs. encased kernels in the wild maize progenitor. This polymorphism causes an amino acid substitution in the TGA1 transcriptional regulator, and this affects dimer stability. These results show how morphological evolution can be driven by a simple nucleotide change that alters protein function.
“Genetic program” has become a deeply entrenched metaphor which compares organisms to computers executing programs for processes such as reproduction, development, differentiation, apoptosis, homeostasis, and behavior. Based on unpublished archives, Peluffo investigates the genesis of this influential metaphor of the “genetic program,” shows how it solved the problem of purpose in biology, and suggests that there was a shared origin for its independent introduction by two articles in 1961, one by French Nobel laureates Francois Jacob and Jacques Monod and one by prominent American evolutionary biologist Ernst Mayr.
What Use Is Population Genetics? |
from pyramid_restler.model import SQLAlchemyORMContext
from sqlalchemy import Table
from sqlalchemy.orm import mapper
from sqlalchemy.schema import Column
from sqlalchemy.types import *
from wscserver.model import Base, session
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
class Rede():
"""
Classe que define a tabela 'rede'
"""
__tablename__ = 'rede'
id_rede = Column(Integer, primary_key=True, nullable=False)
id_local = Column(Integer)
id_servidor_autenticacao = Column(Integer)
te_ip_rede = Column(String(15), nullable=False)
nm_rede = Column(String(100))
te_observacao = Column(String(100))
nm_pessoa_contato1 = Column(String(50))
nm_pessoa_contato2 = Column(String(50))
nu_telefone1 = Column(String(11))
te_email_contato2 = Column(String(50))
nu_telefone2 = Column(String(11))
te_email_contato1 = Column(String(50))
te_serv_cacic = Column(String(60), nullable=False)
te_serv_updates = Column(String(60), nullable=False)
te_path_serv_updates = Column(String(255))
nm_usuario_login_serv_updates = Column(String(20))
te_senha_login_serv_updates = Column(String(20))
nu_porta_serv_updates = Column(String(4))
te_mascara_rede = Column(String(15))
dt_verifica_updates = Column(DateTime)
nm_usuario_login_serv_updates_gerente = Column(String(20))
te_senha_login_serv_updates_gerente = Column(String(20))
nu_limite_ftp = Column(Integer, nullable=False)
cs_permitir_desativar_srcacic = Column(String(1), nullable=False)
te_debugging = Column(String)
dt_debug = Column(String(8))
def __init__(self, id_rede, id_local, id_servidor_autenticacao,
te_ip_rede, nm_rede, te_observacao, nm_pessoa_contato1,
nm_pessoa_contato2, nu_telefone1, te_email_contato2,
nu_telefone2, te_email_contato1, te_serv_cacic,
te_serv_updates, te_path_serv_updates,
nm_usuario_login_serv_updates, te_senha_login_serv_updates,
nu_porta_serv_updates, te_mascara_rede, dt_verifica_updates,
nm_usuario_login_serv_updates_gerente,
te_senha_login_serv_updates_gerente,
nu_limite_ftp, cs_permitir_desativar_srcacic, te_debugging,
dt_debug):
"""
Metodo que chama as colunas
"""
self.id_rede = id_rede
self.id_local = id_local
self.id_servidor_autenticacao = id_servidor_autenticacao
self.te_ip_rede = te_ip_rede
self.nm_rede = nm_rede
self.te_observacao = te_observacao
self.nm_pessoa_contato1 = nm_pessoa_contato1
self.nm_pessoa_contato2 = nm_pessoa_contato2
self.nu_telefone1 = nu_telefone1
self.te_email_contato2 = te_email_contato2
self.nu_telefone2 = nu_telefone2
self.te_email_contato1 = te_email_contato1
self.te_serv_cacic = te_serv_cacic
self.te_serv_updates = te_serv_updates
self.te_path_serv_updates = te_path_serv_updates
self.nm_usuario_login_serv_updates = nm_usuario_login_serv_updates
self.te_senha_login_serv_updates = te_senha_login_serv_updates
self.nu_porta_serv_updates = nu_porta_serv_updates
self.te_mascara_rede = te_mascara_rede
self.dt_verifica_updates = dt_verifica_updates
self.nm_usuario_login_serv_updates_gerente = nm_usuario_login_serv_updates_gerente
self.te_senha_login_serv_updates_gerente = te_senha_login_serv_updates_gerente
self.nu_limite_ftp = nu_limite_ftp
self.cs_permitir_desativar_srcacic = cs_permitir_desativar_srcacic
self.te_debugging = te_debugging
self.dt_debug = dt_debug
def __repr__(self):
"""
Metodo que passa a lista de parametros da classe
"""
return "<Rede('%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,\
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)>" %\
(self.id_rede,
self.id_local,
self.id_servidor_autenticacao,
self.te_ip_rede,
self.nm_rede,
self.te_observacao,
self.nm_pessoa_contato1,
self.nm_pessoa_contato2,
self.nu_telefone1,
self.te_email_contato2,
self.nu_telefone2,
self.te_email_contato1,
self.te_serv_cacic,
self.te_serv_updates,
self.te_path_serv_updates,
self.nm_usuario_login_serv_updates,
self.te_senha_login_serv_updates,
self.nu_porta_serv_updates,
self.te_mascara_rede,
self.dt_verifica_updates,
self.nm_usuario_login_serv_updates_gerente,
self.te_senha_login_serv_updates_gerente,
self.nu_limite_ftp,
self.cs_permitir_desativar_srcacic,
self.te_debugging,
self.dt_debug
)
class RedeContextFactory(SQLAlchemyORMContext):
entity = Rede
def session_factory(self):
return session
rede = Table('rede', Base.metadata,
Column('id_rede', Integer, primary_key=True, nullable=False),
Column('id_local', Integer),
Column('id_servidor_autenticacao', Integer),
Column('te_ip_rede', String(15), nullable=False),
Column('nm_rede', String(100)),
Column('te_observacao', String(100)),
Column('nm_pessoa_contato1', String(50)),
Column('nm_pessoa_contato2', String(50)),
Column('te_email_contato2', String(50)),
Column('nu_telefone2', String(11)),
Column('te_email_contato1', String(50)),
Column('te_serv_cacic', String(60), nullable=False),
Column('te_serv_updates', String(60), nullable=False),
Column('te_path_serv_updates', String(255)),
Column('nm_usuario_login_serv_updates', String(20)),
Column('te_senha_login_serv_updates', String(20)),
Column('nu_porta_serv_updates', String(4)),
Column('te_mascara_rede', String(15)),
Column('dt_verifica_updates', DateTime),
Column('nm_usuario_login_serv_updates_gerente', String(20)),
Column('te_senha_login_serv_updates_gerente', String(20)),
Column('nu_limite_ftp', Integer, nullable=False),
Column('cs_permitir_desativar_srcacic', String(1),
nullable=False),
Column('te_debugging', String),
Column('dt_debug', String(8)),
extend_existing=True
)
mapper(Rede, rede)
|
Dr. W. William Immel received his undergraduate and medical degrees from Dartmouth College in Hanover, New Hampshire. His internship and residency were completed at Georgetown University Hospital. His Fellowship in Gastroenterology/Hepatology was completed at the Cleveland Clinic in 1986. Dr. Immel was recognized in the Washingtonian Magazine as one of the top 100 physicians in the Washington, D.C. area. Dr. Immel is board certified in internal medicine and gastroenterology/hepatology. He was with Virginia Medical Associates from 1986 to 1998 and has been with Virginia Medical Alliance, p.c. since October, 1998. |
# Copyright (c) 2011, Enthought, Ltd.
# Author: Pietro Berkes <[email protected]>
# License: Modified BSD license (2-clause)
"""Entry point for pyanno UI application.
At present, the application is based on the wx backend of the traitsui library.
It also supports 2 screen formats:
* for large displays (larger than 1300x850), the main window will be
1300 x 850 pixels large
* for small displays it will be 1024x768
"""
from traits.etsconfig.api import ETSConfig
ETSConfig.toolkit = 'wx'
import pyanno.ui.appbase.wx_utils as wx_utils
wx_utils.set_background_color_to_white()
from pyanno.ui.pyanno_ui_application import pyanno_application
import numpy
import logging
def main():
"""Create and start the application."""
# deactivate warnings for operations like log(0.) and log(-inf), which
# are handled correctly by pyanno
numpy.seterr(divide='ignore', invalid='ignore')
with pyanno_application(logging_level=logging.INFO) as app:
app.open()
if __name__ == '__main__':
main()
|
Trigger Point Therapy is also referred to as Neuromuscular Therapy. Trigger points are muscle fibers that have become irritated (a knot in the muscle) and are causing a pain loop in the nervous system. This can happen from normal everyday stress or from a trauma to the body. This technique is designed to identify and break up trigger points to eliminate the pain loop and relax your muscles and nervous system so your body can function more optimally.
A tight spot in your back may trigger a pain in your neck. That pain in the neck may cause headaches. Trigger Point Therapy is designed to find that one spot that triggers a “chain reaction” of relief and relaxation through the body. You and I will maintain conversation throughout your session in finding those tight spots, and adjusting the amount of pressure.
I have been formally trained in finding common trigger points and know many techiniques in rooting them out. Book an appointment with me!
I found Barbara on Yelp and my BACK Thanks Me! She found the spots that were strained and made me almost fall asleep three times! She even keeps late hours for us people who get off at 6 pm. - Michael. |
#!/usr/bin/env python2
# IRPF90 is a Fortran90 preprocessor written in Python for programming using
# the Implicit Reference to Parameters (IRP) method.
# Copyright (C) 2009 Anthony SCEMAMA
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Anthony Scemama
# LCPQ - IRSAMC - CNRS
# Universite Paul Sabatier
# 118, route de Narbonne
# 31062 Toulouse Cedex 4
# [email protected]
import os
import util
import makefile
import irpf90_t
from command_line import command_line
initialized = False
def init():
global initialized
if initialized:
return
# Create directories
for dir in [ irpf90_t.irpdir, irpf90_t.mandir ]:
try:
wd = os.getcwd()
os.chdir(dir)
os.chdir(wd)
except OSError:
os.mkdir(dir)
for dir in command_line.include_dir:
dir = irpf90_t.irpdir+dir
try:
wd = os.getcwd()
os.chdir(dir)
os.chdir(wd)
except OSError:
os.mkdir(dir)
# Create makefile
makefile.create()
# Copy current files in the irpdir
for dir in ['./']+command_line.include_dir:
try:
os.stat(dir)
except:
print dir,'not in dir'
continue
for filename in os.listdir(dir):
filename = dir+filename
if not filename.startswith(".") and not os.path.isdir(filename):
try:
file = open(filename,"r")
except IOError:
if command_line.do_warnings:
print "Warning : Unable to read file %s."%(filename)
else:
buffer = file.read()
file.close()
if not util.same_file(irpf90_t.irpdir+filename,buffer):
file = open(irpf90_t.irpdir+filename,"w")
file.write(buffer)
file.close()
initialized = True
|
Preface: This article is very important for those looking for facts relating to the restaurant industry. If you intend to join this industry you must read this to take an informed decision.
Restaurant-industry sales are projected to reach a record $426.1 billion in 2003. The restaurant industry employs more than 11.7 million people in 870,000 locations. Restaurant-industry sales are forecast to advance 4.5 percent in 2003 and equal 4 percent of the U.S. gross domestic product.
The restaurant industry provides work for more than 9 percent of those employed in the United States. The average annual household expenditure for food away from home in 2000 was $2,137, or $855 per person.
Restaurant-industry employment will reach 13.3 million by 2012. At present the restaurant industry is the nation's largest employer outside of the government. The Bureau of Labor Statistics predicts the restaurant industry's growth will be 30 per cent over the next two years. Industry professionals and owners indicate that the biggest challenge facing the industry is finding educated, competent, and well-trained management personnel to handle this growth.
Recruiting and retaining employees is the biggest challenge quick service operators expected to face in 2002, with roughly one-third of the survey respondents rating it their top challenge.
Spending on food, both at home and away from home, accounted for 13.6 percent of total average annual household expenditures in 1999, or $5,031 per household.
August is the most popular month to eat out, and Saturday is the most popular day of the week for dining out. |
# Time: O((|E| + |V|) * log|V|) = O(|E| * log|V|) by using binary heap,
# if we can further to use Fibonacci heap, it would be O(|E| + |V| * log|V|)
# Space: O(|E| + |V|) = O(|E|)
# There are N network nodes, labelled 1 to N.
#
# Given times, a list of travel times as directed edges times[i] = (u, v, w),
# where u is the source node, v is the target node,
# and w is the time it takes for a signal to travel from source to target.
#
# Now, we send a signal from a certain node K.
# How long will it take for all nodes to receive the signal? If it is impossible, return -1.
#
# Note:
# - N will be in the range [1, 100].
# - K will be in the range [1, N].
# - The length of times will be in the range [1, 6000].
# - All edges times[i] = (u, v, w) will have 1 <= u, v <= N and 1 <= w <= 100.
import collections
import heapq
# Dijkstra's algorithm
class Solution(object):
def networkDelayTime(self, times, N, K):
"""
:type times: List[List[int]]
:type N: int
:type K: int
:rtype: int
"""
adj = [[] for _ in xrange(N)]
for u, v, w in times:
adj[u-1].append((v-1, w))
result = 0
lookup = set()
best = collections.defaultdict(lambda: float("inf"))
min_heap = [(0, K-1)]
while min_heap and len(lookup) != N:
result, u = heapq.heappop(min_heap)
lookup.add(u)
if best[u] < result:
continue
for v, w in adj[u]:
if v in lookup: continue
if result+w < best[v]:
best[v] = result+w
heapq.heappush(min_heap, (result+w, v))
return result if len(lookup) == N else -1
|
Access Control UL294 is the standard of safety for Access Control System units that all access control systems must meet. UL294B is a newer standard, specifically for the use of Power over Ethernet (PoE) components used within access control systems where PoE is the primary power source.
Government and state regulations are moving toward required access control UL294 compliance. Comtrol’s RocketLinx ACS7106 is the first UL294B certified switch enabling confidence in deployment and integration within access control systems.
Quick and effective service is absolutely essential for a hotel to bring customers back again and again. Comtrol’s Ethernet switches, device servers, and serial cards provide reliable connectivity service to the hospitality industry. Numerous hotel companies have integrated Comtrol’s Rocketport multi-port serial cards, DeviceMaster, and Rocketport USB serial hub products when installing their corporate and property management software, and equipment like phone systems, keyless locks, on-site POS systems, and more.
Whether device connections are serial, USB, wireless, Ethernet, or a combination of the three, Comtrol’s products provide excellent connectivity solutions. From the DeviceMaster serial servers to the RocketPort cards and hubs, and from the RocketLinx Ethernet and Power over Ethernet switches, the comprehensive product lines that Comtrol has to offer overcome even some of the most complex connectivity challenges.
The Comtrol RocketLinx series of PoE industrial ethernet switches are designed to meet the performance and environmental demands for applications requiring extended operating temperatures, rugged design, and reliable power delivery to standard and high power devices.
Featuring both managed and unmanaged models, the RocketLinx PoE industrial Ethernet switches provide cost-effective networking solutions delivering the industry’s best Ethernet switch technologies.
MOBILE – What does this mean for your next IP surveillance project?
Putting a surveillance camera on or in a vehicle (car, bus, etc.) for safety, monitoring, or other purposes is becoming much more common. It’s very likely if you are a surveillance integrator that you have worked on a project like this. If not, you have almost certainly heard of it being done in the news lately.
In today’s increasingly connected world it seems that a systems integrator could dangle a Cat 5e or Cat 6 cable out of a cabinet and have dozens of Ethernet switches readily available for immediate connection. Each of these switches, while undoubtedly similar, have many differences… differences that may make or break a system. Given the requirement for a managed Ethernet switch, these small differences can easily get out of hand – requiring a professionally trained eye for each and every line of a product’s specification sheet. Managed Ethernet switches are complicated pieces of equipment, and using them to their full functionality is no small feat. |
from PyQt5.QtCore import QObject, pyqtSignal
from .ObjectStore import ObjectStore, ObjectStoreObject
from random import choice, uniform
class Race(ObjectStoreObject):
"""A race is a type of creature."""
def update(self, *,
id=None,
name="",
male_names=[],
female_names=[],
family_names=[],
attribute_modifiers={},
# Avg 95%
height=[ 65.0, 9.5], # inched
weight=[160.0, 85.0], # lbs
m_f_ratio=1.0
):
self.male_names = male_names
self.female_names = female_names
self.family_names = family_names
self.attribute_modifiers = attribute_modifiers
self.height = height
self.weight = weight
self.m_f_ratio = m_f_ratio
super(Race, self).update(id=id, name=name)
def data(self):
data = super(Race, self).data()
data.update({
"male_names": self.male_names,
"female_names": self.female_names,
"family_names": self.family_names,
"attribute_modifiers": self.attribute_modifiers,
"height": self.height,
"weight": self.weight,
"m_f_ratio": self.m_f_ratio
})
return data
def generate_name(self, *, male=False, female=False):
first_names = None
if male:
first_names = self.male_names
elif female:
first_names = self.female_names
else:
first_names = self.male_names + self.female_names
name = "{} {}".format(
self.choose_name(first_names),
self.choose_name(self.family_names))
return name.title()
@staticmethod
def choose_name(names):
if not names:
return "Fred"
# Sort the names
prefixes = []
suffixes = []
whole_names = []
for name in names:
if name.startswith('-'):
suffixes.append(name[1:])
elif name.endswith('-'):
prefixes.append(name[:-1])
else:
whole_names.append(name)
# How many of each?
combos = len(prefixes) * len(suffixes)
print("prefixes={}, suffixes={}, combos={}".format(
prefixes, suffixes, combos))
# Whole or composed names?
which = uniform(0, combos+len(whole_names))
print("which={}, combos={}, which > combos={}".format(
which,
combos,
which > combos))
if which > combos:
print("Whole")
return choice(whole_names)
else:
print("composed")
return choice(prefixes)+choice(suffixes)
def generate_height_weight(self,
gender='M',
attrs={},
height=0.5,
weight=0.5,
):
size_mod = pow(
sqrt(4.0/3.0),
attrs.get('strength') \
- attrs.get('dexterity'))
height = normal(self.height[0], self.height[1]/2.0)*size_mod
height_variance = height - self.height[0]
weight = normal(self.weight[0], self.weight[1]/2.0) \
* height_variance \
* height_variance
if gender.lower() in ('f', 'female'):
height = height/self.m_f_ratio
weight = weight/self.m_f_ratio
return (height, weight)
Races = ObjectStore(Race)
|
Scale Model News: 1:35 SCALE TACOM V-2 KIT: FINISHED AT LAST!
Mat Irvine: So far in SMN, I have covered the Takom V-2 rocket with launch pad (in German, abschussplattform or ‘firing table’) and the Hanomag tow vehicle. Now it’s time for me to take a close look at the Meillerwagen transporter/erector.
This is by far the most complex of the three builds, and Takom appears to have included absolutely every single bit of the Meillerwagen. You have six runners directly to build it, with some parts shared. There are also photo-etch brass components, a small decal sheet, and two lengths of chain.
The finished Tacom trio (below) look excellent, the complexity of the Meillerwagen contrasting nicely with the simple V-2.
A handsome piece of art (below) decorates the biggish box.
The Meillerwagen does look rather daunting initially, but the instructions - at the beginning, at least - are very clear, and although many of the parts (below) are small and fiddly, they do go together in a logical manner. It is only later on, when there are options to consider, that some of the instructions get slightly hazy.
I have a warning for constructors: a decent razor saw is absolutely vital to remove parts from the runners, especially the smallest ones. Even snippers could snap these fragile items, and don’t even think of attempting to twist them off.
The runners are alphabetically numbered. Here, you are mostly using the two C runners, plus D, E, F and G.
The centre-section of the road-going part of the Meillerwagen is assembled first, and although there are a lot of support struts, there are locating holes or pins on all parts, and some of these even have D-shaped locating pins, to ensure that everything is orientated correctly.
Complex though it is, the Meillerwagen does not allow you to move the V-2 into firing position. You have to decide fairly early on whether you are building the rocket being transported horizontally, or with the launch rail in the vertical position. However, you can delay this decision by building some parts separately, such as the rear support legs, and only make your final choice toward the end.
The front-wheel axles steer and swivel, and all wheels are designed to roll. However, the attachment points are very small, and we are back to the usual reasoning of whether moving wheels on a static model are useful or not.
Some parts are really fragile, such as the ladder that runs up the starboard side of the erector rails. My advice here is that using that razor saw is even more necessary. There is also a ladder section on the port side, but this is moulded as one part.
Location details (below) help with construction. Here, the rectangular outline positions a smaller assembly.
Whether the rear support legs and pads are modelled folded into the side of the Meillerwagen, or extended outward, and the pads lowered, when raising the erector section.
Then there are two sets of hydraulics for this process - contracted inside the main cylinders for transport and extended for raising the rails, and they aren’t interchangeable.
The elevated section also has two work platforms. For road travel they are folded flat, for raising the missile, they are moved through 90 degrees and have handrails fitted, which is when the supplied chain is used. You have to decide which way to fit them, as even with the complexity of this kit, the platforms don’t move through the right-angle.
The colour of most Meillerwagens was an overall camouflage green, but there are alternative schemes. For this build, I used Humbrol 163 Dark Green over an undercoat of Humbrol 30. The instructions don’t really indicate if the parts F8 and F9, which are protection plates fitted during travel, were removed during launch. Some photos of the real setup show them in place, others show them removed, or perhaps not there in the first place.
However, F8 and F9 slot in place, so are removable if you wish. The same applies to the mast that fits to the left-hand side. This is to support the V-2’s electrical connections when it is on the pad. But the Meillerwagen could be removed from the immediate vicinity for launch, so the mast is normally depicted being positioned by the launch pad. Oddly, this is one omission from this otherwise very detailed kit.
Most of the Meillerwagen has been constructed (below) though I have left the wheels and other modules off, to allow separate painting.
For the pix (below) I chose the black and white markings used for test flights. The pattern is detailed on my recommended V-2 website (bottom).
Simple masking off (below) is sufficient for the relatively easy to apply colour scheme. Here, I had already sprayed on the white, and left the masking until it was completely dry.
I used satin black spray (below) to complete the marking scheme.
No figures are included with this kit, but there are plenty of suitable ones available. To date though, no kit company has released support equipment such as fuel bowsers for A, B and T-stoff, the command wagon, or the long Magirus ladder used to reach the V-2 nose section. Most of these are available in 1:72 scale, but none so far as injection styrene items in 1:35 scale.
This Tacom kit is the only currently-available injection-styrene 1:35 scale V-2. Dragon did make one, and the same kit was also reboxed and reissued by Revell, but these have been unavailable for some time. The only real alternative is the Accurate Armour resin kit.
These all supplied a launch platform, but none the Meillerwagen, so in 1:35 and in standard injection-styrene, the Tacom release is the only way you will get one.
The only previous 1:35 scale Meillerwagen was made in resin by PSP, which also made associated vehicles and equipment. But these were specialist resin kits, hard to find and expensive. PSP kits are now produced by Scalelink but only a few of the V-2 accessory kits are currently made. Both the V-2 (02075) and the Hanomag (02068) are available as a separate kits from Takom.
Click here for the SMN Hanomag build.
Click here for the SMN V-2 build.
Click here for more info at the most comprehensive V-2 website. |
#
# Memcached protocol implementation
# Nikolay Mihaylov [email protected]
#
# For Memcached telnet protocol see:
# http://blog.elijaa.org/?post/2010/05/21/Memcached-telnet-command-summary
import asynchat
import time
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class RedisHandler(asynchat.async_chat):
def __init__(self, sock, addr, processor):
#
# Constructs new Redis protocol handler
#
# @param sock : socket from asyncore
# @param addr : address from asyncore
# @param processor : processor class
#
asynchat.async_chat.__init__(self, sock=sock)
self.addr = addr
self.started = time.time()
self.lastping = time.time()
self.processor = processor
self.state_change("read_count")
def state_change(self, state, size = 0):
self.io = StringIO()
if state == "read_count":
self.state_params_count = 0
self.state_params_waiting = 0
self.state_params_temp = 0
self.state_params = []
self.state = state
self.set_terminator("\r\n")
return True
if state == "read_param":
self.state = state
self.set_terminator("\r\n")
return True
if state == "read_data":
# size == 0 is an error, but we will ignore it.
if size < 0:
return False
self.state = state
self.set_terminator(size + len("\r\n") )
return True
# Unknown state ?
return False
def cmd_parse(self):
self.lastping = time.time()
args = self.state_params
command = args[0].lower()
if command == "spop":
key = args[1]
x = self.processor.get(key)
if x is None:
# NULL responce
self.push("$-1\r\n")
return
msg = "$%d\r\n%s\r\n" % (len(x), x)
self.push(msg)
return
if command == "del":
key = args[1]
x = self.processor.delete(key)
if x:
self.cmd_int(1)
return
self.cmd_int(0)
return
# this is ADD. We do not implement SET
if command == "sadd":
key = args[1]
val = args[2]
x = self.processor.add(key, val)
if x:
self.cmd_int(1)
return
self.cmd_int(0)
return
# Non standard command
if command == "scard":
key = args[1]
x = self.processor.len(key)
if x is None:
x = "0"
self.cmd_int(int(x))
return
if command == "sismember":
key = args[1]
val = args[2]
x = self.processor.contains(key, val)
if x:
self.cmd_int(1)
return
self.cmd_int(0)
return
if command == "quit":
self.push("+OK\r\n")
self.close()
return
# error, not implemented
self.cmd_error("Not implemented")
return
def cmd_int(self, id):
self.push(":%d\r\n" % id)
def cmd_error(self, msg = None):
if msg is None:
s = "-ERR\r\n"
else:
s = "-ERR %s\r\n" % msg
self.push(s)
self.state_change("read_count")
def state_read_count(self):
x = self.io.getvalue()
if not x:
self.cmd_error()
return False
if x[0] != "*":
self.cmd_error("proceed with number of params")
return False
try:
self.state_params_count = int(x[1:])
self.state_params_waiting = self.state_params_count
except:
self.cmd_error("wrong number of params")
return False
if self.state_params_count is 0:
self.cmd_error("wrong number of params, *0 is not allowed")
return False
self.state_change("read_param")
return True
def state_read_param(self):
x = self.io.getvalue()
if not x:
self.cmd_error("proceed with size of param")
return False
if x[0] != "$":
self.cmd_error("proceed with size of param")
return False
try:
self.state_params_temp = int(x[1:])
except:
self.cmd_error("wrong size of param")
return False
self.state_change("read_data", self.state_params_temp )
return True
def state_read_data(self):
x = self.io.getvalue()
if not self.state_params_temp:
self.state_params_temp = 0
x = x[0:self.state_params_temp]
self.state_params.append(x)
self.state_params_waiting -= 1
if self.state_params_waiting > 0:
self.state_change("read_param")
return True
# Proceed with request
self.cmd_parse()
self.state_change("read_count")
def found_terminator(self):
if self.state == "read_count":
# *2
return self.state_read_count()
if self.state == "read_param":
# $3
return self.state_read_param()
if self.state == "read_data":
# <data>
return self.state_read_data()
# Unknown state ?
return False
def collect_incoming_data(self, data):
self.io.write(data)
|
“We didn’t pay the rent this month because we’re moving,” a tenant reported over the phone.
We will get to law and terms of leases, but even without legal information, we can reason this out using simple fairness. For a contract, such as a rental agreement, to be legally binding, each party must give something and each must receive something.
In rental contracts, the landlord gives the use of his property, the apartment or house. He gets rent payments for providing use of the property. The tenant gives monthly rent payments. She gets a place to live and keep her belongings.
Part of the month of July had already gone by at the time the tenant spoke. She was still living in the apartment. She was getting a place to live, but the landlord was not getting rent payment. Terms of the contract were not being honored by one party.
The rental agreement may spell out at what point rent is considered overdue and what steps a landlord may take to enforce the agreement. If the agreement covers overdue rent and enforcement, the tenant will know where she stands regarding the final month rent payment.
If the agreement does not cover overdue rent, the landlord still has the right to go to court to enforce the contract and require the tenant to pay.
The Board of County Commissioners voted on a variety of grant approvals related to community projects in a short meeting Thursday.
If the tenant and the landlord have agreed that her security deposit will apply to the final month’s rent, assuming no breakage or other expense that the security deposit would be needed to cover, and if she has given proper notice of her intent to move, she may not have to make additional payments out of pocket.
A security deposit can be used to cover failure to pay rent, damages to the property or expense incurred by the landlord due to a breach of the lease. State law requires the landlord to give a receipt for the security deposit.
A landlord may agree to prorate the rent on the month the tenant moves out, perhaps charging half the rent if she moves out by the 15th of the month. Terms such as a rent reduction should be included in the written lease.
But if there is no agreement to apply the security deposit toward rent or reduce the charge for the final month, and the landlord can file in court for payment of rent due, court costs and the right to repossess the property.
If the court enters a judgment in favor of the landlord, ruling that the tenant does in fact owe the rent she failed to pay, the tenant is required to vacate the apartment, usually within four days. If the tenant fails to move out within the required time, the landlord can ask the court for a warrant of restitution, allowing the landlord to repossess the property and move the tenant’s belongings out into the street under supervision of the sheriff’s department.
Donna Engle is a retired Westminster attorney. Reach her with questions or feedback at 410-840-2354 or [email protected]. Her column, which provides legal information but not legal advice, appears on the second and fourth Sunday each month in Life & Times. |
import requests
HOST = 'https://graph.facebook.com/v2.5'
USER_FIELDS = 'fields=id,first_name,email,tagged_places{place{name,place_type,place_topics,location{city}}},events{name,description},location'
def get_user(token):
data = get('me', USER_FIELDS, token)
if 'error' in data:
print data
raise ValueError
return map_details(data)
def map_details(data):
places = data.get('tagged_places',{}).get('data',[])
events = data.get('events',{}).get('data',[])
return {
'facebook_id':data['id'],
'name':data['first_name'],
'email':data['email'],
'city':map_place(data['location']) if 'location' in data else None,
'places':[map_place(p['place']) for p in places if p['place'].get('location')],
'events':[map_event(e) for e in events if 'description' in e]
}
def map_place(data):
place = {
'name':data['name'],
'city':data['location']['city'],
}
if data['place_type'] == 'PLACE' and data.get('place_topics',{}).get('data'):
place['topics'] = [t['name'] for t in data['place_topics']['data']]
return place
def map_event(data):
return {
'name':data['name'],
'description':data['description'],
}
def get(path, query, token):
uri = '{}/{}?{}&access_token={}'.format(HOST,path,query,token)
try:
response = requests.get(uri)
response.raise_for_status()
except requests.exceptions.HTTPError as error:
print error.response.status_code, error.response.content
return response.json()
|
Does your child have problems with peer relationships, depression, or anxiety?
Are you worried and confused about what’s going on?
Are you unsure how to help?
The childhood years are challenging for parents and children alike. Children go through many developmental stages as they transform from small children to young adults. We work with children, teens, and their families as they navigate through these different stages of life. It can be a big help for children and teenagers to talk to a therapist who understands what they’re going through. If you are a parent who worries about your child, it can also help to work with someone who has a fresh perspective and practical solutions.
During your first session, one of our therapists will meet alone with you to discuss your concerns about your child and your goals for therapy. Then we will work with you to develop a plan to meet your specific needs. We use a wide variety of treatment methods and approaches, and we customize each plan for each child. Our office is equipped with a variety of play therapy games, toys, and art supplies. Therapy can be enjoyable and enriching for the entire family. |
from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The events endpoint supports two types of search by location:
# - by latitude and longitude coordinates (location.origin)
# - by geoname place ID (location.place_id, see places endpoint for more details about geoname place ID)
# The `location.origin` parameter allows you to search for broadcasts
# happening in the county of the provided geopoint
# (a latitude and longitude coordinate string in the form {latitude},{longitude}).
# https://docs.predicthq.com/resources/broadcasts/#param-location.origin
for broadcast in phq.broadcasts.search(location__origin='45.5051,-122.6750'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.location.places[0].place_id)
# You can also specify a geoname place ID or a list of place IDs.
# The `location.place_id` suffix includes broadcasts being children
# or parent of the specified place ID.
# For the broadcasts happening in the US, the default location level
# is the county level. If you specify a city place ID, you'll get all
# the broadcasts for this city's county.
# https://docs.predicthq.com/resources/events/#param-place
for broadcast in phq.broadcasts.search(location__place_id='5746545'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.location.country,
broadcast.location.places[0].place_id)
|
Liberal brainwashing. That’s what conservative critics are likely to call this epic from the Wachowski siblings and Tom Twyker. And it’s true that every act of ideology will work to hide its own workings, as is often the case when it is juxtaposed with some good vs evil battle (as literally happens here). Likewise, parallels that are drawn in Cloud Atlas between historical, present and theoretical struggles are not nearly as intellectual as they could and should be to be truly conscientious.
With that said, a lot of what Cloud Atlas has to say is in my worldview fundamental truth. Sharing concerns with and probably inspired by the works of Alan Moore, the Wachowski’s succeed in what they tried to say with their V for Vendetta adap. It is more unified and skilfully told than the Matrix trilogy. Among them the directors have made some fine movies – this is the best. A stunning, bold, unconventional rarity, I haven’t seen a more culturally valuable film since Sucker Punch. It is a work full of love, and I am full of love for it.
Intercut between different timelines that curiously involve iterations of the same people, I had doubts the conclusion would satisfy the mystery. I was wrong. One caveat to the masterwork is the karmic rebirth device which seems to present characters with a base consistency which sits rather uneasily with social constructivism. Better to think of the reoccurences as representations of others who are (how shall we say it) connected. Or better yet as facets of the perennial protagonist, the hero of a thousand faces, who sits within our culture and, yes, our ideology.
In case you are interested, there are a bluray rip doing the rounds. |
from practical_sieve import primes_in_block
primes_under_100 = [2,3,5,7]+primes_in_block([3,5,7],10,100,5)
def next_ten_thousand_primes(primes):
L = primes[-1]+1
R = L + 10000
B = 1000
return primes_in_block(primes[1:],L,R,B)
def next_primes(primes):
"""
Square the size of your window.
Usually, this is too far to go.
If you start with just the primes under a
hundred and run this twice, you're looking
at checking congruence for tens of millions
of numbers, and this might take some
minutes already.
See: next_ten_thousand_primes(primes)
"""
L = primes[-1] + 1
# Choose the largest possible upper bound
R = L*L
# Choose an appropriate block size,
# which is rather arbitrary.
B = (R-L)/10
# Now adjust R so that B|R-L
R = L + 10*B
return primes_in_block(primes[1:],L,R,B)
def factor_with_primes(n, primes):
ps.reverse()
pairs = []
while ps and n > 1:
p = ps.pop()
e = 0
while n % p == 0:
e = e + 1
n = n/p
if e>0:
pairs.append((p,e))
return n,pairs
def simple_factor(n):
"""
This is a demonstration of how to use
Algorithm 3.2.1 as well a as a practical
and simple way to factor numbers of modest
size. Of course, the near term goal will
be to explore the modern factoring
algorithms, but this will do as a sandbox
for testing and playing.
"""
primes = [2,3,5,7]+primes_in_block([3,5,7],10,100,10)
n, pairs = factor_with_primes(n, primes)
if n == 1:
return pairs
primes = next_primes(primes)
n, new_pairs = factor_with_primes(n, primes)
pairs = pairs + new_pairs
while n>1:
primes = next_ten_thousand_primes(primes)
n, new_pairs = factor_with_primes(n, primes)
pairs = pairs + new_pairs
return pairs
|
Christiana Figueres was appointed Executive Secretary of the United Nations Framework Convention on Climate Change (UNFCCC) in May 2010. With a long and distinguished career in the field, Ms. Figueres was a member of the Costa Rican climate change negotiating team 1995- 2009.
She represented Latin America and the Caribbean on the Executive Board of the Clean Development Mechanism in 2007, and was then elected Vice President of the Bureau 2008-2009. She conceived the new financial instrument "programmatic CDM" with four groundbreaking publications that marked global thinking on this novel concept.
She initiated her life of public service as Minister Counselor at the Embassy of Costa Rica in Bonn, Germany in 1982. She served as Director of International Cooperation in the Ministry of Planning in Costa Rica, and was then named Chief of Staff to the Minister of Agriculture. Moving to the USA, she was Director of Renewable Energy in the Americas (REIA) and in 1995 founded the non-profit Center for Sustainable Development of the Americas (CSDA) which she directed for eight years. She designed and helped to establish national climate change programs in Guatemala, Panama, Colombia, Argentina, Ecuador, Honduras, El Salvador and the Dominican Republic, becoming a prime promoter of Latin America's active participation in the Climate Change Convention. She served as high level advisor to both governments and private companies, including Endesa Latinoamérica. In 2001 she received the Hero for the Planet Award from the National Geographic Magazine.
Ms. Figueres has made important contributions to the analytic literature on the design of the climate regime, is one of the most widely published authors on the topic, and a frequent public speaker. She has a Masters Degree in Anthropology from the London School of Economics, and a certificate in Organizational Development from Georgetown University. She speaks Spanish, English and German. |
"""
Author: Valerio Maggio (@leriomaggio)
Mail: [email protected]
"""
from itertools import ifilter, product
from functools import wraps
from math import sqrt
from numpy import sum as np_sum
# --------------------------------------------------------------------------
# Node Similarities (Kernels on Nodes)
# --------------------------------------------------------------------------
def match(n1, n2):
""" Matching Function: determines wether two nodes are comparable. """
return n1.instruction_class == n2.instruction_class
def features_similarity(n1, n2):
"""
Feature Similarity: Computes a similarity value according to nodes attributes.
"""
if n1.is_leaf_node and n2.is_leaf_node:
return int(n1.instruction == n2.instruction and n1.label == n2.label)
return int(n1.instruction == n2.instruction)
def structural_similarity(n1, n2):
"""
Structural Similarity function (used to detect (without errors) up to Type 2 clones)
"""
if n1.instruction == n2.instruction:
return 1.0
return 0.0
#------------------------------------------------------------------------------
# 01. Iterative Contiguous Kernel (Partial Trees)
#------------------------------------------------------------------------------
# Supporting functions
def compute_pairs_similarities(node_pairs_list, similarity=features_similarity):
"""
Reminder: Improve using numpy.sum
"""
return np_sum([similarity(n1, n2) for n1, n2 in node_pairs_list])
def extract_contiguous_kernel_nodes(t1, t2):
"""
Extract all the possibile pairs of nodes that match
--- (Improved version using itertools - TO BE TESTED.) ---
Note that ifilter returns a Generator, rather than a list (this should me more
efficient in terms of memory consumption).
Nevertheless, the list could be trivially returned instead by removing
the "i" from `ifilter` :-)
(This will call the built-in Python `filter` function)
"""
# return [(n1, n2) for n1 in t1.children for n2 in t2.children if match(n1, n2)]
return ifilter(lambda p: match(p[0], p[1]), product(t1.children, t2.children))
# Memoization in Python with wraps - useful for normalization to avoid repeating calculations
# The memoization is exploited only in case of t1 == t2, i.e., we are computing
# normalization values.
# This is to avoid repeating useless calculations, while not wasting memory storing the
# computation of each pair.
def memo(func):
cache = {}
@wraps(func)
def wrap(t1, t2, node_sim_func):
if t1 == t2:
if t1 not in cache:
cache[t1] = func(t1, t2, node_sim_func)
return cache[t1]
return func(t1, t2, node_sim_func)
return wrap
def iterative_kernel_function(node_pairs_list, node_similarity=features_similarity):
"""
Iterative Tree Kernel Function
"""
if not node_pairs_list or not len(node_pairs_list):
return 0.0
k = 0.0
while len(node_pairs_list):
pair = node_pairs_list.pop(0)
k += compute_pairs_similarities([pair], similarity=node_similarity)
matching_subtrees = extract_contiguous_kernel_nodes(pair[0], pair[1])
node_pairs_list.extend(matching_subtrees)
return k
@memo
def iterative_tree_kernel(tree1, tree2, node_similarity=features_similarity):
'''
Iterative Tree Kernel
'''
if not match(tree1, tree2):
return 0.0
return iterative_kernel_function([(tree1, tree2)], node_similarity)
# --------------------------------------------------------------------------
# Normalized Tree Kernel function
# --------------------------------------------------------------------------
def contiguous_tree_kernel(t1, t2, node_similarity=features_similarity):
"""
Compute the Normalized version of the Contiguous Tree Kernel function
(Value that range from 0 to 1)
"""
kernel_sim = iterative_tree_kernel(t1, t2, node_similarity)
#Normalization
return float(kernel_sim) / sqrt(iterative_tree_kernel(t1, t1, node_similarity) *
iterative_tree_kernel(t2, t2, node_similarity)) |
Innovation Express – a common European approach for supporting the internationalisation of SMEs through cluster initiatives.
The funding instrument aimed at facilitating internationalisation, smart specialisation, and cross-border learning and competence development by developing transnational linkages between SME networks, clusters and other specialised research and innovation nodes – for the benefit of their members.
Innovation Express represents a joint call for proposals implemented within the framework of the BSR Stars programme – a flagship of the EU Strategy for the Baltic Sea Region which fosters macro-regional smart specialisation. The call is funded by national/regional funding agencies to initiate, develop or enhance transnational cooperation activities – leveraging cluster organisations (or similar) to develop proposals for their SME members.
Applications were evaluated based on their potential to create benefits for participating cluster initiatives and companies, along with criteria from regional/national funding organisations.
Innovation Express partners (see “funding, cooperation and extended partners for Innovation Express” below) supported proposals submitted by groups of SMEs and/or cluster governances located in their geographical area and addressing transnational cooperation activities that benefit their SME members.
A broad variety of innovation activities, including technology/knowledge transfer, training, feasibility studies, strategic analysis and mutual benchmarking are supported, as well as many different stages of international cooperation (from initial contact and networking through to the final set up of a long-term innovation project). Within the framework of BSR Stars, there is a particular focus on supporting linkages between clusters with complementary fields of expertise (cross-sectoral) in order to tackle shared challenges or pursue international market opportunities through collaborative efforts. A further particular focus in the 2017 call is supporting linkages between clusters with a comprehensive attention towards sustainability (environmental or social).
Please refer to the table of supported innovation activities for each funding partner (below), and get in touch with the relevant contact person for more detailed information.
Targeted cluster initiatives or SME networks can be located within the Baltic Sea Region, or elsewhere within or outside Europe. All activity sectors are eligible. However, Sweden has a particular focus on “the 6 constellations” (see more in the documents section below).
Submission of proposals possible from 2 June – 31 October 2017.
Innovation Express targets groups of SMEs and/or cluster initiatives and their partners (e.g. technology providers, large companies, universities, research centres, etc.). With their proximity to firms, cluster organisations (or similar) are considered as an effective vehicle for involving SMEs which have more difficulties to develop international cooperation. They can also help them identify relevant partners to initiate trust-building processes and generate market impact.
Eligible applicants are SME networks or cluster governances located within a geographical area where an Innovation Express funding or extended partner is present (see funding and extended partners below). Proposals must be submitted by the representative of the SME network/cluster initiative for the benefit of its SME members – in accordance with them and driven by their internationalisation needs.
The applicant is a legal entity representing an SME network, cluster initiative or other specialised research and innovation environment.
The applicant is located in a geographical area where an Innovation Express funding or extended partner is present.
Applications must target another cluster initiative or SME consortia in at least one other country (within the BSR or elsewhere internationally).
Other eligibility criteria may be defined directly by the funding organisations.
Application forms (in word format) can be accessed in the Document Overview the BSR Stars homepage (http://www.bsr-stars.eu/innovation-express/) for preparatory purposes.
Proposals must be in line with the supported innovation activities and other guidelines outlined by the funding agency in the applicant’s geography (to be found on funding agency’s own website). It is required that applicants contact their local BSR Innovation Express contact person (see above) in order to be informed about funding conditions.
Application forms were to be completed in English via an online application portal. by 31 October, 2017 at the latest.
Completion and submission of additional forms was required (according to guidelines defined by Innovation Express funding/extended partners).
In addition to the funding partners (listed above), other regional and national organisations are involved as associated partners in the Innovation Express call. Both funding and associated partners agree to serve as a source of information regarding cluster initiatives in their geography (e.g. which exist, their areas of specialisation, their desired areas of collaboration, contact names, etc.), and will help to “open doors” and facilitate initial contacts between cluster initiatives.
Funding decisions were communicated mid December 2017.
Funding decisions were made by regional/national funding agencies and communicated/registered in the common database.
All submitted proposals were registered in a common database and distributed to the relevant funding agency.
In connection with the BSR Stars Innovation Express call, a Cluster Matchmaking Conference was hosted by the Baden Württemberg Ministry of Economic Affairs, Labour and Housing, targeted at cluster organisations and SME networks wishing to explore opportunities for international collaboration on behalf of their members. The programme focused on cluster to cluster matchmaking, workshops and other opportunities to establish cooperation. The matchmaking event was organised in close collaboration with Cluster Excellence Denmark.
For more information about the Innovation Express Instrument, you are welcome to contact Hans Henrik Lomholt ([email protected]) at the Danish Agency for Institutions and Educational Grants, coordinating project manager of the BSR Stars Innovation Express in 2016 and 2017. |
import json
import logging
import os
import re
import requests
import traceback
from datetime import datetime
from django.conf import settings
from django.contrib.auth.models import Permission
from django.http import HttpResponse
from django.utils import timezone
from django.utils.translation import trans_real
from translate.filters import checks
from translate.storage import base as storage_base
from translate.storage.placeables import base, general, parse
from translate.storage.placeables.interfaces import BasePlaceable
from translate.lang import data as lang_data
log = logging.getLogger('pontoon')
def add_can_localize(user):
email = user.email
log.debug(email)
# Grant permission to Mozilla localizers
url = "https://mozillians.org/api/v1/users/"
payload = {
"app_name": "pontoon",
"app_key": settings.MOZILLIANS_API_KEY,
"email": email,
"is_vouched": True,
"groups": "localization",
}
try:
response = requests.get(url, params=payload)
mozillians = response.json()["objects"]
if len(mozillians) > 0:
can_localize = Permission.objects.get(codename="can_localize")
user.user_permissions.add(can_localize)
log.debug("Permission can_localize set.")
# Fallback if profile does not allow accessing data
user.first_name = mozillians[0].get("full_name", email)
user.save()
except Exception as e:
log.debug(e)
log.debug("Is your MOZILLIANS_API_KEY set?")
user.save()
def get_project_locale_from_request(request, locales):
"""Get Pontoon locale from Accept-language request header."""
header = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
accept = trans_real.parse_accept_lang_header(header)
for a in accept:
try:
return locales.get(code__iexact=a[0]).code
except:
continue
class NewlineEscapePlaceable(base.Ph):
"""Placeable handling newline escapes."""
istranslatable = False
regex = re.compile(r'\\n')
parse = classmethod(general.regex_parse)
class TabEscapePlaceable(base.Ph):
"""Placeable handling tab escapes."""
istranslatable = False
regex = re.compile(r'\t')
parse = classmethod(general.regex_parse)
class EscapePlaceable(base.Ph):
"""Placeable handling escapes."""
istranslatable = False
regex = re.compile(r'\\')
parse = classmethod(general.regex_parse)
class SpacesPlaceable(base.Ph):
"""Placeable handling spaces."""
istranslatable = False
regex = re.compile('^ +| +$|[\r\n\t] +| {2,}')
parse = classmethod(general.regex_parse)
def mark_placeables(text):
"""Wrap placeables to easily distinguish and manipulate them.
Source: http://bit.ly/1yQOC9B
"""
PARSERS = [
NewlineEscapePlaceable.parse,
TabEscapePlaceable.parse,
EscapePlaceable.parse,
# The spaces placeable can match '\n ' and mask the newline,
# so it has to come later.
SpacesPlaceable.parse,
general.XMLTagPlaceable.parse,
general.AltAttrPlaceable.parse,
general.XMLEntityPlaceable.parse,
general.PythonFormattingPlaceable.parse,
general.JavaMessageFormatPlaceable.parse,
general.FormattingPlaceable.parse,
# The Qt variables can consume the %1 in %1$s which will mask a printf
# placeable, so it has to come later.
general.QtFormattingPlaceable.parse,
general.UrlPlaceable.parse,
general.FilePlaceable.parse,
general.EmailPlaceable.parse,
general.CapsPlaceable.parse,
general.CamelCasePlaceable.parse,
general.OptionPlaceable.parse,
general.PunctuationPlaceable.parse,
general.NumberPlaceable.parse,
]
TITLES = {
'NewlineEscapePlaceable': "Escaped newline",
'TabEscapePlaceable': "Escaped tab",
'EscapePlaceable': "Escaped sequence",
'SpacesPlaceable': "Unusual space in string",
'AltAttrPlaceable': "'alt' attribute inside XML tag",
'NewlinePlaceable': "New-line",
'NumberPlaceable': "Number",
'QtFormattingPlaceable': "Qt string formatting variable",
'PythonFormattingPlaceable': "Python string formatting variable",
'JavaMessageFormatPlaceable': "Java Message formatting variable",
'FormattingPlaceable': "String formatting variable",
'UrlPlaceable': "URI",
'FilePlaceable': "File location",
'EmailPlaceable': "Email",
'PunctuationPlaceable': "Punctuation",
'XMLEntityPlaceable': "XML entity",
'CapsPlaceable': "Long all-caps string",
'CamelCasePlaceable': "Camel case string",
'XMLTagPlaceable': "XML tag",
'OptionPlaceable': "Command line option",
}
output = u""
# Get a flat list of placeables and StringElem instances
flat_items = parse(text, PARSERS).flatten()
for item in flat_items:
# Placeable: mark
if isinstance(item, BasePlaceable):
class_name = item.__class__.__name__
placeable = unicode(item)
# CSS class used to mark the placeable
css = {
'TabEscapePlaceable': "escape ",
'EscapePlaceable': "escape ",
'SpacesPlaceable': "space ",
'NewlinePlaceable': "escape ",
}.get(class_name, "")
title = TITLES.get(class_name, "Unknown placeable")
spaces = ' ' * len(placeable)
if not placeable.startswith(' '):
spaces = placeable[0] + ' ' * (len(placeable) - 1)
# Correctly render placeables in translation editor
content = {
'TabEscapePlaceable': u'\\t',
'EscapePlaceable': u'\\',
'SpacesPlaceable': spaces,
'NewlinePlaceable': {
u'\r\n': u'\\r\\n<br/>\n',
u'\r': u'\\r<br/>\n',
u'\n': u'\\n<br/>\n',
}.get(placeable),
'XMLEntityPlaceable': placeable.replace('&', '&'),
'XMLTagPlaceable':
placeable.replace('<', '<').replace('>', '>'),
}.get(class_name, placeable)
output += ('<mark class="%splaceable" title="%s">%s</mark>') \
% (css, title, content)
# Not a placeable: skip
else:
output += unicode(item).replace('<', '<').replace('>', '>')
return output
def quality_check(original, string, locale, ignore):
"""Check for obvious errors like blanks and missing interpunction."""
if not ignore:
original = lang_data.normalized_unicode(original)
string = lang_data.normalized_unicode(string)
unit = storage_base.TranslationUnit(original)
unit.target = string
checker = checks.StandardChecker(
checkerconfig=checks.CheckerConfig(targetlanguage=locale.code))
warnings = checker.run_filters(unit)
if warnings:
# https://github.com/translate/pootle/
check_names = {
'accelerators': 'Accelerators',
'acronyms': 'Acronyms',
'blank': 'Blank',
'brackets': 'Brackets',
'compendiumconflicts': 'Compendium conflict',
'credits': 'Translator credits',
'doublequoting': 'Double quotes',
'doublespacing': 'Double spaces',
'doublewords': 'Repeated word',
'emails': 'E-mail',
'endpunc': 'Ending punctuation',
'endwhitespace': 'Ending whitespace',
'escapes': 'Escapes',
'filepaths': 'File paths',
'functions': 'Functions',
'gconf': 'GConf values',
'kdecomments': 'Old KDE comment',
'long': 'Long',
'musttranslatewords': 'Must translate words',
'newlines': 'Newlines',
'nplurals': 'Number of plurals',
'notranslatewords': 'Don\'t translate words',
'numbers': 'Numbers',
'options': 'Options',
'printf': 'printf()',
'puncspacing': 'Punctuation spacing',
'purepunc': 'Pure punctuation',
'sentencecount': 'Number of sentences',
'short': 'Short',
'simplecaps': 'Simple capitalization',
'simpleplurals': 'Simple plural(s)',
'singlequoting': 'Single quotes',
'startcaps': 'Starting capitalization',
'startpunc': 'Starting punctuation',
'startwhitespace': 'Starting whitespace',
'tabs': 'Tabs',
'unchanged': 'Unchanged',
'untranslated': 'Untranslated',
'urls': 'URLs',
'validchars': 'Valid characters',
'variables': 'Placeholders',
'xmltags': 'XML tags',
}
warnings_array = []
for key in warnings.keys():
warning = check_names.get(key, key)
warnings_array.append(warning)
return HttpResponse(json.dumps({
'warnings': warnings_array,
}), content_type='application/json')
def req(method, project, resource, locale,
username, password, payload=False):
"""
Make request to Transifex server.
Args:
method: Request method
project: Transifex project name
resource: Transifex resource name
locale: Locale code
username: Transifex username
password: Transifex password
payload: Data to be sent to the server
Returns:
A server response or error message.
"""
url = os.path.join(
'https://www.transifex.com/api/2/project/', project,
'resource', resource, 'translation', locale, 'strings')
try:
if method == 'get':
r = requests.get(
url + '?details', auth=(username, password), timeout=10)
elif method == 'put':
r = requests.put(url, auth=(username, password), timeout=10,
data=json.dumps(payload),
headers={'content-type': 'application/json'})
log.debug(r.status_code)
if r.status_code == 401:
return "authenticate"
elif r.status_code != 200:
log.debug("Response not 200")
return "error"
return r
# Network problem (DNS failure, refused connection, etc.)
except requests.exceptions.ConnectionError as e:
log.debug('ConnectionError: ' + str(e))
return "error"
# Invalid HTTP response
except requests.exceptions.HTTPError as e:
log.debug('HTTPError: ' + str(e))
return "error"
# A valid URL is required
except requests.exceptionsURLRequired as e:
log.debug('URLRequired: ' + str(e))
return "error"
# Request times out
except requests.exceptions.Timeout as e:
log.debug('Timeout: ' + str(e))
return "error"
# Request exceeds the number of maximum redirections
except requests.exceptions.TooManyRedirects as e:
log.debug('TooManyRedirects: ' + str(e))
return "error"
# Ambiguous exception occurres
except requests.exceptions.RequestException as e:
log.debug('RequestException: ' + str(e))
return "error"
except Exception:
log.debug('Generic exception: ' + traceback.format_exc())
return "error"
def first(collection, test, default=None):
"""
Return the first item that, when passed to the given test function,
returns True. If no item passes the test, return the default value.
"""
return next((c for c in collection if test(c)), default)
def match_attr(collection, **attributes):
"""
Return the first item that has matching values for the given
attributes, or None if no item is found to match.
"""
return first(
collection,
lambda i: all(getattr(i, attrib) == value
for attrib, value in attributes.items()),
default=None
)
def aware_datetime(*args, **kwargs):
"""Return an aware datetime using Django's configured timezone."""
return timezone.make_aware(datetime(*args, **kwargs))
def extension_in(filename, extensions):
"""
Check if the extension for the given filename is in the list of
allowed extensions. Uses os.path.splitext rules for getting the
extension.
"""
filename, extension = os.path.splitext(filename)
if extension and extension[1:] in extensions:
return True
else:
return False
|
When talking regarding the options of a pretty girl, it always suggests that an extended and glossy mane, big eyes, good pink lips, and a pointy nose. beyond question, a pointy nose at once catches the eye of individuals to your face. Also, it’s typically seen that girls with sharp noses carry-off makeup additional simply than the others. many ladies recently pay lots on cosmetic surgeries thus on get a superbly formed nose. On the opposite hand, there area unit some World Health Organization don’t wish connected any quite risks that these surgeries could involve.
In addition to giving a narrower form to your nose, this exercise is additionally good for those affected by sinus or cephalalgia. Massage every space of your nose, i.e., the bridge, the tip and therefore the sides, for concerning 5 minutes daily. try this exercise gently in circular motion. This should be done frequently over a amount of a month.
The lines around your nose, ordinarily referred to as smile lines get deeper by age, and don’t look nice. to urge eliminate these, do this straightforward exercise daily. First, fill your mouth with air and shut it tightly. Then move that air all told four directions at intervals your mouth. Hold the air on every occasion for 5 seconds and eventually unharness it.
It is a beautiful thanks to strengthen and form your nose. you wish to dam your naris on one aspect, and inhale among four seconds from the free one. currently reverse the block and exhale from the open one for eight seconds.
This exercise is a lot of for building the muscles of your nose than reshaping it. so as to try to to this, you wish to wiggle your nose smartly whereas taking care that your face is totally still throughout the method.
This one is for ladies UN agency aren’t proud of the form of their nose. With this exercising for your nose, there area unit probabilities that you just will modification its form and sculpt it to the form you would like. Press very cheap sides of your nose with the assistance of your index fingers, whereas flaring the nostrils. This exercise additionally prevents your nose from droopy.
Your smile is one in every of the simplest gifts that nature has given to you. It sure enough might need worked wonders for you a great many times once you would are stressed. you’d be glad to understand that it may assist you to induce a straightened nose. to try to to this exercise, you wish to smile and push your nose upwards. This helps to create the muscles that ar on the edges of your nose. This must be done daily for twenty to thirty times to induce a straighter nose.
With age, there area unit several changes that happen within the body– particularly in bones and muscles. Practising this exercise over an extended amount will assist you cut back the speed of decay of the animal tissue in your nose. during this exercise, you wish to put your finger on the tip of your nose and press it gently. Now, push that finger down by exerting downward pressure by your nose. you’ll try this daily for as again and again as you’ll. |
import os, json, boto3
from flask import url_for
from .models import EditableHTML, SiteAttribute
from werkzeug.utils import secure_filename
from uuid import uuid4
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
@app.context_processor
def inject_name():
return dict(site_name=SiteAttribute.get_value("ORG_NAME"),
logo_url=SiteAttribute.get_value("SITE_LOGO"),
style_timestamp=SiteAttribute.get_value("STYLE_TIME"),
style_sheet=SiteAttribute.get_value("STYLE_SHEET"),
site_color=SiteAttribute.get_value("SITE_COLOR"))
app.add_template_global(index_for_role)
@app.template_filter('pages')
def inject_pages(s):
pages = EditableHTML.query.order_by(EditableHTML.page_name)
pages_list = [p.__dict__ for p in pages]
return pages_list
def index_for_role(role):
return url_for(role.index)
def s3_upload(source_file, acl='public-read'):
# Load necessary information into the application
S3_KEY = os.environ.get('S3_KEY')
S3_SECRET = os.environ.get('S3_SECRET')
S3_BUCKET = os.environ.get('S3_BUCKET')
S3_REGION = os.environ.get('S3_REGION')
TARGET_FOLDER = ''
source_filename = secure_filename(source_file.data.filename)
source_extension = os.path.splitext(source_filename)[1]
destination_filename = uuid4().hex + source_extension
# Connect to S3 and upload file.
s3 = boto3.client(
's3',
aws_access_key_id=S3_KEY,
aws_secret_access_key=S3_SECRET,
)
try:
s3.upload_fileobj(
source_file.data,
S3_BUCKET,
source_filename,
ExtraArgs = {
"ACL": "public-read"
}
)
except Exception as e:
print("Error: ", e)
return e
return destination_filename
|
, we all selects the top series along with greatest quality just for you, and now this pictures is usually one of photos series in our finest images gallery with regards to French Country House Pictures. I really hope you might think it's great.
submitted simply by Lucy at 2019-01-29 22:47:09. To see all photographs throughout French Country House Pictures graphics gallery you should adhere to this particular link. |
# -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from messente.api.sms.api import api
from messente.api.sms.api import utils
from messente.api.sms.api.error import ERROR_CODES
from messente.api.sms.api.response import Response
error_map = ERROR_CODES.copy()
error_map.update({
"ERROR 109": "PIN code field is missing in the template value.",
"ERROR 110": "Verification Session with following ID was not found.",
"ERROR 111": " ".join([
"Sender parameter 'from' is invalid."
"You have not activated this sender name from Messente.com"
]),
})
class NumberVerificationResponse(Response):
_VERIFIED = "VERIFIED"
_INVALID = "INVALID"
_EXPIRED = "EXPIRED"
_THROTTLED = "THROTTLED"
def __init__(self, *args, **kwargs):
self._verification_id = kwargs.pop("verification_id", "")
Response.__init__(self, *args, **kwargs)
def _get_error_map(self):
return error_map
def _parse(self):
custom_statuses = [
self._VERIFIED, self._INVALID, self._EXPIRED, self._THROTTLED
]
stripped = self.raw_response.text.strip()
if stripped in custom_statuses:
self.status = stripped
else:
Response._parse(self)
self._verification_id = self.status_text
def is_ok(self):
return (
self.is_replied() and
self.status in ["OK", self._VERIFIED]
)
def get_verification_id(self):
return self._verification_id
def is_verified(self):
return (self.status == self._VERIFIED)
def is_invalid(self):
return (self.status == self._INVALID)
def is_expired(self):
return (self.status == self._EXPIRED)
def is_throttled(self):
return (self.status == self._THROTTLED)
class NumberVerificationAPI(api.API):
"""
Api supporting PIN code based user verification.
Documentation:
http://messente.com/documentation/number-verification/number-verification-api
"""
def __init__(self, **kwargs):
api.API.__init__(self, "number-verification", **kwargs)
def send_pin(self, data, **kwargs):
self.adapt(data)
if kwargs.get("validate", True):
self.validate(data, mode="send_pin", fatal=True)
r = NumberVerificationResponse(
self.call_api(
"verify/start",
data
),
)
self.log_response(r)
return r
def verify_pin(self, data, **kwargs):
if kwargs.pop("validate", True):
self.validate(data, mode="verify_pin", fatal=True)
r = NumberVerificationResponse(
self.call_api(
"verify/pin",
data
)
)
self.log_response(r)
return r
def adapt(self, data):
data["to"] = utils.adapt_phone_number(data.get("to", ""))
return data
def _validate(self, data, **kwargs):
self.adapt(data)
errors = {}
if kwargs.get("mode", "") == "send_pin":
to = data.get("to", "")
if not to:
self.set_error_required(errors, "to")
elif not utils.is_phone_number_valid(to):
self.set_error(errors, "to")
template = data.get("template", None)
if template is not None and "<PIN>" not in str(template):
self.set_error(errors, "template")
max_tries = data.get("max_tries", None)
if max_tries is not None:
if not str(data["max_tries"]).isdigit() or max_tries < 1:
self.set_error(errors, "max_tries")
retry_delay = data.get("retry_delay", None)
if retry_delay is not None and not str(retry_delay).isdigit():
self.set_error(errors, "retry_delay")
validity = data.get("validity", None)
if validity is not None:
is_positive_int = str(data["validity"]).isdigit()
if not is_positive_int or validity > 1800:
self.set_error(errors, "validity")
elif kwargs.get("mode", "") == "verify_pin":
pin = data.get("pin", "")
if not pin:
self.set_error_required(errors, "pin")
else:
if not str(pin).isdigit() or not int(pin):
self.set_error(errors, "pin")
verification_id = data.get("verification_id", None)
if not verification_id:
self.set_error_required(errors, "verification_id")
elif not isinstance(verification_id, str):
self.set_error(errors, "verification_id")
return (not len(errors), errors)
|
Christmas:BTS and the Courthouses will close at Noon on Friday, December 22nd and will be closed on December 25th. Courthouses will also be closed on Tuesday, December 26th.
New Year's:BTS and the Courthouses will be closed on January 1st. Courthouses will also be closed on Tuesday, January 2nd.
The entire Bankers Title Shenandoah Team wishes you a pleasant Holiday Season spent creating wonderful memories with family and friends.
Click to photo to watch video. |
# -*- coding: utf-8 -*-
import inspect
from django.utils import six
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from muser.utils import get_object
class ExtensionsMixin(object):
"""Extension mixin class to include all content type extension. """
@classmethod
def register_extensions(cls, *extensions):
"""
Register all extensions passed as arguments.
Extensions should be specified as a string to the python module
containing the extension.
"""
if not hasattr(cls, '_extensions'):
cls._extensions = []
cls._extensions_seen = []
for ext in extensions:
if ext in cls._extensions:
continue
extension = None
# check if ext is a Class
if inspect.isclass(ext) and issubclass(ext, Extension):
extension = ext
elif isinstance(ext, six.string_types):
try:
extension = get_object(ext)
except (AttributeError, ImportError, ValueError):
if not extension:
raise ImproperlyConfigured(
'%s is not a valid extension for %s' % (
ext, cls.__name__))
if hasattr(extension, 'Extension'):
extension = extension.Extension
elif hasattr(extension, 'register'):
extension = extension.register
elif hasattr(extension, '__call__'):
pass
else:
raise ImproperlyConfigured(
'%s is not a valid extension for %s' % (
ext, cls.__name__))
if extension in cls._extensions_seen:
continue
cls._extensions_seen.append(extension)
if hasattr(extension, 'handle_model'):
cls._extensions.append(extension(cls))
else:
raise ImproperlyConfigured(
'%r is an invalid extension.' % extension)
class Extension(object):
"""Handle a extension object for content type. """
def __init__(self, model, **kwargs):
self.model = model
for key, value in kwargs.items():
if not hasattr(self, key):
raise TypeError('%s() received an invalid keyword %r' % (
self.__class__.__name__, key))
setattr(self, key, value)
self.handle_model()
def handle_model(self):
raise NotImplementedError
def handle_modeladmin(self, modeladmin):
pass
class ExtensionModelAdmin(admin.ModelAdmin):
def __init__(self, *args, **kwargs):
super(ExtensionModelAdmin, self).__init__(*args, **kwargs)
self.initialize_extensions()
def initialize_extensions(self):
if not hasattr(self, '_extensions_initialized'):
self._extensions_initialized = True
for extension in getattr(self.model, '_extensions', []):
extension.handle_modeladmin(self)
def add_extension_options(self, *f):
if self.fieldsets is None:
return
if isinstance(f[-1], dict): # called with a fieldset
self.fieldsets.insert(self.fieldset_insertion_index, f)
f[1]['classes'] = list(f[1].get('classes', []))
f[1]['classes'].append('collapse')
elif f: # assume called with "other" fields
try:
self.fieldsets[1][1]['fields'].extend(f)
except IndexError:
# Fall back to first fieldset if second does not exist
# XXX This is really messy.
self.fieldsets[0][1]['fields'].extend(f)
|
Through the thousands of pictures on the web about painting wooden outdoor furniture, we all choices the top libraries with ideal quality exclusively for you all, and this pictures is actually among graphics libraries in this greatest graphics gallery about Painting Wooden Outdoor Furniture. I hope you may think it’s great. From a variety of designs painting wooden outdoor furniture for today’s homes, home designs with monochrome concepts are in great demand because they will look neat. The monochrome design also also displays a broad impression on the room, especially in a minimalist house. In the world of fashion, if you mistakenly match clothes, then your appearance can look tangled and less attractive. This also applies in the interior world. If you give painting wooden outdoor furniture to your house that are not appropriate, the appearance of the house also becomes less attractive. In addition, if you are not smart at choosing furniture and room accessories, it can cause a house that looks crowded, lacks ventilation, and is uncomfortable. What do you need painting wooden outdoor furniture for your home space? Before deciding to shop for accessories or home furniture, you should do research first. Accessories are everything that can add to the aesthetics of your space.
Although it has no function, it doesn’t matter, as long as your goal is intended to add aesthetic space. By providing variations in terms of shape and size, it will create a more attractive room.To view all graphics within Painting Wooden Outdoor Furniture graphics gallery please follow this url This photograph (Storage Benches Outside Cushion Storage Resin Wicker Bench Wooden within Painting Wooden Outdoor Furniture) earlier mentioned will be labelled with:put up by admin with March, 17 2018. |
from markdown import markdownFromFile, markdown
from bs4 import BeautifulSoup
class TreeOfContents:
"""Tree abstraction for markdown source"""
source_type = BeautifulSoup
valid_tags = ('a', 'abbr', 'address', 'area', 'article', 'aside', 'audio',
'b', 'base', 'bdi', 'bdo', 'blockquote', 'body', 'br', 'button',
'canvas', 'caption', 'cite', 'code', 'col', 'colgroup', 'data',
'datalist', 'dd', 'del', 'details', 'dfn', 'dialog', 'div', 'dl', 'dt',
'em', 'embed', 'fieldset', 'figcaption', 'figure', 'footer', 'form',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'head', 'header', 'hgroup', 'hr',
'html', 'i', 'iframe', 'img', 'input', 'ins', 'kbd', 'keygen', 'label',
'legend', 'li', 'link', 'main', 'map', 'mark', 'menu', 'menuitem',
'meta', 'meter', 'nav', 'noscript', 'object', 'ol', 'optgroup',
'option', 'output', 'p', 'param', 'picture', 'pre', 'progress', 'q',
'rp', 'rt', 'ruby', 's', 'samp', 'script', 'section', 'select', 'small',
'source', 'span', 'strong', 'style', 'sub', 'summary', 'sup', 'table',
'tbody', 'td', 'template', 'textarea', 'tfoot', 'th', 'thead', 'time',
'title', 'tr', 'track', 'u', 'ul', 'var', 'video', 'wbr')
allowed_attrs = ('string', 'name')
def __init__(self, root, branches=(), descendants=(), source=None,
depth=None):
"""
Construct TreeOfContents object using source
:param SourceType source: parsed source
:param list TreeOfContents branches: list of direct children
:param list SourceType descendants: all descendants
"""
assert source is not None, 'NoneType source passed into TreeOfContents'
self.source = source
self.depth = depth or self.parseTopDepth()
self.descendants = descendants or self.expandDescendants(branches)
self.branches = branches or self.parseBranches(descendants)
@staticmethod
def getHeadingLevel(bs):
"""
>>> bsify = lambda html: BeautifulSoup(html, 'html.parser')
>>> bs = bsify('<h1>Hello</h1>').h1
>>> TOC.getHeadingLevel(bs)
1
>>> bs2 = bsify('<p>Hello</p>').p
>>> TOC.getHeadingLevel(bs2)
>>> bs3 = bsify('<article>Hello</article>').article
>>> TOC.getHeadingLevel(bs3)
"""
try:
return int(bs.name[1])
except (ValueError, IndexError, TypeError):
return None
def parseTopDepth(self):
"""
Parse highest heading in markdown
>>> TOC.fromHTML('<h2>haha</h2><h1>hoho</h1>').parseTopDepth()
1
>>> TOC.fromHTML('<h3>haha</h3><h2>hoho</h2>').parseTopDepth()
2
"""
for i in range(1, 7):
if getattr(self.source, 'h%d' % i):
return i
def expandDescendants(self, branches):
"""
Expand descendants from list of branches
:param list branches: list of immediate children as TreeOfContents objs
:return: list of all descendants
"""
return sum([b.descendants() for b in branches], []) + \
[b.source for b in branches]
def parseBranches(self, descendants):
"""
Parse top level of markdown
:param list elements: list of source objects
:return: list of filtered TreeOfContents objects
"""
parsed, parent, cond = [], False, lambda b: (b.string or '').strip()
for branch in filter(cond, descendants):
if self.getHeadingLevel(branch) == self.depth:
parsed.append({'root':branch.string, 'source':branch})
parent = True
elif not parent:
parsed.append({'root':branch.string, 'source':branch})
else:
parsed[-1].setdefault('descendants', []).append(branch)
return [TOC(depth=self.depth+1, **kwargs) for kwargs in parsed]
def __getattr__(self, attr, *default):
"""Check source for attributes"""
tag = attr[:-1]
if attr in self.allowed_attrs:
return getattr(self.source, attr, *default)
if attr in self.valid_tags:
return next(filter(lambda t: t.name == attr, self.branches), None)
if len(default):
return default[0]
if attr[-1] == 's' and tag in self.valid_tags:
condition = lambda t: t.name == tag
return filter(condition, self.branches)
raise AttributeError("'TreeOfContents' object has no attribute '%s'" % attr)
def __repr__(self):
"""Display contents"""
return str(self)
def __str__(self):
"""Display contents"""
return self.string or ''
def __iter__(self):
"""Iterator over children"""
return iter(self.branches)
def __getitem__(self, i):
return self.branches[i]
@staticmethod
def fromMarkdown(md, *args, **kwargs):
"""
Creates abstraction using path to file
:param str path: path to markdown file
:return: TreeOfContents object
"""
return TOC.fromHTML(markdown(md, *args, **kwargs))
@staticmethod
def fromHTML(html, *args, **kwargs):
"""
Creates abstraction using HTML
:param str html: HTML
:return: TreeOfContents object
"""
source = BeautifulSoup(html, 'html.parser', *args, **kwargs)
return TOC('[document]',
source=source,
descendants=source.children)
TOC = TreeOfContents
|
Today on the Friends of a Feather Podcast, my guest is Jamie Ivey. Jamie is wife to Aaron, mom to four kids, and a podcaster. Jamie and I chat about her family, how she started her podcast, and her new book coming out next week, If You Only Knew.
I enjoyed sharing with her what her podcast has meant to me and she was kind enough to share parts of her story with me. You will not want to miss this episode!
Share with a friend and be encouraged!!
We are friends of a feather; let's stick together!!
Preorder Jamie's book HERE on Amazon, or HERE !
Today, my guests on the podcast are Trisha and Julie! They are my coworker-turned-friends and I cannot wait for you to hear their stories. I love these special friends whom I have known for the past eleven years. A few years ago, they found out they both had breast cancer within months of each other. I love the support and candidness that they show during our conversation and in life. I also love their sense of humor, too!
On this episode, we talk about where they were in their lives when they were diagnosed with breast cancer, advice they give when someone close to you is diagnosed with cancer, what they learned through each of their journeys, and a couple of funny stories mixed in there!
Thanks for listening! Feel free to share this with a friend who is walking this same road.
We are friends of a feather; let's stick together! Be encouraged and share with a friend!!
Welcome to the Friends of a Feather Podcast!!!
I'm Wren and I'm so glad you are here! Each week, I get to chat with a friend who has a story to tell, a passion to share, or a dream that she is pursuing.
This week, my guest is Melanie Redd. Melanie is a wife, mom to two college-age kids, blogger, writer, speaker, social media consultant and a whole lot more!! On this episode, Melanie and I chat about a lot!! We chat about how we met, how it is hard to not promote ourselves or our businesses in the social media world we live in, and how God has guided her life-walk and ministry one step at a time and how its been and continues to be a fun adventure!!!
What I love about Melanie is how encouraging she is and how she points women to the word of God. She gives such great practical advice to young moms who have ministry in their hearts and for women no matter what stage of life you are in right now!
You will be encouraged and will want to grab a pen to write down the quotable wisdom she gives!!!
We are all friends of a feather!! Let's stick together!!
Share the episode with a friends and be encouraged today!!
Where to find Melanie Redd online! |
#
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# Config file handler (management tool)
#
import sys
import difflib
from spacewalk.common.rhnLog import log_debug
from spacewalk.common.usix import raise_with_tb, next
from spacewalk.common.rhnException import rhnFault
from spacewalk.server import rhnSQL, configFilesHandler
from spacewalk.common.fileutils import f_date, ostr_to_sym
class ConfigManagement(configFilesHandler.ConfigFilesHandler):
def __init__(self):
log_debug(3)
configFilesHandler.ConfigFilesHandler.__init__(self)
self.functions.update({
'management.get_file': 'management_get_file',
'management.list_config_channels': 'management_list_channels',
'management.create_config_channel': 'management_create_channel',
'management.remove_config_channel': 'management_remove_channel',
'management.list_file_revisions': 'management_list_file_revisions',
'management.list_files': 'management_list_files',
'management.has_file': 'management_has_file',
'management.put_file': 'management_put_file',
'management.remove_file': 'management_remove_file',
'management.diff': 'management_diff',
'management.get_default_delimiters': 'management_get_delimiters',
'management.get_maximum_file_size': 'management_get_maximum_file_size',
})
self.user = None
self.default_delimiter = '@'
_query_list_config_channels = rhnSQL.Statement("""
select cc.name,
cc.label,
cct.label channel_type
from rhnConfigChannelType cct,
rhnConfigChannel cc
where cc.org_id = :org_id
and cc.confchan_type_id = cct.id
and cct.label = 'normal'
order by cc.label, cc.name
""")
def _get_and_validate_session(self, dict):
session = dict.get('session')
self._validate_session(session)
def management_list_channels(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
return [x['label'] for x in rhnSQL.fetchall_dict(self._query_list_config_channels,
org_id=self.org_id) or []]
_query_lookup_config_channel = rhnSQL.Statement("""
select id
from rhnConfigChannel
where org_id = :org_id
and label = :config_channel
""")
def management_create_channel(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
config_channel_name = dict.get('config_channel_name') or config_channel
config_channel_description = dict.get('description') or config_channel
row = rhnSQL.fetchone_dict(self._query_lookup_config_channel,
org_id=self.org_id, config_channel=config_channel)
if row:
raise rhnFault(4010, "Configuration channel %s already exists" %
config_channel, explain=0)
insert_call = rhnSQL.Function('rhn_config.insert_channel',
rhnSQL.types.NUMBER())
config_channel_id = insert_call(self.org_id,
'normal',
config_channel_name,
config_channel,
config_channel_description)
rhnSQL.commit()
return {}
_query_config_channel_by_label = rhnSQL.Statement("""
select id
from rhnConfigChannel
where org_id = :org_id
and label = :label
""")
def management_remove_channel(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
row = rhnSQL.fetchone_dict(self._query_config_channel_by_label,
org_id=self.org_id, label=config_channel)
if not row:
raise rhnFault(4009, "Channel not found")
delete_call = rhnSQL.Procedure('rhn_config.delete_channel')
try:
delete_call(row['id'])
except rhnSQL.SQLError:
e = sys.exc_info()[1]
errno = e.args[0]
if errno == 2292:
raise_with_tb(rhnFault(4005, "Cannot remove non-empty channel %s" %
config_channel, explain=0), sys.exc_info()[2])
raise
log_debug(5, "Removed:", config_channel)
rhnSQL.commit()
return ""
_query_management_list_files = rhnSQL.Statement("""
select cc.label config_channel,
cfn.path
from rhnConfigFileName cfn,
rhnConfigFileState cfs,
rhnConfigFile cf,
rhnConfigChannel cc
where cc.org_id = :org_id
and cc.label = :config_channel
and cc.id = cf.config_channel_id
and cf.state_id = cfs.id
and cfs.label = 'alive'
and cf.config_file_name_id = cfn.id
""")
def management_list_files(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the config channel
log_debug(3, "Org id", self.org_id, "Config channel", config_channel)
h = rhnSQL.prepare(self._query_management_list_files)
h.execute(org_id=self.org_id, config_channel=config_channel)
retval = []
while 1:
row = h.fetchone_dict()
if not row:
break
val = {}
# Only copy a subset of the keys
for f in ['config_channel', 'path']:
val[f] = row[f]
retval.append(val)
log_debug(4, "pre sort", retval)
retval.sort(lambda x, y: cmp(x['path'], y['path']))
log_debug(4, "Return value", retval)
return retval
def management_get_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
revision = dict.get('revision')
row = self._get_file(config_channel, path, revision=revision)
if not row:
raise rhnFault(4011, "File %s does not exist in channel %s" %
(path, config_channel), explain=0)
return self._format_file_results(row)
_query_list_file_revisions = rhnSQL.Statement("""
select cr.revision
from rhnConfigChannel cc,
rhnConfigRevision cr,
rhnConfigFile cf
where cf.config_channel_id = cc.id
and cc.label = :config_channel
and cc.org_id = :org_id
and cf.config_file_name_id = lookup_config_filename(:path)
and cr.config_file_id = cf.id
order by revision desc
""")
def management_list_file_revisions(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
retval = [x['revision'] for x in rhnSQL.fetchall_dict(self._query_list_file_revisions,
org_id=self.org_id, config_channel=config_channel, path=path) or []]
if not retval:
raise rhnFault(4011, "File %s does not exist in channel %s" %
(path, config_channel), explain=0)
return retval
def management_has_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
row = self._get_file(config_channel, path)
if not row:
return {}
return {
'revision': row['revision'],
}
_query_get_file = """
select :path path,
cc.label config_channel,
ccont.contents file_contents,
ccont.is_binary,
c.checksum_type,
c.checksum,
ccont.delim_start, ccont.delim_end,
cr.revision,
cf.modified,
ci.username,
ci.groupname,
ci.filemode,
cft.label,
ci.selinux_ctx,
case
when cft.label='symlink' then (select path from rhnConfigFileName where id = ci.SYMLINK_TARGET_FILENAME_ID)
else ''
end as symlink
from rhnConfigChannel cc,
rhnConfigInfo ci,
rhnConfigRevision cr
left join rhnConfigContent ccont
on cr.config_content_id = ccont.id
left join rhnChecksumView c
on ccont.checksum_id = c.id,
rhnConfigFile cf,
rhnConfigFileType cft
where cf.config_channel_id = cc.id
and cc.label = :config_channel
and cc.org_id = :org_id
and cf.config_file_name_id = lookup_config_filename(:path)
and cr.config_file_id = cf.id
and cr.config_info_id = ci.id
and cr.config_file_type_id = cft.id
"""
_query_get_file_latest = rhnSQL.Statement(_query_get_file + """
and cf.latest_config_revision_id = cr.id
""")
_query_get_file_revision = rhnSQL.Statement(_query_get_file + """
and cr.revision = :revision
""")
def _get_file(self, config_channel, path, revision=None):
log_debug(2, config_channel, path)
params = {
'org_id': self.org_id,
'config_channel': config_channel,
'path': path,
}
if revision is None:
# Fetch the latest
q = self._query_get_file_latest
else:
params['revision'] = revision
q = self._query_get_file_revision
log_debug(4, params)
return rhnSQL.fetchone_dict(q, **params)
_query_lookup_config_file_by_channel = rhnSQL.Statement("""
select cf.id,
cf.state_id
from rhnConfigFile cf,
rhnConfigChannel cc
where cc.org_id = :org_id
and cf.config_channel_id = cc.id
and cc.label = :config_channel
and cf.config_file_name_id = lookup_config_filename(:path)
""")
def management_remove_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
row = rhnSQL.fetchone_dict(self._query_lookup_config_file_by_channel,
org_id=self.org_id, config_channel=config_channel, path=path)
if not row:
raise rhnFault(4011, "File %s does not exist in channel %s" %
(path, config_channel), explain=0)
config_file_id = row['id']
delete_call = rhnSQL.Procedure("rhn_config.delete_file")
delete_call(config_file_id)
rhnSQL.commit()
return {}
_query_update_file_state = rhnSQL.Statement("""
update rhnConfigFile
set state_id = :state_id
where id = :config_file_id
""")
def management_disable_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
t = rhnSQL.Table('rhnConfigFileState', 'label')
state_id_dead = t['dead']['id']
row = rhnSQL.fetchone_dict(self._query_lookup_config_file_by_channel,
config_channel=config_channel, path=path)
if not row or row['state_id'] == state_id_dead:
raise rhnFault(4011, "File %s does not exist in channel %s" %
(path, config_channel), explain=0)
rhnSQL.execute(self._query_update_file_state,
config_file_id=row['id'], state_id=state_id_dead)
rhnSQL.commit()
return {}
def management_put_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
row = self.lookup_org_config_channel_by_name(config_channel)
conf_channel_id = row['id']
file_path = dict.get('path')
result = self.push_file(conf_channel_id, dict)
file_too_large = result.get('file_too_large')
if file_too_large:
raise rhnFault(4003, "File %s is too large (%s bytes)" %
(dict['path'], dict['size']), explain=0)
rhnSQL.commit()
return {}
def management_get_delimiters(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
return self._get_delimiters()
def management_get_maximum_file_size(self, dict={}):
log_debug(1)
self._get_and_validate_session(dict)
return self._get_maximum_file_size()
def __attributes_differ(self, fsrc, fdst):
""" Returns true if acl, ownership, type or selinux context differ. """
return (fsrc['filemode'] != fdst['filemode']) or (fsrc['label'] != fdst['label']) or \
(fsrc['username'] != fdst['username']) or (fsrc['groupname'] != fdst['groupname']) or \
(fsrc['selinux_ctx'] != fdst['selinux_ctx'])
def __header(self, path, fsrc, config_channel_src, fdst, config_channel_dst):
""" Returns diff like header for this two files. """
template = "--- %s\t%s\tattributes: %s %s %s %s\tconfig channel: %s\trevision: %s"
first_row = template % (path, f_date(fsrc['modified']), ostr_to_sym(fsrc['filemode'], fsrc['label']),
fsrc['username'], fsrc['groupname'], fsrc['selinux_ctx'], config_channel_src,
fsrc['revision'],
)
second_row = template % (path, f_date(fdst['modified']), ostr_to_sym(fdst['filemode'], fdst['label']),
fdst['username'], fdst['groupname'], fdst['selinux_ctx'], config_channel_dst,
fdst['revision'],
)
return (first_row, second_row)
def management_diff(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
param_names = ['config_channel_src', 'revision_src', 'path', ]
for p in param_names:
val = dict.get(p)
if val is None:
raise rhnFault(4007, "No content sent for `%s'" % p)
log_debug(4, "Params sent", dict)
path = dict['path']
config_channel_src = dict['config_channel_src']
revision_src = dict.get('revision_src')
fsrc = self._get_file_revision(config_channel_src, revision_src, path)
config_channel_dst = dict.get('config_channel_dst')
if config_channel_dst is None:
config_channel_dst = config_channel_src
revision_dst = dict.get('revision_dst')
fdst = self._get_file_revision(config_channel_dst, revision_dst, path)
if fsrc['label'] != fdst['label']:
raise rhnFault(4017,
"Path %s is a %s in channel %s while it is a %s in channel %s"
% (path, fsrc['label'],
config_channel_src, fdst['label'], config_channel_dst),
explain=0)
if fsrc['label'] == 'symlink':
if (fsrc["symlink"] != fdst['symlink']) or self.__attributes_differ(fsrc, fdst):
(first_row, second_row) = self.__header(path, fsrc, config_channel_src, fdst, config_channel_dst)
first_row += ' target: %s' % fsrc["symlink"]
second_row += ' target: %s' % fdst["symlink"]
return first_row + "\n" + second_row + "\n"
return ""
diff = difflib.unified_diff(
fsrc['file_content'], fdst['file_content'], path, path, fsrc['modified'], fdst['modified'], lineterm='')
try:
first_row = next(diff)
except StopIteration:
return ""
if not first_row.startswith('---'):
# Hmm, weird
return first_row + '\n'.join(list(diff))
try:
second_row = next(diff)
except StopIteration:
second_row = ''
if not second_row.startswith('+++'):
# Hmm, weird
return second_row + '\n'.join(list(diff))
(first_row, second_row) = self.__header(path, fsrc, config_channel_src, fdst, config_channel_dst)
return first_row + "\n" + second_row + '\n' + '\n'.join(list(diff))
def _get_file_revision(self, config_channel, revision, path):
if revision and not revision.isdigit():
raise rhnFault(4016, "Invalid revision number '%s' specified for path %s "
"in channel %s" % (revision, path, config_channel),
explain=0)
f = self._get_file(config_channel, path, revision=revision)
if not f:
raise rhnFault(4011, "File %s (revision %s) does not exist "
"in channel %s" % (path, revision, config_channel),
explain=0)
if f['label'] == 'file' and f['is_binary'] == 'Y':
raise rhnFault(4004, "File %s (revision %s) seems to contain "
"binary data" % (path, revision),
explain=0)
# We have to read the contents of the first file here, because the LOB
# object is tied to a cursor; if we re-execute the cursor, the LOB
# seems to be invalid (bug 151220)
# Empty files or directories may have NULL instead of lobs
fc_lob = f.get('file_contents')
if fc_lob:
f['file_content'] = rhnSQL.read_lob(fc_lob).splitlines()
else:
f['file_content'] = ''
return f
# Helper functions
_query_org_config_channels = rhnSQL.Statement("""
select cc.id, cc.label, cc.name, cct.label channel_type
from rhnConfigChannelType cct, rhnConfigChannel cc
where cc.label = :config_channel
and cc.org_id = :org_id
and cc.confchan_type_id = cct.id
""")
def lookup_org_config_channel_by_name(self, config_channel):
row = rhnSQL.fetchone_dict(self._query_org_config_channels,
config_channel=config_channel, org_id=self.org_id)
if not row:
raise rhnFault(4009, "Configuration channel %s does not exist" %
config_channel, explain=0)
return row
def _check_user_role(self):
user_roles = self.user.get_roles()
if 'config_admin' in user_roles or 'org_admin' in user_roles:
# All good
return
raise rhnFault(4006,
"User is not a allowed to manage config files")
|
We obsess over Fashion Week for many reasons. Obviously, we anticipate the biggest designers' latest and greatest with bated breath, and the street style (oh, the street style!) might just be the best in the world. But we have to admit we get just as excited to catch a glimpse of that coveted front row and see our favorite celebs all decked out in their Fashion Week best. And this year's Paris shows were no different.
From newbies like Lupito Nyong'o and the girls of Haim to industry vets like Rihanna, Olivia Palermo, and Elle Fanning, Tinseltown's hottest It girls were all in attendance&and in fine form. Whether they were taking selflies at the Miu Miu show (Lupita and Elizabeth, we're looking at you!), supporting bestie Cara Delevingne at Chanel (Rihanna!), or just looking fabulous in two-tone burgundy (well done, Emma Roberts), these lovely ladies gave the runway gals a run for their money.
Click through to catch our top 12 looks, and don't forget to tell us your favorite in the comments below—if you can bring yourself to choose, that is. |
"""MySensors constants for version 1.4 of MySensors."""
from enum import IntEnum
class MessageType(IntEnum):
"""MySensors message types."""
# pylint: disable=too-few-public-methods
presentation = 0 # sent by a node when presenting attached sensors
set = 1 # sent from/to sensor when value should be updated
req = 2 # requests a variable value
internal = 3 # internal message
stream = 4 # OTA firmware updates
class Presentation(IntEnum):
"""MySensors presentation sub-types."""
# pylint: disable=too-few-public-methods
S_DOOR = 0 # Door and window sensors
S_MOTION = 1 # Motion sensors
S_SMOKE = 2 # Smoke sensor
S_LIGHT = 3 # Light Actuator (on/off)
S_DIMMER = 4 # Dimmable device of some kind
S_COVER = 5 # Window covers or shades
S_TEMP = 6 # Temperature sensor
S_HUM = 7 # Humidity sensor
S_BARO = 8 # Barometer sensor (Pressure)
S_WIND = 9 # Wind sensor
S_RAIN = 10 # Rain sensor
S_UV = 11 # UV sensor
S_WEIGHT = 12 # Weight sensor for scales etc.
S_POWER = 13 # Power measuring device, like power meters
S_HEATER = 14 # Heater device
S_DISTANCE = 15 # Distance sensor
S_LIGHT_LEVEL = 16 # Light sensor
S_ARDUINO_NODE = 17 # Arduino node device
S_ARDUINO_RELAY = 18 # Arduino repeating node device
S_LOCK = 19 # Lock device
S_IR = 20 # Ir sender/receiver device
S_WATER = 21 # Water meter
S_AIR_QUALITY = 22 # Air quality sensor e.g. MQ-2
S_CUSTOM = 23 # Use this for custom sensors
S_DUST = 24 # Dust level sensor
S_SCENE_CONTROLLER = 25 # Scene controller device
class SetReq(IntEnum):
"""MySensors set/req sub-types."""
# pylint: disable=too-few-public-methods
V_TEMP = 0 # Temperature
V_HUM = 1 # Humidity
V_LIGHT = 2 # Light status. 0=off 1=on
V_DIMMER = 3 # Dimmer value. 0-100%
V_PRESSURE = 4 # Atmospheric Pressure
# Weather forecast. One of "stable", "sunny", "cloudy", "unstable",
# "thunderstorm" or "unknown"
V_FORECAST = 5
V_RAIN = 6 # Amount of rain
V_RAINRATE = 7 # Rate of rain
V_WIND = 8 # Windspeed
V_GUST = 9 # Gust
V_DIRECTION = 10 # Wind direction
V_UV = 11 # UV light level
V_WEIGHT = 12 # Weight (for scales etc)
V_DISTANCE = 13 # Distance
V_IMPEDANCE = 14 # Impedance value
# Armed status of a security sensor. 1=Armed, 0=Bypassed
V_ARMED = 15
# Tripped status of a security sensor. 1=Tripped, 0=Untripped
V_TRIPPED = 16
V_WATT = 17 # Watt value for power meters
V_KWH = 18 # Accumulated number of KWH for a power meter
V_SCENE_ON = 19 # Turn on a scene
V_SCENE_OFF = 20 # Turn off a scene
# Mode of heater. One of "Off", "HeatOn", "CoolOn", or "AutoChangeOver"
V_HEATER = 21
V_HEATER_SW = 22 # Heater switch power. 1=On, 0=Off
V_LIGHT_LEVEL = 23 # Light level. 0-100%
V_VAR1 = 24 # Custom value
V_VAR2 = 25 # Custom value
V_VAR3 = 26 # Custom value
V_VAR4 = 27 # Custom value
V_VAR5 = 28 # Custom value
V_UP = 29 # Window covering. Up.
V_DOWN = 30 # Window covering. Down.
V_STOP = 31 # Window covering. Stop.
V_IR_SEND = 32 # Send out an IR-command
V_IR_RECEIVE = 33 # This message contains a received IR-command
V_FLOW = 34 # Flow of water (in meter)
V_VOLUME = 35 # Water volume
V_LOCK_STATUS = 36 # Set or get lock status. 1=Locked, 0=Unlocked
V_DUST_LEVEL = 37 # Dust level
V_VOLTAGE = 38 # Voltage level
V_CURRENT = 39 # Current level
class Internal(IntEnum):
"""MySensors internal sub-types."""
# pylint: disable=too-few-public-methods
# Use this to report the battery level (in percent 0-100).
I_BATTERY_LEVEL = 0
# Sensors can request the current time from the Controller using this
# message. The time will be reported as the seconds since 1970
I_TIME = 1
# Sensors report their library version at startup using this message type
I_VERSION = 2
# Use this to request a unique node id from the controller.
I_ID_REQUEST = 3
# Id response back to sensor. Payload contains sensor id.
I_ID_RESPONSE = 4
# Start/stop inclusion mode of the Controller (1=start, 0=stop).
I_INCLUSION_MODE = 5
# Config request from node. Reply with (M)etric or (I)mperal back to sensor
I_CONFIG = 6
# When a sensor starts up, it broadcast a search request to all neighbor
# nodes. They reply with a I_FIND_PARENT_RESPONSE.
I_FIND_PARENT = 7
# Reply message type to I_FIND_PARENT request.
I_FIND_PARENT_RESPONSE = 8
# Sent by the gateway to the Controller to trace-log a message
I_LOG_MESSAGE = 9
# A message that can be used to transfer child sensors
# (from EEPROM routing table) of a repeating node.
I_CHILDREN = 10
# Optional sketch name that can be used to identify sensor in the
# Controller GUI
I_SKETCH_NAME = 11
# Optional sketch version that can be reported to keep track of the version
# of sensor in the Controller GUI.
I_SKETCH_VERSION = 12
# Used by OTA firmware updates. Request for node to reboot.
I_REBOOT = 13
# Send by gateway to controller when startup is complete
I_GATEWAY_READY = 14
class Stream(IntEnum):
"""MySensors stream sub-types."""
# Request new FW, payload contains current FW details
ST_FIRMWARE_CONFIG_REQUEST = 0
# New FW details to initiate OTA FW update
ST_FIRMWARE_CONFIG_RESPONSE = 1
ST_FIRMWARE_REQUEST = 2 # Request FW block
ST_FIRMWARE_RESPONSE = 3 # Response FW block
ST_SOUND = 4 # Sound
ST_IMAGE = 5 # Image
|
The Nightmare Before Halloween returns for a 8th show! C'mon down to Crypticon, Minnesota's #1 Horror Convention! Come meet celebrity guests, Watch classic and indie horror films! Shop for cool horror collectibles in our huge vendor room! Get a tattoo! Live bands! Come party in our fan created horror party rooms, or throw your own horror party room! |
#!/usr/bin/env python
########################################################
#
# Stack Trace Decoder
# Author: Slavey Karadzhov <[email protected]>
#
########################################################
import shlex
import subprocess
import sys
import re
def usage():
print("Usage: \n\t%s <file.elf> [<error-stack.log>]" % sys.argv[0])
def extractAddresses(data):
m = re.findall("(40[0-2](\d|[a-f]){5})", data)
if len(m) == 0:
return m
addresses = []
for item in m:
addresses.append(item[0])
return addresses
if __name__ == "__main__":
if len(sys.argv) not in list(range(2,4)):
usage()
sys.exit(1)
command = "xtensa-lx106-elf-addr2line -aipfC -e '%s' " % sys.argv[1]
pipe = subprocess.Popen(shlex.split(command), bufsize=1, stdin=subprocess.PIPE)
if len(sys.argv) > 2:
data = open(sys.argv[2]).read()
pipe.communicate("\n".join(extractAddresses(data)).encode('ascii'))
else:
while True:
data = sys.stdin.readline()
addresses = extractAddresses(data)
if len(addresses) == 0:
continue
# print ( "[",addresses,"]" )
line = "\r\n".join(addresses)+"\r\n"
# line = line.ljust(125," ")
pipe.stdin.write(line.encode('ascii'))
pipe.stdin.flush()
|
Do you have homemade recipes? What are your best beauty tips?
Find homemade skin care, hair care or scalp remedies!
Do you know how to get rid of blackheads? Relieve dry itchy skin? Soothe itchy scalp? Foot fungus? Acne? Discolorations? Dandruff? Eczema? Etc... What can you say about your skin, hair or scalp conditions?
Share your story, tips and recipe! Your contribution may be a page on this website, for others to view around the world! YOU can make a difference! Get started, now! Read the contributions of others, too!
Do You Have a Homemade Recipe?
Have you used a homemade recipe to improve your skin, hair or scalp conditions? We'd love to hear from you! Share YOUR best beauty tips!
Tell us your story AND include your recipe. People really want to know!
Enter your story and homemade recipe. It's easy to do. Just type!
If approved, your entry will appear as a web page on this website. Since most people scan web pages, include your best thoughts in your first paragraph. |
import numpy as np
#x must be a np array
def lnbin(x, BinNum):
"""
Logarithmically bins a numpy array, returns (midpoints, Freq)
This function take the input of a data vector x, which is to be binned;
it also takes in the amount bins one would like the data binned into. The
output is two vectors, one containing the normalised frequency of each bin
(Freq), the other, the midpoint of each bin (midpts).
Added and error to the binned frequency: eFreq (As of June 30 2010). If this
option is not required, just call the function without including the third out
put; i.e.: [midpts Freq]=lnbin(x,BinNum).
Updated 2/6/14 to change the min to scale automatically
"""
if type(x) != np.ndarray:
try:
x = np.array(x)
except:
print 'Improper input format!'
raise
x = np.sort(x)
i = 0
while x[i] <= 0:
i += 1
percent_binned = float((x.size-(i+1))) / x.size*100
#print 'Percentage of input vec binned {}'.format(percent_binned)
FPT = x[i:]
LFPT = np.log(FPT)
max1 = np.log( np.ceil(np.amax(FPT)))
#min1 = 1
min1 = np.log(np.floor(np.min(FPT)))
LFreq = np.zeros((BinNum, 1))
LTime = np.zeros((BinNum, 1))
Lends = np.zeros((BinNum, 2))
step = (max1-min1) / BinNum
#LOG Binning Data ###########################
for i in range(FPT.size):
for k in range(BinNum):
if( k*step+min1 <= LFPT[i] and LFPT[i] < (k+1)*step+min1):
LFreq[k] += 1 #check LFreq on the first bin
LTime[k] = (k+1)*step-(0.5*step)+min1
Lends[k, 0] = k*step+min1
Lends[k, 1] = (k+1)*step+min1
ends = np.exp(Lends)
widths = ends[:,1] - ends[:,0]
Freq = LFreq.T / widths / x.size
eFreq = 1.0 / np.sqrt(LFreq) * Freq
midpts = np.exp(LTime)
return (midpts[:,0], Freq.T[:,0]) |
There are very few true waterfront motorcoach properties in Southwest Florida which is why you’ll definitely want to check out 93 Pelican Circle. You will look out onto the attractive waterfront boardwalks built in a natural mangrove habitat and right on the Myakka River. This 3706 square foot waterfront site features landscaping on both sides of the 1500 square foot brick paved pad along with two 50amp electric outlets at the pedestal. Also included are the hook-ups for city water and sewer along with wi-fi which is accessible throughout the resort. The plated square footage of this site allows room for the future construction of a 370 square foot casita or outdoor entertaining space. Grab a glass of wine and enjoy one of Myakka’s spectacular sunsets right from your patio! |
import unittest
from collections import OrderedDict
import six
from mock import MagicMock
from openformats.formats.yaml.utils import YamlGenerator
from openformats.formats.yaml.yaml_representee_classes import (BlockList,
FlowList,
FlowStyleOrderedDict,
double_quoted_unicode,
single_quoted_unicode)
class YamlGeneratorTestCase(unittest.TestCase):
def test_insert_translation_in_dict_empty_parent(self):
keys = ["one", "two", "[0]"]
flags = "block:block:'".split(':')
translation_string = "test"
result = OrderedDict()
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', OrderedDict([
# (u'two', BlockList([
# single_quoted_unicode(u'test')
# ]))
# ]))
# ])
self.assertListEqual(list(six.iterkeys(result)), ['one'])
self.assertIsInstance(result['one'], OrderedDict)
self.assertIsInstance(result['one']['two'], BlockList)
self.assertIsInstance(result['one']['two'][0], single_quoted_unicode)
def test_insert_translation_in_dict_non_empty_parent(self):
result = OrderedDict()
result['one'] = OrderedDict()
result['one']['three'] = 'a string'
keys = ["one", "two", "[0]"]
flags = "block:block:'".split(':')
translation_string = "test"
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', OrderedDict([
# (u'three', 'a string'),
# (u'two', BlockList([
# single_quoted_unicode(u'test')
# ]))
# ]))
# ])
self.assertListEqual(list(six.iterkeys(result)), ['one'])
self.assertListEqual(list(six.iterkeys(result['one'])),
['three', 'two'])
self.assertIsInstance(result['one']['two'], BlockList)
self.assertIsInstance(result['one']['two'][0], single_quoted_unicode)
def test_insert_translation_in_dict_flow_list(self):
result = OrderedDict()
keys = ["one", "two", "[0]"]
flags = "block:flow:\"".split(':')
translation_string = "test"
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', OrderedDict([
# (u'two', FlowList([
# double_quoted_unicode(u'test')
# ]))
# ]))
# ])
self.assertListEqual(list(six.iterkeys(result)), ['one'])
self.assertIsInstance(result['one'], OrderedDict)
self.assertIsInstance(result['one']['two'], FlowList)
self.assertIsInstance(result['one']['two'][0], double_quoted_unicode)
def test_insert_translation_in_dict_flow_dict(self):
result = OrderedDict()
keys = ["one", "two"]
flags = "flow:\"".split(':')
translation_string = "test"
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', FlowStyleOrderedDict([
# (u'two', double_quoted_unicode(u'test'))
# ]))
# ])
self.assertListEqual(list(six.iterkeys(result)), ['one'])
self.assertIsInstance(result['one'], FlowStyleOrderedDict)
self.assertIsInstance(result['one']['two'], double_quoted_unicode)
def test_insert_translation_in_dict_list_of_dicts(self):
result = OrderedDict()
keys = ["one", "[0]", "two"]
flags = "block:flow:\"".split(':')
translation_string = "test"
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', BlockList([
# BlockStyledOrderedDict([
# (u'two', double_quoted_unicode(u'test'))
# ])
# ]))
# ])
self.assertListEqual(list(list(six.iterkeys(result))), ['one'])
self.assertIsInstance(result['one'], BlockList)
self.assertIsInstance(result['one'][0], FlowStyleOrderedDict)
self.assertIsInstance(result['one'][0]['two'], double_quoted_unicode)
|
You can override default SparkFun Mega Pro Mini 3.3V settings per build environment using board_*** option, where *** is a JSON object path from board manifest sparkfun_megamini.json. For example, board_build.mcu, board_build.f_cpu, etc.
PIO Unified Debugger currently does not support SparkFun Mega Pro Mini 3.3V board. |
from collections import namedtuple
Device = namedtuple('Device', 'addr_str addr last_heard')
bluesync_uuid = [0x92, 0xb8, 0xb4, 0xf7, 0xd9,
0x96, 0xb3, 0xaf, 0x24, 0x48,
0x03, 0x20, 0x35, 0x74, 0x67,
0x86]
timestamp_uuid = [0x6e, 0x16, 0x71, 0x8f, 0xb9,
0xde, 0x2b, 0x92, 0xa0, 0x4b,
0x9d, 0x92, 0xaa, 0x49, 0xd4,
0x63]
trigger_scanning_uuid = [0x04, 0x48, 0x2f, 0x9a, 0x2e,
0x35, 0xc7, 0xa8, 0xcd, 0x4c,
0x4b, 0x90, 0x9a, 0xcb, 0xec,
0xe8]
reference_time_uuid = [0x1d, 0x4f, 0xc4, 0xeb, 0xf5,
0x2c, 0x94, 0xb9, 0xfc, 0x42,
0xca, 0x9e, 0x4a, 0x3b, 0xd9,
0x33]
bluesync_slave_adv_data = [
0x02, 0x01, 0x06, 0x02, 0x0A, 0x03,
0x06, 0xFF, 0xFF, 0xFF, 0xBE, 0xEF, 0xFE
]
bluesync_master_adv_data_prefix = [
0x02, 0x01, 0x06,
0x0A, 0xFF, 0xFF, 0xFF, 0xBE, 0xEF, 0xEF
]
sequence_number_uuid = [
0x4e, 0x14, 0x6c, 0xa0, 0x98,
0xa2, 0xd7, 0x83, 0x81, 0x4f,
0xc1, 0x48, 0xd2, 0x10, 0x9c,
0xaa
]
standard_flag_uuid = [
0x0b, 0xd1, 0x67, 0xa6, 0xfb,
0xbb, 0xe5, 0x9a, 0x64, 0x46,
0x0d, 0x3d, 0xf2, 0x73, 0xc9,
0xe7
]
def array_to_integer(array):
bytes = len(array)
if bytes > 4:
bytes = 4
return sum([array[i] << (8*i) for i in range(0,bytes)])
def integer_to_array(integer):
return [int((integer & (0xFF << 8*i)) >> 8*i) for i in range (0,4)] |
Wasn't <:ctyp< $id$ >> used for something else already?
I thought that id is used for regular types, according to the above.
I tried to see what variant type defs expand to (below).
me to figure this out?
This is matched by $id:uid", correct?
Is there a "shortcut" for TyOr and TyVrn?
I'm afraid I'm still lost here, thus my asking for more information. |
from utils import LXMLMixin
import re
import datetime as dt
from openstates.scrape import Scraper, Event
import pytz
class TXEventScraper(Scraper, LXMLMixin):
_tz = pytz.timezone("US/Central")
def scrape(self, session=None, chamber=None):
if not session:
session = self.latest_session()
self.info("No session specified; using %s", session)
if chamber:
yield from self.scrape_committee_upcoming(session, chamber)
else:
yield from self.scrape_committee_upcoming(session, "upper")
yield from self.scrape_committee_upcoming(session, "lower")
def scrape_event_page(self, session, chamber, url, datetime):
page = self.lxmlize(url)
info = page.xpath("//p")
metainfo = {}
plaintext = ""
for p in info:
content = re.sub(r"\s+", " ", p.text_content())
plaintext += content + "\n"
if ":" in content:
key, val = content.split(":", 1)
metainfo[key.strip()] = val.strip()
committee = metainfo["COMMITTEE"]
where = metainfo["PLACE"]
if "CHAIR" in where:
where, chair = where.split("CHAIR:")
metainfo["PLACE"] = where.strip()
metainfo["CHAIR"] = chair.strip()
chair = None
if "CHAIR" in metainfo:
chair = metainfo["CHAIR"]
plaintext = re.sub(r"\s+", " ", plaintext).strip()
regexp = r"(S|J|H)(B|M|R) (\d+)"
bills = re.findall(regexp, plaintext)
event = Event(
name=committee, start_date=self._tz.localize(datetime), location_name=where
)
event.add_source(url)
event.add_participant(committee, type="committee", note="host")
if chair is not None:
event.add_participant(chair, type="legislator", note="chair")
for bill in bills:
chamber, type, number = bill
bill_id = "%s%s %s" % (chamber, type, number)
item = event.add_agenda_item("Bill up for discussion")
item.add_bill(bill_id)
event.add_agenda_item(plaintext)
yield event
def scrape_page(self, session, chamber, url):
page = self.lxmlize(url)
events = page.xpath("//a[contains(@href, 'schedules/html')]")
for event in events:
peers = event.getparent().getparent().xpath("./*")
date = peers[0].text_content()
time = peers[1].text_content()
tad = "%s %s" % (date, time)
tad = re.sub(r"(PM|AM).*", r"\1", tad)
tad_fmt = "%m/%d/%Y %I:%M %p"
if "AM" not in tad and "PM" not in tad:
tad_fmt = "%m/%d/%Y"
tad = date
# Time expressed as 9:00 AM, Thursday, May 17, 2012
datetime = dt.datetime.strptime(tad, tad_fmt)
yield from self.scrape_event_page(
session, chamber, event.attrib["href"], datetime
)
def scrape_upcoming_page(self, session, chamber, url):
page = self.lxmlize(url)
date = None
time = None
for row in page.xpath(".//tr"):
title = row.xpath(".//div[@class='sectionTitle']")
if len(title) > 0:
date = title[0].text_content()
time_elem = row.xpath(".//td/strong")
if time_elem:
time = time_elem[0].text_content()
events = row.xpath(".//a[contains(@href, 'schedules/html')]")
for event in events:
# Ignore text after the datetime proper (ie, after "AM" or "PM")
datetime = "{} {}".format(date, time)
datetime = re.search(r"(?i)(.+?[ap]m).+", datetime)
if not datetime:
self.warning("invalid datetime: %s %s", date, time)
continue
datetime = datetime.group(1)
datetime = dt.datetime.strptime(datetime, "%A, %B %d, %Y %I:%M %p")
yield from self.scrape_event_page(
session, chamber, event.attrib["href"], datetime
)
def scrape_committee_upcoming(self, session, chamber):
chid = {"upper": "S", "lower": "H", "other": "J"}[chamber]
url = (
"https://capitol.texas.gov/Committees/Committees.aspx" + "?Chamber=" + chid
)
page = self.lxmlize(url)
refs = page.xpath("//div[@id='content']//a")
for ref in refs:
yield from self.scrape_page(session, chamber, ref.attrib["href"])
url = (
"http://capitol.texas.gov/Committees/MeetingsUpcoming.aspx"
+ "?Chamber="
+ chid
)
yield from self.scrape_upcoming_page(session, chamber, url)
|
BBC News - Is your phone at risk from cyber-criminals?
Is your phone at risk from cyber-criminals?
While malicious software for smartphones is on the increase, it could still be human error that creates the easiest opportunities for cyber-crime.
Ever since phones became "smart", there has been concern that they could become riddled with malicious, self-replicating viruses and worms just like their less portable PC relatives.
So far however, the expected deluge has not happened. Cyber-criminals are not flooding smartphones with malware.
"The organisations or bad guys are looking for money," says Tony Osborne of online security firm Symantec.
"I think as we see mobile phones used more as a method of purchasing or creating financial transactions, then we're going to see far more attacks."
But the sector is growing quickly. Market analyst Juniper Research says that over 200 million people worldwide will have used banking services on their phone by the end of 2010, doubling to 400 million by the end of 2013.
While this makes for an increasingly enticing prospect for hackers, the current problems faced by users are often a little less hi-tech.
Over two-thirds of smartphone users are leaving themselves vulnerable to opportunistic identity fraudsters by users still leaving their phone without a pin or password, according to the government-supported GetSafeOnline.org.
And even then, the phone is still at risk.
"It isn't too difficult to break into a phone, you can just try every number from 0000 to 9999," says William Buchanan, professor of computing at Edinburgh Napier University.
"I think one of the major problems is that people don't realise how much information is on the device."
And it is not just what you have on your phone, but how ease of use has often superseded the need for security.
"This means that if someone else had your handset, they can access and use your profile without needing to know your password. In addition, if you synchronise your handset with a PC at home, they'll be able to access all of that information too."
But what about those criminals trying to get to your handset remotely?
"Attackers already have the tools to write the malicious code they need," says Mr Osborne.
"The new generation of smartphones are all geared towards downloading apps which are written by other users, using software development kits."
In August, BBC technology journalist Mark Ward decided to find out if it would be possible to use these tools to write an application which posed as a simple game but in the background silently stole the phone's contact list and e-mailed it to a predetermined address.
"I think the big surprise was how straightforward it was to put the spyware together," he says.
"We were expecting to really sweat over the nasty bits, but all the bits we used are standard parts of all the applications you get on your phone. So there was no part of the phone that was cut off from those basic standard bits of coding."
Mark never made the application available but some malicious programs have been found in the wild.
Even for those not using their phones for full-on finance, there are ways for cyber-criminals to make money directly from malware.
A malicious, self-replicating virus called Commwarrior, which targets the Symbian operating system on Nokia handsets, arrives as a multimedia message.
If you click on it, you will run malicious code which scans your contact list, and sends a copy of itself to everyone it finds. Discovered in 2005, while worrying for experts, it failed to have the widespread impact that was once feared.
Also, an application for Google's Android operating system targeted users in Russia who thought they were downloading an adult video player.
Although it seemed to do nothing once installed, an examination of the source code revealed it was actually designed to silently send text messages to premium rate numbers owned by the bad guys. Users would be charged and the criminals would take the profits.
So how can you be sure that an app you download isn't doing something untoward in the background?
"I think the problem is that you can't assume that every application that wants access to your contacts is suspicious," says Mark Ward.
"Take games for example. If you want to play a multi-player game via your phone, it needs to know your location, it needs to know your friends' locations and it needs to be able to bring you together to play with those friends - that's not suspicious."
The mobile operators advise not to download apps directly from the web but stick to the official application stores provided by the five different platforms - these only contain applications which have been pre-vetted.
So far, there have been no reports of malware getting through this vetting procedure but with thousands of apps to check and source code for each running into thousands of lines, no operator can absolutely guarantee that their vetting procedure will always be foolproof.
About 25 new pieces of smart phone malware are being discovered each week. While this is minuscule compared to the PC malware landscape, some companies have already launched antivirus products which scan and remove bad apps from the phone.
However, one of the most profitable and effective mobile phone scams is not a virus or even a malicious download.
It is a spam text message which asks the recipient to call a premium-rate phone number.
But the biggest threat to your smartphone is its portability and tendency to go missing.
There are apps however that can help you remotely wipe its memory or find it, so all may not be lost after all. |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from optionaldict import optionaldict
from teambition.api.base import TeambitionAPI
class Works(TeambitionAPI):
def get(self, id=None, parent_id=None, page=None, count=None, all=None):
"""
获取文件信息
详情请参考
http://docs.teambition.com/wiki/works#works-get
:param id: 可选,文件 ID
:param parent_id: 可选,父级 ID
:param page: 可选,当前页,默认为 1
:param count: 可选,每页数量,默认为 30
:param all: 可选,若提供此参数则返回所有
:return: 返回的 JSON 数据包
"""
assert id or parent_id
params = optionaldict(
page=page,
count=count,
all=all
)
if id:
endpoint = 'api/works/{0}'.format(id)
elif parent_id:
endpoint = 'api/works'
params['_parentId'] = parent_id
return self._get(endpoint, params=params)
def create(self, parent_id, file_name, file_size, file_type, file_category,
file_key, image_width=None, image_height=None,
involve_members=None):
"""
新建文件
详情请参考
http://docs.teambition.com/wiki/works#works-create
:param parent_id: 所属目录 ID
:param file_name: 文件名
:param file_size: 文件大小
:param file_type: 文件类型
:param file_category: 文件类别
:param file_key: 使用 striker 服务上传后可得
:param image_width: 可选,图片宽度
:param image_height: 可选,图片高度
:param involve_members: 可选
:return: 返回的 JSON 数据包
"""
data = optionaldict(
_parentId=parent_id,
fileName=file_name,
fileSize=file_size,
fileType=file_type,
fileCategory=file_category,
fileKey=file_key,
imageWidth=image_width,
imageHeight=image_height,
involveMembers=involve_members
)
return self._post(
'api/works',
data=data
)
def like(self, id):
"""
赞文件
详情请参考
http://docs.teambition.com/wiki/works#works-like
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._post('api/works/{0}/like'.format(id))
def update(self, id, file_name, description=None):
"""
更新文件
详情请参考
http://docs.teambition.com/wiki/works#works-update
:param id: 文件 ID
:param file_name: 文件名
:param description: 可选,描述
:return: 返回的 JSON 数据包
"""
data = optionaldict(
fileName=file_name,
description=description
)
return self._put(
'api/works/{0}'.format(id),
data=data
)
def move(self, id, parent_id):
"""
移动文件
详情请参考
http://docs.teambition.com/wiki/works#works-move
:param id: 文件 ID
:param parent_id: 新的目录 ID
:return: 返回的 JSON 数据包
"""
return self._put(
'api/works/{0}'.format(id),
data={
'_parentId': parent_id
}
)
def delete(self, id):
"""
删除文件
详情请参考
http://docs.teambition.com/wiki/works#works-delete
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._delete('api/works/{0}'.format(id))
def update_members(self, id, members):
"""
更新文件参与者
详情请参考
http://docs.teambition.com/wiki/works#works-update-involvemembers
:param id: 文件 ID
:param members: 参与者 ID 列表
:return: 返回的 JSON 数据包
"""
return self._put(
'api/works/{0}/involveMembers'.format(id),
data={
'involveMembers': members
}
)
def get_tags(self, id):
"""
获取任务标签列表
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._get('api/works/{0}/tags'.format(id))
def remove_tag(self, id, tag_id):
"""
移除标签
:param id: 文件 ID
:param name: 标签 ID
:return: 返回的 JSON 数据包
"""
return self._delete('api/works/{0}/tags/{1}'.format(id, tag_id))
def add_tag(self, id, tag_id):
"""
关联标签
:param id: 文件 ID
:param tag_id: 标签 ID
:return: 返回的 JSON 数据包
"""
return self._put('api/works/{0}/tags/{1}'.format(id, tag_id))
def get_objectlinks(self, id):
"""
获取文件关联的 objectlink 列表
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._get('api/works/{0}/objectlinks'.format(id))
def create_objectlink(self, id, linked_id, linked_type):
"""
关联对象
:param id: 文件 ID
:param linked_id: 关联对象 ID
:param linked_type: 关联对象类型
:return: 返回的 JSON 数据包
"""
return self._post(
'api/objectlinks',
data={
'_parentId': id,
'parentType': 'work',
'_linkedId': linked_id,
'linkedType': linked_type
}
)
def get_versions(self, id):
"""
获取文件关联的历史版本信息
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-list
:param id: 文件 ID
:return: 历史版本列表
"""
return self._get('api/works/{0}/versions'.format(id))
def get_version(self, id, version_id):
"""
获取单个历史版本信息
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-get
:param id: 文件 ID
:param version_id: 历史版本 ID
:return: 历史版本信息
"""
return self._get('api/works/{0}/versions/{1}'.format(id, version_id))
def update_version(self, id, version_id, file_name=None, description=None):
"""
获取单个历史版本信息
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-update
:param id: 文件 ID
:param version_id: 历史版本 ID
:param file_name: 可选,文件名
:param description: 可选,描述
:return: 返回的 JSON 数据包
"""
data = optionaldict(fileName=file_name, description=description)
return self._put(
'api/works/{0}/versions/{1}'.format(id, version_id),
data=data
)
def delete_version(self, id, version_id):
"""
删除单个历史版本
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-delete
:param id: 文件 ID
:param version_id: 历史版本 ID
:return: 返回的 JSON 数据包
"""
return self._delete(
'api/works/{0}/versions/{1}'.format(id, version_id)
)
def create_version(self, id, file_name, file_size, file_type,
file_category, file_key, image_width=None,
image_height=None, involve_members=None):
"""
新建文件
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-post
:param id: 文件 ID
:param file_name: 文件名
:param file_size: 文件大小
:param file_type: 文件类型
:param file_category: 文件类别
:param file_key: 使用 striker 服务上传后可得
:param image_width: 可选,图片宽度
:param image_height: 可选,图片高度
:param involve_members: 可选
:return: 返回的 JSON 数据包
"""
data = optionaldict(
fileName=file_name,
fileSize=file_size,
fileType=file_type,
fileCategory=file_category,
fileKey=file_key,
imageWidth=image_width,
imageHeight=image_height,
involveMembers=involve_members
)
return self._post(
'api/works/{0}/versions'.format(id),
data=data
)
def link_task(self, id, linked_id):
"""
关联任务
:param id: 任务 ID
:param linked_id: 关联任务 ID
:return: 返回的 JSON 数据包
"""
return self.create_objectlink(id, linked_id, 'task')
def link_post(self, id, linked_id):
"""
关联分享
:param id: 任务 ID
:param linked_id: 关联分享 ID
:return: 返回的 JSON 数据包
"""
return self.create_objectlink(id, linked_id, 'post')
def link_event(self, id, linked_id):
"""
关联日程
:param id: 任务 ID
:param linked_id: 关联日程 ID
:return: 返回的 JSON 数据包
"""
return self.create_objectlink(id, linked_id, 'event')
def link_work(self, id, linked_id):
"""
关联文件
:param id: 任务 ID
:param linked_id: 关联文件 ID
:return: 返回的 JSON 数据包
"""
return self.create_objectlink(id, linked_id, 'work')
def get_activities(self, id):
"""
获取文件动态
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._get(
'api/activities',
params={'_boundToObjectId': id}
)
|
This reference to Jesus as “a light for revelation to the Gentiles” leads the Church to also celebrate Candlemas on February 2nd. It is customary on this day, to bless the candles we will use during prayer, celebrations and liturgy throughout the coming year. On Thursday, February 2nd, the 8:00 a.m. Mass at St. Mark’s will include the blessing of candles, and all are invited to bring a candle or candles from home to be blessed. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.