repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
rwl/PyCIM | CIM14/IEC61968/Metering/Reading.py | 1 | 5442 | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.IEC61970.Meas.MeasurementValue import MeasurementValue
class Reading(MeasurementValue):
"""Specific value measured by a meter or other asset. Each Reading is associated with a specific ReadingType.
"""
def __init__(self, value=0.0, ReadingType=None, ReadingQualities=None, EndDeviceAsset=None, MeterReadings=None, *args, **kw_args):
"""Initialises a new 'Reading' instance.
@param value: Value of this reading.
@param ReadingType: Type information for this reading value.
@param ReadingQualities: Used only if quality of this reading value is different than 'Good'.
@param EndDeviceAsset:
@param MeterReadings: All meter readings (sets of values) containing this reading value.
"""
#: Value of this reading.
self.value = value
self._ReadingType = None
self.ReadingType = ReadingType
self._ReadingQualities = []
self.ReadingQualities = [] if ReadingQualities is None else ReadingQualities
self._EndDeviceAsset = None
self.EndDeviceAsset = EndDeviceAsset
self._MeterReadings = []
self.MeterReadings = [] if MeterReadings is None else MeterReadings
super(Reading, self).__init__(*args, **kw_args)
_attrs = ["value"]
_attr_types = {"value": float}
_defaults = {"value": 0.0}
_enums = {}
_refs = ["ReadingType", "ReadingQualities", "EndDeviceAsset", "MeterReadings"]
_many_refs = ["ReadingQualities", "MeterReadings"]
def getReadingType(self):
"""Type information for this reading value.
"""
return self._ReadingType
def setReadingType(self, value):
if self._ReadingType is not None:
filtered = [x for x in self.ReadingType.Readings if x != self]
self._ReadingType._Readings = filtered
self._ReadingType = value
if self._ReadingType is not None:
if self not in self._ReadingType._Readings:
self._ReadingType._Readings.append(self)
ReadingType = property(getReadingType, setReadingType)
def getReadingQualities(self):
"""Used only if quality of this reading value is different than 'Good'.
"""
return self._ReadingQualities
def setReadingQualities(self, value):
for x in self._ReadingQualities:
x.Reading = None
for y in value:
y._Reading = self
self._ReadingQualities = value
ReadingQualities = property(getReadingQualities, setReadingQualities)
def addReadingQualities(self, *ReadingQualities):
for obj in ReadingQualities:
obj.Reading = self
def removeReadingQualities(self, *ReadingQualities):
for obj in ReadingQualities:
obj.Reading = None
def getEndDeviceAsset(self):
return self._EndDeviceAsset
def setEndDeviceAsset(self, value):
if self._EndDeviceAsset is not None:
filtered = [x for x in self.EndDeviceAsset.Readings if x != self]
self._EndDeviceAsset._Readings = filtered
self._EndDeviceAsset = value
if self._EndDeviceAsset is not None:
if self not in self._EndDeviceAsset._Readings:
self._EndDeviceAsset._Readings.append(self)
EndDeviceAsset = property(getEndDeviceAsset, setEndDeviceAsset)
def getMeterReadings(self):
"""All meter readings (sets of values) containing this reading value.
"""
return self._MeterReadings
def setMeterReadings(self, value):
for p in self._MeterReadings:
filtered = [q for q in p.Readings if q != self]
self._MeterReadings._Readings = filtered
for r in value:
if self not in r._Readings:
r._Readings.append(self)
self._MeterReadings = value
MeterReadings = property(getMeterReadings, setMeterReadings)
def addMeterReadings(self, *MeterReadings):
for obj in MeterReadings:
if self not in obj._Readings:
obj._Readings.append(self)
self._MeterReadings.append(obj)
def removeMeterReadings(self, *MeterReadings):
for obj in MeterReadings:
if self in obj._Readings:
obj._Readings.remove(self)
self._MeterReadings.remove(obj)
| mit | -2,014,066,142,019,675,400 | 37.323944 | 134 | 0.668688 | false |
radiac/sermin | sermin/utils.py | 1 | 1923 | """
Util functions
"""
import os
import shlex
from subprocess import Popen, PIPE
from six import string_types
from .exceptions import ShellError
from . import report
class ShellOutput(str):
def __new__(cls, stdout, stderr):
# Store raw
stdout = stdout.strip() if stdout else ''
stderr = stderr.strip() if stderr else ''
# Join stdout and stderr
value = stdout
if stderr:
if stdout:
value += '\n'
value += stderr
self = super(ShellOutput, cls).__new__(cls, value)
self.stdout = stdout
self.stderr = stderr
return self
def shell(cmd, cd=None, stdin=None, expect_errors=False):
"""
Perform a shell command
Arguments:
cmd Shell command to execute
Returns
out Output string
out.stdout The stdout
out.stderr The stderr
out.return_code The return code
"""
cmd_display = cmd
if not isinstance(cmd, string_types):
cmd = map(str, cmd)
cmd_display = ' '.join(cmd)
if isinstance(cmd, string_types):
cmd = shlex.split(cmd)
old_dir = os.getcwd()
if cd:
report.info('$ cd {}'.format(cd))
os.chdir(cd)
report.info('$ {}'.format(cmd_display), label='shell')
process = Popen(cmd, shell=False, stdout=PIPE, stderr=PIPE, stdin=PIPE)
if stdin:
process.stdin.write(stdin)
stdout, stderr = process.communicate()
out = ShellOutput(stdout, stderr)
out.cmd = cmd
out.return_code = process.returncode
report.info(out, label='shell')
if cd:
os.chdir(old_dir)
if not expect_errors and out.return_code != 0:
msg = 'Unexpected return code {code} from {cmd}: {out}'
raise ShellError(msg.format(
code=out.return_code,
cmd=cmd_display,
out=out,
))
return out
| bsd-3-clause | -398,740,528,859,610,240 | 23.0375 | 75 | 0.577223 | false |
danakj/chromium | build/android/gyp/proguard.py | 1 | 2974 | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import os
import sys
from util import build_utils
from util import proguard_util
_DANGEROUS_OPTIMIZATIONS = [
# See crbug.com/625992
"code/allocation/variable",
# See crbug.com/625994
"field/propagation/value",
"method/propagation/parameter",
"method/propagation/returnvalue",
]
def _ParseOptions(args):
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--proguard-path',
help='Path to the proguard executable.')
parser.add_option('--input-paths',
help='Paths to the .jar files proguard should run on.')
parser.add_option('--output-path', help='Path to the generated .jar file.')
parser.add_option('--proguard-configs',
help='Paths to proguard configuration files.')
parser.add_option('--mapping', help='Path to proguard mapping to apply.')
parser.add_option('--is-test', action='store_true',
help='If true, extra proguard options for instrumentation tests will be '
'added.')
parser.add_option('--tested-apk-info', help='Path to the proguard .info file '
'for the tested apk')
parser.add_option('--classpath', action='append',
help='Classpath for proguard.')
parser.add_option('--stamp', help='Path to touch on success.')
parser.add_option('--enable-dangerous-optimizations', action='store_true',
help='Enable optimizations which are known to have issues.')
parser.add_option('--verbose', '-v', action='store_true',
help='Print all proguard output')
options, _ = parser.parse_args(args)
classpath = []
for arg in options.classpath:
classpath += build_utils.ParseGypList(arg)
options.classpath = classpath
return options
def main(args):
args = build_utils.ExpandFileArgs(args)
options = _ParseOptions(args)
proguard = proguard_util.ProguardCmdBuilder(options.proguard_path)
proguard.injars(build_utils.ParseGypList(options.input_paths))
proguard.configs(build_utils.ParseGypList(options.proguard_configs))
proguard.outjar(options.output_path)
if options.mapping:
proguard.mapping(options.mapping)
if options.tested_apk_info:
proguard.tested_apk_info(options.tested_apk_info)
classpath = list(set(options.classpath))
proguard.libraryjars(classpath)
proguard.verbose(options.verbose)
if not options.enable_dangerous_optimizations:
proguard.disable_optimizations(_DANGEROUS_OPTIMIZATIONS)
input_paths = proguard.GetInputs()
build_utils.CallAndWriteDepfileIfStale(
proguard.CheckOutput,
options,
input_paths=input_paths,
input_strings=proguard.build(),
output_paths=[options.output_path])
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause | 2,036,293,348,165,897,700 | 32.044444 | 80 | 0.696369 | false |
Primary-Traxex/odooalipay | models/func.py | 1 | 3484 | # coding: utf-8
"""
* 支付宝接口公用函数 python版本
* 详细:该类是请求、通知返回两个文件所调用的公用函数核心处理文件
* 版本:1.0
* 日期:2012-07-19(官方)本接口写于2017年新春
* 说明:
"""
import json
import sys
import types
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
from Crypto.Hash import SHA
from base64 import b64encode,b64decode
from urllib import urlencode
def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
if strings_only and isinstance(s, (types.NoneType, int)):
return s
if not isinstance(s, basestring):
try:
return str(s)
except UnicodeEncodeError:
if isinstance(s, Exception):
return ' '.join([smart_str(arg, encoding, strings_only,
errors) for arg in s])
return unicode(s).encode(encoding, errors)
elif isinstance(s, unicode):
return s.encode(encoding, errors)
elif s and encoding != 'utf-8':
return s.decode('utf-8', errors).encode(encoding, errors)
else:
return s
"""
* 除去数组中的空值和签名参数
* @param 签名参数组
* return 去掉空值与签名参数后的新签名参数组
"""
def params_filter(params):
ks = params.keys()
ks.sort()
newparams = {}
prestr = ''
for k in ks:
v = params[k]
k = smart_str(k)
if k not in ('sign','sign_type') and v != '':
newparams[k] = smart_str(v)
prestr += '%s=%s&' % (k, newparams[k])
prestr = prestr[:-1]
return newparams, prestr
"""
* 把数组所有元素,按照“参数=参数值”的模式用“&”字符拼接成字符串
* @param $para 需要拼接的数组
* return 拼接完成以后的字符串
函数没有用到,先放着
"""
def createLinkstring(values):
res = ""
for k,v in values.iteritems():
res += k + "=" + v + "&"
res = res[:-1]
return res
"""
* 把数组所有元素,按照“参数=参数值”的模式用“&”字符拼接成字符串,并对字符串做urlencode编码
* @param $para 需要拼接的数组
* return 拼接完成以后的字符串
"""
def createLinkstringUrlencode(values):
res = ""
for k,v in values.iteritems():
res += k+"="+urlencode(v)+"&"
res = res[:-1]
return res
"""
* RSA签名
* @param data 待签名数据
* @param private_key 商户私钥字符串
* return 签名结果
"""
def rsaSign(data,private_key):
key = RSA.importKey(private_key)
hash_obj = SHA.new(data)
signer = PKCS1_v1_5.new(key)
d = b64encode(signer.sign(hash_obj))
return d
"""
*生成签名结果
*param $para_sort 已排序要签名的数组
* return 签名结果字符串
"""
def buildRequestMysign(values,private_key):
#把数组所有元素,按照“参数=参数值”的模式用“&”字符拼接成字符串
params,prestr = params_filter(values)
mysign = rsaSign(prestr,private_key) or ''
return params,mysign
"""
* RSA验签
* @param $data 待签名数据
* @param $public_key 支付宝的公钥字符串
* @param $sign 要校对的的签名结果
* return 验证结果
"""
def rsaVerify(data,public_key,sign):
rsakey = RSA.importKey(public_key)
res = SHA.new(data)
verifier = PKCS1_v1_5.new(rsakey)
return verifier.verify(res,b64decode(sign))
| mit | 2,567,390,790,461,478,000 | 22.816667 | 74 | 0.607418 | false |
makhan/spaghetti-basic | translator.py | 1 | 1798 | """
A module that simply bolts on simple loops
by translating them into interpretable code in Spaghetti.
This module will probably disappear in the future.
This is just a quick hack some hours before the GCJ 2010 Quals
to add FOR loops for solving problems during the contest
Warning: Extremely Buggy and hardly works
A proper loop structure will have to be implemented at the parser level
"""
from tokenizer import tokenize
header="""
DIM __call_stack(1000000) AS INTEGER
DIM __loop_stack(1000000) AS INTEGER
__call_sp=0
__loop_sp=0
"""
DEBUG=False
def translate(lines):
#output=tokenize(header)
if DEBUG:
for line in lines:
print line
output=[]
cur_line=900000
last_loop=[]
loop_stack=[]
for line in lines:
if len(line)==0:
continue
if line[0]=='FOR':
var,start,end=line[1],line[3],line[5]
if len(line)>6:
step=line[7]
else:
step=+1
if DEBUG:
print "found var=%s, start=%s, end=%s, step=%s"%(var,start,end,step)
output.append([str(cur_line),'LET',var,'=',start])
cur_line+=1
output.append([str(cur_line), 'IF',var,'=',end,'THEN','GOTO'])
loop_stack.append(step)
loop_stack.append(var)
loop_stack.append(cur_line)
last_loop.append(len(output)-1)
cur_line+=1
elif line[0]=='NEXT':
try:
return_point=last_loop.pop()
except IndexError:
print "Unmatched NEXT"
return []
return_line=loop_stack.pop()
var=loop_stack.pop()
step=loop_stack.pop()
output.append(['LET',var,'=',var,'+',str(step)])
output.append(['GOTO', str(return_line)])
#line after NEXT
cur_line+=1
output[return_point].append(str(cur_line))
output.append([str(cur_line),'REM','Line after for loop'])
cur_line+=1
else:
output.append(line)
if DEBUG:
for line in output:
print line
return output
| mit | -601,688,768,264,575,000 | 22.657895 | 72 | 0.669633 | false |
hyatzh/redis-py | redis/client.py | 1 | 72902 | from __future__ import with_statement
from itertools import chain, starmap
import datetime
import sys
import warnings
import time as mod_time
from redis._compat import (b, izip, imap, iteritems, dictkeys, dictvalues,
basestring, long, nativestr, urlparse, bytes)
from redis.connection import ConnectionPool, UnixDomainSocketConnection
from redis.exceptions import (
ConnectionError,
DataError,
RedisError,
ResponseError,
WatchError,
NoScriptError,
ExecAbortError,
)
SYM_EMPTY = b('')
def list_or_args(keys, args):
# returns a single list combining keys and args
try:
iter(keys)
# a string or bytes instance can be iterated, but indicates
# keys wasn't passed as a list
if isinstance(keys, (basestring, bytes)):
keys = [keys]
except TypeError:
keys = [keys]
if args:
keys.extend(args)
return keys
def timestamp_to_datetime(response):
"Converts a unix timestamp to a Python datetime object"
if not response:
return None
try:
response = int(response)
except ValueError:
return None
return datetime.datetime.fromtimestamp(response)
def string_keys_to_dict(key_string, callback):
return dict.fromkeys(key_string.split(), callback)
def dict_merge(*dicts):
merged = {}
[merged.update(d) for d in dicts]
return merged
def parse_debug_object(response):
"Parse the results of Redis's DEBUG OBJECT command into a Python dict"
# The 'type' of the object is the first item in the response, but isn't
# prefixed with a name
response = nativestr(response)
response = 'type:' + response
response = dict([kv.split(':') for kv in response.split()])
# parse some expected int values from the string response
# note: this cmd isn't spec'd so these may not appear in all redis versions
int_fields = ('refcount', 'serializedlength', 'lru', 'lru_seconds_idle')
for field in int_fields:
if field in response:
response[field] = int(response[field])
return response
def parse_object(response, infotype):
"Parse the results of an OBJECT command"
if infotype in ('idletime', 'refcount'):
return int(response)
return response
def parse_info(response):
"Parse the result of Redis's INFO command into a Python dict"
info = {}
response = nativestr(response)
def get_value(value):
if ',' not in value or '=' not in value:
try:
if '.' in value:
return float(value)
else:
return int(value)
except ValueError:
return value
else:
sub_dict = {}
for item in value.split(','):
k, v = item.rsplit('=', 1)
sub_dict[k] = get_value(v)
return sub_dict
for line in response.splitlines():
if line and not line.startswith('#'):
key, value = line.split(':')
info[key] = get_value(value)
return info
def pairs_to_dict(response):
"Create a dict given a list of key/value pairs"
it = iter(response)
return dict(izip(it, it))
def zset_score_pairs(response, **options):
"""
If ``withscores`` is specified in the options, return the response as
a list of (value, score) pairs
"""
if not response or not options['withscores']:
return response
score_cast_func = options.get('score_cast_func', float)
it = iter(response)
return list(izip(it, imap(score_cast_func, it)))
def int_or_none(response):
if response is None:
return None
return int(response)
def float_or_none(response):
if response is None:
return None
return float(response)
def parse_client(response, **options):
parse = options['parse']
if parse == 'LIST':
clients = []
for c in nativestr(response).splitlines():
clients.append(dict([pair.split('=') for pair in c.split(' ')]))
return clients
elif parse == 'KILL':
return bool(response)
def parse_config(response, **options):
if options['parse'] == 'GET':
response = [nativestr(i) if i is not None else None for i in response]
return response and pairs_to_dict(response) or {}
return nativestr(response) == 'OK'
def parse_script(response, **options):
parse = options['parse']
if parse in ('FLUSH', 'KILL'):
return response == 'OK'
if parse == 'EXISTS':
return list(imap(bool, response))
return response
class StrictRedis(object):
"""
Implementation of the Redis protocol.
This abstract class provides a Python interface to all Redis commands
and an implementation of the Redis protocol.
Connection and Pipeline derive from this, implementing how
the commands are sent and received to the Redis server
"""
RESPONSE_CALLBACKS = dict_merge(
string_keys_to_dict(
'AUTH DEL EXISTS EXPIRE EXPIREAT HDEL HEXISTS HMSET MOVE MSETNX '
'PERSIST RENAMENX SISMEMBER SMOVE SETEX SETNX SREM ZREM',
bool
),
string_keys_to_dict(
'BITCOUNT DECRBY GETBIT HLEN INCRBY LINSERT LLEN LPUSHX RPUSHX '
'SADD SCARD SDIFFSTORE SETBIT SETRANGE SINTERSTORE STRLEN '
'SUNIONSTORE ZADD ZCARD ZREMRANGEBYRANK ZREMRANGEBYSCORE',
int
),
string_keys_to_dict('INCRBYFLOAT HINCRBYFLOAT', float),
string_keys_to_dict(
# these return OK, or int if redis-server is >=1.3.4
'LPUSH RPUSH',
lambda r: isinstance(r, long) and r or nativestr(r) == 'OK'
),
string_keys_to_dict('ZSCORE ZINCRBY', float_or_none),
string_keys_to_dict(
'FLUSHALL FLUSHDB LSET LTRIM MSET RENAME '
'SAVE SELECT SET SHUTDOWN SLAVEOF WATCH UNWATCH',
lambda r: nativestr(r) == 'OK'
),
string_keys_to_dict('BLPOP BRPOP', lambda r: r and tuple(r) or None),
string_keys_to_dict(
'SDIFF SINTER SMEMBERS SUNION',
lambda r: r and set(r) or set()
),
string_keys_to_dict(
'ZRANGE ZRANGEBYSCORE ZREVRANGE ZREVRANGEBYSCORE',
zset_score_pairs
),
string_keys_to_dict('ZRANK ZREVRANK', int_or_none),
{
'BGREWRITEAOF': (
lambda r: r == 'Background rewriting of AOF file started'
),
'BGSAVE': lambda r: r == 'Background saving started',
'BRPOPLPUSH': lambda r: r and r or None,
'CLIENT': parse_client,
'CONFIG': parse_config,
'DEBUG': parse_debug_object,
'HGETALL': lambda r: r and pairs_to_dict(r) or {},
'INFO': parse_info,
'LASTSAVE': timestamp_to_datetime,
'OBJECT': parse_object,
'PING': lambda r: nativestr(r) == 'PONG',
'RANDOMKEY': lambda r: r and r or None,
'SCRIPT': parse_script,
'TIME': lambda x: (int(x[0]), int(x[1]))
}
)
@classmethod
def from_url(cls, url, db=None, **kwargs):
"""
Return a Redis client object configured from the given URL.
For example::
redis://username:password@localhost:6379/0
If ``db`` is None, this method will attempt to extract the database ID
from the URL path component.
Any additional keyword arguments will be passed along to the Redis
class's initializer.
"""
url = urlparse(url)
# We only support redis:// schemes.
assert url.scheme == 'redis' or not url.scheme
# Extract the database ID from the path component if hasn't been given.
if db is None:
try:
db = int(url.path.replace('/', ''))
except (AttributeError, ValueError):
db = 0
return cls(host=url.hostname, port=url.port, db=db,
password=url.password, **kwargs)
def __init__(self, host='localhost', port=6379,
db=0, password=None, socket_timeout=None,
connection_pool=None, charset='utf-8',
errors='strict', decode_responses=False,
unix_socket_path=None):
if not connection_pool:
kwargs = {
'db': db,
'password': password,
'socket_timeout': socket_timeout,
'encoding': charset,
'encoding_errors': errors,
'decode_responses': decode_responses,
}
# based on input, setup appropriate connection args
if unix_socket_path:
kwargs.update({
'path': unix_socket_path,
'connection_class': UnixDomainSocketConnection
})
else:
kwargs.update({
'host': host,
'port': port
})
connection_pool = ConnectionPool(**kwargs)
self.connection_pool = connection_pool
self.response_callbacks = self.__class__.RESPONSE_CALLBACKS.copy()
def set_response_callback(self, command, callback):
"Set a custom Response Callback"
self.response_callbacks[command] = callback
def pipeline(self, transaction=True, shard_hint=None):
"""
Return a new pipeline object that can queue multiple commands for
later execution. ``transaction`` indicates whether all commands
should be executed atomically. Apart from making a group of operations
atomic, pipelines are useful for reducing the back-and-forth overhead
between the client and server.
"""
return StrictPipeline(
self.connection_pool,
self.response_callbacks,
transaction,
shard_hint)
def transaction(self, func, *watches, **kwargs):
"""
Convenience method for executing the callable `func` as a transaction
while watching all keys specified in `watches`. The 'func' callable
should expect a single arguement which is a Pipeline object.
"""
shard_hint = kwargs.pop('shard_hint', None)
with self.pipeline(True, shard_hint) as pipe:
while 1:
try:
if watches:
pipe.watch(*watches)
func(pipe)
return pipe.execute()
except WatchError:
continue
def lock(self, name, timeout=None, sleep=0.1):
"""
Return a new Lock object using key ``name`` that mimics
the behavior of threading.Lock.
If specified, ``timeout`` indicates a maximum life for the lock.
By default, it will remain locked until release() is called.
``sleep`` indicates the amount of time to sleep per loop iteration
when the lock is in blocking mode and another client is currently
holding the lock.
"""
return Lock(self, name, timeout=timeout, sleep=sleep)
def pubsub(self, shard_hint=None):
"""
Return a Publish/Subscribe object. With this object, you can
subscribe to channels and listen for messages that get published to
them.
"""
return PubSub(self.connection_pool, shard_hint)
#### COMMAND EXECUTION AND PROTOCOL PARSING ####
def execute_command(self, *args, **options):
"Execute a command and return a parsed response"
pool = self.connection_pool
command_name = args[0]
connection = pool.get_connection(command_name, **options)
try:
connection.send_command(*args)
return self.parse_response(connection, command_name, **options)
except ConnectionError:
connection.disconnect()
connection.send_command(*args)
return self.parse_response(connection, command_name, **options)
finally:
pool.release(connection)
def parse_response(self, connection, command_name, **options):
"Parses a response from the Redis server"
response = connection.read_response()
if command_name in self.response_callbacks:
return self.response_callbacks[command_name](response, **options)
return response
#### SERVER INFORMATION ####
def bgrewriteaof(self):
"Tell the Redis server to rewrite the AOF file from data in memory."
return self.execute_command('BGREWRITEAOF')
def bgsave(self):
"""
Tell the Redis server to save its data to disk. Unlike save(),
this method is asynchronous and returns immediately.
"""
return self.execute_command('BGSAVE')
def client_kill(self, address):
"Disconnects the client at ``address`` (ip:port)"
return self.execute_command('CLIENT', 'KILL', address, parse='KILL')
def client_list(self):
"Returns a list of currently connected clients"
return self.execute_command('CLIENT', 'LIST', parse='LIST')
def config_get(self, pattern="*"):
"Return a dictionary of configuration based on the ``pattern``"
return self.execute_command('CONFIG', 'GET', pattern, parse='GET')
def config_set(self, name, value):
"Set config item ``name`` with ``value``"
return self.execute_command('CONFIG', 'SET', name, value, parse='SET')
def dbsize(self):
"Returns the number of keys in the current database"
return self.execute_command('DBSIZE')
def time(self):
"""
Returns the server time as a 2-item tuple of ints:
(seconds since epoch, microseconds into this second).
"""
return self.execute_command('TIME')
def debug_object(self, key):
"Returns version specific metainformation about a give key"
return self.execute_command('DEBUG', 'OBJECT', key)
def delete(self, *names):
"Delete one or more keys specified by ``names``"
return self.execute_command('DEL', *names)
__delitem__ = delete
def echo(self, value):
"Echo the string back from the server"
return self.execute_command('ECHO', value)
def flushall(self):
"Delete all keys in all databases on the current host"
return self.execute_command('FLUSHALL')
def flushdb(self):
"Delete all keys in the current database"
return self.execute_command('FLUSHDB')
def info(self, section=None):
"""
Returns a dictionary containing information about the Redis server
The ``section`` option can be used to select a specific section
of information
The section option is not supported by older versions of Redis Server,
and will generate ResponseError
"""
if section is None:
return self.execute_command('INFO')
else:
return self.execute_command('INFO', section)
def lastsave(self):
"""
Return a Python datetime object representing the last time the
Redis database was saved to disk
"""
return self.execute_command('LASTSAVE')
def object(self, infotype, key):
"Return the encoding, idletime, or refcount about the key"
return self.execute_command('OBJECT', infotype, key, infotype=infotype)
def ping(self):
"Ping the Redis server"
return self.execute_command('PING')
def save(self):
"""
Tell the Redis server to save its data to disk,
blocking until the save is complete
"""
return self.execute_command('SAVE')
def shutdown(self):
"Shutdown the server"
try:
self.execute_command('SHUTDOWN')
except ConnectionError:
# a ConnectionError here is expected
return
raise RedisError("SHUTDOWN seems to have failed.")
def slaveof(self, host=None, port=None):
"""
Set the server to be a replicated slave of the instance identified
by the ``host`` and ``port``. If called without arguements, the
instance is promoted to a master instead.
"""
if host is None and port is None:
return self.execute_command("SLAVEOF", "NO", "ONE")
return self.execute_command("SLAVEOF", host, port)
#### BASIC KEY COMMANDS ####
def append(self, key, value):
"""
Appends the string ``value`` to the value at ``key``. If ``key``
doesn't already exist, create it with a value of ``value``.
Returns the new length of the value at ``key``.
"""
return self.execute_command('APPEND', key, value)
def getrange(self, key, start, end):
"""
Returns the substring of the string value stored at ``key``,
determined by the offsets ``start`` and ``end`` (both are inclusive)
"""
return self.execute_command('GETRANGE', key, start, end)
def bitcount(self, key, start=None, end=None):
"""
Returns the count of set bits in the value of ``key``. Optional
``start`` and ``end`` paramaters indicate which bytes to consider
"""
params = [key]
if start is not None and end is not None:
params.append(start)
params.append(end)
elif (start is not None and end is None) or \
(end is not None and start is None):
raise RedisError("Both start and end must be specified")
return self.execute_command('BITCOUNT', *params)
def bitop(self, operation, dest, *keys):
"""
Perform a bitwise operation using ``operation`` between ``keys`` and
store the result in ``dest``.
"""
return self.execute_command('BITOP', operation, dest, *keys)
def decr(self, name, amount=1):
"""
Decrements the value of ``key`` by ``amount``. If no key exists,
the value will be initialized as 0 - ``amount``
"""
return self.execute_command('DECRBY', name, amount)
def exists(self, name):
"Returns a boolean indicating whether key ``name`` exists"
return self.execute_command('EXISTS', name)
__contains__ = exists
def expire(self, name, time):
"""
Set an expire flag on key ``name`` for ``time`` seconds. ``time``
can be represented by an integer or a Python timedelta object.
"""
if isinstance(time, datetime.timedelta):
time = time.seconds + time.days * 24 * 3600
return self.execute_command('EXPIRE', name, time)
def expireat(self, name, when):
"""
Set an expire flag on key ``name``. ``when`` can be represented
as an integer indicating unix time or a Python datetime object.
"""
if isinstance(when, datetime.datetime):
when = int(mod_time.mktime(when.timetuple()))
return self.execute_command('EXPIREAT', name, when)
def get(self, name):
"""
Return the value at key ``name``, or None if the key doesn't exist
"""
return self.execute_command('GET', name)
def __getitem__(self, name):
"""
Return the value at key ``name``, raises a KeyError if the key
doesn't exist.
"""
value = self.get(name)
if value:
return value
raise KeyError(name)
def getbit(self, name, offset):
"Returns a boolean indicating the value of ``offset`` in ``name``"
return self.execute_command('GETBIT', name, offset)
def getset(self, name, value):
"""
Set the value at key ``name`` to ``value`` if key doesn't exist
Return the value at key ``name`` atomically
"""
return self.execute_command('GETSET', name, value)
def incr(self, name, amount=1):
"""
Increments the value of ``key`` by ``amount``. If no key exists,
the value will be initialized as ``amount``
"""
return self.execute_command('INCRBY', name, amount)
def incrbyfloat(self, name, amount=1.0):
"""
Increments the value at key ``name`` by floating ``amount``.
If no key exists, the value will be initialized as ``amount``
"""
return self.execute_command('INCRBYFLOAT', name, amount)
def keys(self, pattern='*'):
"Returns a list of keys matching ``pattern``"
return self.execute_command('KEYS', pattern)
def mget(self, keys, *args):
"""
Returns a list of values ordered identically to ``keys``
"""
args = list_or_args(keys, args)
return self.execute_command('MGET', *args)
def mset(self, mapping):
"Sets each key in the ``mapping`` dict to its corresponding value"
items = []
for pair in iteritems(mapping):
items.extend(pair)
return self.execute_command('MSET', *items)
def msetnx(self, mapping):
"""
Sets each key in the ``mapping`` dict to its corresponding value if
none of the keys are already set
"""
items = []
for pair in iteritems(mapping):
items.extend(pair)
return self.execute_command('MSETNX', *items)
def move(self, name, db):
"Moves the key ``name`` to a different Redis database ``db``"
return self.execute_command('MOVE', name, db)
def persist(self, name):
"Removes an expiration on ``name``"
return self.execute_command('PERSIST', name)
def pexpire(self, name, time):
"""
Set an expire flag on key ``name`` for ``time`` milliseconds.
``time`` can be represented by an integer or a Python timedelta
object.
"""
if isinstance(time, datetime.timedelta):
ms = int(time.microseconds / 1000)
time = time.seconds + time.days * 24 * 3600 * 1000 + ms
return self.execute_command('PEXPIRE', name, time)
def pexpireat(self, name, when):
"""
Set an expire flag on key ``name``. ``when`` can be represented
as an integer representing unix time in milliseconds (unix time * 1000)
or a Python datetime object.
"""
if isinstance(when, datetime.datetime):
ms = int(when.microsecond / 1000)
when = int(mod_time.mktime(when.timetuple())) * 1000 + ms
return self.execute_command('PEXPIREAT', name, when)
def pttl(self, name):
"Returns the number of milliseconds until the key ``name`` will expire"
return self.execute_command('PTTL', name)
def randomkey(self):
"Returns the name of a random key"
return self.execute_command('RANDOMKEY')
def rename(self, src, dst):
"""
Rename key ``src`` to ``dst``
"""
return self.execute_command('RENAME', src, dst)
def renamenx(self, src, dst):
"Rename key ``src`` to ``dst`` if ``dst`` doesn't already exist"
return self.execute_command('RENAMENX', src, dst)
def set(self, name, value):
"Set the value at key ``name`` to ``value``"
return self.execute_command('SET', name, value)
__setitem__ = set
def setbit(self, name, offset, value):
"""
Flag the ``offset`` in ``name`` as ``value``. Returns a boolean
indicating the previous value of ``offset``.
"""
value = value and 1 or 0
return self.execute_command('SETBIT', name, offset, value)
def setex(self, name, time, value):
"""
Set the value of key ``name`` to ``value`` that expires in ``time``
seconds. ``time`` can be represented by an integer or a Python
timedelta object.
"""
if isinstance(time, datetime.timedelta):
time = time.seconds + time.days * 24 * 3600
return self.execute_command('SETEX', name, time, value)
def setnx(self, name, value):
"Set the value of key ``name`` to ``value`` if key doesn't exist"
return self.execute_command('SETNX', name, value)
def setrange(self, name, offset, value):
"""
Overwrite bytes in the value of ``name`` starting at ``offset`` with
``value``. If ``offset`` plus the length of ``value`` exceeds the
length of the original value, the new value will be larger than before.
If ``offset`` exceeds the length of the original value, null bytes
will be used to pad between the end of the previous value and the start
of what's being injected.
Returns the length of the new string.
"""
return self.execute_command('SETRANGE', name, offset, value)
def strlen(self, name):
"Return the number of bytes stored in the value of ``name``"
return self.execute_command('STRLEN', name)
def substr(self, name, start, end=-1):
"""
Return a substring of the string at key ``name``. ``start`` and ``end``
are 0-based integers specifying the portion of the string to return.
"""
return self.execute_command('SUBSTR', name, start, end)
def ttl(self, name):
"Returns the number of seconds until the key ``name`` will expire"
return self.execute_command('TTL', name)
def type(self, name):
"Returns the type of key ``name``"
return self.execute_command('TYPE', name)
def watch(self, *names):
"""
Watches the values at keys ``names``, or None if the key doesn't exist
"""
warnings.warn(DeprecationWarning('Call WATCH from a Pipeline object'))
def unwatch(self):
"""
Unwatches the value at key ``name``, or None of the key doesn't exist
"""
warnings.warn(
DeprecationWarning('Call UNWATCH from a Pipeline object'))
#### LIST COMMANDS ####
def blpop(self, keys, timeout=0):
"""
LPOP a value off of the first non-empty list
named in the ``keys`` list.
If none of the lists in ``keys`` has a value to LPOP, then block
for ``timeout`` seconds, or until a value gets pushed on to one
of the lists.
If timeout is 0, then block indefinitely.
"""
if timeout is None:
timeout = 0
if isinstance(keys, basestring):
keys = [keys]
else:
keys = list(keys)
keys.append(timeout)
return self.execute_command('BLPOP', *keys)
def brpop(self, keys, timeout=0):
"""
RPOP a value off of the first non-empty list
named in the ``keys`` list.
If none of the lists in ``keys`` has a value to LPOP, then block
for ``timeout`` seconds, or until a value gets pushed on to one
of the lists.
If timeout is 0, then block indefinitely.
"""
if timeout is None:
timeout = 0
if isinstance(keys, basestring):
keys = [keys]
else:
keys = list(keys)
keys.append(timeout)
return self.execute_command('BRPOP', *keys)
def brpoplpush(self, src, dst, timeout=0):
"""
Pop a value off the tail of ``src``, push it on the head of ``dst``
and then return it.
This command blocks until a value is in ``src`` or until ``timeout``
seconds elapse, whichever is first. A ``timeout`` value of 0 blocks
forever.
"""
if timeout is None:
timeout = 0
return self.execute_command('BRPOPLPUSH', src, dst, timeout)
def lindex(self, name, index):
"""
Return the item from list ``name`` at position ``index``
Negative indexes are supported and will return an item at the
end of the list
"""
return self.execute_command('LINDEX', name, index)
def linsert(self, name, where, refvalue, value):
"""
Insert ``value`` in list ``name`` either immediately before or after
[``where``] ``refvalue``
Returns the new length of the list on success or -1 if ``refvalue``
is not in the list.
"""
return self.execute_command('LINSERT', name, where, refvalue, value)
def llen(self, name):
"Return the length of the list ``name``"
return self.execute_command('LLEN', name)
def lpop(self, name):
"Remove and return the first item of the list ``name``"
return self.execute_command('LPOP', name)
def lpush(self, name, *values):
"Push ``values`` onto the head of the list ``name``"
return self.execute_command('LPUSH', name, *values)
def lpushx(self, name, value):
"Push ``value`` onto the head of the list ``name`` if ``name`` exists"
return self.execute_command('LPUSHX', name, value)
def lrange(self, name, start, end):
"""
Return a slice of the list ``name`` between
position ``start`` and ``end``
``start`` and ``end`` can be negative numbers just like
Python slicing notation
"""
return self.execute_command('LRANGE', name, start, end)
def lrem(self, name, count, value):
"""
Remove the first ``count`` occurrences of elements equal to ``value``
from the list stored at ``name``.
The count argument influences the operation in the following ways:
count > 0: Remove elements equal to value moving from head to tail.
count < 0: Remove elements equal to value moving from tail to head.
count = 0: Remove all elements equal to value.
"""
return self.execute_command('LREM', name, count, value)
def lset(self, name, index, value):
"Set ``position`` of list ``name`` to ``value``"
return self.execute_command('LSET', name, index, value)
def ltrim(self, name, start, end):
"""
Trim the list ``name``, removing all values not within the slice
between ``start`` and ``end``
``start`` and ``end`` can be negative numbers just like
Python slicing notation
"""
return self.execute_command('LTRIM', name, start, end)
def rpop(self, name):
"Remove and return the last item of the list ``name``"
return self.execute_command('RPOP', name)
def rpoplpush(self, src, dst):
"""
RPOP a value off of the ``src`` list and atomically LPUSH it
on to the ``dst`` list. Returns the value.
"""
return self.execute_command('RPOPLPUSH', src, dst)
def rpush(self, name, *values):
"Push ``values`` onto the tail of the list ``name``"
return self.execute_command('RPUSH', name, *values)
def rpushx(self, name, value):
"Push ``value`` onto the tail of the list ``name`` if ``name`` exists"
return self.execute_command('RPUSHX', name, value)
def sort(self, name, start=None, num=None, by=None, get=None,
desc=False, alpha=False, store=None):
"""
Sort and return the list, set or sorted set at ``name``.
``start`` and ``num`` allow for paging through the sorted data
``by`` allows using an external key to weight and sort the items.
Use an "*" to indicate where in the key the item value is located
``get`` allows for returning items from external keys rather than the
sorted data itself. Use an "*" to indicate where int he key
the item value is located
``desc`` allows for reversing the sort
``alpha`` allows for sorting lexicographically rather than numerically
``store`` allows for storing the result of the sort into
the key ``store``
"""
if (start is not None and num is None) or \
(num is not None and start is None):
raise RedisError("``start`` and ``num`` must both be specified")
pieces = [name]
if by is not None:
pieces.append('BY')
pieces.append(by)
if start is not None and num is not None:
pieces.append('LIMIT')
pieces.append(start)
pieces.append(num)
if get is not None:
# If get is a string assume we want to get a single value.
# Otherwise assume it's an interable and we want to get multiple
# values. We can't just iterate blindly because strings are
# iterable.
if isinstance(get, basestring):
pieces.append('GET')
pieces.append(get)
else:
for g in get:
pieces.append('GET')
pieces.append(g)
if desc:
pieces.append('DESC')
if alpha:
pieces.append('ALPHA')
if store is not None:
pieces.append('STORE')
pieces.append(store)
return self.execute_command('SORT', *pieces)
#### SET COMMANDS ####
def sadd(self, name, *values):
"Add ``value(s)`` to set ``name``"
return self.execute_command('SADD', name, *values)
def scard(self, name):
"Return the number of elements in set ``name``"
return self.execute_command('SCARD', name)
def sdiff(self, keys, *args):
"Return the difference of sets specified by ``keys``"
args = list_or_args(keys, args)
return self.execute_command('SDIFF', *args)
def sdiffstore(self, dest, keys, *args):
"""
Store the difference of sets specified by ``keys`` into a new
set named ``dest``. Returns the number of keys in the new set.
"""
args = list_or_args(keys, args)
return self.execute_command('SDIFFSTORE', dest, *args)
def sinter(self, keys, *args):
"Return the intersection of sets specified by ``keys``"
args = list_or_args(keys, args)
return self.execute_command('SINTER', *args)
def sinterstore(self, dest, keys, *args):
"""
Store the intersection of sets specified by ``keys`` into a new
set named ``dest``. Returns the number of keys in the new set.
"""
args = list_or_args(keys, args)
return self.execute_command('SINTERSTORE', dest, *args)
def sismember(self, name, value):
"Return a boolean indicating if ``value`` is a member of set ``name``"
return self.execute_command('SISMEMBER', name, value)
def smembers(self, name):
"Return all members of the set ``name``"
return self.execute_command('SMEMBERS', name)
def smove(self, src, dst, value):
"Move ``value`` from set ``src`` to set ``dst`` atomically"
return self.execute_command('SMOVE', src, dst, value)
def spop(self, name):
"Remove and return a random member of set ``name``"
return self.execute_command('SPOP', name)
def srandmember(self, name, number=None):
"""
If ``number`` is None, returns a random member of set ``name``.
If ``number`` is supplied, returns a list of ``number`` random
memebers of set ``name``. Note this is only available when running
Redis 2.6+.
"""
args = number and [number] or []
return self.execute_command('SRANDMEMBER', name, *args)
def srem(self, name, *values):
"Remove ``values`` from set ``name``"
return self.execute_command('SREM', name, *values)
def sunion(self, keys, *args):
"Return the union of sets specifiued by ``keys``"
args = list_or_args(keys, args)
return self.execute_command('SUNION', *args)
def sunionstore(self, dest, keys, *args):
"""
Store the union of sets specified by ``keys`` into a new
set named ``dest``. Returns the number of keys in the new set.
"""
args = list_or_args(keys, args)
return self.execute_command('SUNIONSTORE', dest, *args)
#### SORTED SET COMMANDS ####
def zadd(self, name, *args, **kwargs):
"""
Set any number of score, element-name pairs to the key ``name``. Pairs
can be specified in two ways:
As *args, in the form of: score1, name1, score2, name2, ...
or as **kwargs, in the form of: name1=score1, name2=score2, ...
The following example would add four values to the 'my-key' key:
redis.zadd('my-key', 1.1, 'name1', 2.2, 'name2', name3=3.3, name4=4.4)
"""
pieces = []
if args:
if len(args) % 2 != 0:
raise RedisError("ZADD requires an equal number of "
"values and scores")
pieces.extend(args)
for pair in iteritems(kwargs):
pieces.append(pair[1])
pieces.append(pair[0])
return self.execute_command('ZADD', name, *pieces)
def zcard(self, name):
"Return the number of elements in the sorted set ``name``"
return self.execute_command('ZCARD', name)
def zcount(self, name, min, max):
return self.execute_command('ZCOUNT', name, min, max)
def zincrby(self, name, value, amount=1):
"Increment the score of ``value`` in sorted set ``name`` by ``amount``"
return self.execute_command('ZINCRBY', name, amount, value)
def zinterstore(self, dest, keys, aggregate=None):
"""
Intersect multiple sorted sets specified by ``keys`` into
a new sorted set, ``dest``. Scores in the destination will be
aggregated based on the ``aggregate``, or SUM if none is provided.
"""
return self._zaggregate('ZINTERSTORE', dest, keys, aggregate)
def zrange(self, name, start, end, desc=False, withscores=False,
score_cast_func=float):
"""
Return a range of values from sorted set ``name`` between
``start`` and ``end`` sorted in ascending order.
``start`` and ``end`` can be negative, indicating the end of the range.
``desc`` a boolean indicating whether to sort the results descendingly
``withscores`` indicates to return the scores along with the values.
The return type is a list of (value, score) pairs
``score_cast_func`` a callable used to cast the score return value
"""
if desc:
return self.zrevrange(name, start, end, withscores,
score_cast_func)
pieces = ['ZRANGE', name, start, end]
if withscores:
pieces.append('withscores')
options = {
'withscores': withscores, 'score_cast_func': score_cast_func}
return self.execute_command(*pieces, **options)
def zrangebyscore(self, name, min, max, start=None, num=None,
withscores=False, score_cast_func=float):
"""
Return a range of values from the sorted set ``name`` with scores
between ``min`` and ``max``.
If ``start`` and ``num`` are specified, then return a slice
of the range.
``withscores`` indicates to return the scores along with the values.
The return type is a list of (value, score) pairs
`score_cast_func`` a callable used to cast the score return value
"""
if (start is not None and num is None) or \
(num is not None and start is None):
raise RedisError("``start`` and ``num`` must both be specified")
pieces = ['ZRANGEBYSCORE', name, min, max]
if start is not None and num is not None:
pieces.extend(['LIMIT', start, num])
if withscores:
pieces.append('withscores')
options = {
'withscores': withscores, 'score_cast_func': score_cast_func}
return self.execute_command(*pieces, **options)
def zrank(self, name, value):
"""
Returns a 0-based value indicating the rank of ``value`` in sorted set
``name``
"""
return self.execute_command('ZRANK', name, value)
def zrem(self, name, *values):
"Remove member ``values`` from sorted set ``name``"
return self.execute_command('ZREM', name, *values)
def zremrangebyrank(self, name, min, max):
"""
Remove all elements in the sorted set ``name`` with ranks between
``min`` and ``max``. Values are 0-based, ordered from smallest score
to largest. Values can be negative indicating the highest scores.
Returns the number of elements removed
"""
return self.execute_command('ZREMRANGEBYRANK', name, min, max)
def zremrangebyscore(self, name, min, max):
"""
Remove all elements in the sorted set ``name`` with scores
between ``min`` and ``max``. Returns the number of elements removed.
"""
return self.execute_command('ZREMRANGEBYSCORE', name, min, max)
def zrevrange(self, name, start, num, withscores=False,
score_cast_func=float):
"""
Return a range of values from sorted set ``name`` between
``start`` and ``num`` sorted in descending order.
``start`` and ``num`` can be negative, indicating the end of the range.
``withscores`` indicates to return the scores along with the values
The return type is a list of (value, score) pairs
``score_cast_func`` a callable used to cast the score return value
"""
pieces = ['ZREVRANGE', name, start, num]
if withscores:
pieces.append('withscores')
options = {
'withscores': withscores, 'score_cast_func': score_cast_func}
return self.execute_command(*pieces, **options)
def zrevrangebyscore(self, name, max, min, start=None, num=None,
withscores=False, score_cast_func=float):
"""
Return a range of values from the sorted set ``name`` with scores
between ``min`` and ``max`` in descending order.
If ``start`` and ``num`` are specified, then return a slice
of the range.
``withscores`` indicates to return the scores along with the values.
The return type is a list of (value, score) pairs
``score_cast_func`` a callable used to cast the score return value
"""
if (start is not None and num is None) or \
(num is not None and start is None):
raise RedisError("``start`` and ``num`` must both be specified")
pieces = ['ZREVRANGEBYSCORE', name, max, min]
if start is not None and num is not None:
pieces.extend(['LIMIT', start, num])
if withscores:
pieces.append('withscores')
options = {
'withscores': withscores, 'score_cast_func': score_cast_func}
return self.execute_command(*pieces, **options)
def zrevrank(self, name, value):
"""
Returns a 0-based value indicating the descending rank of
``value`` in sorted set ``name``
"""
return self.execute_command('ZREVRANK', name, value)
def zscore(self, name, value):
"Return the score of element ``value`` in sorted set ``name``"
return self.execute_command('ZSCORE', name, value)
def zunionstore(self, dest, keys, aggregate=None):
"""
Union multiple sorted sets specified by ``keys`` into
a new sorted set, ``dest``. Scores in the destination will be
aggregated based on the ``aggregate``, or SUM if none is provided.
"""
return self._zaggregate('ZUNIONSTORE', dest, keys, aggregate)
def _zaggregate(self, command, dest, keys, aggregate=None):
pieces = [command, dest, len(keys)]
if isinstance(keys, dict):
keys, weights = dictkeys(keys), dictvalues(keys)
else:
weights = None
pieces.extend(keys)
if weights:
pieces.append('WEIGHTS')
pieces.extend(weights)
if aggregate:
pieces.append('AGGREGATE')
pieces.append(aggregate)
return self.execute_command(*pieces)
#### HASH COMMANDS ####
def hdel(self, name, *keys):
"Delete ``keys`` from hash ``name``"
return self.execute_command('HDEL', name, *keys)
def hexists(self, name, key):
"Returns a boolean indicating if ``key`` exists within hash ``name``"
return self.execute_command('HEXISTS', name, key)
def hget(self, name, key):
"Return the value of ``key`` within the hash ``name``"
return self.execute_command('HGET', name, key)
def hgetall(self, name):
"Return a Python dict of the hash's name/value pairs"
return self.execute_command('HGETALL', name)
def hincrby(self, name, key, amount=1):
"Increment the value of ``key`` in hash ``name`` by ``amount``"
return self.execute_command('HINCRBY', name, key, amount)
def hincrbyfloat(self, name, key, amount=1.0):
"""
Increment the value of ``key`` in hash ``name`` by floating ``amount``
"""
return self.execute_command('HINCRBYFLOAT', name, key, amount)
def hkeys(self, name):
"Return the list of keys within hash ``name``"
return self.execute_command('HKEYS', name)
def hlen(self, name):
"Return the number of elements in hash ``name``"
return self.execute_command('HLEN', name)
def hset(self, name, key, value):
"""
Set ``key`` to ``value`` within hash ``name``
Returns 1 if HSET created a new field, otherwise 0
"""
return self.execute_command('HSET', name, key, value)
def hsetnx(self, name, key, value):
"""
Set ``key`` to ``value`` within hash ``name`` if ``key`` does not
exist. Returns 1 if HSETNX created a field, otherwise 0.
"""
return self.execute_command("HSETNX", name, key, value)
def hmset(self, name, mapping):
"""
Sets each key in the ``mapping`` dict to its corresponding value
in the hash ``name``
"""
if not mapping:
raise DataError("'hmset' with 'mapping' of length 0")
items = []
for pair in iteritems(mapping):
items.extend(pair)
return self.execute_command('HMSET', name, *items)
def hmget(self, name, keys, *args):
"Returns a list of values ordered identically to ``keys``"
args = list_or_args(keys, args)
return self.execute_command('HMGET', name, *args)
def hvals(self, name):
"Return the list of values within hash ``name``"
return self.execute_command('HVALS', name)
def publish(self, channel, message):
"""
Publish ``message`` on ``channel``.
Returns the number of subscribers the message was delivered to.
"""
return self.execute_command('PUBLISH', channel, message)
def eval(self, script, numkeys, *keys_and_args):
"""
Execute the LUA ``script``, specifying the ``numkeys`` the script
will touch and the key names and argument values in ``keys_and_args``.
Returns the result of the script.
In practice, use the object returned by ``register_script``. This
function exists purely for Redis API completion.
"""
return self.execute_command('EVAL', script, numkeys, *keys_and_args)
def evalsha(self, sha, numkeys, *keys_and_args):
"""
Use the ``sha`` to execute a LUA script already registered via EVAL
or SCRIPT LOAD. Specify the ``numkeys`` the script will touch and the
key names and argument values in ``keys_and_args``. Returns the result
of the script.
In practice, use the object returned by ``register_script``. This
function exists purely for Redis API completion.
"""
return self.execute_command('EVALSHA', sha, numkeys, *keys_and_args)
def script_exists(self, *args):
"""
Check if a script exists in the script cache by specifying the SHAs of
each script as ``args``. Returns a list of boolean values indicating if
if each already script exists in the cache.
"""
options = {'parse': 'EXISTS'}
return self.execute_command('SCRIPT', 'EXISTS', *args, **options)
def script_flush(self):
"Flush all scripts from the script cache"
options = {'parse': 'FLUSH'}
return self.execute_command('SCRIPT', 'FLUSH', **options)
def script_kill(self):
"Kill the currently executing LUA script"
options = {'parse': 'KILL'}
return self.execute_command('SCRIPT', 'KILL', **options)
def script_load(self, script):
"Load a LUA ``script`` into the script cache. Returns the SHA."
options = {'parse': 'LOAD'}
return self.execute_command('SCRIPT', 'LOAD', script, **options)
def register_script(self, script):
"""
Register a LUA ``script`` specifying the ``keys`` it will touch.
Returns a Script object that is callable and hides the complexity of
deal with scripts, keys, and shas. This is the preferred way to work
with LUA scripts.
"""
return Script(self, script)
class Redis(StrictRedis):
"""
Provides backwards compatibility with older versions of redis-py that
changed arguments to some commands to be more Pythonic, sane, or by
accident.
"""
# Overridden callbacks
RESPONSE_CALLBACKS = dict_merge(
StrictRedis.RESPONSE_CALLBACKS,
{
'TTL': lambda r: r != -1 and r or None,
'PTTL': lambda r: r != -1 and r or None,
}
)
def pipeline(self, transaction=True, shard_hint=None):
"""
Return a new pipeline object that can queue multiple commands for
later execution. ``transaction`` indicates whether all commands
should be executed atomically. Apart from making a group of operations
atomic, pipelines are useful for reducing the back-and-forth overhead
between the client and server.
"""
return Pipeline(
self.connection_pool,
self.response_callbacks,
transaction,
shard_hint)
def setex(self, name, value, time):
"""
Set the value of key ``name`` to ``value`` that expires in ``time``
seconds. ``time`` can be represented by an integer or a Python
timedelta object.
"""
if isinstance(time, datetime.timedelta):
time = time.seconds + time.days * 24 * 3600
return self.execute_command('SETEX', name, time, value)
def lrem(self, name, value, num=0):
"""
Remove the first ``num`` occurrences of elements equal to ``value``
from the list stored at ``name``.
The ``num`` argument influences the operation in the following ways:
num > 0: Remove elements equal to value moving from head to tail.
num < 0: Remove elements equal to value moving from tail to head.
num = 0: Remove all elements equal to value.
"""
return self.execute_command('LREM', name, num, value)
def zadd(self, name, *args, **kwargs):
"""
NOTE: The order of arguments differs from that of the official ZADD
command. For backwards compatability, this method accepts arguments
in the form of name1, score1, name2, score2, while the official Redis
documents expects score1, name1, score2, name2.
If you're looking to use the standard syntax, consider using the
StrictRedis class. See the API Reference section of the docs for more
information.
Set any number of element-name, score pairs to the key ``name``. Pairs
can be specified in two ways:
As *args, in the form of: name1, score1, name2, score2, ...
or as **kwargs, in the form of: name1=score1, name2=score2, ...
The following example would add four values to the 'my-key' key:
redis.zadd('my-key', 'name1', 1.1, 'name2', 2.2, name3=3.3, name4=4.4)
"""
pieces = []
if args:
if len(args) % 2 != 0:
raise RedisError("ZADD requires an equal number of "
"values and scores")
pieces.extend(reversed(args))
for pair in iteritems(kwargs):
pieces.append(pair[1])
pieces.append(pair[0])
return self.execute_command('ZADD', name, *pieces)
class PubSub(object):
"""
PubSub provides publish, subscribe and listen support to Redis channels.
After subscribing to one or more channels, the listen() method will block
until a message arrives on one of the subscribed channels. That message
will be returned and it's safe to start listening again.
"""
def __init__(self, connection_pool, shard_hint=None):
self.connection_pool = connection_pool
self.shard_hint = shard_hint
self.connection = None
self.channels = set()
self.patterns = set()
self.subscription_count = 0
self.subscribe_commands = set(
('subscribe', 'psubscribe', 'unsubscribe', 'punsubscribe')
)
def __del__(self):
try:
# if this object went out of scope prior to shutting down
# subscriptions, close the connection manually before
# returning it to the connection pool
if self.connection and (self.channels or self.patterns):
self.connection.disconnect()
self.reset()
except:
pass
def reset(self):
if self.connection:
self.connection.disconnect()
self.connection_pool.release(self.connection)
self.connection = None
def close(self):
self.reset()
def execute_command(self, *args, **kwargs):
"Execute a publish/subscribe command"
# NOTE: don't parse the response in this function. it could pull a
# legitmate message off the stack if the connection is already
# subscribed to one or more channels
if self.connection is None:
self.connection = self.connection_pool.get_connection(
'pubsub',
self.shard_hint
)
connection = self.connection
try:
connection.send_command(*args)
except ConnectionError:
connection.disconnect()
# Connect manually here. If the Redis server is down, this will
# fail and raise a ConnectionError as desired.
connection.connect()
# resubscribe to all channels and patterns before
# resending the current command
for channel in self.channels:
self.subscribe(channel)
for pattern in self.patterns:
self.psubscribe(pattern)
connection.send_command(*args)
def parse_response(self):
"Parse the response from a publish/subscribe command"
response = self.connection.read_response()
if nativestr(response[0]) in self.subscribe_commands:
self.subscription_count = response[2]
# if we've just unsubscribed from the remaining channels,
# release the connection back to the pool
if not self.subscription_count:
self.reset()
return response
def psubscribe(self, patterns):
"Subscribe to all channels matching any pattern in ``patterns``"
if isinstance(patterns, basestring):
patterns = [patterns]
for pattern in patterns:
self.patterns.add(pattern)
return self.execute_command('PSUBSCRIBE', *patterns)
def punsubscribe(self, patterns=[]):
"""
Unsubscribe from any channel matching any pattern in ``patterns``.
If empty, unsubscribe from all channels.
"""
if isinstance(patterns, basestring):
patterns = [patterns]
for pattern in patterns:
try:
self.patterns.remove(pattern)
except KeyError:
pass
return self.execute_command('PUNSUBSCRIBE', *patterns)
def subscribe(self, channels):
"Subscribe to ``channels``, waiting for messages to be published"
if isinstance(channels, basestring):
channels = [channels]
for channel in channels:
self.channels.add(channel)
return self.execute_command('SUBSCRIBE', *channels)
def unsubscribe(self, channels=[]):
"""
Unsubscribe from ``channels``. If empty, unsubscribe
from all channels
"""
if isinstance(channels, basestring):
channels = [channels]
for channel in channels:
try:
self.channels.remove(channel)
except KeyError:
pass
return self.execute_command('UNSUBSCRIBE', *channels)
def listen(self):
"Listen for messages on channels this client has been subscribed to"
while self.subscription_count or self.channels or self.patterns:
r = self.parse_response()
msg_type = nativestr(r[0])
if msg_type == 'pmessage':
msg = {
'type': msg_type,
'pattern': nativestr(r[1]),
'channel': nativestr(r[2]),
'data': r[3]
}
else:
msg = {
'type': msg_type,
'pattern': None,
'channel': nativestr(r[1]),
'data': r[2]
}
yield msg
class BasePipeline(object):
"""
Pipelines provide a way to transmit multiple commands to the Redis server
in one transmission. This is convenient for batch processing, such as
saving all the values in a list to Redis.
All commands executed within a pipeline are wrapped with MULTI and EXEC
calls. This guarantees all commands executed in the pipeline will be
executed atomically.
Any command raising an exception does *not* halt the execution of
subsequent commands in the pipeline. Instead, the exception is caught
and its instance is placed into the response list returned by execute().
Code iterating over the response list should be able to deal with an
instance of an exception as a potential value. In general, these will be
ResponseError exceptions, such as those raised when issuing a command
on a key of a different datatype.
"""
UNWATCH_COMMANDS = set(('DISCARD', 'EXEC', 'UNWATCH'))
def __init__(self, connection_pool, response_callbacks, transaction,
shard_hint):
self.connection_pool = connection_pool
self.connection = None
self.response_callbacks = response_callbacks
self.transaction = transaction
self.shard_hint = shard_hint
self.watching = False
self.reset()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.reset()
def __del__(self):
try:
self.reset()
except:
pass
def reset(self):
self.command_stack = []
self.scripts = set()
# make sure to reset the connection state in the event that we were
# watching something
if self.watching and self.connection:
try:
# call this manually since our unwatch or
# immediate_execute_command methods can call reset()
self.connection.send_command('UNWATCH')
self.connection.read_response()
except ConnectionError:
# disconnect will also remove any previous WATCHes
self.connection.disconnect()
# clean up the other instance attributes
self.watching = False
self.explicit_transaction = False
# we can safely return the connection to the pool here since we're
# sure we're no longer WATCHing anything
if self.connection:
self.connection_pool.release(self.connection)
self.connection = None
def multi(self):
"""
Start a transactional block of the pipeline after WATCH commands
are issued. End the transactional block with `execute`.
"""
if self.explicit_transaction:
raise RedisError('Cannot issue nested calls to MULTI')
if self.command_stack:
raise RedisError('Commands without an initial WATCH have already '
'been issued')
self.explicit_transaction = True
def execute_command(self, *args, **kwargs):
if (self.watching or args[0] == 'WATCH') and \
not self.explicit_transaction:
return self.immediate_execute_command(*args, **kwargs)
return self.pipeline_execute_command(*args, **kwargs)
def immediate_execute_command(self, *args, **options):
"""
Execute a command immediately, but don't auto-retry on a
ConnectionError if we're already WATCHing a variable. Used when
issuing WATCH or subsequent commands retrieving their values but before
MULTI is called.
"""
command_name = args[0]
conn = self.connection
# if this is the first call, we need a connection
if not conn:
conn = self.connection_pool.get_connection(command_name,
self.shard_hint)
self.connection = conn
try:
conn.send_command(*args)
return self.parse_response(conn, command_name, **options)
except ConnectionError:
conn.disconnect()
# if we're not already watching, we can safely retry the command
# assuming it was a connection timeout
if not self.watching:
conn.send_command(*args)
return self.parse_response(conn, command_name, **options)
self.reset()
raise
def pipeline_execute_command(self, *args, **options):
"""
Stage a command to be executed when execute() is next called
Returns the current Pipeline object back so commands can be
chained together, such as:
pipe = pipe.set('foo', 'bar').incr('baz').decr('bang')
At some other point, you can then run: pipe.execute(),
which will execute all commands queued in the pipe.
"""
self.command_stack.append((args, options))
return self
def _execute_transaction(self, connection, commands, raise_on_error):
cmds = chain([(('MULTI', ), {})], commands, [(('EXEC', ), {})])
all_cmds = SYM_EMPTY.join(
starmap(connection.pack_command,
[args for args, options in cmds]))
connection.send_packed_command(all_cmds)
# parse off the response for MULTI
self.parse_response(connection, '_')
# and all the other commands
errors = []
for i, _ in enumerate(commands):
try:
self.parse_response(connection, '_')
except ResponseError:
errors.append((i, sys.exc_info()[1]))
# parse the EXEC.
try:
response = self.parse_response(connection, '_')
except ExecAbortError:
self.immediate_execute_command('DISCARD')
if errors:
raise errors[0][1]
raise sys.exc_info()[1]
if response is None:
raise WatchError("Watched variable changed.")
# put any parse errors into the response
for i, e in errors:
response.insert(i, e)
if len(response) != len(commands):
raise ResponseError("Wrong number of response items from "
"pipeline execution")
# find any errors in the response and raise if necessary
if raise_on_error:
self.raise_first_error(response)
# We have to run response callbacks manually
data = []
for r, cmd in izip(response, commands):
if not isinstance(r, Exception):
args, options = cmd
command_name = args[0]
if command_name in self.response_callbacks:
r = self.response_callbacks[command_name](r, **options)
data.append(r)
return data
def _execute_pipeline(self, connection, commands, raise_on_error):
# build up all commands into a single request to increase network perf
all_cmds = SYM_EMPTY.join(
starmap(connection.pack_command,
[args for args, options in commands]))
connection.send_packed_command(all_cmds)
response = [self.parse_response(connection, args[0], **options)
for args, options in commands]
if raise_on_error:
self.raise_first_error(response)
return response
def raise_first_error(self, response):
for r in response:
if isinstance(r, ResponseError):
raise r
def parse_response(self, connection, command_name, **options):
result = StrictRedis.parse_response(
self, connection, command_name, **options)
if command_name in self.UNWATCH_COMMANDS:
self.watching = False
elif command_name == 'WATCH':
self.watching = True
return result
def load_scripts(self):
# make sure all scripts that are about to be run on this pipeline exist
scripts = list(self.scripts)
immediate = self.immediate_execute_command
shas = [s.sha for s in scripts]
exists = immediate('SCRIPT', 'EXISTS', *shas, **{'parse': 'EXISTS'})
if not all(exists):
for s, exist in izip(scripts, exists):
if not exist:
immediate('SCRIPT', 'LOAD', s.script, **{'parse': 'LOAD'})
def execute(self, raise_on_error=True):
"Execute all the commands in the current pipeline"
if self.scripts:
self.load_scripts()
stack = self.command_stack
if self.transaction or self.explicit_transaction:
execute = self._execute_transaction
else:
execute = self._execute_pipeline
conn = self.connection
if not conn:
conn = self.connection_pool.get_connection('MULTI',
self.shard_hint)
# assign to self.connection so reset() releases the connection
# back to the pool after we're done
self.connection = conn
try:
return execute(conn, stack, raise_on_error)
except ConnectionError:
conn.disconnect()
# if we were watching a variable, the watch is no longer valid
# since this connection has died. raise a WatchError, which
# indicates the user should retry his transaction. If this is more
# than a temporary failure, the WATCH that the user next issue
# will fail, propegating the real ConnectionError
if self.watching:
raise WatchError("A ConnectionError occured on while watching "
"one or more keys")
# otherwise, it's safe to retry since the transaction isn't
# predicated on any state
return execute(conn, stack, raise_on_error)
finally:
self.reset()
def watch(self, *names):
"Watches the values at keys ``names``"
if self.explicit_transaction:
raise RedisError('Cannot issue a WATCH after a MULTI')
return self.execute_command('WATCH', *names)
def unwatch(self):
"Unwatches all previously specified keys"
return self.watching and self.execute_command('UNWATCH') or True
def script_load_for_pipeline(self, script):
"Make sure scripts are loaded prior to pipeline execution"
self.scripts.add(script)
class StrictPipeline(BasePipeline, StrictRedis):
"Pipeline for the StrictRedis class"
pass
class Pipeline(BasePipeline, Redis):
"Pipeline for the Redis class"
pass
class Script(object):
"An executable LUA script object returned by ``register_script``"
def __init__(self, registered_client, script):
self.registered_client = registered_client
self.script = script
self.sha = registered_client.script_load(script)
def __call__(self, keys=[], args=[], client=None):
"Execute the script, passing any required ``args``"
client = client or self.registered_client
args = tuple(keys) + tuple(args)
# make sure the Redis server knows about the script
if isinstance(client, BasePipeline):
# make sure this script is good to go on pipeline
client.script_load_for_pipeline(self)
try:
return client.evalsha(self.sha, len(keys), *args)
except NoScriptError:
# Maybe the client is pointed to a differnet server than the client
# that created this instance?
self.sha = client.script_load(self.script)
return client.evalsha(self.sha, len(keys), *args)
class LockError(RedisError):
"Errors thrown from the Lock"
pass
class Lock(object):
"""
A shared, distributed Lock. Using Redis for locking allows the Lock
to be shared across processes and/or machines.
It's left to the user to resolve deadlock issues and make sure
multiple clients play nicely together.
"""
LOCK_FOREVER = float(2 ** 31 + 1) # 1 past max unix time
def __init__(self, redis, name, timeout=None, sleep=0.1):
"""
Create a new Lock instnace named ``name`` using the Redis client
supplied by ``redis``.
``timeout`` indicates a maximum life for the lock.
By default, it will remain locked until release() is called.
``sleep`` indicates the amount of time to sleep per loop iteration
when the lock is in blocking mode and another client is currently
holding the lock.
Note: If using ``timeout``, you should make sure all the hosts
that are running clients have their time synchronized with a network
time service like ntp.
"""
self.redis = redis
self.name = name
self.acquired_until = None
self.timeout = timeout
self.sleep = sleep
if self.timeout and self.sleep > self.timeout:
raise LockError("'sleep' must be less than 'timeout'")
def __enter__(self):
return self.acquire()
def __exit__(self, exc_type, exc_value, traceback):
self.release()
def acquire(self, blocking=True):
"""
Use Redis to hold a shared, distributed lock named ``name``.
Returns True once the lock is acquired.
If ``blocking`` is False, always return immediately. If the lock
was acquired, return True, otherwise return False.
"""
sleep = self.sleep
timeout = self.timeout
while 1:
unixtime = int(mod_time.time())
if timeout:
timeout_at = unixtime + timeout
else:
timeout_at = Lock.LOCK_FOREVER
timeout_at = float(timeout_at)
if self.redis.setnx(self.name, timeout_at):
self.acquired_until = timeout_at
return True
# We want blocking, but didn't acquire the lock
# check to see if the current lock is expired
existing = float(self.redis.get(self.name) or 1)
if existing < unixtime:
# the previous lock is expired, attempt to overwrite it
existing = float(self.redis.getset(self.name, timeout_at) or 1)
if existing < unixtime:
# we successfully acquired the lock
self.acquired_until = timeout_at
return True
if not blocking:
return False
mod_time.sleep(sleep)
def release(self):
"Releases the already acquired lock"
if self.acquired_until is None:
raise ValueError("Cannot release an unlocked lock")
existing = float(self.redis.get(self.name) or 1)
# if the lock time is in the future, delete the lock
if existing >= self.acquired_until:
self.redis.delete(self.name)
self.acquired_until = None
| mit | 8,373,793,123,824,788,000 | 35.96856 | 79 | 0.59115 | false |
jptomo/rpython-lang-scheme | rpython/rtyper/rnone.py | 1 | 2579 | from rpython.flowspace.model import Constant
from rpython.annotator.model import SomeNone
from rpython.rtyper.rmodel import Repr, TyperError, inputconst
from rpython.rtyper.lltypesystem.lltype import Void, Bool, Ptr, Char
from rpython.rtyper.lltypesystem.llmemory import Address
from rpython.rtyper.rpbc import SmallFunctionSetPBCRepr
from rpython.rtyper.annlowlevel import llstr
from rpython.tool.pairtype import pairtype
class NoneRepr(Repr):
lowleveltype = Void
def rtype_bool(self, hop):
return Constant(False, Bool)
def none_call(self, hop):
raise TyperError("attempt to call constant None")
def ll_str(self, none):
return llstr("None")
def get_ll_eq_function(self):
return None
def get_ll_hash_function(self):
return ll_none_hash
rtype_simple_call = none_call
rtype_call_args = none_call
none_repr = NoneRepr()
class __extend__(SomeNone):
def rtyper_makerepr(self, rtyper):
return none_repr
def rtyper_makekey(self):
return self.__class__,
def ll_none_hash(_):
return 0
class __extend__(pairtype(Repr, NoneRepr)):
def convert_from_to((r_from, _), v, llops):
return inputconst(Void, None)
def rtype_is_((robj1, rnone2), hop):
if hop.s_result.is_constant():
return hop.inputconst(Bool, hop.s_result.const)
return rtype_is_None(robj1, rnone2, hop)
class __extend__(pairtype(NoneRepr, Repr)):
def convert_from_to((_, r_to), v, llops):
return inputconst(r_to, None)
def rtype_is_((rnone1, robj2), hop):
if hop.s_result.is_constant():
return hop.inputconst(Bool, hop.s_result.const)
return rtype_is_None(robj2, rnone1, hop, pos=1)
def rtype_is_None(robj1, rnone2, hop, pos=0):
if isinstance(robj1.lowleveltype, Ptr):
v1 = hop.inputarg(robj1, pos)
return hop.genop('ptr_iszero', [v1], resulttype=Bool)
elif robj1.lowleveltype == Address:
v1 = hop.inputarg(robj1, pos)
cnull = hop.inputconst(Address, robj1.null_instance())
return hop.genop('adr_eq', [v1, cnull], resulttype=Bool)
elif robj1 == none_repr:
return hop.inputconst(Bool, True)
elif isinstance(robj1, SmallFunctionSetPBCRepr):
if robj1.s_pbc.can_be_None:
v1 = hop.inputarg(robj1, pos)
return hop.genop('char_eq', [v1, inputconst(Char, '\000')],
resulttype=Bool)
else:
return inputconst(Bool, False)
else:
raise TyperError('rtype_is_None of %r' % (robj1))
| mit | -3,959,287,251,146,390,500 | 30.45122 | 71 | 0.647926 | false |
peterjanes/dosage | dosagelib/plugins/r.py | 1 | 5376 | # -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2020 Tobias Gruetzmacher
# Copyright (C) 2019-2020 Daniel Ring
from __future__ import absolute_import, division, print_function
from re import compile
from six.moves.urllib.parse import urljoin
from ..helpers import bounceStarter, xpath_class
from ..scraper import _BasicScraper, _ParserScraper
from ..util import tagre
from .common import _WordPressScraper, _WPWebcomic
class RalfTheDestroyer(_WordPressScraper):
url = 'http://ralfthedestroyer.com/'
class RaynaOnTheRiver(_WordPressScraper):
url = 'http://www.catomix.com/rayna/'
firstStripUrl = url + 'archives/comic/teaser-poster'
class RealLife(_WordPressScraper):
url = 'https://reallifecomics.com/'
stripUrl = url + 'comic.php?comic=%s'
firstStripUrl = stripUrl % 'title-1'
help = 'Index format: monthname-dd-yyyy'
def namer(self, imageUrl, pageUrl):
# Fix inconsisntent filenames
filename = imageUrl.rsplit('/', 1)[-1]
if pageUrl.rsplit('=', 1)[-1] == 'may-27-2014':
filename = filename.replace('20140219_3121', '20140527')
filename = filename.replace('5-Finished', '20140623_3161')
filename = filename.replace('520140722', '20140722')
filename = filename.replace('520140724', '20140724')
return filename
def getPrevUrl(self, url, data):
# "Parse" JavaScript
prevtag = data.find_class('comic-nav-previous')
if not prevtag:
return None
target = prevtag[0].get('onclick').split("'")[1]
return urljoin(url, target)
class RealmOfAtland(_BasicScraper):
url = 'http://www.realmofatland.com/'
stripUrl = url + '?p=%s'
firstStripUrl = stripUrl % '1'
prevSearch = compile(tagre("a", "href", r'(\?p=\d+)', after="cg_back"))
imageSearch = compile(tagre("img", "src", r'(images/strips/atland\d+.[^"]+)'))
help = 'Index format: nnn'
class RedMeat(_ParserScraper):
url = 'http://www.redmeat.com/max-cannon/FreshMeat'
imageSearch = '//div[@class="comicStrip"]//img'
prevSearch = '//a[@class="prev"]'
def namer(self, image_url, page_url):
parts = image_url.rsplit('/', 2)
return '_'.join(parts[1:3])
class Replay(_ParserScraper):
url = 'http://replaycomic.com/'
stripUrl = url + 'comic/%s/'
url = stripUrl % 'trying-it-out'
firstStripUrl = stripUrl % 'red-desert'
imageSearch = '//div[@id="comic"]//img'
prevSearch = '//a[contains(@class, "comic-nav-previous")]'
nextSearch = '//a[contains(@class, "comic-nav-next")]'
def starter(self):
# Retrieve archive page to identify chapters
archivePage = self.getPage(self.url + 'archive')
archive = archivePage.xpath('//div[@class="comic-archive-chapter-wrap"]')
self.chapter = len(archive) - 1
self.startOfChapter = []
for archiveChapter in archive:
self.startOfChapter.append(archiveChapter.xpath('.//a')[0].get('href'))
return bounceStarter(self)
def namer(self, imageUrl, pageUrl):
# Name pages based on chapter, index, and post title
name = pageUrl.rstrip('/').rsplit('/', 1)[-1]
page = imageUrl.rsplit('/', 1)[-1].rsplit('.', 1)
# Fix inconsistent page number formatting
if page[0].isdigit() and len(page[0]) > 2 and self.chapter == 1 and name != 'through-the-woods':
page[0] = page[0][:2] + '-' + page[0][2:]
name = '%d-%s-%s.%s' % (self.chapter, page[0], name, page[1])
if pageUrl in self.startOfChapter:
self.chapter -= 1
return name
class RiversideExtras(_WPWebcomic):
url = 'https://riversidecomics.com/'
class RomanticallyApocalyptic(_ParserScraper):
url = 'http://romanticallyapocalyptic.com/'
stripUrl = url + '%s'
firstStripUrl = stripUrl % '0'
imageSearch = '//div[%s]/center//img' % xpath_class('comicpanel')
prevSearch = '//a[@accesskey="p"]'
help = 'Index format: n'
adult = True
class Roza(_ParserScraper):
url = 'http://www.junglestudio.com/roza/index.php'
stripUrl = url + '?date=%s'
firstStripUrl = stripUrl % '2007-05-01'
imageSearch = '//img[contains(@src, "pages/")]'
prevSearch = '//a[img[contains(@src, "navtable_01.gif")]]'
help = 'Index format: yyyy-mm-dd'
class Ruthe(_BasicScraper):
url = 'http://ruthe.de/'
stripUrl = url + 'cartoon/%s/datum/asc/'
firstStripUrl = stripUrl % '1'
lang = 'de'
imageSearch = compile(tagre("img", "src", r'(/?cartoons/strip_\d+[^"]+)'))
prevSearch = compile(tagre("a", "href", r'(/cartoon/\d+/datum/asc/)') +
'vorheriger')
help = 'Index format: number'
class Ryugou(_WPWebcomic):
url = 'http://ryugou.swashbuckledcomics.com/'
stripUrl = url + 'comic/%s/'
firstStripUrl = 'ryugou-chapter-1-cover'
starter = bounceStarter
def namer(self, imageUrl, pageUrl):
title = pageUrl.rstrip('/').rsplit('/', 1)[-1]
ext = imageUrl.rsplit('.', 1)[-1]
return title + '.' + ext
def fetchUrls(self, url, data, urlSearch):
imageUrls = super(Ryugou, self).fetchUrls(url, data, urlSearch)
if url == self.stripUrl % '1-3':
imageUrls = [imageUrls[1]]
return imageUrls
| mit | -1,011,755,485,602,224,600 | 33.909091 | 104 | 0.620722 | false |
SanaMobile/sana.mds | src/mds/api/contrib/openmrslib/openmrs19.py | 1 | 17366 | """ Classes and utilities for talking to an OpenMRS server version 1.9
:Authors: Sana Dev Team
:Version: 2.0
"""
import urllib
import cookielib
import logging
import urllib2
import cjson
import time
import base64
from django.conf import settings
from . import openers
from mds.api.responses import succeed, fail
__all__ = ['OpenMRS']
SESSION_STATUS = "authenticated"
SESSION_INVALID = u"Invalid auth data"
SESSION_CONTENT = "sessionId"
LIST_CONTENT = "results"
ERROR_CONTENT = "error"
ERROR_MESSAGE = "message"
ERROR_SOURCE = "code"
def error_reader(response, all_unicode=False):
message = response[ERROR_CONTENT][ERROR_MESSAGE]
return fail(message)
def resultlist_reader(response, all_unicode=False):
""" Returns a list
"""
links = []
def get_self(links):
for link in links:
if link['rel'] == "self":
return link['uri']
for result in response:
links.append(get_self(result['links']))
return links
def item_reader(response, all_unicode=False):
pass
def rest_reader(response, all_unicode=False):
msg = cjson.decode(response.read(), all_unicode=all_unicode)
if ERROR_CONTENT in msg.keys():
return error_reader(msg)
elif LIST_CONTENT in msg.keys():
return resultlist_reader(msg[LIST_CONTENT])
elif SESSION_CONTENT in msg.keys():
return session_reader(response)
else:
# single item
return succeed(msg)
def session_reader(response, all_unicode=False):
""" Returns a succeed or fail response dict with the message content set to
the session id or an error message.
"""
msg = response
if msg.get(SESSION_STATUS, False):
return succeed(msg.get(SESSION_CONTENT))
else:
return fail(SESSION_INVALID)
def login_form(host, username, password):
return {"uname": username,
"pw": password,
"redirect": "/openmrs",
"refererURL": host+"index.htm"
}
def patient_form(first_name, last_name, patient_id, gender, birthdate):
"""OpenMRS Short patient form for creating a new patient.
Parameters OpenMRS form field Note
first_name personName.givenName N/A
last_name personName.familyName N/A
patient_id identifiers[0].identifier N/A
gender patient.gender M or F
birthdate patient.birthdate single digits must be padded
N/A identifiers[0].identifierType use "2"
N/A identifiers[0].location use "1"
"""
data = {"personName.givenName": first_name,
"personName.familyName": last_name,
"identifiers[0].identifier": patient_id,
"identifiers[0].identifierType": 2,
"identifiers[0].location": 1,
"patient.gender": gender,
"patient.birthdate": birthdate,}
return data
def patient_reader(response, all_unicode=False):
msg = cjson.decode(response.read(), all_unicode=all_unicode)
if ERROR_CONTENT in msg.keys():
return error_reader(msg)
else:
result = []
for p in msg["results"]:
logging.debug("patient: %s " % p)
name = p["preferredName"]
firstname = name["givenName"]
logging.debug("...: %s " % firstname)
lastname = name["familyName"]
logging.debug("...: %s " % lastname)
gender = p["gender"]
birthdate = p["birthdate"]
uuid = p["uuid"]
patient = "%s%s%s%s%s%s" % (firstname.lower(),
birthdate[0:4],
birthdate[5:7],
birthdate[8:10],
lastname.lower(),
gender.lower())
logging.debug("patient: %s " % patient)
result.append(patient)
return "".join(result)
def person_form(**data):
pass
def encounter_queue_form(patient_id, phone_id,
procedure_title, saved_procedure_id,
responses):
description = {'phoneId': str(phone_id),
'procedureDate': time.strftime(settings.OPENMRS_DATE_FMT),
'patientId': str(patient_id),
'procedureTitle': str(procedure_title),
'caseIdentifier': str(saved_procedure_id),
'questions': responses}
return description
def queue_form(encounter):
pass
class OpenMRS(openers.OpenMRSOpener):
""" Utility class for remote communication with OpenMRS version 1.9
Notes for the OpenMRS Webservices.REST API;
1. 'There is a filter defined on the module that intercepts all calls and
authenticates the given request.
Currently only BASIC authentication is supported. Header arguments
values of __ and __ are expected.
Alternatively, a session token can be used.
GET /openmrs/ws/rest/$VERSION/session
with the BASIC credentials will return the current token value. This
token should be passed with all subsequent calls as a cookie named
"jsessionid=token".'
Sana developer comments:
For BASIC Credentials the following snippet will open a resource:
# url, username, and password as appropriate
cookies = cookielib.CookieJar()
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, url, username, password)
auth_handler = urllib2.HTTPBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(auth_handler,
urllib2.HTTPCookieProcessor(cookies),)
urllib2.install_opener(opener)
# Build a request and set the headers
req = urllib2.Request(url)
basic64 = lambda x,y: base64.encodestring('%s:%s' % (x, y))[:-1]
req.add_header("Authorization", "Basic %s" % basic64(username, password))
opener.open(req)
For session access:
A successful GET sent to the session resource path:
/openmrs/ws/rest/$VERSION/session ($VERSION is 'v1' for OpenMRS 1.8-1.9)
will return:
{"sessionId":"E77D1DEACFEAF53282D9453603005A3D","authenticated":true}
You will need to set the
# This will add
authenticated = session.get("authenticated",False)
jsessionid = session.get("sessionId")
req = urllib2.Request(url)
req.add_header("jsessionid", jsessionid)
return opener.open(req)
2. REST Resources
See:
https://wiki.openmrs.org/display/docs/REST+Web+Service+Resources
"""
paths = {"sessions": "ws/rest/v1/session/",
"session": "ws/rest/v1/session/{uuid}",
"concept" : "ws/rest/v1/concept/(<?Puuid>)",
"concepts" : "ws/rest/v1/concept/",
"subject" : "ws/rest/v1/patient/(<?Puuid>)",
"subjects-create" : "ws/rest/v1/patient/",
"subjects" : "ws/rest/v1/patient/",
"sana-encounters" : "moduleServlet/sana/uploadServlet",
"encounters" : "ws/rest/v1/encounter/",
"encounter" : "ws/rest/v1/encounter/(<?Puuid>)",
"encounter_observations": "ws/rest/v1/encounter/(<?Puuid>)/observation/",
"patient": "admin/patients/shortPatientForm.form",
"login": "loginServlet"}
forms = {"patient": patient_form,
"login": login_form }
def open(self, url, username=None, password=None, use_json=False, **kwargs):
#session_path = self.build_url("sessions",query=auth)
opener, session = self.open_session(username, password)
if not session["authenticated"]:
logging.info("username and password combination incorrect!")
raise Exception(u"username and password combination incorrect!")
# short circuit here
if url == self.build_url("sessions"):
logging.info("username and password validated!")
return u"username and password validated!"
jsessionid = session.get("sessionId")
logging.info("Autenthicated?: %s, Session ID: %s" % (session["authenticated"],jsessionid))
req = urllib2.Request(url)
req.add_header("Cookie", "jsessionid=%s"%jsessionid)
if kwargs:
data = cjson.encode(kwargs) if use_json else urllib.urlencode(kwargs)
req.add_data(data)
logging.debug("Request: %s" % req.get_full_url())
logging.debug("...headers: %s" % req.header_items())
logging.debug("...method: %s" % req.get_method())
return opener.open(req)
def open_session(self, username=None, password=None):
logging.debug("Opening session")
url = self.build_url("sessions")
logging.info("URL: %s"% url)
#opener = self.build_opener(url, username, password)
cookies = cookielib.CookieJar()
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, url, username, password)
auth_handler = urllib2.HTTPBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(auth_handler,
urllib2.HTTPCookieProcessor(cookies),)
urllib2.install_opener(opener)
req = urllib2.Request(url)
basic64 = lambda x,y: base64.encodestring('%s:%s' % (x, y))[:-1]
logging.info("Base 64 encoded: %s" % basic64(username,password))
print basic64(username, password), username, password
if username and password:
req.add_header("Authorization", "Basic %s" % basic64(username, password))
#session = urllib2.urlopen(req)
session = cjson.decode(opener.open(req).read())
logging.info("Before returning data")
return opener, session
def getPatient(self,username, password, patientid):
""" Retrieves a patient by id from OpenMRS through the Webservices.REST
module.
Backwards compatibility method.
"""
wsname = 'subjects'
pargs={"q":patientid,
"v":"full" }
auth = {"username": username,
"password": password }
logging.debug("query" % pargs)
response = self.wsdispatch(wsname, query=pargs, response_handler=patient_reader, auth=auth)
logging.debug("response: %s" % response)
content = response
return content
def getAllPatients(self,username, password, query=None):
"""Retrieves all patients from OpenMRS through the REST module.
Backwards compatibility method.
"""
wsname = 'subjects'
auth = {"username": username,
"password": password }
logging.debug("Before doing the wsdispatch request")
response = self.wsdispatch(wsname, query=query, response_handler=patient_reader, auth=auth)
logging.debug(response)
content = []
for uri in response:
person = cjson.decode(self.open(uri+"?v=full", username, password).read())
print person
patient = {}
name = person["names"]
patient['first_name'] = name["givenName"]
patient['family_name'] = name["family_name"]
patient['gender'] = person["gender"]
patient['birthdate'] = person["birthdate"]
patient['uuid'] = person["uuid"]
content.append(patient)
return content
def create_patient(self, patient_id, first_name, last_name, gender,
birthdate, auth=None):
"""Sends a post request to OpenMRS patient service to create patient.
"""
try:
data = patient_form(patient_id, first_name, last_name, gender,
birthdate)
pargs={"uuid":"" }
self.wsdispatch("login", pargs, data=auth)
response = self.wsdispatch("patient", pargs, auth=auth, data=data)
content = response.read()
return content
except Exception, e:
logging.info("Exception trying to create patient: %s" % str(e))
def _login(self, username=None, password=None):
data = login_form(self.host, username, password)
try:
self.opener.open("%sloginServlet" % self.host, data)
logging.debug("Success: Validating with OpenMRS loginServlet")
result = True
except Exception, e:
logging.debug("Error logging into OpenMRS: %s" % e)
result = False
return result
def upload_procedure(self, patient_id, phone_id,
procedure_title, saved_procedure_id,
responses, files, username=None, password=None):
"""Posts an encounter to the OPenMRS encounter service through the Sana
module
OpenMRS url: <host> + moduleServlet/moca/uploadServlet
OpenMRS Form Fields: ::
Parameter OpenMRS form field Note
phone_id phoneId
procedureDate mm/dd/yyyy
patient_id patientId
procedure_title procedureTitle
saved_procedure_id caseIdentifier
responses questions
Note: Above parameters are then encoded and posted to OpenMRS as the
'description' field value.
Binaries are attached as one parameter per binary with field name
given as 'medImageFile-<element-id>-<index> where index correlates
to the position in the csv 'answer' attribute of the particular
procedure element
Parameters:
phone_id
client telephone number.
patient_id
The patient identifier.
procedure_title
The procedure tirle.
saved_procedure_id
Saved procedure id.
responses
Encounter text data as JSON encoded text.
"""
hasPermissions = False
result = False
message = ""
encounter = None
response = None
try:
#if len(self.cookies) == 0:
#self._login()
# opener, logged_in = self._login()
logging.debug("Validating permissions to manage sana queue")
opener, session = self.open_session(username, password)
if not session["authenticated"]:
raise Exception(u"username and password combination incorrect!")
url = "%smoduleServlet/sana/permissionsServlet" % self.host
response = opener.open(url).read()
logging.debug("Got result %s" % response)
resp_msg = cjson.decode(response,True)
message = resp_msg['message']
hasPermissions = True if resp_msg['status'] == 'OK' else False
if not hasPermissions:
return result, message
logging.debug("Uploading procedure")
# NOTE: Check version format in settings matches OpenMRS version
description = encounter_queue_form(patient_id, phone_id,
procedure_title, saved_procedure_id,
responses)
description = cjson.encode(description)
post = {'description': str(description)}
logging.debug("Encoded parameters, checking files.")
# Attach a file
for elt in responses:
etype = elt.get('type', None)
eid = elt.get('id', None)
if eid in files:
logging.info("Checking for files associated with %s" % eid)
for i,path in enumerate(files[eid]):
logging.info('medImageFile-%s-%d -> %s'
% (eid, i, path))
post['medImageFile-%s-%d' % (eid, i)] = open(path, "rb")
url = "%smoduleServlet/sana/uploadServlet" % self.host
logging.debug("About to post to " + url)
response = self.open(url,
username=username,
password=password,
use_json=False, **post).read()
logging.debug("Got result %s" % response)
resp_msg = cjson.decode(response,True)
message = resp_msg.get('message', '')
result = True if resp_msg['status'] == 'OK' else False
encounter = resp_msg.get('encounter', None)
logging.debug("Done with upload")
except Exception as e:
print e
logging.error("Exception in uploading procedure: %s"
% saved_procedure_id)
raise e
return result, message, encounter
| bsd-3-clause | 1,089,484,495,699,072,800 | 37.939462 | 99 | 0.563803 | false |
bingweichen/GOKU | backend/server/database/model.py | 1 | 11139 | """
@author: Bingwei Chen
@time: 8/4/17
@desc: models
每个field 生效是要重新删表,建表
"""
import json
from datetime import datetime
from peewee import *
from server.database.db import database
from server.utility.constant.basic_constant import \
DELIVERY, APPOINTMENT_STATUS
from server.utility.constant.const_db import Const
class JSONField(TextField):
def db_value(self, value):
return json.dumps(value)
def python_value(self, value):
if value is not None:
return json.loads(value)
class BaseModel(Model):
# def __str__(self):
# r = {}
# for k in self._data.keys():
# try:
# r[k] = str(getattr(self, k))
# except:
# r[k] = json.dumps(getattr(self, k))
# return str(r)
class Meta:
database = database
# class Const(BaseModel):
# key = CharField(primary_key=True)
# value = JSONField()
# label = CharField()
class Store(BaseModel):
name = CharField(unique=True, primary_key=True)
address = CharField(unique=True)
class School(BaseModel):
name = CharField(unique=True, primary_key=True)
address = CharField(unique=True)
store = ForeignKeyField(Store, related_name="schools")
# status default empty, when the e_bike is used change to full
class User(BaseModel):
username = CharField(primary_key=True)
password = CharField() # change
name = CharField()
school = ForeignKeyField(School, related_name='users')
student_id = CharField(db_column='student_id') # 学号
phone = BigIntegerField(unique=True, null=True)
identify_number = CharField(unique=True) # 身份证号
we_chat_id = CharField(verbose_name='微信号', null=True, default=None) # 微信号
# we_chat_id = CharField(unique=True, verbose_name='微信号',
# null=True, default=None) # 微信号
account = CharField(verbose_name='账号', null=True, default=None) # 退款账号
account_type = CharField(verbose_name='账号类型', null=True,
default=None) # 账号类型
status = CharField(default="empty") # 租用状态
admin = BooleanField(default=False)
# def __unicode__(self):
# return self.username
class VirtualCard(BaseModel):
card_no = ForeignKeyField(
User, primary_key=True, related_name="virtual_cards")
deposit = FloatField(default=0.0)
balance = FloatField(default=0.0)
situation = CharField(default="正常") # 冻结
real_name_authentication = CharField(
default="未认证", choices=["已认证", "未认证"])
out_trade_no = CharField(default=None, null=True) # 商户付款订单号
class ConsumeRecord(BaseModel):
card = ForeignKeyField(VirtualCard, related_name="consume_record")
consume_event = CharField()
consume_date_time = DateTimeField()
consume_fee = FloatField()
balance = FloatField(default=0.0)
out_trade_no = CharField(default=None, null=True) # 商户付款订单号
# from playhouse.postgres_ext import ArrayField
# 颜色应该是数组
class EBikeModel(BaseModel):
# 电动车型号 model string
name = CharField(primary_key=True)
category = CharField() # 电动车类别:小龟,酷车,闪车,MINI租
type = CharField() # 电动车类型:买车,租车
price = JSONField() # 价格
colors = JSONField() # [红,蓝,绿。。。]
distance = CharField() # 续航
configure = CharField() # 配置
battery = CharField() # 电池规格
image_urls = JSONField(default=None, null=True) # 轮播图
introduction_image_urls = JSONField(default=None, null=True) # 介绍图
introduction = CharField(default="物品简介") # 文字介绍
num_sold = IntegerField(default=0) # 销售量
num_view = IntegerField(default=0) # 浏览量
# 新增库存表
class Storage(BaseModel):
# 属于什么型号
model = ForeignKeyField(rel_model=EBikeModel,
related_name="storage")
color = CharField(max_length=5)
# 库存量
num = IntegerField()
class Meta:
primary_key = CompositeKey('model', 'color', )
# 1. 车不进行录入,当生成订单时生成
# 2. 录入车辆
class EBike(BaseModel):
# 车牌号
plate_no = CharField(primary_key=True)
# 属于什么型号
model = ForeignKeyField(EBikeModel, related_name='e_bikes')
# 属于哪个用户
user = ForeignKeyField(User, related_name='e_bikes', null=True)
color = CharField()
# 电动车状态 空闲,被租用
status = CharField(default="空闲")
# 订单针对车型和颜色
class Appointment(BaseModel):
user = ForeignKeyField(User, related_name='appointments')
e_bike_model = ForeignKeyField(EBikeModel, related_name='appointments')
color = CharField(max_length=5)
category = CharField() # 电动车类别:小龟,酷车,闪车,MINI租
type = CharField() # 电动车类型:买车,租车
note = CharField(null=True) # 备注
date = DateTimeField() # 生成日期
expired_date_time = DateTimeField() # 有效期限
serial_number = CharField(null=True) # 车序列号
price = FloatField() # 最终价格
reduced_price = FloatField(null=True) # 优惠价格
rent_time_period = CharField(default="无", null=True) # 租期:学期,年
end_time = DateTimeField(default=None, null=True) # 租用结束日期
appointment_fee = FloatField(default=0) # 预约金
rent_deposit = FloatField(default=0) # 租车押金
appointment_fee_needed = \
FloatField(default=Const.get(
key="DEFAULT_APPOINTMENT_FEE").value) # 需要的预约金
rent_deposit_needed = \
FloatField(default=Const.get(
key="RENT_DEPOSIT").value) # 需要的押金
delivery = CharField(default=DELIVERY["0"])
status = CharField(default=APPOINTMENT_STATUS["0"])
# 闪充电池 出租
class Battery(BaseModel):
serial_number = CharField(primary_key=True)
# 电池信息,比如电压、电流
desc = CharField(null=True)
# 是否被租
on_loan = BooleanField(default=False)
# 租用人
user = ForeignKeyField(User, related_name='battery', null=True)
# 闪充电池租用记录
class BatteryRecord(BaseModel):
user = ForeignKeyField(User)
battery = ForeignKeyField(Battery, related_name="battery_records")
rent_date = DateTimeField()
return_date = DateTimeField(default=None, null=True)
price = FloatField(default=0)
situation = CharField(default="借用中") # 借用中,已归还
# 闪充电池报修记录
class BatteryReport(BaseModel):
# 电池id
battery = ForeignKeyField(
Battery, related_name='battery_report', null=True)
# 当前使用人
current_owner = ForeignKeyField(
User, related_name='battery_report', null=True)
report_time = DateTimeField() # 保修单生成时间
# reporter = ForeignKeyField(User, related_name=)
# 优惠券模版
class CouponTemplate(BaseModel):
# 优惠劵描述
desc = CharField()
# 使用条件
situation = FloatField(null=True, default=0)
# 面值
value = FloatField()
# 有效期
duration = IntegerField(null=True)
# 优惠券
class Coupon(BaseModel):
# 优惠劵描述
desc = CharField()
# 用户
user = ForeignKeyField(User, related_name='coupon', null=True)
# 使用条件
situation = FloatField(null=True, default=0)
# 面值
value = FloatField()
# 到期日期
expired = DateTimeField(null=True)
# 状态: 可用,已使用,过期
status = CharField(default="可用")
# 模版编号
template_no = ForeignKeyField(CouponTemplate, related_name='coupon',
null=True)
# 编号
class SerialNumber(BaseModel):
code = CharField(primary_key=True)
store = ForeignKeyField(Store)
store_code = CharField()
category_code = CharField(default=None, null=True)
available = BooleanField(default=True)
appointment = ForeignKeyField(Appointment, null=True)
battery = ForeignKeyField(Battery, null=True)
# 退款表格
class RefundTable(BaseModel):
user = ForeignKeyField(User)
out_trade_no = CharField() # 商户付款订单号
type = CharField() # 退款类型:退预约金,退虚拟卡押金
value = FloatField() # 退款金额
date = DateTimeField() # 日期
comment = CharField(default=None, null=True) # 备注
status = CharField(default="未处理") # 状态:已退款
# account = CharField() # 退款账号
# account_type = CharField() # 账号类型
# 报修表格 电动车点击报修
class ReportTable(BaseModel):
appointment = ForeignKeyField(Appointment)
user = ForeignKeyField(User)
address = CharField() # 报修地址
comment = CharField()
phone = BigIntegerField(null=True)
date = DateTimeField()
class WxInfo(BaseModel):
key = CharField()
value = CharField()
date = DateTimeField(null=True)
expires_in = IntegerField(null=True)
class WxUser(BaseModel):
open_id = CharField()
access_token = CharField(default=None, null=True)
refresh_token = CharField(default=None, null=True)
date = DateTimeField(null=True)
expires_in = IntegerField(null=True)
# 后续开具体栏位
class Logs(BaseModel):
start = DateTimeField()
end = DateTimeField()
request = JSONField()
response = JSONField()
category = CharField() # 属于哪个类型:如支付
status = CharField()
extra_info = JSONField()
class WxPayment(BaseModel):
out_trade_no = CharField(unique=True, max_length=32)
total_fee = IntegerField()
status = CharField(default='NOTPAY') # 订单状态
appointment = ForeignKeyField(Appointment, related_name="wx_payment",
null=True)
openid = CharField()
attach = JSONField()
code = CharField()
table_list = [Const, Store, School, User, VirtualCard, EBikeModel,
Storage, EBike, Appointment, Battery, BatteryRecord,
BatteryReport, CouponTemplate, Coupon, SerialNumber,
RefundTable, ReportTable, WxInfo]
table_temp = [WxPayment]
def create_tables():
"""
:return: None
:rtype:
"""
return database.create_tables(table_temp, safe=True)
def drop_tables():
"""
:return: None
:rtype:
"""
return database.drop_tables(table_temp, safe=True)
def create_table(model):
"""
:param model:
:type model:
:return: <pymysql.cursors.Cursor object at 0x108d00828>
:rtype:
"""
return database.create_table(model)
def drop_table(model):
"""
:param model:
:type model:
:return: the cursor of the drop model
:rtype:
"""
return database.drop_table(model)
def recreate_tables():
drop_tables()
create_tables()
if __name__ == '__main__':
pass
# create_table(Storage)
# create_table(Appointment)
print(recreate_tables())
#
| apache-2.0 | -5,023,248,486,933,650,000 | 24.85422 | 78 | 0.645464 | false |
tmolteno/python-necpp | PyNEC/example/context_clean.py | 1 | 8139 | # Note: explicit zeroes are blanks. All other values should be specified symbolically.
# Currently these contain only the subset of cards that I needed
class Range(object):
def __init__(self, start, stop, count=None, delta=None):
self.start = start
self.stop = stop
if count is not None:
self.count = count
self.delta = (stop - start) / count
else:
self.count = (stop_ - start) / delta
self.delta = delta
# Setting do_debug to True will dump all the cards generated with context_clean, so you can verify the output more easily in a text editor (and debug that file manually)
do_debug = False
def debug(card, *args):
if do_debug:
stringified = " , ".join([str(a) for a in args])
print("%s %s" % (card, stringified))
class context_clean(object):
def __init__(self, context):
self.context = context
def remove_all_loads(self):
ld_short_all_loads = -1
self.context.ld_card(ld_short_all_loads, 0, 0, 0, 0, 0, 0)
def set_wire_conductivity(self, conductivity, wire_tag=None):
""" The conductivity is specified in mhos/meter. Currently all segments of a wire are set. If wire_tag is None, all wire_tags are set (i.e., a tag of 0 is used). """
if wire_tag is None:
wire_tag = 0
debug("LD", 5, wire_tag, 0, 0, conductivity, 0, 0)
self.context.ld_card(5, wire_tag, 0, 0, conductivity, 0, 0)
def set_all_wires_conductivity(self, conductivity):
self.set_wire_conductivity(conductivity)
# TODO: multiplicative
def set_frequencies_linear(self, start_frequency, stop_frequency, count=None, step_size=None):
""" If start_frequency does not equal stop_frequency, either count or step should be specified. The other parameter will be automatically deduced """
if start_frequency == stop_frequency:
step_size = 0
count = 1
else:
# TODO: add some asserts
if count is not None:
step_size = (stop_frequency - start_frequency) / count
else:
count = (stop_frequency - start_frequency) / step_size
# TODO, what if we don't have nice divisibility here
count = int(count)
ifrq_linear_step = 0
debug("FR", ifrq_linear_step, count, start_frequency, step_size, 0, 0, 0)
self.context.fr_card(ifrq_linear_step, count, start_frequency, step_size)
def set_frequency(self, frequency):
self.set_frequencies_linear(frequency, frequency)
def clear_ground(self):
gn_nullify_ground = -1
self.context.gn_card(gn_nullify_ground, 0, 0, 0, 0, 0, 0, 0)
# TODO: I could probably make a ground class, would probably be cleaner to group some of the options and different functions there (like combining ground screen etc)
# TODO: gn card is iffy, check!
def set_finite_ground(self, ground_dielectric, ground_conductivity):
gn_finite_ground = 0
no_ground_screen = 0
self.context.gn_card(gn_finite_ground, no_ground_screen, ground_dielectric, ground_conductivity, 0, 0, 0, 0)
def set_perfect_ground(self):
gn_perfectly_conducting = 1
no_ground_screen = 0
debug("GN", gn_perfectly_conducting, no_ground_screen, 0, 0, 0, 0, 0, 0)
self.context.gn_card(gn_perfectly_conducting, no_ground_screen, 0, 0, 0, 0, 0, 0)
# TODO: i1 = 5 is also a voltage excitation
def voltage_excitation(self, wire_tag, segment_nr, voltage):
ex_voltage_excitation = 0
no_action = 0 # TODO configurable
option_i3i4 = 10*no_action + no_action
debug("EX", ex_voltage_excitation, wire_tag, segment_nr, option_i3i4, voltage.real, voltage.imag, 0, 0, 0, 0)
self.context.ex_card(ex_voltage_excitation, wire_tag, segment_nr, option_i3i4, voltage.real, voltage.imag, 0, 0, 0, 0)
def get_geometry(self):
#return geometry_clean(self.context.get_geometry()) # TODO
return self.context.get_geometry()
def set_extended_thin_wire_kernel(self, enable):
if enable:
debug ("EK", 0)
self.context.set_extended_thin_wire_kernel(True)
else:
debug ("EK", -1)
self.context.set_extended_thin_wire_kernel(False)
def geometry_complete(self, ground_plane, current_expansion=True):
no_ground_plane = 0
ground_plane_current_expansion = 1
ground_plane_no_current_expansion = -1
if not ground_plane:
debug("GE", no_ground_plane)
self.context.geometry_complete(no_ground_plane)
else:
if current_expansion:
debug("GE", ground_plane_current_expansion)
self.context.geometry_complete(ground_plane_current_expansion)
else:
debug("GE", ground_plane_no_current_expansion)
self.context.geometry_complete(ground_plane_no_current_expansion)
output_major_minor = 0
output_vertical_horizontal = 1
normalization_none = 0
normalization_major = 1
normalization_minor = 2
normalization_vertical = 3
normalization_horizontal = 4
normalization_totalgain = 5
power_gain = 0
directive_gain = 1
average_none = 0
average_gain = 1
average_todo = 2
# TODO: this should be different for surface_wave_mode (1), because then thetas = z
def radiation_pattern(self, thetas, phis, output_mode=output_vertical_horizontal, normalization=normalization_none, gain=power_gain, average=average_todo):
""" thetas and phis should be Range(-like) objects """
normal_mode = 0 # TODO other modes
# the rp_card already has XNDA as separate arguments
radial_distance = 0 # TODO
gnornamize_maximum = 0 # TODO
xnda = average + 10*gain+100*normalization+1000*output_mode
debug("RP", normal_mode, thetas.count, phis.count, xnda, thetas.start, phis.start, thetas.delta, phis.delta, radial_distance, gnornamize_maximum)
self.context.rp_card(normal_mode, thetas.count, phis.count, output_mode, normalization, gain, average, thetas.start, phis.start, thetas.delta, phis.delta, radial_distance, gnornamize_maximum)
# TODO: shunt admittances, length of transmission line if not straight-line distance
def transmission_line(self, src, dst, impedance, crossed_line=False, length=None, shunt_admittance_src=0, shunt_admittance_dst=0):
""" src and dst are (tag_nr, segment_nr) pairs """
if crossed_line:
impedance *= -1
if length is None:
length = 0
shunt_admittance_src = complex(shunt_admittance_src)
shunt_admittance_dst = complex(shunt_admittance_dst)
debug("TL", src[0], src[1], dst[0], dst[1], impedance, length, shunt_admittance_src.real, shunt_admittance_src.imag, shunt_admittance_dst.real, shunt_admittance_dst.imag)
self.context.tl_card(src[0], src[1], dst[0], dst[1], impedance, length, shunt_admittance_src.real, shunt_admittance_src.imag, shunt_admittance_dst.real, shunt_admittance_dst.imag)
# Some simple wrappers for context...
# TODO: this should be simpler, can't this be auto-generated, or implicitly defined? The best solution is of course to do this in the C++ code,
# and then the wrappers are immediately correct and nice
def xq_card(self, *args):
return self.context.xq_card(*args)
def get_input_parameters(self, *args):
return self.context.get_input_parameters(*args)
class geometry_clean(object):
def __init__(self, geometry):
self.geometry = geometry
def wire(self, tag_id, nr_segments, src, dst, radius, length_ratio=1.0, radius_ratio=1.0):
""" radius is in meter. length_ratio can be set to have non-uniform segment lengths, radius_ratio can be used for tapered wires """
debug("GW", tag_id, nr_segments, src[0], src[1], src[2], dst[0], dst[1], dst[2], radius) # TODO
self.geometry.wire(tag_id, nr_segments, src[0], src[1], src[2], dst[0], dst[1], dst[2], radius, length_ratio, radius_ratio)
| gpl-2.0 | 5,148,461,539,862,816,000 | 43.233696 | 199 | 0.646885 | false |
dorotapalicova/GoldDigger | gold_digger/data_providers/yahoo.py | 1 | 2754 | # -*- coding: utf-8 -*-
from datetime import date
from ._provider import Provider
class Yahoo(Provider):
"""
Real-time service with no known limits offers only latest exchange rates.
Implicit base currency is USD.
"""
BASE_URL = "http://query.yahooapis.com/v1/public/yql"
PREPARED_YQL = "SELECT * FROM yahoo.finance.xchange WHERE pair IN ('{pairs}')"
params = {
"env": "store://datatables.org/alltableswithkeys",
"format": "json"
}
name = "yahoo"
def get_by_date(self, date_of_exchange, currency):
date_str = date_of_exchange.strftime(format="%Y-%m-%d")
self.logger.debug("Requesting Yahoo for %s (%s)", currency, date_str, extra={"currency": currency, "date": date_str})
if date_of_exchange == date.today():
return self._get_latest(currency)
def get_all_by_date(self, date_of_exchange, currencies):
if date_of_exchange == date.today():
return self._get_all_latest(currencies)
def _get_latest(self, currency, base_currency="USD"):
self.params["q"] = self.PREPARED_YQL.format(pairs="%s,%s" % (base_currency, currency))
response = self._post(self.BASE_URL, params=self.params)
if response:
rates = self._get_rates_from_response(response)
if len(rates) > 0:
return self._to_decimal(rates[0].get("Rate", ""), currency)
def _get_all_latest(self, currencies):
day_rates = {}
self.params["q"] = self.PREPARED_YQL.format(pairs=",".join(currencies))
response = self._post(self.BASE_URL, params=self.params)
for record in self._get_rates_from_response(response):
currency = record.get("id", "")
currency = currency[:3] if currency else currency
decimal_value = self._to_decimal(record.get("Rate", ""), currency)
if currency and decimal_value:
day_rates[currency] = decimal_value
return day_rates
def _get_rates_from_response(self, response):
"""
:returns: record with following structure
{
'Ask': '0.7579',
'Bid': '0.7579',
'Date': '9/14/2016',
'Name': 'USD/GBP',
'Rate': '0.7579',
'Time': '8:59am',
'id': 'GBP=X'
}
"""
if response:
try:
results = response.json()["query"]["results"]
return results["rate"] if results else []
except KeyError as e:
self.logger.error("%s - Accessing records failed: %s" % (self, e))
return []
def get_historical(self, origin_date, currencies):
return {}
def __str__(self):
return self.name
| apache-2.0 | -5,179,105,512,653,488,000 | 35.72 | 125 | 0.564996 | false |
reeshupatel/demo | keystone/tests/test_v3_os_revoke.py | 1 | 4841 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
import six
from keystone.common import dependency
from keystone.contrib.revoke import model
from keystone.openstack.common import timeutils
from keystone.tests import test_v3
from keystone.token import provider
def _future_time_string():
expire_delta = datetime.timedelta(seconds=1000)
future_time = timeutils.utcnow() + expire_delta
return timeutils.isotime(future_time)
@dependency.requires('revoke_api')
class OSRevokeTests(test_v3.RestfulTestCase):
EXTENSION_NAME = 'revoke'
EXTENSION_TO_ADD = 'revoke_extension'
def test_get_empty_list(self):
resp = self.get('/OS-REVOKE/events')
self.assertEqual([], resp.json_body['events'])
def _blank_event(self):
return {}
# The two values will be the same with the exception of
# 'issued_before' which is set when the event is recorded.
def assertReportedEventMatchesRecorded(self, event, sample, before_time):
after_time = timeutils.utcnow()
event_issued_before = timeutils.normalize_time(
timeutils.parse_isotime(event['issued_before']))
self.assertTrue(
before_time <= event_issued_before,
'invalid event issued_before time; %s is not later than %s.' % (
timeutils.isotime(event_issued_before, subsecond=True),
timeutils.isotime(before_time, subsecond=True)))
self.assertTrue(
event_issued_before <= after_time,
'invalid event issued_before time; %s is not earlier than %s.' % (
timeutils.isotime(event_issued_before, subsecond=True),
timeutils.isotime(after_time, subsecond=True)))
del (event['issued_before'])
self.assertEqual(sample, event)
def test_revoked_token_in_list(self):
user_id = uuid.uuid4().hex
expires_at = provider.default_expire_time()
sample = self._blank_event()
sample['user_id'] = six.text_type(user_id)
sample['expires_at'] = six.text_type(timeutils.isotime(expires_at,
subsecond=True))
before_time = timeutils.utcnow()
self.revoke_api.revoke_by_expiration(user_id, expires_at)
resp = self.get('/OS-REVOKE/events')
events = resp.json_body['events']
self.assertEqual(len(events), 1)
self.assertReportedEventMatchesRecorded(events[0], sample, before_time)
def test_disabled_project_in_list(self):
project_id = uuid.uuid4().hex
sample = dict()
sample['project_id'] = six.text_type(project_id)
before_time = timeutils.utcnow()
self.revoke_api.revoke(
model.RevokeEvent(project_id=project_id))
resp = self.get('/OS-REVOKE/events')
events = resp.json_body['events']
self.assertEqual(len(events), 1)
self.assertReportedEventMatchesRecorded(events[0], sample, before_time)
def test_disabled_domain_in_list(self):
domain_id = uuid.uuid4().hex
sample = dict()
sample['domain_id'] = six.text_type(domain_id)
before_time = timeutils.utcnow()
self.revoke_api.revoke(
model.RevokeEvent(domain_id=domain_id))
resp = self.get('/OS-REVOKE/events')
events = resp.json_body['events']
self.assertEqual(len(events), 1)
self.assertReportedEventMatchesRecorded(events[0], sample, before_time)
def test_list_since_invalid(self):
self.get('/OS-REVOKE/events?since=blah', expected_status=400)
def test_list_since_valid(self):
resp = self.get('/OS-REVOKE/events?since=2013-02-27T18:30:59.999999Z')
events = resp.json_body['events']
self.assertEqual(len(events), 0)
def test_since_future_time_no_events(self):
domain_id = uuid.uuid4().hex
sample = dict()
sample['domain_id'] = six.text_type(domain_id)
self.revoke_api.revoke(
model.RevokeEvent(domain_id=domain_id))
resp = self.get('/OS-REVOKE/events')
events = resp.json_body['events']
self.assertEqual(len(events), 1)
resp = self.get('/OS-REVOKE/events?since=%s' % _future_time_string())
events = resp.json_body['events']
self.assertEqual([], events)
| apache-2.0 | -3,278,505,571,250,286,000 | 38.040323 | 79 | 0.646767 | false |
mantasurbonas/angis-poc-sudoku | src/spausdintojas.py | 1 | 1058 | iš sudoku_lenta reikalinga SudokuLenta
iš taisykles reikalingas Sprendėjas
klasė Spausdintojas:
@staticmethod
tai spausdinkSudokuLentą(lenta):
res = ""
imk eilnr iš intervalo (0, 9):
imk stulpnr iš intervalo (0, 9):
l = lenta.langeliai[eilnr, stulpnr]
jeigu l.arIšspręstas():
res += " " + str(l.sprendimas) + " "
kituatveju:
res += " (" +str(len(l.kandidatai)) +") "
res += "\n"
rašyk(res)
@staticmethod
tai spausdinkStatistiką(stat):
rašyk ("išspręsta per", stat.iteracija, "iteracijų")
rašyk ("truko", stat.trukmė * 1000, "msec")
@staticmethod
tai spausdink(kažkas):
jeigu isinstance(kažkas, SudokuLenta ):
Spausdintojas.spausdinkSudokuLentą(kažkas)
ojeigu isinstance(kažkas, Sprendėjas.Statistika ):
Spausdintojas.spausdinkStatistiką(kažkas)
kituatveju:
rašyk(kažkas)
| gpl-2.0 | 8,243,370,010,085,592,000 | 26.864865 | 61 | 0.5645 | false |
brandonlee503/Algorithms-Assignments | Practice Assignment 2/HW2Q3.py | 1 | 1088 | def algorithm3(arr):
maxLength = 0
if len(arr) <= 0:
return 0
if len(arr) == 1:
return 1
for i in arr:
print "i is: " + str(i)
temp = [arr[i-1]]
for j in range(len(arr) - i):
spliceArrayJ = arr[j+1:]
print "reversed array: "
reversedArray = spliceArrayJ[::-1]
print reversedArray
print "This is temp: " + str(temp)
print "This is tempSize: " + str(len(temp))
print "if temp in reversedArray is: " + str(isSublist(temp, reversedArray))
print "if len(temp) > maxLength is: " + str(len(temp) > maxLength)
if isSublist(temp, reversedArray) and len(temp) > maxLength: #temp in reversedArray
maxLength = len(temp)
#maxLength = reversedArray.count(temp)
print "DAAAAA MATX LENGTH: " + str(maxLength)
temp.append(arr[i + j])
print "maxLength is: " + str(maxLength)
return maxLength
def isSublist(a, b):
if a == []: return True
if b == []: return False
return b[:len(a)] == a or isSublist(a, b[1:])
#algorithm3([1,2,3,4,5,6,7,8,9])
#algorithm3([1,2,1,4,5,1,2,1])
#algorithm3([1,3,4,1,1])
#print isSublist([1,2],[1,2,3]) | mit | -7,528,026,928,750,685,000 | 26.225 | 86 | 0.624081 | false |
gjo/botocore_paste | setup.py | 1 | 1263 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import find_packages, setup
description = 'Adapt botocore to paste'
here = os.path.abspath(os.path.dirname(__file__))
try:
readme = open(os.path.join(here, 'README.rst')).read()
changes = open(os.path.join(here, 'CHANGES.txt')).read()
long_description = '\n\n'.join([readme, changes])
except:
long_description = description
install_requires = [
'botocore',
]
tests_require = [
]
setup(
name='botocore_paste',
version='0.2',
description=description,
long_description=long_description,
author='OCHIAI, Gouji',
author_email='[email protected]',
url='https://github.com/gjo/botocore_paste',
license='BSD',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
test_suite='botocore_paste',
tests_require=tests_require,
extras_require={
'testing': tests_require,
},
classifiers=[
'Framework :: Paste',
'Framework :: Pyramid',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
],
)
| bsd-3-clause | 60,953,616,441,704,910 | 22.830189 | 60 | 0.62787 | false |
JioCloud/oslo-incubator | pypi/setup.py | 1 | 1560 | #!/usr/bin/python
# -*- encoding: utf-8 -*-
# Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
name='oslo',
version='1',
description="Namespace for common components for Openstack",
long_description="Namespace for common components for Openstack",
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Environment :: No Input/Output (Daemon)',
'Environment :: OpenStack',
],
keywords='openstack',
author='OpenStack',
author_email='[email protected]',
url='http://www.openstack.org/',
license='Apache Software License',
zip_safe=True,
packages=setuptools.find_packages(exclude=['ez_setup',
'examples', 'tests']),
namespace_packages=['oslo'],
)
| apache-2.0 | 9,090,687,576,045,347,000 | 35.27907 | 69 | 0.663462 | false |
olitheolix/qtmacs | doc/source/code_snippets/tut_3_7.py | 1 | 1962 | """
Tutorial 3: Applet- and macro signatures.
"""
# Import the module with the global variables and the macro base class.
import qtmacs.qte_global as qte_global
from qtmacs.base_macro import QtmacsMacro
from qtmacs.base_applet import QtmacsApplet
from PyQt4 import QtGui
# Get a reference to the main instance of Qtmacs.
qteMain = qte_global.qteMain
class TutorialMulti(QtmacsApplet):
"""
An applet with multiple widgets.
"""
def __init__(self, appletID):
# Initialise the base class.
super().__init__(appletID)
# Instantiate three QLineEdit objects.
line1 = QtGui.QLineEdit(self)
line2 = QtGui.QLineEdit(self)
line3 = QtGui.QLineEdit(self)
# Register them with Qtmacs.
self.qteLine1 = self.qteAddWidget(line1)
self.qteLine2 = self.qteAddWidget(line2, autoBind=False)
self.qteLine3 = self.qteAddWidget(line3, widgetSignature='custom')
# Register the macro and connect the ``clicked`` signals of
# the push buttons.
self.macroName = qteMain.qteRegisterMacro(DemoMacroLineEdit)
# Bind the 'e' key to all widgets.
qteMain.qteBindKeyWidget('e', self.macroName, self.qteLine1)
qteMain.qteBindKeyWidget('e', self.macroName, self.qteLine2)
qteMain.qteBindKeyWidget('e', self.macroName, self.qteLine3)
class DemoMacroLineEdit(QtmacsMacro):
"""
Insert the string '|LineEdit|` into a QLineEdit.
|Signature|
* *applet*: '*'
* *widget*: 'QLineEdit'
"""
def __init__(self):
super().__init__()
self.qteSetAppletSignature('*')
self.qteSetWidgetSignature('QLineEdit')
def qteRun(self):
self.qteWidget.insert('|LineEdit|')
# Register the applet with Qtmacs, create an instance of it,
# and make it active immediately.
app_name = qteMain.qteRegisterApplet(TutorialMulti)
app_obj = qteMain.qteNewApplet(app_name)
qteMain.qteMakeAppletActive(app_obj)
| gpl-3.0 | 5,176,266,747,123,214,000 | 29.184615 | 74 | 0.67788 | false |
steven-cutting/latinpigsay | data/text/samples.py | 1 | 6123 | # -*- coding: utf-8 -*-
__title__ = 'latinpigsay'
__license__ = 'MIT'
__author__ = 'Steven Cutting'
__author_email__ = '[email protected]'
__created_on__ = '12/3/2014'
"""
Created on Wed Dec 3 17:36:17 2014
@author: steven_c
"""
acidtest = """Can you talk piglatin to piglatin.
"""
quotes = """A Tale of Two Cities LITE(tm)
-- by Charles Dickens
A lawyer who looks like a French Nobleman is executed in his place.
The Metamorphosis LITE(tm)
-- by Franz Kafka
A man turns into a bug and his family gets annoyed.
Lord of the Rings LITE(tm)
-- by J. R. R. Tolkien
Some guys take a long vacation to throw a ring into a volcano.
Hamlet LITE(tm)
-- by Wm. Shakespeare
A college student on vacation with family problems, a screwy
girl-friend and a mother who won't act her age.
"""
paragraphs = """For many people (myself among them), the Python language is easy to fall in love with.
Since its first appearance in 1991, Python has become one of the most popular dynamic,
programming languages, along with Perl, Ruby, and others. Python and Ruby have
become especially popular in recent years for building websites using their numerous
web frameworks, like Rails (Ruby) and Django (Python). Such languages are often
called scripting languages as they can be used to write quick-and-dirty small programs,
or scripts. I don’t like the term “scripting language” as it carries a connotation that they
cannot be used for building mission-critical software. Among interpreted languages
Python is distinguished by its large and active scientific computing community.
Adoption of Python for scientific computing in both industry applications and academic
research has increased significantly since the early 2000s.
For data analysis and interactive, exploratory computing and data visualization, Python
will inevitably draw comparisons with the many other domain-specific open source
and commercial programming languages and tools in wide use, such as R, MATLAB,
SAS, Stata, and others. In recent years, Python’s improved library support (primarily
pandas) has made it a strong alternative for data manipulation tasks. Combined with
Python’s strength in general purpose programming, it is an excellent choice as a single
language for building data-centric applications.
"""
simplepgs = """Simple test.
Paragraphs. test.
Derp, derp a.
Simple test.
Let's sentence ma'am let's full of ain't contractions I'm i'm couldn't've I'd.
Fred's stapler.
Fred's going to the movie.
O'clock o'clock.
Paragraphs. test.
Derp, derp.
"""
contsentence = "Let's sentence ma'am let's full of ain't contractions I'm i'm couldn't've I'd."
sentence = 'If capture groups are used, then the matched text is also included in the result.'
listofwords = ['Pig Latin',
'hello',
'switch',
'glove',
'fruit smoothie',
'egg',
'ultimate',
'I',
'yellow',
'my',
'rhythm',
'436',
'5',
]
txt = """
The Gettysburg Address
Four score and seven years ago our fathers brought forth on this continent,
a new nation, conceived in Liberty, and dedicated to the proposition that all
men are created equal.
Now we are engaged in a great civil war, testing whether that nation, or any
nation so conceived and so dedicated, can long endure. We are met on a great
battlefield of that war. We have come to dedicate a portion of that field, as
a final resting place for those who here gave their lives that that nation
might live. It is altogether fitting and proper that we should do this.
But, in a larger sense, we cannot dedicate - we cannot consecrate - we cannot
hallow - this ground. The brave men, living and dead, who struggled here, have
consecrated it, far above our poor power to add or detract. The world will
little note, nor long remember what we say here, but it can never forget what
they did here. It is for us the living, rather, to be dedicated here to the
unfinished work which they who fought here have thus far so nobly advanced.
It is rather for us to be here dedicated to the great task remaining before
us - that from these honored dead we take increased devotion to that cause for
which they gave the last full measure of devotion - that we here highly resolve
that these dead shall not have died in vain - that this nation, under God,
shall have a new birth of freedom - and that government of the people, by
the people, for the people, shall not perish from the earth.
"""
paragraphs_og = """For many people (myself among them), the Python language is easy to fall in love with.
Since its first appearance in 1991, Python has become one of the most popular dynamic,
programming languages, along with Perl, Ruby, and others. Python and Ruby have
become especially popular in recent years for building websites using their numerous
web frameworks, like Rails (Ruby) and Django (Python). Such languages are often
called scripting languages as they can be used to write quick-and-dirty small programs,
or scripts. I don’t like the term “scripting language” as it carries a connotation that they
cannot be used for building mission-critical software. Among interpreted languages
Python is distinguished by its large and active scientific computing community. Adop-
tion of Python for scientific computing in both industry applications and academic
research has increased significantly since the early 2000s.
For data analysis and interactive, exploratory computing and data visualization, Python
will inevitably draw comparisons with the many other domain-specific open source
and commercial programming languages and tools in wide use, such as R, MATLAB,
SAS, Stata, and others. In recent years, Python’s improved library support (primarily
pandas) has made it a strong alternative for data manipulation tasks. Combined with
Python’s strength in general purpose programming, it is an excellent choice as a single
language for building data-centric applications.
"""
| mit | -1,094,194,312,728,751,500 | 42.283688 | 105 | 0.74406 | false |
aronparsons/spacewalk | backend/satellite_tools/satsync.py | 1 | 101969 | #
# Copyright (c) 2008--2015 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# Spacewalk Incremental Synchronization Tool
# main function.
# __lang. imports__
import datetime
import os
import sys
import stat
import time
import types
import exceptions
import Queue
import threading
from optparse import Option, OptionParser
from rhn.connections import idn_ascii_to_puny, idn_puny_to_unicode
import gettext
translation = gettext.translation('spacewalk-backend-server', fallback=True)
_ = translation.ugettext
# __rhn imports__
from spacewalk.common import rhnMail, rhnLib, rhnFlags
from spacewalk.common.rhnLog import initLOG
from spacewalk.common.rhnConfig import CFG, initCFG, PRODUCT_NAME
from spacewalk.common.rhnTB import exitWithTraceback
sys.path.append("/usr/share/rhn")
from up2date_client import config
from spacewalk.common.checksum import getFileChecksum
from spacewalk.server import rhnSQL
from spacewalk.server.rhnSQL import SQLError, SQLSchemaError, SQLConnectError
from spacewalk.server.rhnServer import satellite_cert
from spacewalk.server.rhnLib import get_package_path
from spacewalk.common import fileutils
initCFG('server.satellite')
initLOG(CFG.LOG_FILE, CFG.DEBUG)
# __rhn sync/import imports__
import xmlWireSource
import xmlDiskSource
from progress_bar import ProgressBar
from xmlSource import FatalParseException, ParseException
from diskImportLib import rpmsPath
from syncLib import log, log2, log2disk, log2stderr, log2email
from syncLib import RhnSyncException, RpmManip, ReprocessingNeeded
from syncLib import initEMAIL_LOG, dumpEMAIL_LOG
from syncLib import FileCreationError, FileManip
from SequenceServer import SequenceServer
from spacewalk.server.importlib.errataCache import schedule_errata_cache_update
from spacewalk.server.importlib.importLib import InvalidChannelFamilyError
from spacewalk.server.importlib.importLib import MissingParentChannelError
from spacewalk.server.importlib.importLib import get_nevra, get_nevra_dict
import satCerts
import req_channels
import messages
import sync_handlers
import constants
_DEFAULT_SYSTEMID_PATH = '/etc/sysconfig/rhn/systemid'
_DEFAULT_RHN_ENTITLEMENT_CERT_BACKUP = '/etc/sysconfig/rhn/rhn-entitlement-cert.xml'
DEFAULT_ORG = 1
# the option object is used everywhere in this module... make it a
# global so we don't have to pass it to everyone.
OPTIONS = None
# pylint: disable=W0212
class Runner:
step_precedence = {
'packages': ['download-packages'],
'source-packages': ['download-source-packages'],
'errata': ['download-errata'],
'kickstarts': ['download-kickstarts'],
'rpms': [''],
'srpms': [''],
'channels': ['channel-families'],
'channel-families': [''],
'short': [''],
'download-errata': ['errata'],
'download-packages': [''],
'download-source-packages': [''],
'download-kickstarts': [''],
'arches': [''], # 5/26/05 wregglej 156079 Added arches to precedence list.
'orgs': [''],
}
# The step hierarchy. We need access to it both for command line
# processing and for the actions themselves
step_hierarchy = [
'orgs',
'channel-families',
'arches',
'channels',
'short',
'download-packages',
'rpms',
'packages',
'srpms',
'download-source-packages',
'download-errata',
'download-kickstarts',
'source-packages',
'errata',
'kickstarts',
]
def __init__(self):
self.syncer = None
self.packages_report = None
self._xml_file_dir_error_message = ''
self._affected_channels = None
self._packages_report = None
self._actions = None
# 5/24/05 wregglej - 156079 turn off a step's dependents in the step is turned off.
def _handle_step_dependents(self, actionDict, step):
ad = actionDict
if ad.has_key(step):
# if the step is turned off, then the steps that are dependent on it have to be turned
# off as well.
if ad[step] == 0:
ad = self._turn_off_dependents(ad, step)
# if the step isn't in the actionDict, then it's dependent actions must be turned off.
else:
ad = self._turn_off_dependents(ad, step)
return ad
# 5/24/05 wregglej - 156079 actually turns off the dependent steps, which are listed in the step_precedence
# dictionary.
def _turn_off_dependents(self, actionDict, step):
ad = actionDict
for dependent in self.step_precedence[step]:
if ad.has_key(dependent):
ad[dependent] = 0
return ad
def main(self):
"""Main routine: commandline processing, etc..."""
# let's time the whole process
timeStart = time.time()
actionDict, channels = processCommandline()
# 5/24/05 wregglej - 156079 turn off an step's dependent steps if it's turned off.
# look at self.step_precedence for a listing of how the steps are dependent on each other.
for st in self.step_hierarchy:
actionDict = self._handle_step_dependents(actionDict, st)
self._actions = actionDict
# 5/26/05 wregglej - 156079 have to handle the list-channels special case.
if actionDict.has_key('list-channels'):
if actionDict['list-channels'] == 1:
actionDict['channels'] = 1
actionDict['arches'] = 0
actionDict['channel-families'] = 1
channels = []
# create and set permissions for package repository mountpoint.
_verifyPkgRepMountPoint()
if OPTIONS.email:
initEMAIL_LOG()
# init the synchronization processor
self.syncer = Syncer(channels, actionDict['list-channels'], actionDict['rpms'],
forceAllErrata=actionDict['force-all-errata'])
try:
self.syncer.initialize()
except (KeyboardInterrupt, SystemExit):
raise
except xmlWireSource.rpclib.xmlrpclib.Fault, e:
if CFG.ISS_PARENT:
if CFG.PRODUCT_NAME == 'Spacewalk':
log(-1, ['', messages.sw_iss_not_available % e.faultString], )
else:
log(-1, ['', messages.sat_iss_not_available % e.faultString], )
sys.exit(26)
else:
log(-1, ['', messages.syncer_error % e.faultString], )
sys.exit(9)
except Exception, e: # pylint: disable=E0012, W0703
log(-1, ['', messages.syncer_error % e], )
sys.exit(10)
log(1, ' db: %s/<password>@%s' % (CFG.DB_USER, CFG.DB_NAME))
selected = [action for action in actionDict.keys() if actionDict[action]]
log2(-1, 3, "Action list/commandline toggles: %s" % repr(selected),
stream=sys.stderr)
if OPTIONS.mount_point:
self._xml_file_dir_error_message = messages.file_dir_error % \
OPTIONS.mount_point
for _try in range(2):
try:
for step in self.step_hierarchy:
if not actionDict[step]:
continue
method_name = '_step_' + step.replace('-', '_')
if not hasattr(self, method_name):
log(-1, _("No handler for step %s") % step)
continue
method = getattr(self, method_name)
ret = method()
if ret:
sys.exit(ret)
else: # for
# Everything went fine
break
except ReprocessingNeeded:
# Try one more time - this time it should be faster since
# everything should be cached
log(1, _('Environment changed, trying again...'))
continue
except RhnSyncException:
rhnSQL.rollback()
raise
else:
log(1, _('Repeated failures'))
timeEnd = time.time()
delta_str = self._get_elapsed_time(timeEnd - timeStart)
log(1, _("""\
Import complete:
Begin time: %s
End time: %s
Elapsed: %s
""") % (formatDateTime(dt=time.localtime(timeStart)),
formatDateTime(dt=time.localtime(timeEnd)),
delta_str),
cleanYN=1)
# mail out that log if appropriate
sendMail()
return 0
@staticmethod
def _get_elapsed_time(elapsed):
elapsed = int(elapsed)
hours = elapsed / 60 / 60
mins = elapsed / 60 - hours * 60
secs = elapsed - mins * 60 - hours * 60 * 60
delta_list = [[hours, _("hours")], [mins, _("minutes")], [secs, _("seconds")]]
delta_str = ", ".join(["%s %s" % (l[0], l[1]) for l in delta_list])
return delta_str
def _run_syncer_step(self, function, step_name):
""" Runs a function, and catches the most common error cases """
try:
ret = function()
except (xmlDiskSource.MissingXmlDiskSourceDirError,
xmlDiskSource.MissingXmlDiskSourceFileError), e:
log(-1, self._xml_file_dir_error_message +
'\n Error message: %s\n' % e)
return 1
except (KeyboardInterrupt, SystemExit):
raise
except xmlWireSource.rpclib.xmlrpclib.Fault, e:
log(-1, messages.failed_step % (step_name, e.faultString))
return 1
except Exception, e: # pylint: disable=E0012, W0703
log(-1, messages.failed_step % (step_name, e))
return 1
return ret
def _step_arches(self):
self.syncer.processArches()
def _step_channel_families(self):
self.syncer.processChannelFamilies()
# Sync the certificate (and update channel family permissions)
if not CFG.ISS_PARENT:
self.syncer.syncCert()
def _step_channels(self):
try:
self.syncer.process_channels()
except MissingParentChannelError, e:
msg = messages.parent_channel_error % repr(e.channel)
log(-1, msg)
# log2email(-1, msg) # redundant
sendMail()
return 1
def _step_short(self):
try:
return self.syncer.processShortPackages()
except xmlDiskSource.MissingXmlDiskSourceFileError:
msg = _("ERROR: The dump is missing package data, "
+ "use --no-rpms to skip this step or fix the content to include package data.")
log2disk(-1, msg)
log2stderr(-1, msg, cleanYN=1)
sys.exit(25)
def _step_download_packages(self):
return self.syncer.download_package_metadata()
def _step_download_source_packages(self):
return self.syncer.download_source_package_metadata()
def _step_rpms(self):
self._packages_report = self.syncer.download_rpms()
return None
# def _step_srpms(self):
# return self.syncer.download_srpms()
def _step_download_errata(self):
return self.syncer.download_errata()
def _step_download_kickstarts(self):
return self.syncer.download_kickstarts()
def _step_packages(self):
self._affected_channels = self.syncer.import_packages()
# def _step_source_packages(self):
# self.syncer.import_packages(sources=1)
def _step_errata(self):
self.syncer.import_errata()
# Now that errata have been populated, schedule an errata cache
# refresh
schedule_errata_cache_update(self._affected_channels)
def _step_kickstarts(self):
self.syncer.import_kickstarts()
def _step_orgs(self):
try:
self.syncer.import_orgs()
except (RhnSyncException, xmlDiskSource.MissingXmlDiskSourceFileError,
xmlDiskSource.MissingXmlDiskSourceDirError):
# the orgs() method doesn't exist; that's fine we just
# won't sync the orgs
log(1, [_("The upstream Satellite does not support syncing orgs data."), _("Skipping...")])
def sendMail(forceEmail=0):
""" Send email summary """
if forceEmail or (OPTIONS is not None and OPTIONS.email):
body = dumpEMAIL_LOG()
if body:
print _("+++ sending log as an email +++")
headers = {
'Subject': _('RHN Management Satellite sync. report from %s') % idn_puny_to_unicode(os.uname()[1]),
}
sndr = "root@%s" % idn_puny_to_unicode(os.uname()[1])
rhnMail.send(headers, body, sender=sndr)
else:
print _("+++ email requested, but there is nothing to send +++")
# mail was sent. Let's not allow it to be sent twice...
OPTIONS.email = None
class Syncer:
""" high-level sychronization/import class
NOTE: there should *ONLY* be one instance of this.
"""
def __init__(self, channels, listChannelsYN, check_rpms, forceAllErrata=False):
""" Base initialization. Most work done in self.initialize() which
needs to be called soon after instantiation.
"""
self._requested_channels = channels
self.mountpoint = OPTIONS.mount_point
self.rhn_cert = OPTIONS.rhn_cert
self.listChannelsYN = listChannelsYN
self.forceAllErrata = forceAllErrata
self.sslYN = not OPTIONS.no_ssl
self._systemidPath = OPTIONS.systemid or _DEFAULT_SYSTEMID_PATH
self._batch_size = OPTIONS.batch_size
self.master_label = OPTIONS.master
#self.create_orgs = OPTIONS.create_missing_orgs
self.xml_dump_version = OPTIONS.dump_version or str(constants.PROTOCOL_VERSION)
self.check_rpms = check_rpms
self.keep_rpms = OPTIONS.keep_rpms
# Object to help with channel math
self._channel_req = None
self._channel_collection = sync_handlers.ChannelCollection()
self.containerHandler = sync_handlers.ContainerHandler(
self.master_label)
# instantiated in self.initialize()
self.xmlDataServer = None
self.systemid = None
# self._*_full hold list of all ids for appropriate channel while
# non-full self._* contain every id only once (in first channel it appeared)
self._channel_packages = {}
self._channel_packages_full = {}
self._avail_channel_packages = {}
self._missing_channel_packages = None
self._missing_fs_packages = None
self._failed_fs_packages = Queue.Queue()
self._extinct_packages = Queue.Queue()
self._channel_errata = {}
self._missing_channel_errata = {}
self._channel_source_packages = {}
self._channel_source_packages_full = {}
self._channel_kickstarts = {}
self._avail_channel_source_packages = None
self._missing_channel_src_packages = None
self._missing_fs_source_packages = None
def initialize(self):
"""Initialization that requires IO, etc."""
# Sync from filesystem:
if self.mountpoint:
log(1, [_(PRODUCT_NAME + ' - file-system synchronization'),
' mp: %s' % self.mountpoint])
self.xmlDataServer = xmlDiskSource.MetadataDiskSource(self.mountpoint)
# Sync across the wire:
else:
self.xmlDataServer = xmlWireSource.MetadataWireSource(self.systemid,
self.sslYN, self.xml_dump_version)
if CFG.ISS_PARENT:
sync_parent = CFG.ISS_PARENT
self.systemid = 'N/A' # systemid is not used in ISS auth process
is_iss = 1
elif not CFG.DISCONNECTED:
sync_parent = CFG.RHN_PARENT
is_iss = 0
else:
log(1, _(PRODUCT_NAME + ' - live synchronization'))
log(-1, _("ERROR: Can't use live synchronization in disconnected mode."), stream=sys.stderr)
sys.exit(1)
url = self.xmlDataServer.schemeAndUrl(sync_parent)
log(1, [_(PRODUCT_NAME + ' - live synchronization'),
_(' url: %s') % url,
_(' debug/output level: %s') % CFG.DEBUG])
self.xmlDataServer.setServerHandler(isIss=is_iss)
if not self.systemid:
# check and fetch systemid (NOTE: systemid kept in memory... may or may not
# be better to do it this way).
if (os.path.exists(self._systemidPath)
and os.access(self._systemidPath, os.R_OK)):
self.systemid = open(self._systemidPath, 'rb').read()
else:
raise RhnSyncException, _('ERROR: this server must be registered with RHN.'), sys.exc_info()[2]
# authorization check of the satellite
auth = xmlWireSource.AuthWireSource(self.systemid, self.sslYN,
self.xml_dump_version)
auth.checkAuth()
def __del__(self):
self.containerHandler.close()
def _process_simple(self, remote_function_name, step_name):
""" Wrapper function that can process metadata that is relatively
simple. This does the decoding of data (over the wire or from
disk).
step_name is just for pretty printing the actual --step name to
the console.
The remote function is passed by name (as a string), to mimic the
lazy behaviour of the if block
"""
log(1, ["", _("Retrieving / parsing %s data") % step_name])
# get XML stream
stream = None
method = getattr(self.xmlDataServer, remote_function_name)
stream = method()
# parse/process XML stream
try:
self.containerHandler.process(stream)
except KeyboardInterrupt:
log(-1, _('*** SYSTEM INTERRUPT CALLED ***'), stream=sys.stderr)
raise
except (FatalParseException, ParseException, Exception), e: # pylint: disable=E0012, W0703
# nuke the container batch upon error!
self.containerHandler.clear()
msg = ''
if isinstance(e, FatalParseException):
msg = (_('ERROR: fatal parser exception occurred ') +
_('(line: %s, col: %s msg: %s)') % (
e.getLineNumber(), e.getColumnNumber(),
e._msg))
elif isinstance(e, ParseException):
msg = (_('ERROR: parser exception occurred: %s') % (e))
elif isinstance(e, exceptions.SystemExit):
log(-1, _('*** SYSTEM INTERRUPT CALLED ***'), stream=sys.stderr)
raise
else:
msg = _('ERROR: exception (during parse) occurred: ')
log2stderr(-1, _(' Encountered some errors with %s data '
+ '(see logs (%s) for more information)') % (step_name, CFG.LOG_FILE))
log2(-1, 3, [_(' Encountered some errors with %s data:') % step_name,
_(' ------- %s PARSE/IMPORT ERROR -------') % step_name,
' %s' % msg,
_(' ---------------------------------------')], stream=sys.stderr)
exitWithTraceback(e, '', 11)
self.containerHandler.reset()
log(1, _("%s data complete") % step_name)
def processArches(self):
self._process_simple("getArchesXmlStream", "arches")
self._process_simple("getArchesExtraXmlStream", "additional arches")
def import_orgs(self):
self._process_simple("getOrgsXmlStream", "orgs")
def syncCert(self):
"sync the Red Hat Satellite cert if applicable (to local DB & filesystem)"
store_cert = True
if self.mountpoint:
if self.rhn_cert:
# Certificate was presented on the command line
try:
cert = open(self.rhn_cert).read()
except IOError, e:
raise RhnSyncException(_("Unable to open file %s: %s") % (
self.rhn_cert, e)), None, sys.exc_info()[2]
cert = cert.strip()
else:
# Try to retrieve the certificate from the database
row = satCerts.retrieve_db_cert()
if row is None:
raise RhnSyncException(_("No certificate found. "
"Please use --rhn-cert"))
cert = row['cert']
store_cert = False
else:
log2(1, 3, ["", _("RHN Entitlement Certificate sync")])
certSource = xmlWireSource.CertWireSource(self.systemid, self.sslYN,
self.xml_dump_version)
cert = certSource.download().strip()
return self._process_cert(cert, store_cert)
@staticmethod
def _process_cert(cert, store_cert=1):
"""Give the cert a check - if it's broken xml we'd better find it out
now
"""
log2(1, 4, _(" - parsing for sanity"))
sat_cert = satellite_cert.SatelliteCert()
try:
sat_cert.load(cert)
except satellite_cert.ParseException:
# XXX figure out what to do
raise RhnSyncException(_("Error parsing the satellite cert")), None, sys.exc_info()[2]
# pylint: disable=E1101
# Compare certificate generation - should match the stream's
generation = rhnFlags.get('stream-generation')
if sat_cert.generation != generation:
raise RhnSyncException(_("""\
Unable to import certificate:
channel dump generation %s incompatible with cert generation %s.
Please contact your RHN representative""") % (generation, sat_cert.generation))
satCerts.set_slots_from_cert(sat_cert, testonly=True)
# push it into the database
log2(1, 4, _(" - syncing to local database"))
# possible place for bug 146395
# Populate channel family permissions
sync_handlers.populate_channel_family_permissions(sat_cert)
# Get rid of the extra channel families
sync_handlers.purge_extra_channel_families()
if store_cert:
# store it! (does a commit)
# XXX bug 146395
satCerts.storeRhnCert(cert)
# Fix the channel family counts now
sync_handlers.update_channel_family_counts()
if store_cert:
# save it to disk
log2(1, 4, _(" - syncing to disk %s") %
_DEFAULT_RHN_ENTITLEMENT_CERT_BACKUP)
fileutils.rotateFile(_DEFAULT_RHN_ENTITLEMENT_CERT_BACKUP, depth=5)
open(_DEFAULT_RHN_ENTITLEMENT_CERT_BACKUP, 'wb').write(cert)
log2(1, 3, _("RHN Entitlement Certificate sync complete"))
def processChannelFamilies(self):
self._process_simple("getChannelFamilyXmlStream", "channel-families")
# pylint: disable=W0703
try:
self._process_simple("getProductNamesXmlStream", "product names")
except Exception:
pass
def _process_comps(self, backend, label, timestamp):
comps_path = 'rhn/comps/%s/comps-%s.xml' % (label, timestamp)
full_path = os.path.join(CFG.MOUNT_POINT, comps_path)
if not os.path.exists(full_path):
if self.mountpoint or CFG.ISS_PARENT:
stream = self.xmlDataServer.getComps(label)
else:
rpmServer = xmlWireSource.RPCGetWireSource(self.systemid, self.sslYN, self.xml_dump_version)
stream = rpmServer.getCompsFileStream(label)
f = FileManip(comps_path, timestamp, None)
f.write_file(stream)
data = {label: None}
backend.lookupChannels(data)
rhnSQL.Procedure('rhn_channel.set_comps')(data[label]['id'], comps_path, timestamp)
def process_channels(self):
""" push channels, channel-family and dist. map information
as well upon parsing.
"""
log(1, ["", _("Retrieving / parsing channel data")])
h = sync_handlers.get_channel_handler()
# get channel XML stream
stream = self.xmlDataServer.getChannelXmlStream()
if self.mountpoint:
for substream in stream:
h.process(substream)
doEOSYN = 0
else:
h.process(stream)
doEOSYN = 1
h.close()
# clean up the channel request and populate self._channel_request
# This essentially determines which channels are to be imported
self._compute_channel_request()
# print out the relevant channel tree
# 3/6/06 wregglej 183213 Don't print out the end-of-service message if
# satellite-sync is running with the --mount-point (-m) option. If it
# did, it would incorrectly list channels as end-of-service if they had been
# synced already but aren't in the channel dump.
self._printChannelTree(doEOSYN=doEOSYN)
if self.listChannelsYN:
# We're done here
return
requested_channels = self._channel_req.get_requested_channels()
try:
importer = sync_handlers.import_channels(requested_channels,
orgid=OPTIONS.orgid or None,
master=OPTIONS.master or None)
for label in requested_channels:
timestamp = self._channel_collection.get_channel_timestamp(label)
ch = self._channel_collection.get_channel(label, timestamp)
if ch.has_key('comps_last_modified') and ch['comps_last_modified'] is not None:
self._process_comps(importer.backend, label, sync_handlers._to_timestamp(ch['comps_last_modified']))
except InvalidChannelFamilyError:
raise RhnSyncException(messages.invalid_channel_family_error %
''.join(requested_channels)), None, sys.exc_info()[2]
except MissingParentChannelError:
raise
rhnSQL.commit()
log(1, _("Channel data complete"))
@staticmethod
def _formatChannelExportType(channel):
"""returns pretty formated text with type of channel export"""
if 'export-type' not in channel or channel['export-type'] is None:
return ''
else:
export_type = channel['export-type']
if 'export-start-date' in channel and channel['export-start-date'] is not None:
start_date = channel['export-start-date']
else:
start_date = ''
if 'export-end-date' in channel and channel['export-end-date'] is not None:
end_date = channel['export-end-date']
else:
end_date = ''
if end_date and not start_date:
return _("%10s import from %s") % (export_type,
formatDateTime(end_date))
elif end_date and start_date:
return _("%10s import from %s - %s") % (export_type,
formatDateTime(start_date),
formatDateTime(end_date))
else:
return _("%10s") % export_type
def _printChannel(self, label, channel_object, log_format, is_imported):
assert channel_object is not None
all_pkgs = channel_object['all-packages'] or channel_object['packages']
pkgs_count = len(all_pkgs)
if is_imported:
status = _('p')
else:
status = _('.')
log(1, log_format % (status, label, pkgs_count, self._formatChannelExportType(channel_object)))
def _printChannelTree(self, doEOSYN=1, doTyposYN=1):
"pretty prints a tree of channel information"
log(1, _(' p = previously imported/synced channel'))
log(1, _(' . = channel not yet imported/synced'))
ch_end_of_service = self._channel_req.get_end_of_service()
ch_typos = self._channel_req.get_typos()
ch_requested_imported = self._channel_req.get_requested_imported()
relevant = self._channel_req.get_requested_channels()
if doEOSYN and ch_end_of_service:
log(1, _(' e = channel no longer supported (end-of-service)'))
if doTyposYN and ch_typos:
log(1, _(' ? = channel label invalid --- typo?'))
pc_labels = sorted(self._channel_collection.get_parent_channel_labels())
t_format = _(' %s:')
p_format = _(' %s %-40s %4s %s')
log(1, t_format % _('base-channels'))
# Relevant parent channels
no_base_channel = True
for plabel in pc_labels:
if plabel not in relevant:
continue
no_base_channel = False
timestamp = self._channel_collection.get_channel_timestamp(plabel)
channel_object = self._channel_collection.get_channel(plabel,
timestamp)
self._printChannel(plabel, channel_object, p_format, (plabel in ch_requested_imported))
if no_base_channel:
log(1, p_format % (' ', _('NONE RELEVANT'), '', ''))
# Relevant parent channels
for plabel in pc_labels:
cchannels = self._channel_collection.get_child_channels(plabel)
# chns has only the channels we are interested in
# (and that's all the channels if we list them)
chns = []
for clabel, ctimestamp in cchannels:
if clabel in relevant:
chns.append((clabel, ctimestamp))
if not chns:
# No child channels, skip
continue
log(1, t_format % plabel)
for clabel, ctimestamp in sorted(chns):
channel_object = self._channel_collection.get_channel(clabel,
ctimestamp)
self._printChannel(clabel, channel_object, p_format, (clabel in ch_requested_imported))
log(2, '')
if doEOSYN and ch_end_of_service:
log(1, t_format % _('end-of-service'))
status = _('e')
for chn in ch_end_of_service:
log(1, p_format % (status, chn, '', ''))
log(2, '')
if doTyposYN and ch_typos:
log(1, _(' typos:'))
status = _('?')
for chn in ch_typos:
log(1, p_format % (status, chn, '', ''))
log(2, '')
log(1, '')
def _compute_channel_request(self):
""" channels request is verify and categorized.
NOTE: self.channel_req *will be* initialized by this method
"""
# channels already imported, and all channels
importedChannels = _getImportedChannels()
availableChannels = self._channel_collection.get_channel_labels()
log(6, _('XXX: imported channels: %s') % importedChannels, 1)
log(6, _('XXX: cached channels: %s') % availableChannels, 1)
# if requested a channel list, we are requesting all channels
if self.listChannelsYN:
requested_channels = availableChannels
log(6, _('XXX: list channels called'), 1)
else:
requested_channels = self._requested_channels
rc = req_channels.RequestedChannels(requested_channels)
rc.set_available(availableChannels)
rc.set_imported(importedChannels)
# rc does all the logic of doing intersections and stuff
rc.compute()
typos = rc.get_typos()
if typos:
log(-1, _("ERROR: these channels either do not exist or "
"are not available:"))
for chn in typos:
log(-1, " %s" % chn)
log(-1, _(" (to see a list of channel labels: %s --list-channels)") % sys.argv[0])
sys.exit(12)
self._channel_req = rc
return rc
def _get_channel_timestamp(self, channel):
try:
timestamp = self._channel_collection.get_channel_timestamp(channel)
except KeyError:
# XXX Do something with this exception
raise
return timestamp
def _compute_unique_packages(self):
""" process package metadata for one channel at a time """
relevant = sorted(self._channel_req.get_requested_channels())
self._channel_packages = {}
self._channel_packages_full = {}
self._avail_channel_packages = {}
already_seen_ids = set()
for chn in relevant:
timestamp = self._get_channel_timestamp(chn)
channel_obj = self._channel_collection.get_channel(chn, timestamp)
avail_package_ids = sorted(set(channel_obj['packages'] or []))
package_full_ids = sorted(set(channel_obj['all-packages'] or [])) or avail_package_ids
package_ids = sorted(set(package_full_ids) - already_seen_ids)
self._channel_packages[chn] = package_ids
self._channel_packages_full[chn] = package_full_ids
self._avail_channel_packages[chn] = avail_package_ids
already_seen_ids.update(package_ids)
def processShortPackages(self):
log(1, ["", "Retrieving short package metadata (used for indexing)"])
# Compute the unique packages and populate self._channel_packages
self._compute_unique_packages()
stream_loader = StreamProducer(
sync_handlers.get_short_package_handler(),
self.xmlDataServer, 'getChannelShortPackagesXmlStream')
sorted_channels = sorted(self._channel_packages.items(), key=lambda x: x[0]) # sort by channel_label
for channel_label, package_ids in sorted_channels:
log(1, _(" Retrieving / parsing short package metadata: %s (%s)") %
(channel_label, len(package_ids)))
if package_ids:
lm = self._channel_collection.get_channel_timestamp(channel_label)
channel_last_modified = int(rhnLib.timestamp(lm))
stream_loader.set_args(channel_label, channel_last_modified)
stream_loader.process(package_ids)
stream_loader.close()
self._diff_packages()
_query_compare_packages = """
select p.id, c.checksum_type, c.checksum, p.path, p.package_size,
TO_CHAR(p.last_modified, 'YYYYMMDDHH24MISS') last_modified
from rhnPackage p, rhnChecksumView c
where p.name_id = lookup_package_name(:name)
and p.evr_id = lookup_evr(:epoch, :version, :release)
and p.package_arch_id = lookup_package_arch(:arch)
and (p.org_id = :org_id or
(p.org_id is null and :org_id is null))
and p.checksum_id = c.id
"""
def _diff_packages_process(self, chunk, channel_label):
package_collection = sync_handlers.ShortPackageCollection()
h = rhnSQL.prepare(self._query_compare_packages)
for pid in chunk:
package = package_collection.get_package(pid)
assert package is not None
l_timestamp = rhnLib.timestamp(package['last_modified'])
if package['org_id'] is not None:
package['org_id'] = OPTIONS.orgid or DEFAULT_ORG
nevra = get_nevra_dict(package)
nevra['org_id'] = package['org_id']
h.execute(**nevra)
row = None
for r in (h.fetchall_dict() or []):
# let's check which checksum we have in database
if (r['checksum_type'] in package['checksums']
and package['checksums'][r['checksum_type']] == r['checksum']):
row = r
break
self._process_package(pid, package, l_timestamp, row,
self._missing_channel_packages[channel_label],
self._missing_fs_packages[channel_label],
check_rpms=self.check_rpms)
# XXX the "is null" condition will have to change in multiorg satellites
def _diff_packages(self):
self._missing_channel_packages = {}
self._missing_fs_packages = {}
sorted_channels = sorted(self._channel_packages.items(), key=lambda x: x[0]) # sort by channel_label
for channel_label, upids in sorted_channels:
log(1, _("Diffing package metadata (what's missing locally?): %s") %
channel_label)
self._missing_channel_packages[channel_label] = []
self._missing_fs_packages[channel_label] = []
self._process_batch(channel_label, upids[:], None,
self._diff_packages_process,
_('Diffing: '),
[channel_label])
self._verify_missing_channel_packages(self._missing_channel_packages)
def _verify_missing_channel_packages(self, missing_channel_packages, sources=0):
"""Verify if all the missing packages are actually available somehow.
In an incremental approach, one may request packages that are actually
not available in the current dump, probably because of applying an
incremental to the wrong base"""
for channel_label, pids in missing_channel_packages.items():
if sources:
avail_pids = [x[0] for x in self._avail_channel_source_packages[channel_label]]
else:
avail_pids = self._avail_channel_packages[channel_label]
if set(pids or []) > set(avail_pids or []):
raise RhnSyncException, _('ERROR: incremental dump skipped')
@staticmethod
def _get_rel_package_path(nevra, org_id, source, checksum_type, checksum):
return get_package_path(nevra, org_id, prepend=CFG.PREPENDED_DIR,
source=source, checksum_type=checksum_type, checksum=checksum)
@staticmethod
def _verify_file(path, mtime, size, checksum_type, checksum):
"""
Verifies if the file is on the filesystem and matches the mtime and checksum.
Computing the checksum is costly, that's why we rely on mtime comparisons.
Returns errcode:
0 - file is ok, it has either the specified mtime and size
or checksum matches (then function sets mtime)
1 - file does not exist at all
2 - file has a different checksum
"""
if not path:
return 1
abs_path = os.path.join(CFG.MOUNT_POINT, path)
try:
stat_info = os.stat(abs_path)
except OSError:
# File is missing completely
return 1
l_mtime = stat_info[stat.ST_MTIME]
l_size = stat_info[stat.ST_SIZE]
if l_mtime == mtime and l_size == size:
# Same mtime, and size, assume identity
return 0
# Have to check checksum
l_checksum = getFileChecksum(checksum_type, filename=abs_path)
if l_checksum != checksum:
return 2
# Set the mtime
os.utime(abs_path, (mtime, mtime))
return 0
def _process_package(self, package_id, package, l_timestamp, row,
m_channel_packages, m_fs_packages, check_rpms=1):
path = None
channel_package = None
fs_package = None
if row:
# package found in the DB
checksum_type = row['checksum_type']
if checksum_type in package['checksums']:
checksum = package['checksums'][row['checksum_type']]
package_size = package['package_size']
db_timestamp = int(rhnLib.timestamp(row['last_modified']))
db_checksum = row['checksum']
db_package_size = row['package_size']
db_path = row['path']
if not (l_timestamp <= db_timestamp and
checksum == db_checksum and
package_size == db_package_size):
# package doesn't match
channel_package = package_id
if check_rpms:
if db_path:
# check the filesystem
errcode = self._verify_file(db_path, l_timestamp,
package_size, checksum_type, checksum)
if errcode:
# file doesn't match
fs_package = package_id
channel_package = package_id
path = db_path
else:
# upload package and reimport metadata
channel_package = package_id
fs_package = package_id
else:
# package is missing from the DB
channel_package = package_id
fs_package = package_id
if channel_package:
m_channel_packages.append(channel_package)
if fs_package:
m_fs_packages.append((fs_package, path))
return
def download_rpms(self):
log(1, ["", _("Downloading rpm packages")])
# Lets go fetch the packages and push them to their proper location:
sorted_channels = sorted(self._missing_fs_packages.items(), key=lambda x: x[0]) # sort by channel
for channel, missing_fs_packages in sorted_channels:
missing_packages_count = len(missing_fs_packages)
log(1, _(" Fetching any missing RPMs: %s (%s)") %
(channel, missing_packages_count or _('NONE MISSING')))
if missing_packages_count == 0:
continue
# Fetch all RPMs whose meta-data is marked for need to be imported
# (ie. high chance of not being there)
self._fetch_packages(channel, missing_fs_packages)
continue
log(1, _("Processing rpm packages complete"))
def _missing_not_cached_packages(self):
missing_packages = {}
# First, determine what has to be downloaded
short_package_collection = sync_handlers.ShortPackageCollection()
package_collection = sync_handlers.PackageCollection()
for channel, pids in self._missing_channel_packages.items():
missing_packages[channel] = mp = []
if not pids:
# Nothing to see here
continue
for pid in pids:
# XXX Catch errors
if (not package_collection.has_package(pid)
or package_collection.get_package(pid)['last_modified']
!= short_package_collection.get_package(pid)['last_modified']):
# not in the cache
mp.append(pid)
return missing_packages
def download_package_metadata(self):
log(1, ["", _("Downloading package metadata")])
# Get the missing but uncached packages
missing_packages = self._missing_not_cached_packages()
stream_loader = StreamProducer(
sync_handlers.get_package_handler(),
self.xmlDataServer, 'getPackageXmlStream')
sorted_channels = sorted(missing_packages.items(), key=lambda x: x[0]) # sort by channel
for channel, pids in sorted_channels:
self._process_batch(channel, pids[:], messages.package_parsing,
stream_loader.process, is_slow=True)
stream_loader.close()
# Double-check that we got all the packages
missing_packages = self._missing_not_cached_packages()
for channel, pids in missing_packages.items():
if pids:
# Something may have changed from the moment we started to
# download the packages till now
raise ReprocessingNeeded
def download_srpms(self):
self._compute_unique_source_packages()
self._diff_source_packages()
log(1, ["", _("Downloading srpm packages")])
# Lets go fetch the source packages and push them to their proper location:
sorted_channels = sorted(self._missing_fs_source_packages.items(), key=lambda x: x[0]) # sort by channel_label
for channel, missing_fs_source_packages in sorted_channels:
missing_source_packages_count = len(missing_fs_source_packages)
log(1, _(" Fetching any missing SRPMs: %s (%s)") %
(channel, missing_source_packages_count or _('NONE MISSING')))
if missing_source_packages_count == 0:
continue
# Fetch all SRPMs whose meta-data is marked for need to be imported
# (ie. high chance of not being there)
self._fetch_packages(channel, missing_fs_source_packages, sources=1)
continue
log(1, "Processing srpm packages complete")
def _compute_unique_source_packages(self):
""" process package metadata for one channel at a time"""
relevant = self._channel_req.get_requested_channels()
self._channel_source_packages = {}
self._channel_source_packages_full = {}
self._avail_channel_source_packages = {}
already_seen_ids = set()
for chn in relevant:
timestamp = self._get_channel_timestamp(chn)
channel_obj = self._channel_collection.get_channel(chn, timestamp)
sps = set(channel_obj['source_packages'])
if not sps:
# No source package info
continue
ret_sps = []
for sp in sps:
if isinstance(sp, types.StringType):
# Old style
ret_sps.append((sp, None))
else:
ret_sps.append((sp['id'], sp['last_modified']))
del sps
ret_sps.sort()
self._channel_source_packages[chn] = sorted(set(ret_sps) - already_seen_ids)
self._channel_source_packages_full[chn] = ret_sps
self._avail_channel_source_packages[chn] = ret_sps
already_seen_ids.update(ret_sps)
def _compute_not_cached_source_packages(self):
missing_sps = {}
# First, determine what has to be downloaded
sp_collection = sync_handlers.SourcePackageCollection()
for channel, sps in self._channel_source_packages.items():
missing_sps[channel] = []
if not sps:
# Nothing to see here
continue
missing_sps[channel] = [sp_id for (sp_id, _timestamp) in sps
if not sp_collection.has_package(sp_id)]
return missing_sps
_query_compare_source_packages = """
select ps.id, c.checksum_type, c.checksum, ps.path, ps.package_size,
TO_CHAR(ps.last_modified, 'YYYYMMDDHH24MISS') last_modified
from rhnPackageSource ps, rhnChecksumView c
where ps.source_rpm_id = lookup_source_name(:package_id)
and (ps.org_id = :org_id or
(ps.org_id is null and :org_id is null))
and ps.checksum_id = c.id
and c.checksum = :checksum
and c.checksum_type = :checksum_type
"""
def _diff_source_packages_process(self, chunk, channel_label):
package_collection = sync_handlers.SourcePackageCollection()
sql_params = ['package_id', 'checksum', 'checksum_type']
h = rhnSQL.prepare(self._query_compare_source_packages)
for pid, _timestamp in chunk:
package = package_collection.get_package(pid)
assert package is not None
params = {}
for t in sql_params:
params[t] = package[t] or ""
if package['org_id'] is not None:
params['org_id'] = OPTIONS.orgid or DEFAULT_ORG
package['org_id'] = OPTIONS.orgid or DEFAULT_ORG
else:
params['org_id'] = package['org_id']
h.execute(**params)
row = h.fetchone_dict()
self._process_package(pid, package, None, row,
self._missing_channel_src_packages[channel_label],
self._missing_fs_source_packages[channel_label])
# XXX the "is null" condition will have to change in multiorg satellites
def _diff_source_packages(self):
self._missing_channel_src_packages = {}
self._missing_fs_source_packages = {}
for channel_label, upids in self._channel_source_packages.items():
log(1, _("Diffing source package metadata (what's missing locally?): %s") % channel_label)
self._missing_channel_src_packages[channel_label] = []
self._missing_fs_source_packages[channel_label] = []
self._process_batch(channel_label, upids[:], None,
self._diff_source_packages_process,
_('Diffing: '),
[channel_label])
self._verify_missing_channel_packages(self._missing_channel_src_packages, sources=1)
def download_source_package_metadata(self):
log(1, ["", _("Downloading source package metadata")])
# Get the missing but uncached packages
missing_packages = self._compute_not_cached_source_packages()
stream_loader = StreamProducer(
sync_handlers.get_source_package_handler(),
self.xmlDataServer, 'getSourcePackageXmlStream')
for channel, pids in missing_packages.items():
self._process_batch(channel, pids[:], messages.package_parsing,
stream_loader.process, is_slow=True)
stream_loader.close()
# Double-check that we got all the packages
missing_packages = self._compute_not_cached_source_packages()
for channel, pids in missing_packages.items():
if pids:
# Something may have changed from the moment we started to
# download the packages till now
raise ReprocessingNeeded
def _compute_unique_kickstarts(self):
""" process package metadata for one channel at a time"""
relevant = self._channel_req.get_requested_channels()
self._channel_kickstarts = {}
already_seen_kickstarts = set()
for chn in relevant:
timestamp = self._get_channel_timestamp(chn)
channel_obj = self._channel_collection.get_channel(chn, timestamp)
self._channel_kickstarts[chn] = \
sorted(set(channel_obj['kickstartable_trees'])
- already_seen_kickstarts)
already_seen_kickstarts.update(self._channel_kickstarts[chn])
def _compute_missing_kickstarts(self):
""" process package metadata for one channel at a time"""
relevant = self._channel_req.get_requested_channels()
coll = sync_handlers.KickstartableTreesCollection()
missing_kickstarts = {}
for chn in relevant:
timestamp = self._get_channel_timestamp(chn)
channel_obj = self._channel_collection.get_channel(chn, timestamp)
kickstart_trees = channel_obj['kickstartable_trees']
for ktid in kickstart_trees:
# No timestamp for kickstartable trees
kt = coll.get_item(ktid, timestamp=None)
assert kt is not None
kt_label = kt['label']
# XXX rhnKickstartableTree does not have a last_modified
# Once we add it, we should be able to do more meaningful
# diffs
missing_kickstarts[kt_label] = None
ret = missing_kickstarts.items()
ret.sort()
return ret
def _download_kickstarts_file(self, chunk, channel_label):
cfg = config.initUp2dateConfig()
assert len(chunk) == 1
item = chunk[0]
label, base_path, relative_path, timestamp, file_size = item
path = os.path.join(base_path, relative_path)
f = FileManip(path, timestamp=timestamp, file_size=file_size)
# Retry a number of times, we may have network errors
for _try in range(cfg['networkRetries']):
stream = self._get_ks_file_stream(channel_label, label, relative_path)
try:
f.write_file(stream)
break # inner for
except FileCreationError, e:
msg = e.args[0]
log2disk(-1, _("Unable to save file %s: %s") % (path,
msg))
# Try again
continue
else: # for
# Retried a number of times and it still failed; log the
# file as being failed and move on
log2disk(-1, _("Failed to fetch file %s") % path)
def download_kickstarts(self):
"""Downloads all the kickstart-related information"""
log(1, ["", _("Downloading kickstartable trees metadata")])
self._compute_unique_kickstarts()
stream_loader = StreamProducer(
sync_handlers.get_kickstarts_handler(),
self.xmlDataServer, 'getKickstartsXmlStream')
for channel, ktids in self._channel_kickstarts.items():
self._process_batch(channel, ktids[:], messages.kickstart_parsing,
stream_loader.process)
stream_loader.close()
missing_ks_files = self._compute_missing_ks_files()
log(1, ["", _("Downloading kickstartable trees files")])
sorted_channels = sorted(missing_ks_files.items(), key=lambda x: x[0]) # sort by channel
for channel, files in sorted_channels:
self._process_batch(channel, files[:], messages.kickstart_downloading,
self._download_kickstarts_file,
nevermorethan=1,
process_function_args=[channel])
def _get_ks_file_stream(self, channel, kstree_label, relative_path):
if self.mountpoint:
s = xmlDiskSource.KickstartFileDiskSource(self.mountpoint)
s.setID(kstree_label)
s.set_relative_path(relative_path)
return s.load()
if CFG.ISS_PARENT:
return self.xmlDataServer.getKickstartFile(kstree_label, relative_path)
else:
srv = xmlWireSource.RPCGetWireSource(self.systemid, self.sslYN,
self.xml_dump_version)
return srv.getKickstartFileStream(channel, kstree_label, relative_path)
def _compute_missing_ks_files(self):
coll = sync_handlers.KickstartableTreesCollection()
missing_ks_files = {}
# download files for the ks trees
for channel, ktids in self._channel_kickstarts.items():
missing_ks_files[channel] = missing = []
for ktid in ktids:
# No timestamp for kickstartable trees
kt = coll.get_item(ktid, timestamp=None)
assert kt is not None
kt_label = kt['label']
base_path = kt['base_path']
files = kt['files']
for f in files:
relative_path = f['relative_path']
dest_path = os.path.join(base_path, relative_path)
timestamp = rhnLib.timestamp(f['last_modified'])
file_size = f['file_size']
errcode = self._verify_file(dest_path,
timestamp, file_size, f['checksum_type'], f['checksum'])
if errcode != 0:
# Have to download it
val = (kt_label, base_path, relative_path,
timestamp, file_size)
missing.append(val)
return missing_ks_files
def import_kickstarts(self):
"""Imports the kickstart-related information"""
missing_kickstarts = self._compute_missing_kickstarts()
if not missing_kickstarts:
log(1, messages.kickstart_import_nothing_to_do)
return
ks_count = len(missing_kickstarts)
log(1, messages.kickstart_importing % ks_count)
coll = sync_handlers.KickstartableTreesCollection()
batch = []
for ks, timestamp in missing_kickstarts:
ksobj = coll.get_item(ks, timestamp=timestamp)
assert ksobj is not None
if ksobj['org_id'] is not None:
ksobj['org_id'] = OPTIONS.orgid or DEFAULT_ORG
batch.append(ksobj)
_importer = sync_handlers.import_kickstarts(batch)
log(1, messages.kickstart_imported % ks_count)
def _compute_not_cached_errata(self):
missing_errata = {}
# First, determine what has to be downloaded
errata_collection = sync_handlers.ErrataCollection()
for channel, errata in self._channel_errata.items():
missing_errata[channel] = []
if not errata:
# Nothing to see here
continue
missing_errata[channel] = [eid for (eid, timestamp, _advisory_name) in errata
if not errata_collection.has_erratum(eid, timestamp)
or self.forceAllErrata]
return missing_errata
_query_get_db_errata = rhnSQL.Statement("""
select e.id, e.advisory_name,
TO_CHAR(e.last_modified, 'YYYYMMDDHH24MISS') last_modified
from rhnChannelErrata ce, rhnErrata e, rhnChannel c
where c.label = :channel
and ce.channel_id = c.id
and ce.errata_id = e.id
""")
def _get_db_channel_errata(self):
"""
Fetch the errata stored in the local satellite's database. Returned
as a hash of channel to another hash of advisory names to a tuple of
errata id and last modified date.
"""
db_channel_errata = {}
relevant = self._channel_req.get_requested_channels()
h = rhnSQL.prepare(self._query_get_db_errata)
for channel in relevant:
db_channel_errata[channel] = ce = {}
h.execute(channel=channel)
while 1:
row = h.fetchone_dict()
if not row:
break
advisory_name = row['advisory_name']
erratum_id = row['id']
last_modified = rhnLib.timestamp(row['last_modified'])
ce[advisory_name] = (erratum_id, last_modified)
return db_channel_errata
def _diff_errata(self):
""" Fetch the errata for this channel"""
db_channel_errata = self._get_db_channel_errata()
relevant = self._channel_req.get_requested_channels()
# Now get the channel's errata
channel_errata = {}
for chn in relevant:
db_ce = db_channel_errata[chn]
timestamp = self._get_channel_timestamp(chn)
channel_obj = self._channel_collection.get_channel(chn, timestamp)
errata_timestamps = channel_obj['errata_timestamps']
if errata_timestamps is None or self.forceAllErrata:
# No unique key information, so assume we need all errata
erratum_ids = channel_obj['errata']
errata = [(x, None, None) for x in erratum_ids]
log(2, _("Grabbing all errata for channel %s") % chn)
else:
errata = []
# Check the advisory name and last modification
for erratum in errata_timestamps:
erratum_id = erratum['id']
last_modified = erratum['last_modified']
last_modified = rhnLib.timestamp(last_modified)
advisory_name = erratum['advisory_name']
if db_ce.has_key(advisory_name):
_foo, db_last_modified = db_ce[advisory_name]
if last_modified == db_last_modified:
# We already have this erratum
continue
errata.append((erratum_id, last_modified, advisory_name))
errata.sort()
channel_errata[chn] = errata
# Uniquify the errata
already_seen_errata = set()
for channel, errata in channel_errata.items():
uq_errata = set(errata) - already_seen_errata
self._channel_errata[channel] = sorted(uq_errata)
already_seen_errata.update(uq_errata)
def _diff_db_errata(self):
""" Compute errata that are missing from the satellite
Kind of similar to diff_errata, if we had the timestamp and advisory
information available
"""
errata_collection = sync_handlers.ErrataCollection()
self._missing_channel_errata = missing_channel_errata = {}
db_channel_errata = self._get_db_channel_errata()
for channel, errata in self._channel_errata.items():
ch_erratum_ids = missing_channel_errata[channel] = []
for eid, timestamp, advisory_name in errata:
if timestamp is not None:
# Should have been caught by diff_errata
ch_erratum_ids.append((eid, timestamp, advisory_name))
continue
# timestamp is None, grab the erratum from the cache
erratum = errata_collection.get_erratum(eid, timestamp)
timestamp = rhnLib.timestamp(erratum['last_modified'])
advisory_name = erratum['advisory_name']
db_erratum = db_channel_errata[channel].get(advisory_name)
if db_erratum is None or db_erratum[1] != timestamp or \
self.forceAllErrata:
ch_erratum_ids.append((eid, timestamp, advisory_name))
def download_errata(self):
log(1, ["", _("Downloading errata data")])
if self.forceAllErrata:
log(2, _("Forcing download of all errata data for requested channels."))
self._diff_errata()
not_cached_errata = self._compute_not_cached_errata()
stream_loader = StreamProducer(
sync_handlers.get_errata_handler(),
self.xmlDataServer, 'getErrataXmlStream')
sorted_channels = sorted(not_cached_errata.items(), key=lambda x: x[0]) # sort by channel
for channel, erratum_ids in sorted_channels:
self._process_batch(channel, erratum_ids[:], messages.erratum_parsing,
stream_loader.process)
stream_loader.close()
# XXX This step should go away once the channel info contains the
# errata timestamps and advisory names
self._diff_db_errata()
log(1, _("Downloading errata data complete"))
# __private methods__
def _processWithProgressBar(self, batch, size,
process_function,
prompt=_('Downloading:'),
nevermorethan=None,
process_function_args=()):
pb = ProgressBar(prompt=prompt, endTag=_(' - complete'),
finalSize=size, finalBarLength=40, stream=sys.stdout)
if CFG.DEBUG > 2:
pb.redrawYN = 0
pb.printAll(1)
ss = SequenceServer(batch, nevermorethan=(nevermorethan or self._batch_size))
while not ss.doneYN():
chunk = ss.getChunk()
item_count = len(chunk)
process_function(chunk, *process_function_args)
ss.clearChunk()
pb.addTo(item_count)
pb.printIncrement()
pb.printComplete()
def _process_batch(self, channel, batch, log_msg,
process_function,
prompt=_('Downloading:'),
process_function_args=(),
nevermorethan=None,
is_slow=False):
count = len(batch)
if log_msg:
log(1, log_msg % (channel, count or _('NONE RELEVANT')))
if not count:
return
if is_slow:
log(1, messages.warning_slow)
self._processWithProgressBar(batch, count, process_function,
prompt, nevermorethan, process_function_args)
def _import_packages_process(self, chunk, sources):
batch = self._get_cached_package_batch(chunk, sources)
# check to make sure the orgs exported are valid
_validate_package_org(batch)
try:
sync_handlers.import_packages(batch, sources)
except (SQLError, SQLSchemaError, SQLConnectError), e:
# an SQL error is fatal... crash and burn
exitWithTraceback(e, 'Exception caught during import', 13)
def import_packages(self, sources=0):
if sources:
log(1, ["", _("Importing source package metadata")])
missing_channel_items = self._missing_channel_src_packages
else:
log(1, ["", _("Importing package metadata")])
missing_channel_items = self._missing_channel_packages
sorted_channels = sorted(missing_channel_items.items(), key=lambda x: x[0]) # sort by channel
for channel, packages in sorted_channels:
self._process_batch(channel, packages[:],
messages.package_importing,
self._import_packages_process,
_('Importing: '),
[sources])
return self._link_channel_packages()
def _link_channel_packages(self):
log(1, ["", messages.link_channel_packages])
short_package_collection = sync_handlers.ShortPackageCollection()
_package_collection = sync_handlers.PackageCollection()
uq_packages = {}
for chn, package_ids in self._channel_packages_full.items():
for pid in package_ids:
package = short_package_collection.get_package(pid)
if not package:
continue
assert package is not None
channel_obj = {'label': chn}
if uq_packages.has_key(pid):
# We've seen this package before - just add this channel
# to it
uq_packages[pid]['channels'].append(channel_obj)
else:
package['channels'] = [channel_obj]
uq_packages[pid] = package
uq_pkg_data = uq_packages.values()
# check to make sure the orgs exported are valid
_validate_package_org(uq_pkg_data)
try:
if OPTIONS.mount_point: # if OPTIONS.consider_full is not set interpret dump as incremental
importer = sync_handlers.link_channel_packages(uq_pkg_data, strict=OPTIONS.consider_full)
else:
importer = sync_handlers.link_channel_packages(uq_pkg_data)
except (SQLError, SQLSchemaError, SQLConnectError), e:
# an SQL error is fatal... crash and burn
exitWithTraceback(e, 'Exception caught during import', 14)
return importer.affected_channels
@staticmethod
def _get_cached_package_batch(chunk, sources=0):
""" short-circuit the most common case"""
if not chunk:
return []
short_package_collection = sync_handlers.ShortPackageCollection()
if sources:
package_collection = sync_handlers.SourcePackageCollection()
else:
package_collection = sync_handlers.PackageCollection()
batch = []
for pid in chunk:
package = package_collection.get_package(pid)
if (package is None or package['last_modified']
!= short_package_collection.get_package(pid)
['last_modified']):
# not in the cache
raise Exception(_("Package Not Found in Cache, Clear the Cache to \
Regenerate it."))
batch.append(package)
return batch
def import_errata(self):
log(1, ["", _("Importing channel errata")])
errata_collection = sync_handlers.ErrataCollection()
sorted_channels = sorted(self._missing_channel_errata.items(), key=lambda x: x[0]) # sort by channel_label
for chn, errata in sorted_channels:
log(2, _("Importing %s errata for channel %s.") % (len(errata), chn))
batch = []
for eid, timestamp, _advisory_name in errata:
erratum = errata_collection.get_erratum(eid, timestamp)
# bug 161144: it seems that incremental dumps can create an
# errata collection None
if erratum is not None:
self._fix_erratum(erratum)
batch.append(erratum)
self._process_batch(chn, batch, messages.errata_importing,
sync_handlers.import_errata)
@staticmethod
def _fix_erratum(erratum):
""" Replace the list of packages with references to short packages"""
sp_coll = sync_handlers.ShortPackageCollection()
pids = set(erratum['packages'] or [])
# map all the pkgs objects to the erratum
packages = []
# remove packages which are not in the export (e.g. archs we are not syncing)
for pid in pids:
if not sp_coll.has_package(pid):
# Package not found, go on - may be part of a channel we don't
# sync
continue
package = sp_coll.get_package(pid)
packages.append(package)
erratum['packages'] = packages
if erratum['org_id'] is not None:
erratum['org_id'] = OPTIONS.orgid or DEFAULT_ORG
# Associate errata to only channels that are being synced
# or are synced already
imported_channels = _getImportedChannels()
if OPTIONS.channel:
imported_channels += OPTIONS.channel
erratum['channels'] = [c for c in erratum['channels']
if c['label'] in imported_channels]
# Now fix the files
for errata_file in (erratum['files'] or []):
errata_file_package = errata_file.get('package')
errata_file_source_package = errata_file.get('source-package')
if errata_file['file_type'] == 'RPM' and \
errata_file_package is not None:
package = None
if sp_coll.has_package(errata_file_package):
package = sp_coll.get_package(errata_file_package)
errata_file['pkgobj'] = package
elif errata_file['file_type'] == 'SRPM' and \
errata_file_source_package is not None:
# XXX misa: deal with source rpms
errata_file['pkgobj'] = None
def _fetch_packages(self, channel, missing_fs_packages, sources=0):
short_package_collection = sync_handlers.ShortPackageCollection()
if sources:
# acronym = "SRPM"
package_collection = sync_handlers.SourcePackageCollection()
else:
# acronym = "RPM"
package_collection = sync_handlers.PackageCollection()
self._failed_fs_packages = Queue.Queue()
self._extinct_packages = Queue.Queue()
pkgs_total = len(missing_fs_packages)
pkg_current = 0
total_size = 0
queue = Queue.Queue()
out_queue = Queue.Queue()
lock = threading.Lock()
# count size of missing packages
for package_id, path in missing_fs_packages:
package = package_collection.get_package(package_id)
total_size = total_size + package['package_size']
queue.put((package_id, path))
log(1, messages.package_fetch_total_size %
(self._bytes_to_fuzzy(total_size)))
real_processed_size = processed_size = 0
real_total_size = total_size
start_time = round(time.time())
all_threads = []
for _thread in range(4):
t = ThreadDownload(lock, queue, out_queue, short_package_collection, package_collection,
self, self._failed_fs_packages, self._extinct_packages, sources, channel)
t.setDaemon(True)
t.start()
all_threads.append(t)
while ([x for x in all_threads if x.isAlive()]
and pkg_current < pkgs_total):
try:
(rpmManip, package, is_done) = out_queue.get(False, 0.1)
except Queue.Empty:
continue
pkg_current = pkg_current + 1
if not is_done: # package failed to download or already exist on disk
real_total_size -= package['package_size']
processed_size += package['package_size']
try:
out_queue.task_done()
except AttributeError:
pass
continue
# Package successfully saved
filename = os.path.basename(rpmManip.relative_path)
# Determine downloaded size and remaining time
size = package['package_size']
real_processed_size += size
processed_size += size
current_time = round(time.time())
# timedalta could not be multiplicated by float
remain_time = (datetime.timedelta(seconds=current_time - start_time)) * \
((real_total_size * 10000) / real_processed_size - 10000) / 10000
# cut off miliseconds
remain_time = datetime.timedelta(remain_time.days, remain_time.seconds)
log(1, messages.package_fetch_remain_size_time %
(self._bytes_to_fuzzy(processed_size), self._bytes_to_fuzzy(total_size), remain_time))
log(1, messages.package_fetch_successful %
(pkg_current, pkgs_total, filename, size))
try:
out_queue.task_done()
except AttributeError:
pass
extinct_count = self._extinct_packages.qsize()
failed_count = self._failed_fs_packages.qsize()
# Printing summary
log(2, messages.package_fetch_summary % channel, notimeYN=1)
log(2, messages.package_fetch_summary_success %
(pkgs_total - extinct_count - failed_count), notimeYN=1)
log(2, messages.package_fetch_summary_failed % failed_count,
notimeYN=1)
log(2, messages.package_fetch_summary_extinct % extinct_count,
notimeYN=1)
# Translate x bytes to string "x MB", "x GB" or "x kB"
@staticmethod
def _bytes_to_fuzzy(b):
units = ['bytes', 'kiB', 'MiB', 'GiB', 'TiB', 'PiB']
base = 1024
fuzzy = b
for unit in units:
if fuzzy >= base:
fuzzy = float(fuzzy) / base
else:
break
int_len = len("%d" % fuzzy)
fract_len = 3 - int_len
# pylint: disable=W0631
return "%*.*f %s" % (int_len, fract_len, fuzzy, unit)
def _get_package_stream(self, channel, package_id, nvrea, sources):
""" returns (filepath, stream), so in the case of a "wire source",
the return value is, of course, (None, stream)
"""
# Returns a package stream from disk
if self.mountpoint:
rpmFile = rpmsPath(package_id, self.mountpoint, sources)
try:
stream = open(rpmFile)
except IOError, e:
if e.errno != 2: # No such file or directory
raise
return (rpmFile, None)
return (rpmFile, stream)
# Wire stream
if CFG.ISS_PARENT:
stream = self.xmlDataServer.getRpm(nvrea, channel)
else:
rpmServer = xmlWireSource.RPCGetWireSource(self.systemid, self.sslYN,
self.xml_dump_version)
stream = rpmServer.getPackageStream(channel, nvrea)
return (None, stream)
class ThreadDownload(threading.Thread):
def __init__(self, lock, queue, out_queue, short_package_collection, package_collection, syncer,
failed_fs_packages, extinct_packages, sources, channel):
threading.Thread.__init__(self)
self.queue = queue
self.out_queue = out_queue
self.short_package_collection = short_package_collection
self.package_collection = package_collection
self.syncer = syncer
self.failed_fs_packages = failed_fs_packages
self.extinct_packages = extinct_packages
self.sources = sources
self.channel = channel
self.lock = lock
def run(self):
while not self.queue.empty():
# grabs host from queue
(package_id, path) = self.queue.get()
package = self.package_collection.get_package(package_id)
last_modified = package['last_modified']
checksum_type = package['checksum_type']
checksum = package['checksum']
package_size = package['package_size']
if not path:
nevra = get_nevra(package)
orgid = None
if package['org_id']:
orgid = OPTIONS.orgid or DEFAULT_ORG
path = self.syncer._get_rel_package_path(nevra, orgid, self.sources,
checksum_type, checksum)
# update package path
package['path'] = path
self.package_collection.add_item(package)
errcode = self.syncer._verify_file(path, rhnLib.timestamp(last_modified),
package_size, checksum_type, checksum)
if errcode == 0:
# file is already there
# do not count this size to time estimate
try:
self.queue.task_done()
except AttributeError:
pass
self.out_queue.put((None, package, False))
continue
cfg = config.initUp2dateConfig()
rpmManip = RpmManip(package, path)
nvrea = rpmManip.nvrea()
# Retry a number of times, we may have network errors
for _try in range(cfg['networkRetries']):
self.lock.acquire()
try:
rpmFile, stream = self.syncer._get_package_stream(self.channel,
package_id, nvrea, self.sources)
except:
self.lock.release()
raise
self.lock.release()
if stream is None:
# Mark the package as extinct
self.extinct_packages.put(package_id)
log(1, messages.package_fetch_extinct %
(os.path.basename(path)))
break # inner for
try:
rpmManip.write_file(stream)
break # inner for
except FileCreationError, e:
msg = e.args[0]
log2disk(-1, _("Unable to save file %s: %s") % (
rpmManip.full_path, msg))
# Try again
continue # inner for
else: # for
# Ran out of iterations
# Mark the package as failed and move on
self.failed_fs_packages.put(package_id)
log(1, messages.package_fetch_failed %
(os.path.basename(path)))
# Move to the next package
try:
self.queue.task_done()
except AttributeError:
pass
self.out_queue.put((rpmManip, package, False))
continue
if stream is None:
# Package is extinct. Move on
try:
self.queue.task_done()
except AttributeError:
pass
self.out_queue.put((rpmManip, package, False))
continue
if self.syncer.mountpoint and not self.syncer.keep_rpms:
# Channel dumps import; try to unlink to preserve disk space
# rpmFile is always returned by _get_package_stream for
# disk-based imports
assert(rpmFile is not None)
try:
os.unlink(rpmFile)
except (OSError, IOError):
pass
# signals to queue job is done
try:
self.queue.task_done()
except AttributeError:
pass
self.out_queue.put((rpmManip, package, True))
class StreamProducer:
def __init__(self, handler, data_source_class, source_func):
self.handler = handler
self.is_disk_loader = data_source_class.is_disk_loader()
if self.is_disk_loader:
self.loader = getattr(data_source_class, source_func)()
else:
self.loader = getattr(data_source_class, source_func)
self._args = ()
def set_args(self, *args):
self._args = args
def close(self):
self.handler.close()
def process(self, batch):
if self.is_disk_loader:
for oid in batch:
self.loader.setID(oid)
stream = self.loader.load()
self.handler.process(stream)
else:
# Only use the extra arguments if needed, for now
args = self._args or (batch, )
stream = self.loader(*args)
self.handler.process(stream)
def _verifyPkgRepMountPoint():
""" Checks the base package repository directory tree for
existance and permissions.
Creates base dir if need be, and chowns to apache.root (required
for rhnpush).
"""
if not CFG.MOUNT_POINT:
# Incomplete configuration
log(-1, _("ERROR: server.mount_point not set in the configuration file"))
sys.exit(16)
if not os.path.exists(fileutils.cleanupAbsPath(CFG.MOUNT_POINT)):
log(-1, _("ERROR: server.mount_point %s do not exist")
% fileutils.cleanupAbsPath(CFG.MOUNT_POINT))
sys.exit(26)
if not os.path.exists(fileutils.cleanupAbsPath(CFG.MOUNT_POINT + '/' + CFG.PREPENDED_DIR)):
log(-1, _("ERROR: path under server.mount_point (%s) do not exist")
% fileutils.cleanupAbsPath(CFG.MOUNT_POINT + '/' + CFG.PREPENDED_DIR))
sys.exit(26)
def _validate_package_org(batch):
"""Validate the orgids associated with packages.
If its redhat channel default to Null org
If custom channel and org is specified use that.
If custom and package org is not valid default to org 1
"""
orgid = OPTIONS.orgid or None
for pkg in batch:
if not pkg['org_id'] or pkg['org_id'] == 'None':
# default to Null so do nothing
pkg['org_id'] = None
elif orgid:
# if options.orgid specified use it
pkg['org_id'] = orgid
else:
# org from server is not valid
pkg['org_id'] = DEFAULT_ORG
def _getImportedChannels():
"Retrieves the channels already imported in the satellite's database"
try:
if OPTIONS.include_custom_channels:
h = rhnSQL.prepare("""select label from rhnChannel""")
else:
h = rhnSQL.prepare("""select label from rhnChannel where org_id is null""")
h.execute()
return [x['label'] for x in h.fetchall_dict() or []]
except (SQLError, SQLSchemaError, SQLConnectError), e:
# An SQL error is fatal... crash and burn
exitWithTraceback(e, 'SQL ERROR during xml processing', 17)
return []
def getDbIssParent():
sql = "select label from rhnISSMaster where is_current_master = 'Y'"
h = rhnSQL.prepare(sql)
h.execute()
row = h.fetchone_dict()
if not row:
return None
return row['label']
def getDbCaChain(master):
sql = "select ca_cert from rhnISSMaster where label = :label"
h = rhnSQL.prepare(sql)
h.execute(label=master)
row = h.fetchone_dict()
if not row:
return None
return row['ca_cert']
def processCommandline():
"process the commandline, setting the OPTIONS object"
log2disk(-1, _("Commandline: %s") % repr(sys.argv))
optionsTable = [
Option('--batch-size', action='store',
help=_('DEBUG ONLY: max. batch-size for XML/database-import processing (1..%s).'
+ '"man satellite-sync" for more information.') % SequenceServer.NEVER_MORE_THAN),
Option('--ca-cert', action='store',
help=_('alternative SSL CA Cert (fullpath to cert file)')),
Option('-c', '--channel', action='append',
help=_('process data for this channel only')),
Option('--consider-full', action='store_true',
help=_('disk dump will be considered to be a full export; '
'see "man satellite-sync" for more information.')),
Option('--include-custom-channels', action='store_true',
help=_('existing custom channels will also be synced (unless -c is used)')),
Option('--debug-level', action='store',
help=_('override debug level set in /etc/rhn/rhn.conf (which is currently set at %s).') % CFG.DEBUG),
Option('--dump-version', action='store',
help=_("requested version of XML dump (default: %s)") % constants.PROTOCOL_VERSION),
Option('--email', action='store_true',
help=_('e-mail a report of what was synced/imported')),
Option('--force-all-errata', action='store_true',
help=_('forcibly process all (not a diff of) errata metadata')),
Option('--force-all-packages', action='store_true',
help=_('forcibly process all (not a diff of) package metadata')),
Option('--http-proxy', action='store',
help=_('alternative http proxy (hostname:port)')),
Option('--http-proxy-username', action='store',
help=_('alternative http proxy username')),
Option('--http-proxy-password', action='store',
help=_('alternative http proxy password')),
Option('--iss-parent', action='store',
help=_('parent satellite to import content from')),
Option('-l', '--list-channels', action='store_true',
help=_('list all available channels and exit')),
Option('--list-error-codes', action='store_true',
help=_("help on all error codes satellite-sync returns")),
Option('-m', '--mount-point', action='store',
help=_('source mount point for import - disk update only')),
Option('--no-errata', action='store_true',
help=_('do not process errata data')),
Option('--no-kickstarts', action='store_true',
help=_('do not process kickstart data (provisioning only)')),
Option('--no-packages', action='store_true',
help=_('do not process full package metadata')),
Option('--no-rpms', action='store_true',
help=_('do not download, or process any RPMs')),
Option('--no-ssl', action='store_true',
help=_('turn off SSL (not recommended)')),
Option('--orgid', action='store',
help=_('org to which the sync imports data. defaults to the admin account')),
Option('-p', '--print-configuration', action='store_true',
help=_('print the configuration and exit')),
Option('--rhn-cert', action='store',
help=_('satellite certificate to import ') +
_('(use with --mount-point only)')),
Option('-s', '--server', action='store',
help=_('alternative server with which to connect (hostname)')),
Option('--step', action='store',
help=_('synchronize to this step (man satellite-sync for more info)')),
Option('--systemid', action='store',
help=_("DEBUG ONLY: alternative path to digital system id")),
Option('--traceback-mail', action='store',
help=_('alternative email address(es) for sync output (--email option)')),
Option('--keep-rpms', action='store_true',
help=_('do not remove rpms when importing from local dump')),
Option('--master', action='store',
help=_('the fully qualified domain name of the master Satellite. '
'Valid with --mount-point only. '
'Required if you want to import org data and channel permissions.')),
]
optionParser = OptionParser(option_list=optionsTable)
global OPTIONS
OPTIONS, args = optionParser.parse_args()
# we take extra commandline arguments that are not linked to an option
if args:
msg = _("ERROR: these arguments make no sense in this context (try --help): %s") % repr(args)
log2stderr(-1, msg, 1, 1)
sys.exit(19)
#
# process anything CFG related (db, debug, server, and print)
#
try:
rhnSQL.initDB()
rhnSQL.clear_log_id()
rhnSQL.set_log_auth_login('SETUP')
except (SQLError, SQLSchemaError, SQLConnectError), e:
# An SQL error is fatal... crash and burn
log(-1, _("ERROR: Can't connect to the database: %s") % e, stream=sys.stderr)
log(-1, _("ERROR: Check if your database is running."), stream=sys.stderr)
sys.exit(20)
CFG.set("ISS_Parent", getDbIssParent())
CFG.set("TRACEBACK_MAIL", OPTIONS.traceback_mail or CFG.TRACEBACK_MAIL)
CFG.set("RHN_PARENT", idn_ascii_to_puny(OPTIONS.iss_parent or OPTIONS.server or
CFG.ISS_PARENT or CFG.RHN_PARENT))
if OPTIONS.server and not OPTIONS.iss_parent:
# server option on comman line should override ISS parent from config
CFG.set("ISS_PARENT", None)
else:
CFG.set("ISS_PARENT", idn_ascii_to_puny(OPTIONS.iss_parent or CFG.ISS_PARENT))
CFG.set("ISS_CA_CHAIN", OPTIONS.ca_cert or getDbCaChain(CFG.RHN_PARENT)
or CFG.CA_CHAIN)
CFG.set("HTTP_PROXY", idn_ascii_to_puny(OPTIONS.http_proxy or CFG.HTTP_PROXY))
CFG.set("HTTP_PROXY_USERNAME", OPTIONS.http_proxy_username or CFG.HTTP_PROXY_USERNAME)
CFG.set("HTTP_PROXY_PASSWORD", OPTIONS.http_proxy_password or CFG.HTTP_PROXY_PASSWORD)
CFG.set("CA_CHAIN", OPTIONS.ca_cert or CFG.CA_CHAIN)
# check the validity of the debug level
if OPTIONS.debug_level:
debugRange = 6
try:
debugLevel = int(OPTIONS.debug_level)
if not (0 <= debugLevel <= debugRange):
raise RhnSyncException, "exception will be caught", sys.exc_info()[2]
except KeyboardInterrupt, e:
raise
# pylint: disable=E0012, W0703
except Exception:
msg = [_("ERROR: --debug-level takes an in integer value within the range %s.")
% repr(tuple(range(debugRange + 1))),
_(" 0 - little logging/messaging."),
_(" 1 - minimal logging/messaging."),
_(" 2 - normal level of logging/messaging."),
_(" 3 - lots of logging/messaging."),
_(" 4+ - excessive logging/messaging.")]
log(-1, msg, 1, 1, sys.stderr)
sys.exit(21)
else:
CFG.set('DEBUG', debugLevel)
initLOG(CFG.LOG_FILE, debugLevel)
if OPTIONS.print_configuration:
CFG.show()
sys.exit(0)
if OPTIONS.master:
if not OPTIONS.mount_point:
msg = _("ERROR: The --master option is only valid with the --mount-point option")
log2stderr(-1, msg, cleanYN=1)
sys.exit(28)
elif CFG.ISS_PARENT:
OPTIONS.master = CFG.ISS_PARENT
if OPTIONS.orgid:
# verify if its a valid org
orgs = [a['id'] for a in satCerts.get_all_orgs()]
if int(OPTIONS.orgid) not in orgs:
msg = _("ERROR: Unable to lookup Org Id %s") % OPTIONS.orgid
log2stderr(-1, msg, cleanYN=1)
sys.exit(27)
# the action dictionary used throughout
actionDict = {}
if OPTIONS.list_channels:
if OPTIONS.step:
log(-1, _("WARNING: --list-channels option overrides any --step option. --step ignored."))
OPTIONS.step = 'channels'
actionDict['list-channels'] = 1
else:
actionDict['list-channels'] = 0
#
# validate the --step option and set up the hierarchy of sync process steps.
#
stepHierarchy = Runner.step_hierarchy
# if no step stated... we do all steps.
if not OPTIONS.step:
OPTIONS.step = stepHierarchy[-1]
if OPTIONS.step not in stepHierarchy:
log2stderr(-1, _("ERROR: '%s' is not a valid step. See 'man satellite-sync' for more detail.")
% OPTIONS.step, 1, 1)
sys.exit(22)
# XXX: --source is deferred for the time being
#OPTIONS.source = OPTIONS.step in sourceSteps
# populate the action dictionary
for step in stepHierarchy:
actionDict[step] = 1
if step == OPTIONS.step:
break
# make sure *all* steps in the actionDict are handled.
for step in stepHierarchy:
actionDict[step] = actionDict.has_key(step)
channels = OPTIONS.channel or []
if OPTIONS.list_channels:
actionDict['channels'] = 1
actionDict['arches'] = 0
actionDict['channel-families'] = 1
channels = []
# Cleanup selected channels.
# if no channels selected, the default is to "freshen", or select the
# already existing channels in the local database.
if not channels:
channels = _getImportedChannels()
if not channels:
if actionDict['channels'] and not actionDict['list-channels']:
msg = _("ERROR: No channels currently imported; try satellite-sync --list-channels; "
+ "then satellite-sync -c chn0 -c chn1...")
log2disk(-1, msg)
log2stderr(-1, msg, cleanYN=1)
sys.exit(0)
# add all the "other" actions specified.
otherActions = {"no_rpms": 'no-rpms',
#"no_srpms" : 'no-srpms',
"no_packages": 'no-packages',
#"no_source_packages" : 'no-source-packages',
"no_errata": 'no-errata',
"no_kickstarts": 'no-kickstarts',
"force_all_packages": 'force-all-packages',
"force_all_errata": 'force-all-errata',
'no_ssl': 'no-ssl'}
for oa in otherActions.keys():
if getattr(OPTIONS, oa):
actionDict[otherActions[oa]] = 1
else:
actionDict[otherActions[oa]] = 0
if actionDict['no-kickstarts']:
actionDict['kickstarts'] = 0
if actionDict['no-errata']:
actionDict['errata'] = 0
# if actionDict['no-source-packages']:
actionDict['source-packages'] = 0
if actionDict['no-packages']:
actionDict['packages'] = 0
actionDict['short'] = 0
actionDict['download-packages'] = 0
actionDict['rpms'] = 0
if actionDict['no-rpms']:
actionDict['rpms'] = 0
# if actionDict['no-srpms']:
actionDict['srpms'] = 0
if not OPTIONS.master:
actionDict['orgs'] = 0
if OPTIONS.batch_size:
try:
OPTIONS.batch_size = int(OPTIONS.batch_size)
if OPTIONS.batch_size not in range(1, 51):
raise ValueError(_("ERROR: --batch-size must have a value within the range: 1..50"))
except (ValueError, TypeError):
# int(None) --> TypeError
# int('a') --> ValueError
raise ValueError(_("ERROR: --batch-size must have a value within the range: 1..50")), \
None, sys.exc_info()[2]
OPTIONS.mount_point = fileutils.cleanupAbsPath(OPTIONS.mount_point)
OPTIONS.rhn_cert = fileutils.cleanupAbsPath(OPTIONS.rhn_cert)
OPTIONS.systemid = fileutils.cleanupAbsPath(OPTIONS.systemid)
if OPTIONS.rhn_cert:
if not OPTIONS.mount_point:
msg = _("ERROR: --rhn-cert requires --mount-point")
log2stderr(-1, msg, cleanYN=1)
sys.exit(23)
if not os.path.isfile(OPTIONS.rhn_cert):
msg = _("ERROR: no such file %s") % OPTIONS.rhn_cert
log2stderr(-1, msg, cleanYN=1)
sys.exit(24)
if OPTIONS.mount_point:
if not os.path.isdir(OPTIONS.mount_point):
msg = _("ERROR: no such directory %s") % OPTIONS.mount_point
log2stderr(-1, msg, cleanYN=1)
sys.exit(25)
if OPTIONS.list_error_codes:
msg = [_("Error Codes: Returned codes means:"),
_(" -1 - Could not lock file or KeyboardInterrupt or SystemExit"),
_(" 0 - User interrupted or intentional exit"),
_(" 1 - attempting to run more than one instance of satellite-sync."),
_(" 2 - Unable to find synchronization tools."),
_(" 3 - a general socket exception occurred"),
_(" 4 - an SSL error occurred. Recheck your SSL settings."),
_(" 5 - RHN error"),
_(" 6 - unhandled exception occurred"),
_(" 7 - unknown sync error"),
_(" 8 - ERROR: must be root to execute"),
_(" 9 - rpclib fault during synchronization init"),
_(" 10 - synchronization init error"),
_(" 11 - Error parsing XML stream"),
_(" 12 - Channel do not exist"),
_(" 13 - SQL error during importing package metadata"),
_(" 14 - SQL error during linking channel packages"),
_(" 15 - SQL error during xml processing"),
_(" 16 - server.mount_point not set in the configuration file"),
_(" 17 - SQL error during retrieving the channels already imported in the satellite's database"),
_(" 18 - Wrong db connection string in rhn.conf"),
_(" 19 - Bad arguments"),
_(" 20 - Could not connect to db."),
_(" 21 - Bad debug level"),
_(" 22 - Not valid step"),
_(" 23 - error: --rhn-cert requires --mount-point"),
_(" 24 - no such file"),
_(" 25 - no such directory"),
_(" 26 - mount_point does not exist"),
_(" 27 - No such org"),
_(" 28 - error: --master is only valid with --mount-point"), ]
log(-1, msg, 1, 1, sys.stderr)
sys.exit(0)
if OPTIONS.dump_version:
OPTIONS.dump_version = str(OPTIONS.dump_version)
if OPTIONS.dump_version not in constants.ALLOWED_SYNC_PROTOCOL_VERSIONS:
msg = _("ERROR: unknown dump version, try one of %s") % \
constants.ALLOWED_SYNC_PROTOCOL_VERSIONS
log2stderr(-1, msg, cleanYN=1)
sys.exit(19)
# return the dictionary of actions, channels
return actionDict, channels
def formatDateTime(dtstring=None, dt=None):
""" Format the date time using your locale settings. This assume that your setlocale has been alread called. """
if not dt:
dt = time.strptime(dtstring, '%Y%m%d%H%M%S')
return time.strftime("%c", dt)
if __name__ == '__main__':
sys.stderr.write("!!! running this directly is advisable *ONLY* for testing"
" purposes !!!\n")
try:
sys.exit(Runner().main() or 0)
except (KeyboardInterrupt, SystemExit), ex:
sys.exit(ex)
except Exception: # pylint: disable=E0012, W0703
from spacewalk.common.rhnTB import fetchTraceback
tb = 'TRACEBACK: ' + fetchTraceback(with_locals=1)
log2disk(-1, tb)
log2email(-1, tb)
sendMail()
sys.exit(-1)
| gpl-2.0 | -891,730,889,753,331,100 | 40.603019 | 120 | 0.564289 | false |
fifoforlifo/pynja | test2/remake.py | 1 | 8988 | #!/usr/bin/python3.3
import sys
import os
rootDir = sys.path[0]
sys.path.append(os.path.normpath(os.path.join(rootDir, "..", "packages")))
sys.path.append(os.path.join(rootDir, "build"))
import pynja
import repo
build_cpp = True
build_java = True
def generate_ninja_build(projectMan):
# define cpp_variants and toolchains on a per-OS basis
cpp_variants = []
java_variants = []
deploy_variants = []
isTargetWindows = (os.name == 'nt')
if os.name == 'nt':
if build_cpp:
def toolchain_assign_winsdk(toolchain, msvcVer):
if msvcVer < 10:
toolchain.winsdkVer = 71
toolchain.winsdkDir = pynja.rootPaths.winsdk71
elif msvcVer < 14:
toolchain.winsdkVer = 80
toolchain.winsdkDir = pynja.rootPaths.winsdk80
elif msvcVer == 14:
toolchain.winsdkVer = 81
toolchain.winsdkDir = pynja.rootPaths.winsdk81
toolchain.ucrtVer = pynja.rootPaths.ucrt100Ver
toolchain.ucrtDir = pynja.rootPaths.ucrt100
else:
raise Exception('unsupported MSVC version')
def make_msvc_tool_chain(msvcVer, installDir, arch):
name = "msvc{msvcVer}-{arch}".format(**locals())
toolchain = pynja.MsvcToolChain(name, installDir, arch, msvcVer)
toolchain_assign_winsdk(toolchain, msvcVer)
return toolchain
def make_clang_msvc_tool_chain(msvcVer, installDir, arch, llvmDir):
name = "clang{msvcVer}-{arch}".format(**locals())
toolchain = pynja.ClangMsvcToolChain(name, installDir, arch, msvcVer, llvmDir)
toolchain_assign_winsdk(toolchain, msvcVer)
return toolchain
def make_nvcc_tool_chain(nvccVer, nvccInstallDir, msvcVer, msvcInstallDir, arch):
name = "nvcc{nvccVer}_msvc{msvcVer}-{arch}".format(**locals())
if arch == 'x86':
nvccArch = "-m32"
elif arch == 'amd64':
nvccArch = "-m64"
else:
raise Exception('invalid arch %s' % (arch))
toolchain = pynja.NvccToolChain(name, nvccInstallDir, "msvc%s" % msvcVer, msvcInstallDir, nvccArch, isTargetWindows)
toolchain_assign_winsdk(toolchain, msvcVer)
return toolchain
if os.path.exists(pynja.rootPaths.msvc10):
projectMan.add_toolchain(make_msvc_tool_chain(10, pynja.rootPaths.msvc10, "x86"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc10-x86-dbg-dcrt"))
#projectMan.add_toolchain(make_nvcc_tool_chain(50, pynja.rootPaths.cuda50, 10, pynja.rootPaths.msvc10, "amd64"))
#cpp_variants.append(repo.cpp.CppVariant("windows-msvc10-amd64-dbg-dcrt"))
projectMan.add_toolchain(make_nvcc_tool_chain(50, pynja.rootPaths.cuda50, 10, pynja.rootPaths.msvc10, "x86"))
cpp_variants.append(repo.cpp.CppVariant("windows-nvcc50_msvc10-x86-dbg-dcrt"))
if os.path.exists(pynja.rootPaths.msvc11):
projectMan.add_toolchain(make_msvc_tool_chain(11, pynja.rootPaths.msvc11, "x86"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc11-x86-dbg-dcrt"))
projectMan.add_toolchain(make_msvc_tool_chain(11, pynja.rootPaths.msvc11, "amd64"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc11-amd64-dbg-dcrt"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc11-amd64-rel-dcrt"))
projectMan.add_toolchain(make_nvcc_tool_chain(50, pynja.rootPaths.cuda50, 11, pynja.rootPaths.msvc11, "amd64"))
cpp_variants.append(repo.cpp.CppVariant("windows-nvcc50_msvc11-amd64-dbg-dcrt"))
if os.path.exists(pynja.rootPaths.msvc12):
projectMan.add_toolchain(make_msvc_tool_chain(12, pynja.rootPaths.msvc12, "x86"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc12-x86-dbg-dcrt"))
projectMan.add_toolchain(make_msvc_tool_chain(12, pynja.rootPaths.msvc12, "amd64"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc12-amd64-dbg-dcrt"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc12-amd64-rel-dcrt"))
if os.path.exists(pynja.rootPaths.llvmDir):
projectMan.add_toolchain(make_clang_msvc_tool_chain(12, pynja.rootPaths.msvc12, "amd64", pynja.rootPaths.llvmDir))
cpp_variants.append(repo.cpp.CppVariant("windows-clang12-amd64-dbg-dcrt"))
cpp_variants.append(repo.cpp.CppVariant("windows-clang12-amd64-rel-dcrt"))
if os.path.exists(pynja.rootPaths.msvc14):
projectMan.add_toolchain(make_msvc_tool_chain(14, pynja.rootPaths.msvc14, "x86"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc14-x86-dbg-dcrt"))
projectMan.add_toolchain(make_msvc_tool_chain(14, pynja.rootPaths.msvc14, "amd64"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc14-amd64-dbg-dcrt"))
cpp_variants.append(repo.cpp.CppVariant("windows-msvc14-amd64-rel-dcrt"))
if os.path.exists(pynja.rootPaths.mingw64):
projectMan.add_toolchain(pynja.GccToolChain("mingw64-x86", pynja.rootPaths.mingw64, isTargetWindows))
cpp_variants.append(repo.cpp.CppVariant("windows-mingw64-x86-dbg-dcrt"))
projectMan.add_toolchain(pynja.GccToolChain("mingw64-amd64", pynja.rootPaths.mingw64, isTargetWindows))
cpp_variants.append(repo.cpp.CppVariant("windows-mingw64-amd64-dbg-dcrt"))
cpp_variants.append(repo.cpp.CppVariant("windows-mingw64-amd64-rel-dcrt"))
if os.path.exists(pynja.rootPaths.android_ndk_r8d):
projectMan.add_toolchain(pynja.AndroidGccToolChain("android_arm_gcc-aarch32", pynja.rootPaths.android_ndk_r8d, "4.7", 14, "armeabi", prefix="arm-linux-androideabi-"))
cpp_variants.append(repo.cpp.CppVariant("android-android_arm_gcc-aarch32-dbg-dcrt"))
projectMan.add_toolchain(pynja.qt.QtToolChain('qt5vc11', pynja.rootPaths.qt5vc11BinDir))
if build_java:
if os.path.exists(pynja.rootPaths.jdk15):
projectMan.add_toolchain(pynja.JavacToolChain("javac", pynja.rootPaths.jdk15))
java_variants.append(repo.java.JavaVariant("javac"))
elif os.name == 'posix':
if build_cpp:
projectMan.add_toolchain(pynja.GccToolChain("gcc-x86", "/usr", isTargetWindows))
cpp_variants.append(repo.cpp.CppVariant("linux-gcc-x86-dbg-dcrt"))
projectMan.add_toolchain(pynja.GccToolChain("gcc-amd64", "/usr", isTargetWindows))
cpp_variants.append(repo.cpp.CppVariant("linux-gcc-amd64-dbg-dcrt"))
projectMan.add_toolchain(pynja.ClangToolChain("clang-amd64", "/home/lolo/Downloads/clang+llvm-3.2-x86_64-linux-ubuntu-12.04", isTargetWindows))
cpp_variants.append(repo.cpp.CppVariant("linux-clang-amd64-dbg-dcrt"))
if build_java:
if os.path.exists("/usr/bin/javac"):
projectMan.add_toolchain(pynja.JavacToolChain("javac", "/usr/bin"))
java_variants.append(repo.java.JavaVariant("javac"))
else:
raise Exception("Not implemented")
deploy_variants.append(repo.DeployVariant("app32-dbg"))
deploy_variants.append(repo.DeployVariant("app32-rel"))
deploy_variants.append(repo.DeployVariant("app64-dbg"))
deploy_variants.append(repo.DeployVariant("app64-rel"))
# assume protoc is in the path
projectMan.add_toolchain(pynja.protoc.ProtocToolChain("protoc"))
# add re2c
projectMan.add_toolchain(pynja.re2c.Re2cToolChain(pynja.rootPaths.re2c))
projectMan.emit_rules()
projectMan.ninjaFile.write("\n");
projectMan.ninjaFile.write("#############################################\n");
projectMan.ninjaFile.write("# Begin files.\n");
projectMan.ninjaFile.write("\n");
for variant in cpp_variants:
projectMan.get_project("prog0", variant)
for variant in java_variants:
projectMan.get_project("java2", variant)
for variant in deploy_variants:
projectMan.get_project("test2", variant)
currentScriptPath = os.path.join(pynja.rootDir, os.path.basename(__file__))
projectMan.emit_deploy_targets()
projectMan.emit_phony_targets()
projectMan.emit_regenerator_target(currentScriptPath)
################################################################################
# Main script
print("generating with rootDir=%s" % pynja.rootDir)
repo.init()
pynja.import_file('code/test2.py')
pynja.regenerate_build(generate_ninja_build, pynja.rootPaths.built, pynja.rootPaths.codeBrowsing)
| apache-2.0 | -8,794,421,707,129,115,000 | 47.847826 | 182 | 0.630952 | false |
Smetterleen/Neopets-Python-API | examples/Player.py | 1 | 2994 | import secrets
import logging
import traceback
import sys
from datetime import datetime
def main():
logging.basicConfig(filename='neopapi.log', level=logging.DEBUG, format='%(asctime)s|%(levelname)s|%(name)s|%(msg)s', datefmt="%x-%X")
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.INFO)
logging.getLogger().addHandler(ch)
logger.info('-------------- Starting ---------------')
try:
from datetime import timedelta
from neopapi.core import Time
from neopapi.main import User
import time
import Trainer, Banker, Investor, Maller
tasks = []
# plugins = [Investor, Trainer, Banker]
plugins = [Maller]
for plugin in plugins:
time.sleep(1)
tasks.append((Time.NST_time(), plugin))
while True:
if not User.is_logged_in(secrets.username):
logger.info("User is not logged in. Logging in")
User.login(secrets.username, secrets.password)
ordered_tasks = sorted(tasks, key=lambda x: x[0], reverse=True)
first_task = ordered_tasks.pop()
logger.info('Plugin ' + first_task[1].__name__ + ' is first on the list')
if first_task[0] > Time.NST_time():
logger.info('Waiting until %s NST (localtime: %s) to start %s' % (first_task[0].strftime('%x %X'), (first_task[0] + timedelta(hours=9)).strftime('%X'), first_task[1].__name__))
time.sleep((first_task[0] - Time.NST_time()).total_seconds())
logger.info('Running ' + first_task[1].__name__)
next_task_time = first_task[1].run()
if not isinstance(next_task_time, datetime):
raise Exception('Task did not return datetime')
ordered_tasks.append((next_task_time, first_task[1]))
tasks = ordered_tasks
except Exception as e:
logger.exception('Encoutered an Exception, mail was sent')
# Import smtplib for the actual sending function
import smtplib
# Import the email modules we'll need
from email.mime.text import MIMEText
# Create a text/plain message
msg = MIMEText(str(e) + '\n\n' + (traceback.print_exc() or ''))
# me == the sender's email address
# you == the recipient's email address
msg['Subject'] = 'Error'
msg['From'] = secrets.email
msg['To'] = secrets.email
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP(secrets.smtp_server, secrets.smtp_port)
s.ehlo()
s.starttls()
s.login(secrets.email, secrets.email_pw)
s.sendmail(secrets.email, [secrets.email], msg.as_string())
s.quit()
if __name__ == '__main__':
main()
| gpl-3.0 | -7,169,292,783,107,106,000 | 37.883117 | 192 | 0.569806 | false |
frascoweb/frasco | frasco/push/__init__.py | 1 | 6769 | from flask import g, has_request_context, request, current_app
from frasco.ext import *
from frasco.users import is_user_logged_in, current_user
from frasco.models import delayed_tx_calls
from frasco.ctx import ContextStack
from frasco.assets import expose_package
from itsdangerous import URLSafeTimedSerializer
from redis import Redis
import hashlib
import logging
import subprocess
import sys
import pickle
import uuid
import click
suppress_push_events = ContextStack(False, default_item=True, ignore_nested=True)
testing_push_events = ContextStack(None, list, ignore_nested=True)
dont_skip_self_push_events = ContextStack(False, default_item=True, ignore_nested=True)
logger = logging.getLogger('frasco.push')
class FrascoPushState(ExtensionState):
def __init__(self, *args, **kwargs):
super(FrascoPushState, self).__init__(*args, **kwargs)
self.current_user_loader = default_current_user_loader
class FrascoPush(Extension):
name = 'frasco_push'
state_class = FrascoPushState
defaults = {"redis_url": None,
"server_url": None,
"server_port": 8888,
"server_secured": False,
"channel": "socketio",
"secret": None,
"prefix_event_with_room": True,
"default_current_user_loader": True,
"testing_ignore_redis_publish": True}
def _init_app(self, app, state):
expose_package(app, "frasco_push", __name__)
if state.options['secret'] is None:
state.options["secret"] = app.config['SECRET_KEY']
if not state.options['redis_url'] and has_extension('frasco_redis', app):
state.options['redis_url'] = app.extensions.frasco_redis.options['url']
state.server_cli = ["python", "-m", "frasco.push.server",
"--channel", state.options["channel"],
"--redis", state.options["redis_url"],
"--port", str(state.options["server_port"])]
if state.options['secret']:
state.server_cli.extend(["--secret", state.options["secret"]])
if app.debug or app.testing:
state.server_cli.append("--debug")
if state.options["server_url"]:
state.server_url = state.options['server_url']
else:
server_name = app.config.get('SERVER_NAME') or 'localhost'
state.server_url = "%s://%s:%s" % (
"https" if state.options['server_secured'] else "http",
server_name.split(':')[0], state.options['server_port'])
state.token_serializer = URLSafeTimedSerializer(state.options['secret'])
state.redis = Redis.from_url(state.options["redis_url"])
state.host_id = uuid.uuid4().hex
@app.cli.command('push-server')
@click.option('--access-logs', is_flag=True)
@click.option('--debug', is_flag=True)
def cli_server(access_logs=False, debug=False):
"""Start the push server"""
args = list(state.server_cli)
if access_logs:
args.append("--access-logs")
if debug:
args.append("--debug")
process = subprocess.Popen(args)
process.wait()
sys.exit(process.returncode)
@app.cli.command('push-server-cli')
def cli_print_cmd():
"""Print the command line to start the push server independently from the app"""
click.echo(" ".join(state.server_cli))
@app.before_request
def before_request():
if state.options['secret']:
user_id, user_info, allowed_rooms = state.current_user_loader()
g.socketio_token = create_push_token(user_info, get_user_room_name(user_id), allowed_rooms)
@ext_stateful_method
def current_user_loader(self, state, func):
state.current_user_loader = func
return func
def default_current_user_loader():
if not is_user_logged_in():
return None, {"guest": True}, None
allowed_rooms = None
if hasattr(current_user, 'get_allowed_push_rooms'):
allowed_rooms = current_user.get_allowed_push_rooms()
info = {"guest": False}
info['username'] = getattr(current_user, 'username', current_user.email)
if has_extension('frasco_users_avatar'):
info['avatar_url'] = current_user.avatar_url
return current_user.get_id(), info, allowed_rooms
def create_push_token(user_info=None, user_room=None, allowed_rooms=None):
return get_extension_state('frasco_push').token_serializer.dumps([user_info, user_room, allowed_rooms])
@delayed_tx_calls.proxy
def emit_push_event(event, data=None, skip_self=None, room=None, namespace=None, prefix_event_with_room=True):
if suppress_push_events.top:
return
state = get_extension_state('frasco_push')
if current_app.testing and testing_push_events.top is not None:
testing_push_events.top.append((event, data, skip_self, room, namespace))
if state.options['testing_ignore_redis_publish']:
return
if state.options['prefix_event_with_room'] and prefix_event_with_room and room:
event = "%s:%s" % (room, event)
if skip_self is None:
skip_self = not dont_skip_self_push_events.top
skip_sid = None
if skip_self and has_request_context() and 'x-socketio-sid' in request.headers:
skip_sid = request.headers['x-socketio-sid']
logger.debug("Push event '%s' to {namespace=%s, room=%s, skip_sid=%s}: %s" % (event, namespace, room, skip_sid, data))
# we are publishing the events ourselves rather than using socketio.RedisManager because
# importing socketio while using flask debugger causes an error due to signals
return state.redis.publish(state.options['channel'], format_emit_data(event, data, namespace, room, skip_sid))
def emit_user_push_event(user_id, event, data=None, **kwargs):
return emit_push_event(event, data, room=get_user_room_name(user_id), prefix_event_with_room=False, **kwargs)
def get_user_room_name(user_id):
state = get_extension_state('frasco_push')
if not state.options['secret']:
raise Exception('A secret must be set to use emit_direct()')
return hashlib.sha1((str(user_id) + state.options['secret']).encode('utf-8')).hexdigest()
def format_emit_data(event, data, namespace=None, room=None, skip_sid=None):
# See https://github.com/miguelgrinberg/python-socketio/blob/master/socketio/pubsub_manager.py#L65
return pickle.dumps({
'method': 'emit',
'event': event,
'data': data,
'namespace': namespace or '/',
'room': room,
'skip_sid': skip_sid,
'callback': None,
'host_id': get_extension_state('frasco_push').host_id
})
| mit | 964,026,032,669,104,500 | 39.291667 | 122 | 0.636579 | false |
o108minmin/pint | pint/floattools.py | 1 | 2814 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import math
import fractions
'''
floattools
tools for floating point number
'''
def stringtofraction(arg):
'''
arg : str
-> fractions.Fraction
(jp)
引数argから近似分数を生成します。
返り値はfractions.Fractionとして返されます。
詳細はfractionsを見てください。
引数の形式は以下の通りです。
# 整数
stringtofraction("1")
# 小数
stringtofraction("0.1")
# 指数表記
stringtofraction("1e100")
# 小数と指数表記
stringtofraction("3.14e100")
(en)
Generate fractions.Fraction from arg
Exsample
# integer
stringtofraction("1")
# floating point number
stringtofraction("0.1")
stringtofraction("1e100")
stringtofraction("3.14e100")
'''
if arg.find("-") >= 0:
arg_sign = -1
else:
arg_sign = 1
if arg.find("e") >= 0:
arg_num, arg_e = arg.split("e")
e = fractions.Fraction(10, 1) ** int(arg_e)
else:
arg_num = arg
e = fractions.Fraction(1, 1)
if arg_num.find(".") >= 0:
arg_up, arg_down = arg_num.split(".")
ans = fractions.Fraction(int(arg_down), 10 ** len(arg_down))
ans += fractions.Fraction(int(arg_up), 1)
else:
ans = fractions.Fraction(int(arg_num), 1)
ans *= e
if math.copysign(ans, arg_sign) != ans:
ans *= arg_sign
return ans
def verified_digits(x, y):
digits = 0
str_x = format(x, '.64f')
str_y = format(y, '.64f')
if len(str_x) > len(str_y):
for i in range(0, len(str_x)):
str_y += '0'
elif len(str_y) > len(str_x):
for i in range(0, len(str_y)):
str_x += '0'
if str_x.find('e') > 0:
x_num, x_e = str_x.split('e')
else:
x_num = str_x
x_e = 0
if str_y.find('e') > 0:
y_num, y_e = str_y.split('e')
else:
y_num = str_y
y_e = 0
if x_e != y_e:
return 0
if x_num.find('.') > 0:
x_up, x_down = x_num.split('.')
else:
x_up = x_num
x_down = ''
if y_num.find('.') > 0:
y_up, y_down = y_num.split('.')
else:
y_up = y_num
y_down = ''
if x_up == y_up:
digits += len(x_up)
else:
if len(x_up) != len(y_up):
return 0
tf = [x_up[i] == y_up[i] for i in range(min([len(x_up), len(y_up)]))]
tf.append(False)
digits += tf.index(False)
return digits
if x_down == y_down:
digits += len(x_down) + 1
else:
tf = [x_down[i] == y_down[i] for i in range(min([len(x_down), len(y_down)]))]
tf.append(False)
digits += tf.index(False) + 1
return digits
def numbertofractton(arg):
pass
| mit | 4,094,020,670,867,079,000 | 22.086207 | 85 | 0.506721 | false |
parmarmanojkumar/MITx_Python | 6002x/week5/ProblemSet5/ps5.py | 1 | 12686 | # 6.00.2x Problem Set 5
# Graph optimization
# Finding shortest paths through MIT buildings
#
import string
# This imports everything from `graph.py` as if it was defined in this file!
from graph import *
#
# Problem 2: Building up the Campus Map
#
# Before you write any code, write a couple of sentences here
# describing how you will model this problem as a graph.
# This is a helpful exercise to help you organize your
# thoughts before you tackle a big design problem!
#
def load_map(mapFilename):
"""
Parses the map file and constructs a directed graph
Parameters:
mapFilename : name of the map file
Assumes:
Each entry in the map file consists of the following four positive
integers, separated by a blank space:
From To TotalDistance DistanceOutdoors
e.g.
32 76 54 23
This entry would become an edge from 32 to 76.
Returns:
a directed graph representing the map
"""
# TODO
print "Loading map from file..."
gMap = WeightedDigraph()
with open(mapFilename ,'r', 0) as ipFile:
for line in ipFile:
data = line.split()
srcNode = Node(str(data[0]))
destNode = Node(str(data[1]))
if not gMap.hasNode(srcNode):
gMap.addNode(srcNode)
if not gMap.hasNode(destNode):
gMap.addNode(destNode)
aEdge = WeightedEdge(srcNode, destNode, int(data[2]), int(data[3]))
gMap.addEdge(aEdge)
return gMap
# Problem 3: Finding the Shortest Path using Brute Force Search
#
# State the optimization problem as a function to minimize
# and what the constraints are
#
def bruteForceSearch(digraph, start, end, maxTotalDist, maxDistOutdoors):
"""
Finds the shortest path from start to end using brute-force approach.
The total distance travelled on the path must not exceed maxTotalDist, and
the distance spent outdoor on this path must not exceed maxDistOutdoors.
Parameters:
digraph: instance of class Digraph or its subclass
start, end: start & end building numbers (strings)
maxTotalDist : maximum total distance on a path (integer)
maxDistOutdoors: maximum distance spent outdoors on a path (integer)
Assumes:
start and end are numbers for existing buildings in graph
Returns:
The shortest-path from start to end, represented by
a list of building numbers (in strings), [n_1, n_2, ..., n_k],
where there exists an edge from n_i to n_(i+1) in digraph,
for all 1 <= i < k.
If there exists no path that satisfies maxTotalDist and
maxDistOutdoors constraints, then raises a ValueError.
"""
##### Internal Functions Start
def getPaths(graph, start, end, path=[]):
path = path + [start]
if start == end:
return [path]
paths = []
for node in graph.childrenOf(start):
if node not in path:
newpaths = getPaths(graph, node, end, path)
if newpaths != None:
for newpath in newpaths:
paths.append(newpath)
return paths
def calcPathLength(digraph,aPath):
totDist = 0.0
outDist = 0.0
for idx in xrange(len(aPath)-1):
nextNode = aPath[idx+1]
for link in digraph.edges[aPath[idx]]:
if link[0] == nextNode:
totDist += link[1][0]
outDist += link[1][1]
return totDist, outDist
def calcPathsDetails(digraph, pathsList):
pathsDetail = []
for path in pathsList:
totDist, outDist = calcPathLength(digraph, path)
pathsDetail.append([path, totDist, outDist])
return pathsDetail[:]
def calcShortestPathWithCriteria(pathsDetailed, \
maxTotalDist, maxDistOutdoors):
shortestPath = []
shortestPathVal = float(maxTotalDist)
for path in pathsDetailed:
if path[1] <= maxTotalDist and path[2] <= maxDistOutdoors:
if path[1] <= shortestPathVal:
shortestPathVal = path[1]
shortestPath = path[0]
if len(shortestPath) == 0:
return list(), None
else :
sPath = []
for node in shortestPath:
sPath.append(node.getName())
return sPath[:], shortestPathVal
##### Internal Functions End
# Step0 : load map | loaded map is availble
# Step1 : Calcuate all availabe path
pathsAvailable = getPaths(digraph, Node(start), Node(end))
# Step2 : Calculate path distances for available paths
pathsAvailable = calcPathsDetails(digraph, pathsAvailable)
# Step3 : Calculate Shortest path meeting criteria for total distance and
# outdoor distance
sPath, sPathVal = calcShortestPathWithCriteria(pathsAvailable,
maxTotalDist,
maxDistOutdoors)
if len(sPath) == 0:
raise ValueError(" No path available meeting criteria")
else:
return sPath
#
# Problem 4: Finding the Shorest Path using Optimized Search Method
#
def directedDFS(digraph, start, end, maxTotalDist, maxDistOutdoors):
"""
Finds the shortest path from start to end using directed depth-first.
search approach. The total distance travelled on the path must not
exceed maxTotalDist, and the distance spent outdoor on this path must
not exceed maxDistOutdoors.
Parameters:
digraph: instance of class Digraph or its subclass
start, end: start & end building numbers (strings)
maxTotalDist : maximum total distance on a path (integer)
maxDistOutdoors: maximum distance spent outdoors on a path (integer)
Assumes:
start and end are numbers for existing buildings in graph
Returns:
The shortest-path from start to end, represented by
a list of building numbers (in strings), [n_1, n_2, ..., n_k],
where there exists an edge from n_i to n_(i+1) in digraph,
for all 1 <= i < k.
If there exists no path that satisfies maxTotalDist and
maxDistOutdoors constraints, then raises a ValueError.
"""
stack = []
stack.append([[Node(start)], (0.0,0.0)])
sDist = maxTotalDist + 1.0
sPath = []
totalDist = 0.0
outdoorsDist = 0.0
nEnd = Node(end)
while len(stack) != 0:
popEntry = stack.pop()
path = popEntry[0]
curNode = path[-1]
for destNode, (nodeTotDist, nodeOutDist) in digraph.edges[curNode]:
totalDist = popEntry[1][0]
outdoorsDist = popEntry[1][1]
if destNode not in path :
newPath = path + [destNode]
totalDist += nodeTotDist
outdoorsDist += nodeOutDist
criteria = (totalDist > sDist) or (totalDist > maxTotalDist) or (outdoorsDist > maxDistOutdoors)
if criteria :
continue
stack.append([newPath, (totalDist, outdoorsDist)])
if destNode == nEnd:
sPath = newPath
sDist = totalDist
if len(sPath) == 0:
raise ValueError(" No path available meeting criteria")
else:
shortestPath = []
for node in sPath:
shortestPath.append(node.getName())
return shortestPath[:]
# Uncomment below when ready to test
#### NOTE! These tests may take a few minutes to run!! ####
if __name__ == '__main__':
#Test cases
mitMap = load_map("mit_map.txt")
print isinstance(mitMap, Digraph)
print isinstance(mitMap, WeightedDigraph)
print 'nodes', mitMap.nodes
print 'edges', mitMap.edges
LARGE_DIST = 1000000
# Test case 1
print "---------------"
print "Test case 1:"
print "Find the shortest-path from Building 32 to 56"
expectedPath1 = ['32', '56']
brutePath1 = bruteForceSearch(mitMap, '32', '56', LARGE_DIST, LARGE_DIST)
dfsPath1 = directedDFS(mitMap, '32', '56', LARGE_DIST, LARGE_DIST)
# dfsPath1 = brutePath1
print "Expected: ", expectedPath1
print "Brute-force: ", brutePath1
print "DFS: ", dfsPath1
print "Correct? BFS: {0}; DFS: {1}".format(expectedPath1 == brutePath1, expectedPath1 == dfsPath1)
# Test case 2
print "---------------"
print "Test case 2:"
print "Find the shortest-path from Building 32 to 56 without going outdoors"
expectedPath2 = ['32', '36', '26', '16', '56']
brutePath2 = bruteForceSearch(mitMap, '32', '56', LARGE_DIST, 0)
dfsPath2 = directedDFS(mitMap, '32', '56', LARGE_DIST, 0)
# dfsPath2 = brutePath2
print "Expected: ", expectedPath2
print "Brute-force: ", brutePath2
print "DFS: ", dfsPath2
print "Correct? BFS: {0}; DFS: {1}".format(expectedPath2 == brutePath2, expectedPath2 == dfsPath2)
# Test case 3
print "---------------"
print "Test case 3:"
print "Find the shortest-path from Building 2 to 9"
expectedPath3 = ['2', '3', '7', '9']
brutePath3 = bruteForceSearch(mitMap, '2', '9', LARGE_DIST, LARGE_DIST)
dfsPath3 = directedDFS(mitMap, '2', '9', LARGE_DIST, LARGE_DIST)
# dfsPath3 = brutePath3
print "Expected: ", expectedPath3
print "Brute-force: ", brutePath3
print "DFS: ", dfsPath3
print "Correct? BFS: {0}; DFS: {1}".format(expectedPath3 == brutePath3, expectedPath3 == dfsPath3)
# Test case 4
print "---------------"
print "Test case 4:"
print "Find the shortest-path from Building 2 to 9 without going outdoors"
expectedPath4 = ['2', '4', '10', '13', '9']
brutePath4 = bruteForceSearch(mitMap, '2', '9', LARGE_DIST, 0)
dfsPath4 = directedDFS(mitMap, '2', '9', LARGE_DIST, 0)
# dfsPath4 = brutePath4
print "Expected: ", expectedPath4
print "Brute-force: ", brutePath4
print "DFS: ", dfsPath4
print "Correct? BFS: {0}; DFS: {1}".format(expectedPath4 == brutePath4, expectedPath4 == dfsPath4)
# Test case 5
print "---------------"
print "Test case 5:"
print "Find the shortest-path from Building 1 to 32"
expectedPath5 = ['1', '4', '12', '32']
brutePath5 = bruteForceSearch(mitMap, '1', '32', LARGE_DIST, LARGE_DIST)
dfsPath5 = directedDFS(mitMap, '1', '32', LARGE_DIST, LARGE_DIST)
# dfsPath5 = brutePath5
print "Expected: ", expectedPath5
print "Brute-force: ", brutePath5
print "DFS: ", dfsPath5
print "Correct? BFS: {0}; DFS: {1}".format(expectedPath5 == brutePath5, expectedPath5 == dfsPath5)
# Test case 6
print "---------------"
print "Test case 6:"
print "Find the shortest-path from Building 1 to 32 without going outdoors"
expectedPath6 = ['1', '3', '10', '4', '12', '24', '34', '36', '32']
brutePath6 = bruteForceSearch(mitMap, '1', '32', LARGE_DIST, 0)
dfsPath6 = directedDFS(mitMap, '1', '32', LARGE_DIST, 0)
# dfsPath6 = brutePath6
print "Expected: ", expectedPath6
print "Brute-force: ", brutePath6
print "DFS: ", dfsPath6
print "Correct? BFS: {0}; DFS: {1}".format(expectedPath6 == brutePath6, expectedPath6 == dfsPath6)
# Test case 7
print "---------------"
print "Test case 7:"
print "Find the shortest-path from Building 8 to 50 without going outdoors"
bruteRaisedErr = 'No'
dfsRaisedErr = 'No'
try:
bruteForceSearch(mitMap, '8', '50', LARGE_DIST, 0)
except ValueError:
bruteRaisedErr = 'Yes'
try:
directedDFS(mitMap, '8', '50', LARGE_DIST, 0)
except ValueError:
dfsRaisedErr = 'Yes'
print "Expected: No such path! Should throw a value error."
print "Did brute force search raise an error?", bruteRaisedErr
print "Did DFS search raise an error?", dfsRaisedErr
# Test case 8
print "---------------"
print "Test case 8:"
print "Find the shortest-path from Building 10 to 32 without walking"
print "more than 100 meters in total"
bruteRaisedErr = 'No'
dfsRaisedErr = 'No'
try:
bruteForceSearch(mitMap, '10', '32', 100, LARGE_DIST)
except ValueError:
bruteRaisedErr = 'Yes'
try:
directedDFS(mitMap, '10', '32', 100, LARGE_DIST)
except ValueError:
dfsRaisedErr = 'Yes'
print "Expected: No such path! Should throw a value error."
print "Did brute force search raise an error?", bruteRaisedErr
print "Did DFS search raise an error?", dfsRaisedErr
| mit | -1,558,743,471,224,885,800 | 35.771014 | 112 | 0.606417 | false |
triple-j/base16-palette | build/build_schemes.py | 1 | 1510 | #!/usr/bin/env python
import os
from git import Repo
from shutil import rmtree
from glob import glob
import xml.etree.ElementTree as etree
import re
git_url = "https://github.com/chriskempson/base16.git"
repo_dir = "./base16"
scss_dir = "../scss/color-schemes"
labels = [
"base00",
"base01",
"base02",
"base03",
"base04",
"base05",
"base06",
"base07",
"base08",
"base09",
"base0A",
"base0B",
"base0C",
"base0D",
"base0E",
"base0F"
]
if os.path.isdir(repo_dir):
# delete repo_dir
rmtree(repo_dir)
# clone repo
Repo.clone_from(git_url, repo_dir)
# find all svg palettes
palettes = glob(repo_dir+"/scheme-previews/*.svg")
for palette in palettes:
tree = etree.parse(palette)
root = tree.getroot()
filename = os.path.basename(palette)
filename = re.sub(r'^base16-', '', filename, flags=re.IGNORECASE)
filename = re.sub(r'\.svg$', '', filename, flags=re.IGNORECASE)
filename = "_" + filename + ".scss"
filepath = scss_dir + "/" + filename
text = "";
title = root.find(".//{http://www.w3.org/2000/svg}title").text
text += "// " + title + "\n\n"
colors = {}
for label in labels:
colors[label] = root.find(".//*[@id='" + label + "']").attrib['stroke']
text += "$" + label + ": " + colors[label] + " !default;\n"
print "Writing: " + title + " (" + filepath + ")"
f = open(filepath, 'w')
f.write(text)
f.close()
| mit | 2,400,405,034,719,158,000 | 20.884058 | 79 | 0.561589 | false |
Duncan93/dbm-project2 | Ttest.py | 1 | 5148 | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 22 22:52:24 2015
@author: lorraine
"""
import json
from pprint import pprint
import numpy as np
from scipy.stats import mstats
from scipy import stats
import csv
import pandas as pd
#json_data=open("data/{0}_B.json".format("pizza")).read()
#data = json.loads(json_data)
#pprint(data)
def normaltest_data(category):
data,population = load_rating_data(category)
z,pval = mstats.normaltest(data)
print(category+" p value is "+str(pval))
if(pval < 0.01):
print "Not normal distribution"
else:
print "normal"
# normaltest_data
# null hypothesis is the pizza ratings all over the states follow a normal distribution
# A a significan level of 0.01 was chosen.
#Since the calculated p value is greater than the significan level, we do not reject the null hypothesis
#Therefore we can safely assume the ratings follows a normal distribution
#Suppose the top-40 rated pizza rating nationwide is 4.0, the one sample t-test returns a p value of 0.0019 < significance level=0.05
#therefore we can reject the null hypothesis. Do not have sufficient evidence to conclude the population mean is 4.0
#one-sided t-test, H0: score = 4.0, H1: score < 4.0
# As t<0 & p/2<alpha, we reject null hypothesis. Enough evidence to conclude best pizza score < 4.0
#assume the best pizza and best chinese have the same score in the population
#p-val = 2.32e-07 < 0.01, reject the null hypothesis. Do not have sufficient confidence to conclude the best scores are the same
#One-tailed greater than test. H0: pizza = chinese, H1:pizza >= chinese.
#As t>0 p/2<alpha, we reject null hypothesis. Enough evidence to conclude that best pizza socre is significantly greater than best chinese food
#two side p-val=0.003<0.01, t>0, reject null
#H0: best pizza score = best mexican, H1:best pizza >= mexican
#As t>0 and p/2<alpha, we reject null hypothesis. Best pizza is significantly greater than best mexican
#H0: best chinese = best mexican
#H1: best chinese not equal
# p>0.01, do not reject null. Mexican rating is not significantly different than Chinese
#assume the best pizza and the best bar have the same score in the population
#p-val=0.64 > 0.05, do ont reject the null hyphothesis. The best bar score is not significantly different from best pizza
def anova_test(cat1,cat2,cat3,cat4):
x1,pop1=load_rating_data(cat1)
x2,pop2=load_rating_data(cat2)
x3,pop3=load_rating_data(cat3)
x4,pop4=load_rating_data(cat4)
F_val, p_val_anova = stats.f_oneway(x1,x2,x3,x4)
print("anova f val"+str(F_val))
print("anova p val"+str(p_val_anova))
# anova test null hypothesis:the population mean of the best pizza, bar, chinese and mexican restaurant ratings are the same
#p_val=1.13e-05<0.01, reject null hypothesis
#need to state the assumption of Anova Test
def pearson_rapop(category):
rating,population = load_rating_data(category)
pearson, p_val = stats.pearsonr(rating,population)
print("pearson rapop is "+str(pearson))
print("pearson rapop p_val is "+str(p_val))
# pearson coefficient = 0.23, 0.20<pearson<0.29,weak positive correlation
# p_val=0.09>0.05, H0: There is so statistically significant relationship between the two variables
# do not reject null hypothesis
def load_rating_data(category):
with open("data/{0}_B.json".format(category),"r") as f:
cat = f.read()
cat = json.loads(cat)
rating=[]
population=[]
for i in xrange(len(cat[category])):
score = cat[category][i].values()
rating.append(score[0]["rating"])
population.append(score[0]["population"])
return rating,population
def pearson_raAge(category):
rating,population = load_rating_data(category)
rating = np.array(rating)
population=np.array(population)
age = []
f = open('data/MedianAge.csv')
csv_f = csv.reader(f)
for row in csv_f:
age.append(float(row[2]))
#rating = np.array(rating)
age=np.array(age)
pearson, p_val = stats.pearsonr(rating,age)
print("pearson raAge is "+str(pearson))
print("pearson raAge p_val is "+str(p_val))
#neglible correlation between rating and median age
def one_sample_ttest(category,base):
rating,population=load_rating_data(category)
rating = np.array(rating)
population=np.array(population)
t4, prob4 = stats.ttest_1samp(rating,base)
print("t value of "+category+str(t4))
print("p value of "+category+str(prob4))
def two_sample_ttest(category1, category2):
data1,populaton1=load_rating_data(category1)
data1 = np.array(data1)
data2,population2=load_rating_data(category2)
data2 = np.array(data2)
t, prob = stats.ttest_rel(data1,data2)
print("t value of "+ category1+category2+str(t))
print("p value of "+ category1+category2+str(prob))
category_filter = ["pizza","chinese","mexican","bars"]
#for category in category_filter:
normaltest_data("pizza")
# pearson_raAge("pizza")
# pearson_rapop("pizza")
# one_sample_ttest("pizza",4)
# two_sample_ttest("pizza","chinese")
# anova_test("pizza","chinese","mexican","bars")
| mit | -4,440,707,402,355,450,000 | 37.691729 | 144 | 0.705402 | false |
dotoscat/toyblock | doc/source/conf.py | 1 | 5240 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# toyblock documentation build configuration file, created by
# sphinx-quickstart on Mon Sep 4 12:31:07 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.githubpages',
'sphinx.ext.napoleon']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'toyblock'
copyright = '2017, Oscar Triano \'dotoscat\''
author = 'Oscar Triano \'dotoscat\''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.0.0'
# The full version, including alpha/beta/rc tags.
release = '2.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'toyblockdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'toyblock.tex', 'toyblock Documentation',
'Oscar Triano \'dotoscat\'', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'toyblock', 'toyblock Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'toyblock', 'toyblock Documentation',
author, 'toyblock', 'One line description of project.',
'Miscellaneous'),
]
| lgpl-3.0 | -3,109,284,729,114,547,000 | 29.823529 | 79 | 0.671947 | false |
Micronaet/micronaet-migration | report_purchase_custom/report/company/parser.py | 1 | 10924 | ##############################################################################
#
# Copyright (c) 2008-2010 SIA "KN dati". (http://kndati.lv) All Rights Reserved.
# General contacts <[email protected]>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from openerp.report import report_sxw
from openerp.report.report_sxw import rml_parse
class Parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'get_volume_item':self.get_volume_item,
'get_q_x_pack': self.get_q_x_pack,
'get_supplier_code': self.get_supplier_code,
# Multi pack insert depend on Campaign module:
'multipack_dimension_list': self.multipack_dimension_list,
'multipack_dimension_volume': self.multipack_dimension_volume,
'multipack_dimension_volume_total':
self.multipack_dimension_volume_total,
# TODO remove:
'total_volume':self.total_volume,
'get_total_volume':self.get_total_volume,
'get_unit_volume':self.get_unit_volume,
'get_price': self.get_price,
'get_subtotal': self.get_subtotal,
'get_total': self.get_total,
'get_lang_field': self.get_lang_field,
'total_USD':self.total_USD,
'get_couple': self.get_couple,
})
# -------------------------------------------------------------------------
# Multipack block:
# -------------------------------------------------------------------------
# Utility:
def multipack_extract_info(self, detail, data='list'):
''' Extract data from product detail
data:
'list' for list of elements
'volume' volume total
'total' volume total
'''
res = []
volume = 0
product = detail.product_id
qty = detail.product_qty or 0
if product.has_multipackage:
for pack in product.multi_pack_ids:
for loop in range(0, pack.number or 1):
res.append('%s x %s x %s' % (
pack.height, pack.width, pack.length,
))
volume_1 = pack.height * pack.width * pack.length / \
1000000.0
if data == 'total':
volume += qty * volume_1
elif data == 'volume':
volume += volume_1
else:
res.append('%s x %s x %s' % (
product.pack_l, product.pack_h, product.pack_p
))
q_x_pack = self.get_q_x_pack(product)
volume_1 = \
product.pack_l * product.pack_h * product.pack_p / 1000000.0
if data == 'volume':
volume = volume_1
elif data == 'total':
volume = qty * volume_1 / q_x_pack
if data == 'list':
return res
# elif 'volume':
return volume
# Get pack list:
def multipack_dimension_list(self, detail, as_list=True):
''' Create list of elements
return as_list or as text formatted
'''
res = self.multipack_extract_info(detail, data='list')
if as_list:
return '\n'.join(res)
else:
return res
# Get volume
def multipack_dimension_volume(self, detail, data='volume'):
''' Calculate volume multipack or product pack data
data: 'volume' for one 'totat' for total
'''
volume = self.multipack_extract_info(detail, data=data)
return '%2.3f' % volume
# Get total volume
def multipack_dimension_volume_total(self, order):
''' Get total volume
'''
volume = 0.0
for detail in order.order_line:
volume += self.multipack_extract_info(detail, data='total')
return '%2.3f' % volume
def get_q_x_pack(self, product):
# Old method after where saved here
if product.has_multipackage:
return 1
#elif len(product.packaging_ids) == 1:
# return int(product.packaging_ids[0].qty or 1.0)
else:
if product.q_x_pack and product.q_x_pack < 1:
return 1 # XXX for 0.5 1 x pack
else:
return int(product.q_x_pack or 1)
# -------------------------------------------------------------------------
def get_supplier_code(self, product):
if product.default_supplier_code:
return '[%s]' % (product.default_supplier_code)
elif product.seller_ids and product.seller_ids[0].product_code:
return '[%s]' % (product.seller_ids[0].product_code)
else:
return '/'
def get_lang_field(self, pool, item_id, field, lang):
''' Get field from obj in lang passed
'''
context = {'lang': lang}
obj_pool = self.pool.get(pool)
obj_proxy = obj_pool.browse(self.cr, self.uid, item_id, context=context)
return obj_proxy.__getattribute__(field)
def get_couple(self, order_line):
''' Couple the elements
'''
res = []
position = 0 # next is 0
for element in order_line:
# Colls:
try:
colls = int(element.product_id.colls) or 1
except:
colls = 1 # error in conversion
# Q x Pack.
try:
q_x_pack = int(element.product_id.q_x_pack) or '/'
except:
q_x_pack = '/' # error in conversion
for i in range(1, colls + 1):
position += 1
part_number = "%s/%s" % (i, colls)
if position % 2 == 1: # odd
# fist element:
res.append([
[element, part_number, q_x_pack],
[False, False, False]
])
else: # event
# second element:
res[(position-1) / 2][1][0] = element
res[(position-1) / 2][1][1] = part_number
res[(position-1) / 2][1][2] = q_x_pack
return res
def get_price(self, item, order):
''' Return cost price from pricelist in currency indicated
'''
try:
currency = order.partner_id.property_product_pricelist_purchase.currency_id.name
if currency == "EUR":
return "%2.2f"%(item.product_id.seller_ids[0].pricelist_ids[0].price,)
elif currency == "USD":
return "%2.2f"%(item.product_id.seller_ids[0].pricelist_ids[0].price_usd,)
except:
pass # on error price is empty
return "0.0"
def get_subtotal(self, item, order):
try:
currency = order.partner_id.property_product_pricelist_purchase.currency_id.name
if currency == "EUR":
return "%2.2f"%(float("%2.2f"%(
item.product_id.seller_ids[
0].pricelist_ids[0].price)) * item.product_qty)
elif currency == "USD":
return "%2.2f"%(float("%2.2f"%(
item.product_id.seller_ids[
0].pricelist_ids[0].price_usd)) * item.product_qty)
except:
pass # on error price is empty
return "0.0"
def get_total(self, items, order):
total=0.0
for item in items:
total += float(self.get_subtotal(item, order))
return "%2.2f"%(total)
def get_unit_volume(self, item):
''' get unit volume
'''
#if len(item.product_id.packaging_ids) == 1:
return "%2.3f" % ((
item.product_qty * \
item.product_id.pack_l * \
item.product_id.pack_h * \
item.product_id.pack_p / 1000000.0 / (
self.get_q_x_pack(item.product_id))) or 0.0)
#else:
# return '/'
def get_total_volume(self, o):
''' Function that compute total volume for 1 or more items
'''
res = 0.0
for item in o.order_line:
#if len(item.product_id.packaging_ids) == 1:
res += (item.product_qty * \
item.product_id.pack_l * \
item.product_id.pack_h * \
item.product_id.pack_p / 1000000.0 / (
self.get_q_x_pack(item.product_id))) or 0.0
return '%2.3f' % res
def get_volume_item(self, item_id):
''' calculate total volume for item line
Pz / Pz. x box * (volume of the box => l x w x h)
'''
return self.get_total_volume([item_id])
def total_volume(self, o):
''' calculate total volume for all items present in order
'''
return '' #self.get_total_volume([item.id for item in item_list])
def total_USD(self, order_id):
''' calculate total USD for all items present in order
'''
total=0.0
for item in self.pool.get('purchase.order').browse(self.cr, self.uid, order_id).order_line:
if item.product_id and item.product_id.fob_cost_supplier:
total += float("%2.2f"%(item.product_id.fob_cost_supplier,)) * item.product_qty
return "%2.2f"%(total,)
| agpl-3.0 | 6,611,097,650,410,658,000 | 38.294964 | 99 | 0.502014 | false |
mitodl/bootcamp-ecommerce | hubspot/management/commands/sync_hubspot.py | 1 | 4122 | """
Management command to sync all Users, Orders, Products, and Lines with Hubspot
and Line Items
"""
from django.contrib.auth.models import User
from django.core.management import BaseCommand
from applications.models import BootcampApplication
from hubspot.api import (
make_contact_sync_message,
make_product_sync_message,
make_deal_sync_message,
make_line_sync_message,
)
from hubspot.tasks import sync_bulk_with_hubspot
from klasses.models import BootcampRun
class Command(BaseCommand):
"""
Command to sync Contact, Product, and Deal information with Hubspot
"""
help = (
"Sync all Contacts, Deals, and Products with Hubspot. Hubspot API key must be set and Hubspot settings"
"must be configured with configure_hubspot_settings"
)
@staticmethod
def bulk_sync_model(objects, make_object_sync_message, object_type, **kwargs):
"""
Sync all database objects of a certain type with hubspot
Args:
objects (iterable) objects to sync
make_object_sync_message (function) function that takes an objectID and
returns a sync message for that model
object_type (str) one of "CONTACT", "DEAL", "PRODUCT", "LINE_ITEM"
"""
sync_bulk_with_hubspot(
objects,
make_object_sync_message,
object_type,
print_to_console=True,
**kwargs,
)
def sync_contacts(self):
"""
Sync all profiles with contacts in hubspot
"""
print(" Syncing users with hubspot contacts...")
self.bulk_sync_model(
User.objects.filter(profile__isnull=False),
make_contact_sync_message,
"CONTACT",
)
print(" Finished")
def sync_products(self):
"""
Sync all Bootcamps with products in hubspot
"""
print(" Syncing products with hubspot products...")
self.bulk_sync_model(
BootcampRun.objects.all(), make_product_sync_message, "PRODUCT"
)
print(" Finished")
def sync_deals(self):
"""
Sync all deals with deals in hubspot. Hubspot deal information is stored in both PersonalPrice
and the ecommerce Order
"""
print(" Syncing orders with hubspot deals...")
self.bulk_sync_model(
BootcampApplication.objects.all(), make_deal_sync_message, "DEAL"
)
self.bulk_sync_model(
BootcampApplication.objects.all(), make_line_sync_message, "LINE_ITEM"
)
print(" Finished")
def sync_all(self):
"""
Sync all Users, Orders, Products, and Lines with Hubspot.
"""
self.sync_contacts()
self.sync_products()
self.sync_deals()
def add_arguments(self, parser):
"""
Definition of arguments this command accepts
"""
parser.add_argument(
"--contacts",
"--users",
dest="sync_contacts",
action="store_true",
help="Sync all users",
)
parser.add_argument(
"--products",
dest="sync_products",
action="store_true",
help="Sync all products",
)
parser.add_argument(
"--deals",
"--orders",
dest="sync_deals",
action="store_true",
help="Sync all orders",
)
def handle(self, *args, **options):
print("Syncing with hubspot...")
if not (
options["sync_contacts"]
or options["sync_products"]
or options["sync_deals"]
):
# If no flags are set, sync everything
self.sync_all()
else:
# If some flags are set, sync the specified models
if options["sync_contacts"]:
self.sync_contacts()
if options["sync_products"]:
self.sync_products()
if options["sync_deals"]:
self.sync_deals()
print("Hubspot sync complete")
| bsd-3-clause | 7,408,869,084,964,508,000 | 29.761194 | 111 | 0.565745 | false |
jlongstaf/f5-openstack-lbaasv2-driver | f5lbaasdriver/v2/bigip/disconnected_service.py | 1 | 3379 | # Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from neutron.db import segments_db
from neutron.plugins.ml2 import db
from neutron.plugins.ml2 import models
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class DisconnectedService(object):
def __init__(self):
self.supported_encapsulations = ['vlan']
# Retain this method for future use in case a particular ML2 implementation
# decouples network_id from physical_network name. The implementation in
# neutron db.py requires a network_id.
def get_network_segments(self, session):
with session.begin(subtransactions=True):
query = (session.query(models.NetworkSegment).
order_by(models.NetworkSegment.segment_index))
records = query.all()
result = {}
for record in records:
if record.network_id not in result:
result[record.network_id] = []
result[record.network_id].append(db._make_segment_dict(record))
return result
def get_network_segment(self, context, agent_configuration, network):
data = None
network_segment_physical_network = \
agent_configuration.get('network_segment_physical_network', None)
supported_encapsulations = [
x.lower() for x in self.supported_encapsulations +
agent_configuration.get('tunnel_types', [])
]
# look up segment details in the ml2_network_segments table
segments = segments_db.get_network_segments(context, network['id'],
filter_dynamic=None)
for segment in segments:
LOG.debug("F5 disconnected service check segment: %s" % segment)
if ((network_segment_physical_network ==
segment['physical_network']) and
(segment['network_type'].lower() in
supported_encapsulations)):
data = segment
break
elif (network['provider:network_type'] == 'opflex' and
segment['network_type'] == 'vlan'):
data = segment
LOG.debug("Got OPFLEX segment: %s" % segment)
break
if not data:
LOG.debug('Using default segment for network %s' %
(network['id']))
# neutron is expected to provide this data immediately
data = {
'segmentation_id': network['provider:segmentation_id']
}
if 'provider:network_type' in network:
data['network_type'] = network['provider:network_type']
if 'provider:physical_network' in network:
data['physical_network'] = network['provider:physical_network']
return data
| apache-2.0 | 364,617,107,890,503,300 | 38.752941 | 79 | 0.612311 | false |
Azure/WALinuxAgent | azurelinuxagent/common/osutil/suse.py | 1 | 6891 | #
# Copyright 2018 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.6+ and Openssl 1.0+
#
import time
import azurelinuxagent.common.logger as logger
import azurelinuxagent.common.utils.fileutil as fileutil
import azurelinuxagent.common.utils.shellutil as shellutil # pylint: disable=W0611
from azurelinuxagent.common.exception import OSUtilError # pylint: disable=W0611
from azurelinuxagent.common.future import ustr # pylint: disable=W0611
from azurelinuxagent.common.osutil.default import DefaultOSUtil
class SUSE11OSUtil(DefaultOSUtil):
def __init__(self):
super(SUSE11OSUtil, self).__init__()
self.jit_enabled = True
self.dhclient_name = 'dhcpcd'
def set_hostname(self, hostname):
fileutil.write_file('/etc/HOSTNAME', hostname)
self._run_command_without_raising(["hostname", hostname], log_error=False)
def get_dhcp_pid(self):
return self._get_dhcp_pid(["pidof", self.dhclient_name])
def is_dhcp_enabled(self):
return True
def stop_dhcp_service(self):
self._run_command_without_raising(["/sbin/service", self.dhclient_name, "stop"], log_error=False)
def start_dhcp_service(self):
self._run_command_without_raising(["/sbin/service", self.dhclient_name, "start"], log_error=False)
def start_network(self):
self._run_command_without_raising(["/sbin/service", "network", "start"], log_error=False)
def restart_ssh_service(self):
self._run_command_without_raising(["/sbin/service", "sshd", "restart"], log_error=False)
def stop_agent_service(self):
self._run_command_without_raising(["/sbin/service", self.service_name, "stop"], log_error=False)
def start_agent_service(self):
self._run_command_without_raising(["/sbin/service", self.service_name, "start"], log_error=False)
def register_agent_service(self):
self._run_command_without_raising(["/sbin/insserv", self.service_name], log_error=False)
def unregister_agent_service(self):
self._run_command_without_raising(["/sbin/insserv", "-r", self.service_name], log_error=False)
class SUSEOSUtil(SUSE11OSUtil):
def __init__(self):
super(SUSEOSUtil, self).__init__()
self.dhclient_name = 'wickedd-dhcp4'
def publish_hostname(self, hostname):
self.set_dhcp_hostname(hostname)
self.set_hostname_record(hostname)
ifname = self.get_if_name()
# To push the hostname to the dhcp server we do not need to
# bring down the interface, just make the make ifup do whatever is
# necessary
self.ifup(ifname)
def ifup(self, ifname, retries=3, wait=5):
logger.info('Interface {0} bounce with ifup'.format(ifname))
retry_limit=retries+1
for attempt in range(1, retry_limit):
try:
shellutil.run_command(['ifup', ifname], log_error=True)
except Exception:
if attempt < retry_limit:
logger.info("retrying in {0} seconds".format(wait))
time.sleep(wait)
else:
logger.warn("exceeded restart retries")
@staticmethod
def get_systemd_unit_file_install_path():
return "/usr/lib/systemd/system"
def set_hostname(self, hostname):
self._run_command_without_raising(
["hostnamectl", "set-hostname", hostname], log_error=False
)
def set_dhcp_hostname(self, hostname):
dhcp_config_file_path = '/etc/sysconfig/network/dhcp'
hostname_send_setting = fileutil.get_line_startingwith(
'DHCLIENT_HOSTNAME_OPTION', dhcp_config_file_path
)
if hostname_send_setting:
value = hostname_send_setting.split('=')[-1]
if value == '"AUTO"' or value == '"{0}"'.format(hostname):
# Return if auto send host-name is configured or the current
# hostname is already set up to be sent
return
else:
# Do not use update_conf_file as it moves the setting to the
# end of the file separating it from the contextual comment
new_conf = []
dhcp_conf = fileutil.read_file(
dhcp_config_file_path).split('\n')
for entry in dhcp_conf:
if entry.startswith('DHCLIENT_HOSTNAME_OPTION'):
new_conf.append(
'DHCLIENT_HOSTNAME_OPTION="{0}"'. format(hostname)
)
continue
new_conf.append(entry)
fileutil.write_file(dhcp_config_file_path, '\n'.join(new_conf))
else:
fileutil.append_file(
dhcp_config_file_path,
'DHCLIENT_HOSTNAME_OPTION="{0}"'. format(hostname)
)
def stop_dhcp_service(self):
self._run_command_without_raising(["systemctl", "stop", "{}.service".format(self.dhclient_name)],
log_error=False)
def start_dhcp_service(self):
self._run_command_without_raising(["systemctl", "start", "{}.service".format(self.dhclient_name)],
log_error=False)
def start_network(self):
self._run_command_without_raising(["systemctl", "start", "network.service"], log_error=False)
def restart_ssh_service(self):
self._run_command_without_raising(["systemctl", "restart", "sshd.service"], log_error=False)
def stop_agent_service(self):
self._run_command_without_raising(["systemctl", "stop", "{}.service".format(self.service_name)],
log_error=False)
def start_agent_service(self):
self._run_command_without_raising(["systemctl", "start", "{}.service".format(self.service_name)],
log_error=False)
def register_agent_service(self):
self._run_command_without_raising(["systemctl", "enable", "{}.service".format(self.service_name)],
log_error=False)
def unregister_agent_service(self):
self._run_command_without_raising(["systemctl", "disable", "{}.service".format(self.service_name)],
log_error=False)
| apache-2.0 | 7,067,188,137,874,378,000 | 40.763636 | 107 | 0.612103 | false |
brynpickering/calliope | calliope/test/test_cli.py | 1 | 4812 | import os
import tempfile
import pytest # pylint: disable=unused-import
from click.testing import CliRunner
import calliope
from calliope import cli, AttrDict
_THIS_DIR = os.path.dirname(__file__)
_MODEL_NATIONAL = os.path.join(
_THIS_DIR,
'..', 'example_models', 'national_scale', 'model.yaml'
)
class TestCLI:
def test_new(self):
runner = CliRunner()
with tempfile.TemporaryDirectory() as tempdir:
new_path = os.path.join(tempdir, 'test')
result = runner.invoke(cli.new, [new_path])
assert result.exit_code == 0
# Assert that `model.yaml` in the target dir exists
assert os.path.isfile(os.path.join(tempdir, 'test', 'model.yaml'))
def test_run_from_yaml(self):
runner = CliRunner()
with runner.isolated_filesystem() as tempdir:
result = runner.invoke(cli.run, [_MODEL_NATIONAL, '--save_netcdf=output.nc', '--save_plots=results.html'])
assert calliope._logger.level == 19
assert result.exit_code == 0
assert os.path.isfile(os.path.join(tempdir, 'output.nc'))
assert os.path.isfile(os.path.join(tempdir, 'results.html'))
def test_run_from_netcdf(self):
runner = CliRunner()
model = calliope.examples.national_scale()
with runner.isolated_filesystem() as tempdir:
model_file = os.path.join(tempdir, 'model.nc')
out_file = os.path.join(tempdir, 'output.nc')
model.to_netcdf(model_file)
result = runner.invoke(cli.run, [model_file, '--save_netcdf=output.nc'])
assert result.exit_code == 0
assert os.path.isfile(out_file)
def test_generate_runs_bash(self):
runner = CliRunner()
with runner.isolated_filesystem() as tempdir:
result = runner.invoke(cli.generate_runs, [
_MODEL_NATIONAL, 'test.sh', '--kind=bash',
'--scenarios="run1;run2;run3;run4"'
])
assert result.exit_code == 0
assert os.path.isfile(os.path.join(tempdir, 'test.sh'))
def test_generate_runs_windows(self):
runner = CliRunner()
with runner.isolated_filesystem() as tempdir:
result = runner.invoke(cli.generate_runs, [
_MODEL_NATIONAL, 'test.bat', '--kind=windows',
'--scenarios="run1;run2;run3;run4"'
])
assert result.exit_code == 0
assert os.path.isfile(os.path.join(tempdir, 'test.bat'))
def test_generate_runs_bsub(self):
runner = CliRunner()
with runner.isolated_filesystem() as tempdir:
result = runner.invoke(cli.generate_runs, [
_MODEL_NATIONAL, 'test.sh', '--kind=bsub',
'--scenarios="run1;run2;run3;run4"',
'--cluster_mem=1G', '--cluster_time=100'
])
assert result.exit_code == 0
assert os.path.isfile(os.path.join(tempdir, 'test.sh'))
assert os.path.isfile(os.path.join(tempdir, 'test.sh.array.sh'))
def test_generate_runs_sbatch(self):
runner = CliRunner()
with runner.isolated_filesystem() as tempdir:
result = runner.invoke(cli.generate_runs, [
_MODEL_NATIONAL, 'test.sh', '--kind=sbatch',
'--scenarios="run1;run2;run3;run4"',
'--cluster_mem=1G', '--cluster_time=100'
])
assert result.exit_code == 0
assert os.path.isfile(os.path.join(tempdir, 'test.sh'))
assert os.path.isfile(os.path.join(tempdir, 'test.sh.array.sh'))
def test_debug(self):
runner = CliRunner()
result = runner.invoke(cli.run, ['foo.yaml', '--debug'])
assert result.exit_code == 1
assert 'Traceback (most recent call last)' in result.output
result = runner.invoke(cli.run, ['foo.yaml'])
assert result.exit_code == 1
assert 'Traceback (most recent call last)' not in result.output
def test_generate_scenarios(self):
runner = CliRunner()
with runner.isolated_filesystem() as tempdir:
out_file = os.path.join(tempdir, 'scenarios.yaml')
result = runner.invoke(cli.generate_scenarios, [
_MODEL_NATIONAL, out_file,
'cold_fusion',
'run1;run2',
'group_share_cold_fusion_cap;group_share_cold_fusion_prod',
])
assert result.exit_code == 0
assert os.path.isfile(out_file)
scenarios = AttrDict.from_yaml(out_file)
assert 'scenario_0' not in scenarios['scenarios']
assert scenarios['scenarios']['scenario_1'] == [
'cold_fusion', 'run1', 'group_share_cold_fusion_cap'
]
| apache-2.0 | -8,558,425,467,268,571,000 | 38.442623 | 118 | 0.576891 | false |
gusDuarte/software-center-5.2 | softwarecenter/ui/gtk3/views/webkit.py | 1 | 6532 | # Copyright (C) 2010 Canonical
#
# Authors:
# Michael Vogt
# Gary Lasker
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
from gi.repository import WebKit as webkit
from gi.repository import Gtk
from gi.repository import Pango
import urlparse
from softwarecenter.i18n import get_language
from softwarecenter.paths import SOFTWARE_CENTER_CACHE_DIR
from softwarecenter.enums import WEBKIT_USER_AGENT_SUFFIX
from softwarecenter.utils import get_oem_channel_descriptor
from gi.repository import Soup
from gi.repository import WebKit
def global_webkit_init():
session = WebKit.get_default_session()
fname = os.path.join(SOFTWARE_CENTER_CACHE_DIR, "cookies.txt")
# clear cookies again in a new session, see #1018347 comment #4
# there is no "logout" support right now on any of the USC pages
try:
os.remove(fname)
except OSError:
pass
cookie_jar = Soup.CookieJarText.new(fname, False)
session.add_feature(cookie_jar)
global_webkit_init()
class SoftwareCenterWebView(webkit.WebView):
""" A customized version of the regular webview
It will:
- send Accept-Language headers from the users language
- disable plugings
- send a custom user-agent string
- auto-fill in id_email in login.ubuntu.com
"""
# javascript to auto fill email login on login.ubuntu.com
AUTO_FILL_SERVER = "https://login.ubuntu.com"
AUTO_FILL_EMAIL_JS = """
document.getElementById("id_email").value="%s";
document.getElementById("id_password").focus();
"""
def __init__(self):
# actual webkit init
webkit.WebView.__init__(self)
self.connect("resource-request-starting",
self._on_resource_request_starting)
self.connect("notify::load-status",
self._on_load_status_changed)
settings = self.get_settings()
settings.set_property("enable-plugins", False)
settings.set_property("user-agent", self._get_user_agent_string())
self._auto_fill_email = ""
def set_auto_insert_email(self, email):
self._auto_fill_email = email
def _get_user_agent_string(self):
settings = self.get_settings()
user_agent_string = settings.get_property("user-agent")
user_agent_string += " %s " % WEBKIT_USER_AGENT_SUFFIX
user_agent_string += get_oem_channel_descriptor()
return user_agent_string
def _on_resource_request_starting(self, view, frame, res, req, resp):
lang = get_language()
if lang:
message = req.get_message()
if message:
headers = message.get_property("request-headers")
headers.append("Accept-Language", lang)
#def _show_header(name, value, data):
# print name, value
#headers.foreach(_show_header, None)
def _maybe_auto_fill_in_username(self):
uri = self.get_uri()
if self._auto_fill_email and uri.startswith(self.AUTO_FILL_SERVER):
self.execute_script(
self.AUTO_FILL_EMAIL_JS % self._auto_fill_email)
# ensure that we have the keyboard focus
self.grab_focus()
def _on_load_status_changed(self, view, pspec):
prop = pspec.name
status = view.get_property(prop)
if status == webkit.LoadStatus.FINISHED:
self._maybe_auto_fill_in_username()
class ScrolledWebkitWindow(Gtk.VBox):
def __init__(self, include_progress_ui=False):
super(ScrolledWebkitWindow, self).__init__()
# get webkit
self.webkit = SoftwareCenterWebView()
# add progress UI if needed
if include_progress_ui:
self._add_progress_ui()
# create main webkitview
self.scroll = Gtk.ScrolledWindow()
self.scroll.set_policy(Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.AUTOMATIC)
self.pack_start(self.scroll, True, True, 0)
# embed the webkit view in a scrolled window
self.scroll.add(self.webkit)
self.show_all()
def _add_progress_ui(self):
# create toolbar box
self.header = Gtk.HBox()
# add spinner
self.spinner = Gtk.Spinner()
self.header.pack_start(self.spinner, False, False, 6)
# add a url to the toolbar
self.url = Gtk.Label()
self.url.set_ellipsize(Pango.EllipsizeMode.END)
self.url.set_alignment(0.0, 0.5)
self.url.set_text("")
self.header.pack_start(self.url, True, True, 0)
# frame around the box
self.frame = Gtk.Frame()
self.frame.set_border_width(3)
self.frame.add(self.header)
self.pack_start(self.frame, False, False, 6)
# connect the webkit stuff
self.webkit.connect("notify::uri", self._on_uri_changed)
self.webkit.connect("notify::load-status",
self._on_load_status_changed)
def _on_uri_changed(self, view, pspec):
prop = pspec.name
uri = view.get_property(prop)
# the full uri is irellevant for the purchase view, but it is
# interessting to know what protocol/netloc is in use so that the
# user can verify its https on sites he is expecting
scheme, netloc, path, params, query, frag = urlparse.urlparse(uri)
if scheme == "file" and netloc == "":
self.url.set_text("")
else:
self.url.set_text("%s://%s" % (scheme, netloc))
# start spinner when the uri changes
#self.spinner.start()
def _on_load_status_changed(self, view, pspec):
prop = pspec.name
status = view.get_property(prop)
#print status
if status == webkit.LoadStatus.PROVISIONAL:
self.spinner.start()
self.spinner.show()
if (status == webkit.LoadStatus.FINISHED or
status == webkit.LoadStatus.FAILED):
self.spinner.stop()
self.spinner.hide()
| lgpl-3.0 | -6,916,611,946,628,458,000 | 35.49162 | 79 | 0.640539 | false |
geodynamics/pylith | pylith/bc/AuxSubfieldsTimeDependent.py | 1 | 2804 | # ----------------------------------------------------------------------
#
# Brad T. Aagaard, U.S. Geological Survey
# Charles A. Williams, GNS Science
# Matthew G. Knepley, University at Buffalo
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2021 University of California, Davis
#
# See LICENSE.md for license information.
#
# ----------------------------------------------------------------------
#
# @file pylith/materials/AuxSubfieldsTimeDependent.py
#
# @brief Python subfields container for isotropic, linear elasticity
# subfields.
from pylith.utils.PetscComponent import PetscComponent
class AuxSubfieldsTimeDependent(PetscComponent):
"""Python subfields container for time dependent boundary conditions.
f(x,t) = f_0(x) + \dot{f}_1(x)(t-t_1(x)) + f_2(x)a(t-t_2(x))
f_0(x): initial_amplitude
\dot{f}_1(x): rate_amplitude
t_1(x): rate_start
f_2(x): time_history_amplitude
t_2(x): time_history_start
FACTORY: auxiliary_subfields
"""
import pythia.pyre.inventory
from pylith.topology.Subfield import Subfield
initialAmplitude = pythia.pyre.inventory.facility("initial_amplitude", family="auxiliary_subfield", factory=Subfield)
initialAmplitude.meta['tip'] = "Initial amplitude, f_0(x), subfield."
rateAmplitude = pythia.pyre.inventory.facility("rate_amplitude", family="auxiliary_subfield", factory=Subfield)
rateAmplitude.meta['tip'] = "Rate amplitude, \dot{f}_1(x), subfield."
rateStart = pythia.pyre.inventory.facility("rate_start_time", family="auxiliary_subfield", factory=Subfield)
rateStart.meta['tip'] = "Rate starting time, t_1(x), subfield."
timeHistoryAmplitude = pythia.pyre.inventory.facility(
"time_history_amplitude", family="auxiliary_subfield", factory=Subfield)
timeHistoryAmplitude.meta['tip'] = "Time history amplitude, f_2(x). subfield"
timeHistoryStart = pythia.pyre.inventory.facility("time_history_start_time", family="auxiliary_subfield", factory=Subfield)
timeHistoryStart.meta['tip'] = "Time history starting time, t_2(s), subfield."
# PUBLIC METHODS /////////////////////////////////////////////////////
def __init__(self, name="auxfieldstimedependent"):
"""Constructor.
"""
PetscComponent.__init__(self, name, facility="auxiliary_fields")
return
# PRIVATE METHODS ////////////////////////////////////////////////////
def _configure(self):
PetscComponent._configure(self)
return
# FACTORIES ////////////////////////////////////////////////////////////
def auxiliary_subfields():
"""Factory associated with AuxSubfieldsAbsorbingDampers.
"""
return AuxSubfieldsTimeDependent()
# End of file
| mit | 8,185,914,683,688,222,000 | 33.617284 | 127 | 0.628031 | false |
bitmazk/django-download-stats | download_stats/tests/test_settings.py | 1 | 1778 | """Settings that need to be set in order to run the tests."""
import os
DEBUG = True
SITE_ID = 1
APP_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
ROOT_URLCONF = 'download_stats.tests.urls'
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(APP_ROOT, '../static')
MEDIA_ROOT = os.path.join(APP_ROOT, '../media')
STATICFILES_DIRS = (
os.path.join(APP_ROOT, 'static'),
)
TEMPLATE_DIRS = (
os.path.join(APP_ROOT, 'templates'),
)
TEMPLATE_CONTEXT_PROCESSORS = [
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
'download_stats.context_processors.download_url',
]
COVERAGE_REPORT_HTML_OUTPUT_DIR = os.path.join(
os.path.join(APP_ROOT, 'tests/coverage'))
COVERAGE_MODULE_EXCLUDES = [
'tests$', 'settings$', 'urls$', 'locale$',
'migrations', 'fixtures', 'admin$', 'django_extensions',
]
EXTERNAL_APPS = [
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.sitemaps',
'django.contrib.sites',
'django_nose',
]
INTERNAL_APPS = [
'download_stats',
]
INSTALLED_APPS = EXTERNAL_APPS + INTERNAL_APPS
COVERAGE_MODULE_EXCLUDES += EXTERNAL_APPS
SECRET_KEY = 'foobar'
| mit | -3,483,909,533,195,000,000 | 23.356164 | 61 | 0.664792 | false |
shipperizer/reimagined-garbanzo | server.py | 1 | 3198 | #!/usr/bin/env python
import logging
import sys
from os import environ
from time import sleep
import json
from uuid import uuid4
from celery import Celery
import pika
def init_logging():
logger = logging.getLogger('server')
logger.setLevel(logging.INFO)
sh = logging.StreamHandler()
formatter = logging.Formatter('[%(levelname)s] - [%(asctime)s] - %(message)s')
sh.setFormatter(formatter)
logger.addHandler(sh)
return logger
def init_celery():
redis = environ.get('REDIS_HOST', 'localhost')
app = Celery('server', broker='redis://{}:6379/2'.format(redis), backend='redis://{}:6379/3'.format(redis))
@app.task
def data():
connection = mq_connection()
channel = connection.channel()
channel.exchange_declare(exchange='logs', type='fanout')
message = json.dumps({'data': {'uuid': str(uuid4()), 'message': 'Payload incoming', 'type': 'data'}})
channel.basic_publish(exchange='logs', routing_key='', body=message)
logger.info("[x] Sent {0}".format(message))
return app
logger = init_logging()
celery = init_celery()
def mq_connection(blocking=True):
credentials = pika.PlainCredentials(environ.get('RABBITMQ_USER', 'rabbit'), environ.get('RABBITMQ_PASS', 'rabbit'))
ssl_opts = {'ca_certs': '/tmp/ca/cacert.pem', 'certfile': '/tmp/client/cert.pem', 'keyfile': '/tmp/client/key.pem'}
if blocking:
return pika.BlockingConnection(pika.ConnectionParameters(
host=environ.get('RABBITMQ_HOST', 'localhost'), port=5671, credentials=credentials, ssl=True, ssl_options=ssl_opts)
)
else:
raise Exception('Only blocking is supported right now')
def registrator():
logger.info(' [*] Waiting for clients. To exit press CTRL+C')
connection = mq_connection()
channel = connection.channel()
channel.exchange_declare(exchange='registrator', type='fanout')
result = channel.queue_declare()
queue_name = result.method.queue
channel.queue_bind(exchange='registrator', queue=queue_name)
def callback(ch, method, properties, body):
if json.loads(body).get('type') == 'registration':
logger.info('Registered client {}'.format(json.loads(body).get('client')))
elif json.loads(body).get('type') == 'heartbeat':
logger.info('Client {} alive'.format(json.loads(body).get('client')))
else:
logger.warning('Unknown message')
channel.basic_consume(callback, queue=queue_name, no_ack=True)
channel.start_consuming()
def run():
connection = mq_connection()
channel = connection.channel()
channel.exchange_declare(exchange='logs',
type='fanout')
for i in range(10000):
message = json.dumps({'message': "Here's the server, over!"})
channel.basic_publish(exchange='logs',
routing_key='',
body=message)
logger.info("[x] Sent {0} #{1}".format(message, i))
sleep(15)
connection.close()
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1:][0] == 'registrator':
registrator()
else:
run()
| mit | 5,642,650,292,563,329,000 | 29.169811 | 127 | 0.626329 | false |
amaork/libi2c | example/pylibi2c_tools.py | 1 | 2343 | # -*- coding: utf-8 -*-
import argparse
import pylibi2c
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# Required args
parser.add_argument('-b', '--bus', help='i2c bus, such as /dev/i2c-1', type=str, required=True)
parser.add_argument('-d', '--device', help='i2c device address, such as 0x56', type=str, required=True)
# Device option args
parser.add_argument('--iaddr', help='i2c internal address', type=str, default="0x0")
parser.add_argument('--delay', help='i2c r/w delay, unit is msec', type=int, default=1)
parser.add_argument('--iaddr_bytes', help='i2c internal address bytes', type=int, default=1)
parser.add_argument('--page_bytes', help='i2c per page max number of bytes', type=int, default=8)
# Read/write options
parser.add_argument('--data', help='write data', type=str)
parser.add_argument('--size', help='read data size', type=int)
parser.add_argument('--ioctl', help='using ioctl r/w i2c', type=bool, default=False)
args = vars(parser.parse_args())
try:
bus = args.get('bus')
device = int(args.get('device'), 16)
delay = args.get('delay')
iaddr = int(args.get('iaddr'), 16)
page_bytes = args.get('page_bytes')
iaddr_bytes = args.get('iaddr_bytes')
data = args.get('data')
size = args.get('size')
ioctl = args.get('ioctl')
if data is None and size is None:
raise RuntimeError("'data' or 'size' must specified one, 'data' for write, 'size' for read")
# Create a i2c device
i2c = pylibi2c.I2CDevice(bus=bus, addr=device, page_bytes=page_bytes, iaddr_bytes=iaddr_bytes, delay=delay)
if data:
write_handle = i2c.ioctl_write if ioctl else i2c.write
ret = write_handle(iaddr, bytes(data.encode("ascii")))
print("Write: '{0:s}' to address: 0x{1:x}".format(data, iaddr))
print("Result:{}".format(ret))
else:
read_handle = i2c.ioctl_read if ioctl else i2c.read
data = read_handle(iaddr, size)
print("Read: {0:d} bytes data from address: 0x{1:x}".format(size, iaddr))
print("Result:'{}'".format(data.decode("ascii")))
except (TypeError, IOError, ValueError, RuntimeError) as err:
print("I2C R/W error:{}".format(err))
| mit | -5,291,170,457,087,977,000 | 40.105263 | 115 | 0.608621 | false |
crisisking/skybot | plugins/misc.py | 1 | 1609 | import socket
import subprocess
import time
from util import hook, http
socket.setdefaulttimeout(10) # global setting
def get_version():
try:
stdout = subprocess.check_output(['git', 'log', '--format=%h'])
except:
revnumber = 0
shorthash = '????'
else:
revs = stdout.splitlines()
revnumber = len(revs)
shorthash = revs[0]
shorthash = shorthash.decode('utf-8')
http.ua_skybot = 'Skybot/r%d %s (http://github.com/rmmh/skybot)' \
% (revnumber, shorthash)
return shorthash, revnumber
# autorejoin channels
@hook.event('KICK')
def rejoin(paraml, conn=None):
if paraml[1] == conn.nick:
if paraml[0].lower() in conn.channels:
conn.join(paraml[0])
# join channels when invited
@hook.event('INVITE')
def invite(paraml, conn=None):
conn.join(paraml[-1])
@hook.event('004')
def onjoin(paraml, conn=None):
# identify to services
nickserv_password = conn.nickserv_password
nickserv_name = conn.nickserv_name
nickserv_command = conn.nickserv_command
if nickserv_password:
conn.msg(nickserv_name, nickserv_command % nickserv_password)
time.sleep(1)
# set mode on self
mode = conn.user_mode
if mode:
conn.cmd('MODE', [conn.nick, mode])
conn.join_channels()
# set user-agent as a side effect of reading the version
get_version()
@hook.regex(r'^\x01VERSION\x01$')
def version(inp, notice=None):
ident, rev = get_version()
notice('\x01VERSION skybot %s r%d - http://github.com/rmmh/'
'skybot/\x01' % (ident, rev))
| unlicense | -14,688,733,195,193,010 | 22.661765 | 71 | 0.63207 | false |
hackaugusto/raiden | raiden/tests/unit/test_matrix_presence.py | 1 | 10106 | from typing import Callable, Dict, Optional, Union
from unittest.mock import Mock
import pytest
from eth_utils import to_canonical_address
from matrix_client.user import User
from raiden.network.transport.matrix import AddressReachability, UserPresence
from raiden.network.transport.matrix.utils import USERID_RE, UserAddressManager
from raiden.utils import Address
class DummyUser:
def __init__(self, user_id: str):
self.user_id = user_id
self.displayname = "dummy"
def __eq__(self, other):
return isinstance(other, (DummyUser, User)) and self.user_id == other.user_id
def __repr__(self):
return f"<{self.__class__.__name__} user_id={self.user_id}>"
def __hash__(self):
return hash(self.user_id)
class DummyMatrixClient:
def __init__(self, user_id: str):
self._presence_callback = None
self.user_id = user_id
def add_presence_listener(self, callback: Callable):
if self._presence_callback is not None:
raise RuntimeError("Callback has already been registered")
self._presence_callback = callback
# Test helper
def trigger_presence_callback(self, user_states: Dict[str, UserPresence]):
"""Trigger the registered presence listener with the given user presence"""
if self._presence_callback is None:
raise RuntimeError("No callback has been registered")
for user_id, presence in user_states.items():
event = {
"sender": user_id,
"type": "m.presence",
"content": {"presence": presence.value},
}
self._presence_callback(event)
class NonValidatingUserAddressManager(UserAddressManager):
@staticmethod
def _validate_userid_signature(user: User) -> Optional[Address]:
match = USERID_RE.match(user.user_id)
if not match:
return None
return to_canonical_address(match.group(1))
def dummy_get_user(user_or_id: Union[str, User]) -> User:
if isinstance(user_or_id, User):
return user_or_id
return DummyUser(user_id=user_or_id)
ADDR1 = b"\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11"
ADDR2 = b'""""""""""""""""""""'
INVALID_USER_ID = "bla:bla"
USER0_ID = "@0x0000000000000000000000000000000000000000:server1"
USER1_S1_ID = "@0x1111111111111111111111111111111111111111:server1"
USER1_S2_ID = "@0x1111111111111111111111111111111111111111:server2"
USER2_S1_ID = "@0x2222222222222222222222222222222222222222:server1"
USER2_S2_ID = "@0x2222222222222222222222222222222222222222:server2"
USER1_S1 = DummyUser(USER1_S1_ID)
USER1_S2 = DummyUser(USER1_S2_ID)
USER2_S1 = DummyUser(USER2_S1_ID)
USER2_S2 = DummyUser(USER2_S2_ID)
@pytest.fixture
def dummy_matrix_client():
return DummyMatrixClient(USER0_ID)
@pytest.fixture
def user_presence():
"""Storage `user_presence_callback` will update. Useful to assert over in tests."""
return {}
@pytest.fixture
def address_reachability():
"""Storage `address_reachability_callback` will update. Useful to assert over in tests."""
return {}
@pytest.fixture
def user_presence_callback(user_presence):
def _callback(user, presence):
user_presence[user] = presence
return _callback
@pytest.fixture
def address_reachability_callback(address_reachability):
def _callback(address, reachability):
address_reachability[address] = reachability
return _callback
@pytest.fixture
def user_addr_mgr(dummy_matrix_client, address_reachability_callback, user_presence_callback):
return NonValidatingUserAddressManager(
client=dummy_matrix_client,
get_user_callable=dummy_get_user,
address_reachability_changed_callback=address_reachability_callback,
user_presence_changed_callback=user_presence_callback,
stop_event=None,
)
def test_user_addr_mgr_basics(
user_addr_mgr, dummy_matrix_client, address_reachability, user_presence
):
# This will do nothing since the address isn't known / whitelisted
dummy_matrix_client.trigger_presence_callback({USER1_S1_ID: UserPresence.ONLINE})
# This won't do anything either since the user has an invalid id format
dummy_matrix_client.trigger_presence_callback({INVALID_USER_ID: UserPresence.ONLINE})
# Nothing again, due to using our own user
dummy_matrix_client.trigger_presence_callback({USER0_ID: UserPresence.ONLINE})
assert user_addr_mgr.known_addresses == set()
assert not user_addr_mgr.is_address_known(ADDR1)
assert user_addr_mgr.get_userids_for_address(ADDR1) == set()
assert user_addr_mgr.get_address_reachability(ADDR1) is AddressReachability.UNKNOWN
assert len(address_reachability) == 0
assert len(user_presence) == 0
user_addr_mgr.add_address(ADDR1)
dummy_matrix_client.trigger_presence_callback({USER1_S1_ID: UserPresence.ONLINE})
assert user_addr_mgr.known_addresses == {ADDR1}
assert user_addr_mgr.is_address_known(ADDR1)
assert user_addr_mgr.get_userids_for_address(ADDR1) == {USER1_S1_ID}
assert user_addr_mgr.get_address_reachability(ADDR1) is AddressReachability.REACHABLE
assert len(address_reachability) == 1
assert address_reachability[ADDR1] is AddressReachability.REACHABLE
assert len(user_presence) == 1
print(user_presence)
assert user_presence[USER1_S1] is UserPresence.ONLINE
def test_user_addr_mgr_compound(
user_addr_mgr, dummy_matrix_client, address_reachability, user_presence
):
user_addr_mgr.add_address(ADDR1)
dummy_matrix_client.trigger_presence_callback({USER1_S1_ID: UserPresence.ONLINE})
assert user_addr_mgr.get_address_reachability(ADDR1) == AddressReachability.REACHABLE
assert address_reachability[ADDR1] is AddressReachability.REACHABLE
assert user_addr_mgr.get_userid_presence(USER1_S1_ID) is UserPresence.ONLINE
assert user_presence[USER1_S1] is UserPresence.ONLINE
dummy_matrix_client.trigger_presence_callback({USER1_S1_ID: UserPresence.OFFLINE})
assert user_addr_mgr.get_address_reachability(ADDR1) == AddressReachability.UNREACHABLE
assert address_reachability[ADDR1] is AddressReachability.UNREACHABLE
assert user_addr_mgr.get_userid_presence(USER1_S1_ID) is UserPresence.OFFLINE
assert user_addr_mgr.get_userid_presence(USER1_S2_ID) is UserPresence.UNKNOWN
assert user_presence[USER1_S1] is UserPresence.OFFLINE
# The duplicate `ONLINE` item is intentional to test both sides of a branch
for presence in [UserPresence.ONLINE, UserPresence.ONLINE, UserPresence.UNAVAILABLE]:
dummy_matrix_client.trigger_presence_callback({USER1_S2_ID: presence})
assert user_addr_mgr.get_address_reachability(ADDR1) == AddressReachability.REACHABLE
assert address_reachability[ADDR1] is AddressReachability.REACHABLE
assert user_addr_mgr.get_userid_presence(USER1_S1_ID) is UserPresence.OFFLINE
assert user_addr_mgr.get_userid_presence(USER1_S2_ID) is presence
assert user_presence[USER1_S1] is UserPresence.OFFLINE
assert user_presence[USER1_S2] is presence
dummy_matrix_client.trigger_presence_callback({USER1_S2_ID: UserPresence.OFFLINE})
assert user_addr_mgr.get_address_reachability(ADDR1) == AddressReachability.UNREACHABLE
assert address_reachability[ADDR1] is AddressReachability.UNREACHABLE
assert user_addr_mgr.get_userid_presence(USER2_S1_ID) is UserPresence.UNKNOWN
assert user_addr_mgr.get_userid_presence(USER2_S2_ID) is UserPresence.UNKNOWN
assert user_addr_mgr.get_address_reachability(ADDR2) is AddressReachability.UNKNOWN
def test_user_addr_mgr_force(user_addr_mgr, address_reachability, user_presence):
assert not user_addr_mgr.is_address_known(ADDR1)
assert user_addr_mgr.known_addresses == set()
user_addr_mgr.add_userid_for_address(ADDR1, USER1_S1_ID)
# This only updates the internal user presense state, but calls no callbacks and also doesn't
# update the address reachability
user_addr_mgr.force_user_presence(USER1_S1, UserPresence.ONLINE)
assert user_addr_mgr.get_userid_presence(USER1_S1_ID) is UserPresence.ONLINE
assert user_addr_mgr.get_address_reachability(ADDR1) is AddressReachability.UNKNOWN
assert len(user_presence) == 0
assert len(address_reachability) == 0
# Update address presence from previously forced user state
user_addr_mgr.refresh_address_presence(ADDR1)
assert user_addr_mgr.get_address_reachability(ADDR1) is AddressReachability.REACHABLE
assert len(user_presence) == 0
assert len(address_reachability) == 1
assert address_reachability[ADDR1] is AddressReachability.REACHABLE
def test_user_addr_mgr_fetch_presence(
user_addr_mgr, dummy_matrix_client, address_reachability, user_presence
):
dummy_matrix_client.get_user_presence = Mock(return_value=UserPresence.ONLINE.value)
user_addr_mgr.add_userid_for_address(ADDR1, USER1_S1_ID)
# We have not provided or forced any xplicit user presence,
# therefore the client will be queried
user_addr_mgr.refresh_address_presence(ADDR1)
assert user_addr_mgr.get_address_reachability(ADDR1) is AddressReachability.REACHABLE
assert len(user_presence) == 0
assert len(address_reachability) == 1
assert address_reachability[ADDR1] is AddressReachability.REACHABLE
assert dummy_matrix_client.get_user_presence.called_with(USER1_S1_ID)
def test_user_addr_mgr_fetch_misc(
user_addr_mgr, dummy_matrix_client, address_reachability, user_presence
):
user2s = {USER2_S1_ID, USER2_S2_ID}
user_addr_mgr.add_userids_for_address(ADDR2, user2s)
assert user_addr_mgr.is_address_known(ADDR2)
assert user_addr_mgr.get_userids_for_address(ADDR2) == user2s
# Set stop event, no more presence updates should be processed
user_addr_mgr._stop_event.set()
dummy_matrix_client.trigger_presence_callback({USER2_S2_ID: UserPresence.ONLINE})
assert len(user_presence) == 0
assert len(address_reachability) == 0
assert user_addr_mgr.get_userid_presence(USER2_S2_ID) is UserPresence.UNKNOWN
| mit | -5,618,101,307,904,100,000 | 39.103175 | 97 | 0.730556 | false |
tecan/xchat-rt | plugins/scripts/encryption/supybot-bitcoin-marketmonitor-master/GPG/plugin.py | 1 | 48123 | ###
# GPG - supybot plugin to authenticate users via GPG keys
# Copyright (C) 2011, Daniel Folkinshteyn <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###
from supybot import conf
from supybot import ircmsgs
from supybot import world
import supybot.utils as utils
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
import supybot.log
import sqlite3
import re
import os
import os.path
import errno
import hashlib
import time
import copy
import logging
import traceback
try:
gnupg = utils.python.universalImport('gnupg', 'local.gnupg')
except ImportError:
raise callbacks.Error, \
"You need the gnupg module installed to use this plugin."
try:
bitcoinsig = utils.python.universalImport('local.bitcoinsig')
except ImportError:
raise callbacks.Error, \
"You are possibly missing the ecdsa module."
domainRe = re.compile('^' + utils.web._domain + '$', re.I)
urlRe = re.compile('^' + utils.web._urlRe + '$', re.I)
class GPGDB(object):
def __init__(self, filename):
self.filename = filename
self.db = None
def _commit(self):
'''a commit wrapper to give it another few tries if it errors.
which sometimes happens due to:
OperationalError: database is locked'''
for i in xrange(10):
try:
self.db.commit()
except:
time.sleep(1)
def open(self):
if os.path.exists(self.filename):
db = sqlite3.connect(self.filename, timeout=10, check_same_thread = False)
db.text_factory = str
self.db = db
return
db = sqlite3.connect(self.filename, timeout=10, check_same_thread = False)
db.text_factory = str
self.db = db
cursor = self.db.cursor()
cursor.execute("""CREATE TABLE users (
id INTEGER PRIMARY KEY,
keyid TEXT,
fingerprint TEXT,
bitcoinaddress TEXT,
registered_at INTEGER,
nick TEXT)
""")
self._commit()
return
def close(self):
self.db.close()
def getByNick(self, nick):
cursor = self.db.cursor()
nick = nick.replace('|','||').replace('_','|_').replace('%','|%')
cursor.execute("""SELECT * FROM users WHERE nick LIKE ? ESCAPE '|'""", (nick,))
return cursor.fetchall()
def getByKey(self, keyid):
cursor = self.db.cursor()
cursor.execute("""SELECT * FROM users WHERE keyid = ?""", (keyid,))
return cursor.fetchall()
def getByAddr(self, address):
cursor = self.db.cursor()
cursor.execute("""SELECT * FROM users WHERE bitcoinaddress = ?""", (address,))
return cursor.fetchall()
def getCount(self):
cursor = self.db.cursor()
cursor.execute("""SELECT count(*) FROM users""")
return cursor.fetchall()
def register(self, keyid, fingerprint, bitcoinaddress, timestamp, nick):
cursor = self.db.cursor()
cursor.execute("""INSERT INTO users VALUES
(NULL, ?, ?, ?, ?, ?)""",
(keyid, fingerprint, bitcoinaddress, timestamp, nick))
self._commit()
def changenick(self, oldnick, newnick):
cursor = self.db.cursor()
cursor.execute("""UPDATE users SET nick = ? WHERE nick = ?""",
(newnick, oldnick,))
self._commit()
def changekey(self, nick, oldkeyid, newkeyid, newkeyfingerprint):
cursor = self.db.cursor()
cursor.execute("""UPDATE users SET keyid = ?, fingerprint = ?
WHERE (keyid = ? OR keyid IS NULL) and nick = ?""",
(newkeyid, newkeyfingerprint, oldkeyid, nick))
self._commit()
def changeaddress(self, nick, oldaddress, newaddress):
cursor = self.db.cursor()
cursor.execute("""UPDATE users SET bitcoinaddress = ?
WHERE nick = ? AND (bitcoinaddress = ? OR bitcoinaddress IS NULL)""",
(newaddress, nick, oldaddress,))
self._commit()
def getGPGKeyID(irc, msg, args, state, type='GPG key id. Please use the long form 16 digit key id'):
v = args[0]
m = re.search(r'^(0x)?([0-9A-Fa-f]{16})$', v)
if m is None:
state.errorInvalid(type, args[0])
return
state.args.append(m.group(2).upper())
del args[0]
def getUsername(irc, msg, args, state, type='username. Usernames must contain only printable ASCII characters with no whitespace'):
v = args[0]
m = re.search(r"^[!-~]+$", v)
if m is None:
state.errorInvalid(type, args[0])
return
state.args.append(m.group(0))
del args[0]
addConverter('keyid', getGPGKeyID)
addConverter('username', getUsername)
class GPG(callbacks.Plugin):
"""This plugin lets users create identities based on GPG keys,
and to authenticate via GPG signed messages."""
threaded = True
def __init__(self, irc):
self.__parent = super(GPG, self)
self.__parent.__init__(irc)
self.filename = conf.supybot.directories.data.dirize('GPG.db')
self.db = GPGDB(self.filename)
self.db.open()
try:
os.makedirs(conf.supybot.directories.data.dirize('otps'))
except OSError, e:
if e.errno != errno.EEXIST:
raise
self.gpg = gnupg.GPG(gnupghome = conf.supybot.directories.data.dirize('GPGkeyring'))
try: #restore auth dicts, if we're reloading the plugin
self.authed_users = utils.gpg_authed_users
utils.gpg_authed_users = {}
self.pending_auth = utils.gpg_pending_auth
utils.gpg_pending_auth = {}
except AttributeError:
self.pending_auth = {}
self.authed_users = {}
authlogfilename = os.path.join(conf.supybot.directories.log(), 'gpgauthlog.log')
authlog = logging.getLogger('GPGauth')
authlog.setLevel(-1)
if len(authlog.handlers) == 0:
handler = supybot.log.BetterFileHandler(authlogfilename)
handler.setLevel(-1)
handler.setFormatter(supybot.log.pluginFormatter)
authlog.addHandler(handler)
self.authlog = authlog
self.authlog.info("***** loading GPG plugin. *****")
def die(self):
self.__parent.die()
self.db.close()
# save auth dicts, in case we're reloading the plugin
utils.gpg_authed_users = self.authed_users
utils.gpg_pending_auth = self.pending_auth
self.authlog.info("***** quitting or unloading GPG plugin. *****")
def _recv_key(self, keyservers, keyid):
for ks in keyservers:
try:
result = self.gpg.recv_keys(ks, keyid)
if result.results[0].has_key('ok'):
return result.results[0]['fingerprint']
except:
continue
else:
raise Exception(result.stderr)
def _removeExpiredRequests(self):
pending_auth_copy = copy.deepcopy(self.pending_auth)
for hostmask,auth in pending_auth_copy.iteritems():
try:
if time.time() - auth['expiry'] > self.registryValue('authRequestTimeout'):
if auth['type'] == 'register' and not self.db.getByKey(auth['keyid']):
try:
self.gpg.delete_keys(auth['fingerprint'])
except:
pass
del self.pending_auth[hostmask]
except:
pass #let's keep going
def _checkURLWhitelist(self, url):
if not self.registryValue('pastebinWhitelist'):
return True
passed = False
for wu in self.registryValue('pastebinWhitelist'):
if wu.endswith('/') and url.find(wu) == 0:
passed = True
break
if (not wu.endswith('/')) and (url.find(wu + '/') == 0):
passed = True
break
return passed
def register(self, irc, msg, args, nick, keyid):
"""<nick> <keyid>
Register your GPG identity, associating GPG key <keyid> with <nick>.
<keyid> is a 16 digit key id, with or without the '0x' prefix.
We look on servers listed in 'plugins.GPG.keyservers' config.
You will be given a random passphrase to clearsign with your key, and
submit to the bot with the 'verify' command.
Your passphrase will expire in 10 minutes.
"""
self._removeExpiredRequests()
if self.db.getByNick(nick):
irc.error("Username already registered. Try a different username.")
return
if self.db.getByKey(keyid):
irc.error("This key already registered in the database.")
return
rs = irc.getCallback('RatingSystem')
rsdata = rs.db.get(nick)
if len(rsdata) != 0:
irc.error("This username is reserved for a legacy user. "
"Contact otc administrator to reclaim the account, if "
"you are an oldtimer since before key auth.")
return
keyservers = self.registryValue('keyservers').split(',')
try:
fingerprint = self._recv_key(keyservers, keyid)
except Exception as e:
irc.error("Could not retrieve your key from keyserver. "
"Either it isn't there, or it is invalid.")
self.log.info("GPG register: failed to retrieve key %s from keyservers %s. Details: %s" % \
(keyid, keyservers, e,))
return
challenge = "freenode:#bitcoin-otc:" + hashlib.sha256(os.urandom(128)).hexdigest()[:-8]
request = {msg.prefix: {'keyid':keyid,
'nick':nick, 'expiry':time.time(),
'type':'register', 'fingerprint':fingerprint,
'challenge':challenge}}
self.pending_auth.update(request)
self.authlog.info("register request from hostmask %s for user %s, keyid %s." %\
(msg.prefix, nick, keyid, ))
irc.reply("Request successful for user %s, hostmask %s. Your challenge string is: %s" %\
(nick, msg.prefix, challenge,))
register = wrap(register, ['username', 'keyid'])
def eregister(self, irc, msg, args, nick, keyid):
"""<nick> <keyid>
Register your GPG identity, associating GPG key <keyid> with <nick>.
<keyid> is a 16 digit key id, with or without the '0x' prefix.
We look on servers listed in 'plugins.GPG.keyservers' config.
You will be given a link to a page which contains a one time password
encrypted with your key. Decrypt, and use the 'everify' command with it.
Your passphrase will expire in 10 minutes.
"""
self._removeExpiredRequests()
if self.db.getByNick(nick):
irc.error("Username already registered. Try a different username.")
return
if self.db.getByKey(keyid):
irc.error("This key already registered in the database.")
return
rs = irc.getCallback('RatingSystem')
rsdata = rs.db.get(nick)
if len(rsdata) != 0:
irc.error("This username is reserved for a legacy user. "
"Contact otc administrator to reclaim the account, if "
"you are an oldtimer since before key auth.")
return
keyservers = self.registryValue('keyservers').split(',')
try:
fingerprint = self._recv_key(keyservers, keyid)
except Exception as e:
irc.error("Could not retrieve your key from keyserver. "
"Either it isn't there, or it is invalid.")
self.log.info("GPG eregister: failed to retrieve key %s from keyservers %s. Details: %s" % \
(keyid, keyservers, e,))
return
challenge = "freenode:#bitcoin-otc:" + hashlib.sha256(os.urandom(128)).hexdigest()[:-8]
try:
data = self.gpg.encrypt(challenge + '\n', keyid, always_trust=True)
if data.status != "encryption ok":
raise ValueError, "problem encrypting otp"
otpfn = conf.supybot.directories.data.dirize('otps/%s' % (keyid,))
f = open(otpfn, 'w')
f.write(data.data)
f.close()
except Exception, e:
irc.error("Problem creating encrypted OTP file.")
self.log.info("GPG eregister: key %s, otp creation %s, exception %s" % \
(keyid, data.stderr, e,))
return
request = {msg.prefix: {'keyid':keyid,
'nick':nick, 'expiry':time.time(),
'type':'eregister', 'fingerprint':fingerprint,
'challenge':challenge}}
self.pending_auth.update(request)
self.authlog.info("eregister request from hostmask %s for user %s, keyid %s." %\
(msg.prefix, nick, keyid,))
irc.reply("Request successful for user %s, hostmask %s. Get your encrypted OTP from %s" %\
(nick, msg.prefix, 'http://bitcoin-otc.com/otps/%s' % (keyid,),))
eregister = wrap(eregister, ['username', 'keyid'])
def bcregister(self, irc, msg, args, nick, bitcoinaddress):
"""<nick> <bitcoinaddress>
Register your identity, associating bitcoin address key <bitcoinaddress>
with <nick>.
<bitcoinaddress> should be a standard-type bitcoin address, starting with 1.
You will be given a random passphrase to sign with your address key, and
submit to the bot with the 'bcverify' command.
Your passphrase will expire in 10 minutes.
"""
self._removeExpiredRequests()
if self.db.getByNick(nick):
irc.error("Username already registered. Try a different username.")
return
if self.db.getByAddr(bitcoinaddress):
irc.error("This address is already registered in the database.")
return
rs = irc.getCallback('RatingSystem')
rsdata = rs.db.get(nick)
if len(rsdata) != 0:
irc.error("This username is reserved for a legacy user. "
"Contact otc administrator to reclaim the account, if "
"you are an oldtimer since before key auth.")
return
challenge = "freenode:#bitcoin-otc:" + hashlib.sha256(os.urandom(128)).hexdigest()[:-8]
request = {msg.prefix: {'bitcoinaddress':bitcoinaddress,
'nick':nick, 'expiry':time.time(),
'type':'bcregister',
'challenge':challenge}}
self.pending_auth.update(request)
self.authlog.info("bcregister request from hostmask %s for user %s, bitcoinaddress %s." %\
(msg.prefix, nick, bitcoinaddress, ))
irc.reply("Request successful for user %s, hostmask %s. Your challenge string is: %s" %\
(nick, msg.prefix, challenge,))
bcregister = wrap(bcregister, ['username', 'something'])
def auth(self, irc, msg, args, nick):
"""<nick>
Initiate authentication for user <nick>.
You must have registered a GPG key with the bot for this to work.
You will be given a random passphrase to clearsign with your key, and
submit to the bot with the 'verify' command.
Your passphrase will expire within 10 minutes.
"""
self._removeExpiredRequests()
userdata = self.db.getByNick(nick)
if len(userdata) == 0:
irc.error("This nick is not registered. Please register.")
return
keyid = userdata[0][1]
fingerprint = userdata[0][2]
if keyid is None:
irc.error("You have not registered a GPG key. Try using bcauth instead, or register a GPG key first.")
return
challenge = "freenode:#bitcoin-otc:" + hashlib.sha256(os.urandom(128)).hexdigest()[:-8]
request = {msg.prefix: {'nick':userdata[0][5],
'expiry':time.time(), 'keyid':keyid,
'type':'auth', 'challenge':challenge,
'fingerprint':fingerprint}}
self.pending_auth.update(request)
self.authlog.info("auth request from hostmask %s for user %s, keyid %s." %\
(msg.prefix, nick, keyid, ))
irc.reply("Request successful for user %s, hostmask %s. Your challenge string is: %s" %\
(nick, msg.prefix, challenge,))
auth = wrap(auth, ['username'])
def eauth(self, irc, msg, args, nick):
"""<nick>
Initiate authentication for user <nick>.
You must have registered a GPG key with the bot for this to work.
You will be given a link to a page which contains a one time password
encrypted with your key. Decrypt, and use the 'everify' command with it.
Your passphrase will expire in 10 minutes.
"""
self._removeExpiredRequests()
userdata = self.db.getByNick(nick)
if len(userdata) == 0:
irc.error("This nick is not registered. Please register.")
return
keyid = userdata[0][1]
fingerprint = userdata[0][2]
if keyid is None:
irc.error("You have not registered a GPG key. Try using bcauth instead, or register a GPG key first.")
return
challenge = "freenode:#bitcoin-otc:" + hashlib.sha256(os.urandom(128)).hexdigest()[:-8]
try:
data = None
data = self.gpg.encrypt(challenge + '\n', keyid, always_trust=True)
if data.status != "encryption ok":
raise ValueError, "problem encrypting otp"
otpfn = conf.supybot.directories.data.dirize('otps/%s' % (keyid,))
f = open(otpfn, 'w')
f.write(data.data)
f.close()
except Exception, e:
irc.error("Problem creating encrypted OTP file.")
if 'stderr' in dir(data):
gpgerroroutput = data.stderr
else:
gpgerroroutput = None
self.log.info("GPG eauth: key %s, otp creation %s, exception %s" % \
(keyid, gpgerroroutput, e,))
return
request = {msg.prefix: {'nick':userdata[0][5],
'expiry':time.time(), 'keyid':keyid,
'type':'eauth', 'challenge':challenge,
'fingerprint':fingerprint}}
self.pending_auth.update(request)
self.authlog.info("eauth request from hostmask %s for user %s, keyid %s." %\
(msg.prefix, nick, keyid, ))
irc.reply("Request successful for user %s, hostmask %s. Get your encrypted OTP from %s" %\
(nick, msg.prefix, 'http://bitcoin-otc.com/otps/%s' % (keyid,),))
eauth = wrap(eauth, ['username'])
def bcauth(self, irc, msg, args, nick):
"""<nick>
Initiate authentication for user <nick>.
You must have registered with the bot with a bitcoin address for this to work.
You will be given a random passphrase to sign with your address, and
submit to the bot with the 'bcverify' command.
Your passphrase will expire within 10 minutes.
"""
self._removeExpiredRequests()
userdata = self.db.getByNick(nick)
if len(userdata) == 0:
irc.error("This nick is not registered. Please register.")
return
bitcoinaddress = userdata[0][3]
if bitcoinaddress is None:
irc.error("You have not registered a bitcoin address. Try using auth/eauth instead, or register an address first.")
return
challenge = "freenode:#bitcoin-otc:" + hashlib.sha256(os.urandom(128)).hexdigest()[:-8]
request = {msg.prefix: {'nick':userdata[0][5],
'expiry':time.time(),
'type':'bcauth', 'challenge':challenge,
'bitcoinaddress':bitcoinaddress}}
self.pending_auth.update(request)
self.authlog.info("bcauth request from hostmask %s for user %s, bitcoinaddress %s." %\
(msg.prefix, nick, bitcoinaddress, ))
irc.reply("Request successful for user %s, hostmask %s. Your challenge string is: %s" %\
(nick, msg.prefix, challenge,))
bcauth = wrap(bcauth, ['username'])
def _unauth(self, irc, hostmask):
try:
logmsg = "Terminating session for hostmask %s, authenticated to user %s, keyid %s, bitcoinaddress %s" % (hostmask, self.authed_users[hostmask]['nick'], self.authed_users[hostmask]['keyid'],self.authed_users[hostmask]['bitcoinaddress'],)
self.authlog.info(logmsg)
del self.authed_users[hostmask]
if not world.testing:
irc.queueMsg(ircmsgs.privmsg("#bitcoin-otc-auth", logmsg))
return True
except KeyError:
return False
def unauth(self, irc, msg, args):
"""takes no arguments
Unauthenticate, 'logout' of your GPG session.
"""
if self._unauth(irc, msg.prefix):
irc.reply("Your GPG session has been terminated.")
else:
irc.error("You do not have a GPG session to terminate.")
unauth = wrap(unauth)
def _testPresenceInChannels(self, irc, nick):
"""Make sure authenticating user is present in channels being monitored."""
for channel in self.registryValue('channels').split(';'):
try:
if nick in irc.state.channels[channel].users:
return True
except KeyError:
pass
else:
return False
def verify(self, irc, msg, args, url):
"""<url>
Verify the latest authentication request by providing a pastebin <url>
which contains the challenge string clearsigned with your GPG key
of record. If verified, you'll be authenticated for the duration of the bot's
or your IRC session on channel (whichever is shorter).
"""
self._removeExpiredRequests()
if not self._checkURLWhitelist(url):
irc.error("Only these pastebins are supported: %s" % \
self.registryValue('pastebinWhitelist'))
return
if not self._testPresenceInChannels(irc, msg.nick):
irc.error("In order to authenticate, you must be present in one "
"of the following channels: %s" % (self.registryValue('channels'),))
return
try:
authrequest = self.pending_auth[msg.prefix]
except KeyError:
irc.error("Could not find a pending authentication request from your hostmask. "
"Either it expired, or you changed hostmask, or you haven't made one.")
return
if authrequest['type'] not in ['register','auth','changekey']:
irc.error("No outstanding GPG signature-based request found.")
return
try:
rawdata = utils.web.getUrl(url)
m = re.search(r'-----BEGIN PGP SIGNED MESSAGE-----\r?\nHash.*?\n-----END PGP SIGNATURE-----', rawdata, re.S)
data = m.group(0)
except:
irc.error("Failed to retrieve clearsigned data. Check your url.")
return
if authrequest['challenge'] not in data:
irc.error("Challenge string not present in signed message.")
return
try:
vo = self.gpg.verify(data)
if not vo.valid:
irc.error("Signature verification failed.")
self.log.info("Signature verification from %s failed. Details: %s" % \
(msg.prefix, vo.stderr))
return
if vo.key_id != authrequest['keyid'] and vo.pubkey_fingerprint[-16:] != authrequest['keyid']:
irc.error("Signature is not made with the key on record for this nick.")
return
except:
irc.error("Authentication failed. Please try again.")
return
response = ""
if authrequest['type'] == 'register':
if self.db.getByNick(authrequest['nick']) or self.db.getByKey(authrequest['keyid']):
irc.error("Username or key already in the database.")
return
self.db.register(authrequest['keyid'], authrequest['fingerprint'], None,
time.time(), authrequest['nick'])
response = "Registration successful. "
elif authrequest['type'] == 'changekey':
gpgauth = self._ident(msg.prefix)
if gpgauth is None:
irc.error("You must be authenticated in order to change your registered key.")
return
if self.db.getByKey(authrequest['keyid']):
irc.error("This key id already registered. Try a different key.")
return
self.db.changekey(gpgauth['nick'], gpgauth['keyid'], authrequest['keyid'], authrequest['fingerprint'])
response = "Successfully changed key for user %s from %s to %s. " %\
(gpgauth['nick'], gpgauth['keyid'], authrequest['keyid'],)
userdata = self.db.getByNick(authrequest['nick'])
self.authed_users[msg.prefix] = {'timestamp':time.time(),
'keyid': authrequest['keyid'], 'nick':authrequest['nick'],
'bitcoinaddress':userdata[0][3],
'fingerprint':authrequest['fingerprint']}
del self.pending_auth[msg.prefix]
logmsg = "verify success from hostmask %s for user %s, keyid %s." %\
(msg.prefix, authrequest['nick'], authrequest['keyid'],) + response
self.authlog.info(logmsg)
if not world.testing:
irc.queueMsg(ircmsgs.privmsg("#bitcoin-otc-auth", logmsg))
irc.reply(response + "You are now authenticated for user '%s' with key %s" %\
(authrequest['nick'], authrequest['keyid']))
verify = wrap(verify, ['httpUrl'])
def everify(self, irc, msg, args, otp):
"""<otp>
Verify the latest encrypt-authentication request by providing your decrypted
one-time password.
If verified, you'll be authenticated for the duration of the bot's
or your IRC session on channel (whichever is shorter).
"""
self._removeExpiredRequests()
if not self._testPresenceInChannels(irc, msg.nick):
irc.error("In order to authenticate, you must be present in one "
"of the following channels: %s" % (self.registryValue('channels'),))
return
try:
authrequest = self.pending_auth[msg.prefix]
except KeyError:
irc.error("Could not find a pending authentication request from your hostmask. "
"Either it expired, or you changed hostmask, or you haven't made one.")
return
if authrequest['type'] not in ['eregister','eauth','echangekey']:
irc.error("No outstanding encryption-based request found.")
return
if authrequest['challenge'] != otp:
irc.error("Incorrect one-time password. Try again.")
return
response = ""
if authrequest['type'] == 'eregister':
if self.db.getByNick(authrequest['nick']) or self.db.getByKey(authrequest['keyid']):
irc.error("Username or key already in the database.")
return
self.db.register(authrequest['keyid'], authrequest['fingerprint'], None,
time.time(), authrequest['nick'])
response = "Registration successful. "
elif authrequest['type'] == 'echangekey':
gpgauth = self._ident(msg.prefix)
if gpgauth is None:
irc.error("You must be authenticated in order to change your registered key.")
return
if self.db.getByKey(authrequest['keyid']):
irc.error("This key id already registered. Try a different key.")
return
self.db.changekey(gpgauth['nick'], gpgauth['keyid'], authrequest['keyid'], authrequest['fingerprint'])
response = "Successfully changed key for user %s from %s to %s. " %\
(gpgauth['nick'], gpgauth['keyid'], authrequest['keyid'],)
userdata = self.db.getByNick(authrequest['nick'])
self.authed_users[msg.prefix] = {'timestamp':time.time(),
'keyid': authrequest['keyid'], 'nick':authrequest['nick'],
'bitcoinaddress':userdata[0][3],
'fingerprint':authrequest['fingerprint']}
del self.pending_auth[msg.prefix]
logmsg = "everify success from hostmask %s for user %s, keyid %s." %\
(msg.prefix, authrequest['nick'], authrequest['keyid'],) + response
self.authlog.info(logmsg)
if not world.testing:
irc.queueMsg(ircmsgs.privmsg("#bitcoin-otc-auth", logmsg))
irc.reply(response + "You are now authenticated for user %s with key %s" %\
(authrequest['nick'], authrequest['keyid']))
everify = wrap(everify, ['something'])
def bcverify(self, irc, msg, args, data):
"""<signedmessage>
Verify the latest authentication request by providing the <signedmessage>
which contains the challenge string signed with your bitcoin address
of record. If verified, you'll be authenticated for the duration of the bot's
or your IRC session on channel (whichever is shorter).
"""
self._removeExpiredRequests()
if not self._testPresenceInChannels(irc, msg.nick):
irc.error("In order to authenticate, you must be present in one "
"of the following channels: %s" % (self.registryValue('channels'),))
return
try:
authrequest = self.pending_auth[msg.prefix]
except KeyError:
irc.error("Could not find a pending authentication request from your hostmask. "
"Either it expired, or you changed hostmask, or you haven't made one.")
return
if authrequest['type'] not in ['bcregister','bcauth','bcchangekey']:
irc.error("No outstanding bitcoin-signature-based request found.")
return
try:
result = bitcoinsig.verify_message(authrequest['bitcoinaddress'], data, authrequest['challenge'])
if not result:
irc.error("Signature verification failed.")
return
except:
irc.error("Authentication failed. Please try again.")
self.log.info("bcverify traceback: \n%s" % (traceback.format_exc()))
return
response = ""
if authrequest['type'] == 'bcregister':
if self.db.getByNick(authrequest['nick']) or self.db.getByAddr(authrequest['bitcoinaddress']):
irc.error("Username or key already in the database.")
return
self.db.register(None, None, authrequest['bitcoinaddress'],
time.time(), authrequest['nick'])
response = "Registration successful. "
elif authrequest['type'] == 'bcchangekey':
gpgauth = self._ident(msg.prefix)
if gpgauth is None:
irc.error("You must be authenticated in order to change your registered address.")
return
if self.db.getByAddr(authrequest['bitcoinaddress']):
irc.error("This address is already registered. Try a different one.")
return
self.db.changeaddress(gpgauth['nick'], gpgauth['bitcoinaddress'], authrequest['bitcoinaddress'])
response = "Successfully changed address for user %s from %s to %s. " %\
(gpgauth['nick'], gpgauth['bitcoinaddress'], authrequest['bitcoinaddress'],)
userdata = self.db.getByNick(authrequest['nick'])
self.authed_users[msg.prefix] = {'timestamp':time.time(),
'keyid': userdata[0][1], 'nick':authrequest['nick'],
'bitcoinaddress':authrequest['bitcoinaddress'],
'fingerprint':userdata[0][2]}
del self.pending_auth[msg.prefix]
logmsg = "bcverify success from hostmask %s for user %s, address %s." %\
(msg.prefix, authrequest['nick'], authrequest['bitcoinaddress'],) + response
self.authlog.info(logmsg)
if not world.testing:
irc.queueMsg(ircmsgs.privmsg("#bitcoin-otc-auth", logmsg))
irc.reply(response + "You are now authenticated for user '%s' with address %s" %\
(authrequest['nick'], authrequest['bitcoinaddress']))
bcverify = wrap(bcverify, ['something'])
#~ def changenick(self, irc, msg, args, newnick):
#~ """<newnick>
#~ Changes your GPG registered username to <newnick>.
#~ You must be authenticated in order to use this command.
#~ """
#~ self._removeExpiredRequests()
#~ gpgauth = self._ident(msg.prefix)
#~ if gpgauth is None:
#~ irc.error("You must be authenticated in order to change your registered username.")
#~ return
#~ if self.db.getByNick(newnick):
#~ irc.error("Username already registered. Try a different username.")
#~ return
#~ oldnick = gpgauth['nick']
#~ self.db.changenick(oldnick, newnick)
#~ gpgauth['nick'] = newnick
#~ irc.reply("Successfully changed your nick from %s to %s." % (oldnick, newnick,))
#~ changenick = wrap(changenick, ['something'])
def changekey(self, irc, msg, args, keyid):
"""<keyid>
Changes your GPG registered key to <keyid>.
<keyid> is a 16 digit key id, with or without the '0x' prefix.
We look on servers listed in 'plugins.GPG.keyservers' config.
You will be given a random passphrase to clearsign with your key, and
submit to the bot with the 'verify' command.
You must be authenticated in order to use this command.
"""
self._removeExpiredRequests()
gpgauth = self._ident(msg.prefix)
if gpgauth is None:
irc.error("You must be authenticated in order to change your registered key.")
return
if self.db.getByKey(keyid):
irc.error("This key id already registered. Try a different key.")
return
keyservers = self.registryValue('keyservers').split(',')
try:
fingerprint = self._recv_key(keyservers, keyid)
except Exception as e:
irc.error("Could not retrieve your key from keyserver. "
"Either it isn't there, or it is invalid.")
self.log.info("GPG changekey: failed to retrieve key %s from keyservers %s. Details: %s" % \
(keyid, keyservers, e,))
return
challenge = "freenode:#bitcoin-otc:" + hashlib.sha256(os.urandom(128)).hexdigest()[:-8]
request = {msg.prefix: {'keyid':keyid,
'nick':gpgauth['nick'], 'expiry':time.time(),
'type':'changekey', 'fingerprint':fingerprint,
'challenge':challenge}}
self.pending_auth.update(request)
self.authlog.info("changekey request from hostmask %s for user %s, oldkeyid %s, newkeyid %s." %\
(msg.prefix, gpgauth['nick'], gpgauth['keyid'], keyid, ))
irc.reply("Request successful for user %s, hostmask %s. Your challenge string is: %s" %\
(gpgauth['nick'], msg.prefix, challenge,))
changekey = wrap(changekey, ['keyid',])
def echangekey(self, irc, msg, args, keyid):
"""<keyid>
Changes your GPG registered key to <keyid>.
<keyid> is a 16 digit key id, with or without the '0x' prefix.
We look on servers listed in 'plugins.GPG.keyservers' config.
You will be given a link to a page which contains a one time password
encrypted with your key. Decrypt, and use the 'everify' command with it.
You must be authenticated in order to use this command.
"""
self._removeExpiredRequests()
gpgauth = self._ident(msg.prefix)
if gpgauth is None:
irc.error("You must be authenticated in order to change your registered key.")
return
if self.db.getByKey(keyid):
irc.error("This key id already registered. Try a different key.")
return
keyservers = self.registryValue('keyservers').split(',')
try:
fingerprint = self._recv_key(keyservers, keyid)
except Exception as e:
irc.error("Could not retrieve your key from keyserver. "
"Either it isn't there, or it is invalid.")
self.log.info("GPG echangekey: failed to retrieve key %s from keyservers %s. Details: %s" % \
(keyid, keyservers, e,))
return
challenge = "freenode:#bitcoin-otc:" + hashlib.sha256(os.urandom(128)).hexdigest()[:-8]
try:
data = self.gpg.encrypt(challenge + '\n', keyid, always_trust=True)
if data.status != "encryption ok":
raise ValueError, "problem encrypting otp"
otpfn = conf.supybot.directories.data.dirize('otps/%s' % (keyid,))
f = open(otpfn, 'w')
f.write(data.data)
f.close()
except Exception, e:
irc.error("Problem creating encrypted OTP file.")
self.log.info("GPG echangekey: key %s, otp creation %s, exception %s" % \
(keyid, data.stderr, e,))
return
request = {msg.prefix: {'keyid':keyid,
'nick':gpgauth['nick'], 'expiry':time.time(),
'type':'echangekey', 'fingerprint':fingerprint,
'challenge':challenge}}
self.pending_auth.update(request)
self.authlog.info("echangekey request from hostmask %s for user %s, oldkeyid %s, newkeyid %s." %\
(msg.prefix, gpgauth['nick'], gpgauth['keyid'], keyid, ))
irc.reply("Request successful for user %s, hostmask %s. Get your encrypted OTP from %s" %\
(gpgauth['nick'], msg.prefix, 'http://bitcoin-otc.com/otps/%s' % (keyid,),))
echangekey = wrap(echangekey, ['keyid',])
def changeaddress(self, irc, msg, args, bitcoinaddress):
"""<bitcoinaddress>
Changes your registered address to <bitcoinaddress>.
You will be given a random passphrase to sign with your new address, and
submit to the bot with the 'bcverify' command.
You must be authenticated in order to use this command.
"""
self._removeExpiredRequests()
gpgauth = self._ident(msg.prefix)
if gpgauth is None:
irc.error("You must be authenticated in order to change your registered key.")
return
if self.db.getByAddr(bitcoinaddress):
irc.error("This address is already registered. Try a different one.")
return
challenge = "freenode:#bitcoin-otc:" + hashlib.sha256(os.urandom(128)).hexdigest()[:-8]
request = {msg.prefix: {'bitcoinaddress':bitcoinaddress,
'nick':gpgauth['nick'], 'expiry':time.time(),
'type':'bcchangekey',
'challenge':challenge}}
self.pending_auth.update(request)
self.authlog.info("changeaddress request from hostmask %s for user %s, oldaddress %s, newaddress %s." %\
(msg.prefix, gpgauth['nick'], gpgauth['bitcoinaddress'], bitcoinaddress, ))
irc.reply("Request successful for user %s, hostmask %s. Your challenge string is: %s" %\
(gpgauth['nick'], msg.prefix, challenge,))
changeaddress = wrap(changeaddress, ['something'])
def ident(self, irc, msg, args, nick):
"""[<nick>]
Returns details about your GPG identity with the bot, or notes the
absence thereof.
If optional <nick> is given, tells you about <nick> instead.
"""
if nick is not None:
try:
hostmask = irc.state.nickToHostmask(nick)
except KeyError:
irc.error("I am not seeing this user on IRC. "
"If you want information about a registered gpg user, "
"try the 'gpg info' command instead.")
return
else:
hostmask = msg.prefix
nick = msg.nick
response = "Nick '%s', with hostmask '%s', is " % (nick, hostmask,)
try:
authinfo = self.authed_users[hostmask]
if irc.nested:
response = authinfo['nick']
else:
if authinfo['nick'].upper() != nick.upper():
response = "\x02CAUTION: irc nick differs from otc registered nick.\x02 " + response
response += ("identified as user '%s', with GPG key id %s, " + \
"key fingerprint %s, and bitcoin address %s") % (authinfo['nick'],
authinfo['keyid'],
authinfo['fingerprint'],
authinfo['bitcoinaddress'])
except KeyError:
if irc.nested:
response = ""
else:
response += "not identified."
irc.reply(response)
ident = wrap(ident, [optional('something')])
def _info(self, nick):
"""Return info on registered user. For use from other plugins."""
result = self.db.getByNick(nick)
if len(result) == 0:
return None
else:
return result[0]
def info(self, irc, msg, args, optlist, nick):
"""[--key|--address] <nick>
Returns the registration details of registered user <nick>.
If '--key' option is given, interpret <nick> as a GPG key ID.
"""
if 'key' in dict(optlist).keys():
result = self.db.getByKey(nick)
elif 'address' in dict(optlist).keys():
result = self.db.getByAddr(nick)
else:
result = self.db.getByNick(nick)
if len(result) == 0:
irc.reply("No such user registered.")
return
result = result[0]
authhost = self._identByNick(result[5])
if authhost is not None:
authstatus = " Currently authenticated from hostmask %s ." % (authhost,)
if authhost.split('!')[0].upper() != result[5].upper():
authstatus += " CAUTION: irc nick differs from otc registered nick."
else:
authstatus = " Currently not authenticated."
irc.reply("User '%s', with keyid %s, fingerprint %s, and bitcoin address %s, registered on %s. http://b-otc.com/vg?nick=%s .%s" %\
(result[5], result[1], result[2], result[3], time.ctime(result[4]), utils.web.urlquote(result[5]), authstatus))
info = wrap(info, [getopts({'key': '','address':'',}),'something'])
def stats(self, irc, msg, args):
"""takes no arguments
Gives the statistics on number of registered users,
number of authenticated users, number of pending authentications.
"""
self._removeExpiredRequests()
try:
regusers = self.db.getCount()[0][0]
authedusers = len(self.authed_users)
pendingauths = len(self.pending_auth)
except:
irc.error("Problem retrieving statistics. Try again later.")
return
irc.reply("There are %s registered users, %s currently authenticated. "
"There are also %s pending authentication requests." % \
(regusers, authedusers, pendingauths,))
stats = wrap(stats)
def _ident(self, hostmask):
"""Use to check identity status from other plugins."""
return self.authed_users.get(hostmask, None)
def _identByNick(self, nick):
for k,v in self.authed_users.iteritems():
if v['nick'].lower() == nick.lower():
return k
return None
def doQuit(self, irc, msg):
"""Kill the authentication when user quits."""
if irc.network == self.registryValue('network'):
self._unauth(irc, msg.prefix)
def doPart(self, irc, msg):
"""Kill the authentication when user parts all channels."""
channels = self.registryValue('channels').split(';')
if msg.args[0] in channels and irc.network == self.registryValue('network'):
for channel in channels:
try:
if msg.nick in irc.state.channels[channel].users:
break
except KeyError:
pass #oh well, we're not in one of our monitored channels
else:
if ircutils.strEqual(msg.nick, irc.nick): #we're parting
self.authlog.info("***** clearing authed_users due to self-part. *****")
self.authed_users.clear()
else:
self._unauth(irc, msg.prefix)
def doError(self, irc, msg):
"""Reset the auth dict when bot gets disconnected."""
if irc.network == self.registryValue('network'):
self.authlog.info("***** clearing authed_users due to network error. *****")
self.authed_users.clear()
def doKick(self, irc, msg):
"""Kill the authentication when user gets kicked."""
channels = self.registryValue('channels').split(';')
if msg.args[0] in channels and irc.network == self.registryValue('network'):
(channel, nick) = msg.args[:2]
if ircutils.toLower(irc.nick) in ircutils.toLower(nick):
self.authlog.info("***** clearing authed_users due to self-kick. *****")
self.authed_users.clear()
else:
try:
hostmask = irc.state.nickToHostmask(nick)
self._unauth(irc, hostmask)
except KeyError:
pass
def doNick(self, irc, msg):
if msg.prefix in self.authed_users.keys():
newprefix = msg.args[0] + '!' + msg.prefix.split('!',1)[1]
logmsg = "Attaching authentication for hostmask %s to new hostmask %s due to nick change." %\
(msg.prefix, newprefix,)
self.authlog.info(logmsg)
if not world.testing:
irc.queueMsg(ircmsgs.privmsg("#bitcoin-otc-auth", logmsg))
self.authed_users[newprefix] = self.authed_users[msg.prefix]
self._unauth(irc, msg.prefix)
Class = GPG
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| gpl-2.0 | 1,254,094,022,718,385,700 | 45.227666 | 248 | 0.573655 | false |
MCLConsortium/mcl-site | src/jpl.mcl.site.policy/setup.py | 1 | 2947 | # encoding: utf-8
from setuptools import setup, find_packages
import os.path
# Package data
# ------------
_name = 'jpl.mcl.site.policy'
_version = '1.0.2'
_description = "Policy package for MCL"
_url = 'http://mcl.jpl.nasa.gov/software/' + _name
_downloadURL = 'http://oodt.jpl.nasa.gov/dist/mcl/' + _name + '-' + _version + '.tar.gz'
_author = 'Sean Kelly'
_authorEmail = '[email protected]'
_maintainer = 'Sean Kelly'
_maintainerEmail = '[email protected]'
_license = 'Proprietary'
_namespaces = ['jpl', 'jpl.mcl', 'jpl.mcl.site']
_zipSafe = False
_keywords = 'plone zope site mcl policy dependency'
_testSuite = 'jpl.mcl.site.policy.tests.test_suite'
_extras = {
'test': ['plone.app.testing'],
}
_entryPoints = {
'z3c.autoinclude.plugin': ['target=plone'],
}
_requirements = [
'setuptools',
'collective.captchacontactinfo',
'collective.recaptcha',
'eea.faceted.vocabularies',
'eea.facetednavigation',
'five.formlib == 1.0.4',
'jpl.mcl.site.knowledge',
'jpl.mcl.site.sciencedata',
'plone.api',
'plone.app.collection',
'plone.app.form',
'plone.app.imaging',
'plone.app.ldap',
'plone.app.upgrade',
'plone.formwidget.recaptcha',
'Products.Archetypes',
'Products.ATContentTypes',
'Products.CMFPlacefulWorkflow',
'Products.CMFPlone',
'Products.PloneFormGen',
'yafowil.plone',
'z3c.jbot',
]
_classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Plone',
'License :: Other/Proprietary License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
]
# Setup Metadata
# --------------
#
# Nothing below here should require updating.
def _read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
_header = '*' * len(_name) + '\n' + _name + '\n' + '*' * len(_name)
_longDescription = _header + '\n\n' + _read('README.rst') + '\n\n' + _read('docs', 'INSTALL.txt') + '\n\n' \
+ _read('docs', 'HISTORY.txt')
open('doc.txt', 'w').write(_longDescription)
setup(
author=_author,
author_email=_authorEmail,
classifiers=_classifiers,
description=_description,
download_url=_downloadURL,
entry_points=_entryPoints,
extras_require=_extras,
include_package_data=True,
install_requires=_requirements,
keywords=_keywords,
license=_license,
long_description=_longDescription,
maintainer=_maintainer,
maintainer_email=_maintainerEmail,
name=_name,
namespace_packages=_namespaces,
packages=find_packages('src', exclude=['ez_setup', 'bootstrap']),
package_dir={'': 'src'},
test_suite=_testSuite,
url=_url,
version=_version,
zip_safe=_zipSafe,
)
| apache-2.0 | 366,627,224,946,995,260 | 28.47 | 108 | 0.620631 | false |
glenntanner3/devedeng | src/devedeng/avconv.py | 1 | 16737 | #!/usr/bin/env python3
# Copyright 2014 (C) Raster Software Vigo (Sergio Costas)
#
# This file is part of DeVeDe-NG
#
# DeVeDe-NG is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# DeVeDe-NG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
import subprocess
import os
import devedeng.configuration_data
import devedeng.executor
import devedeng.mux_dvd_menu
class avconv(devedeng.executor.executor):
supports_analize = False
supports_play = False
supports_convert = True
supports_menu = True
supports_mkiso = False
supports_burn = False
display_name = "AVCONV"
disc_types = []
@staticmethod
def check_is_installed():
try:
handle = subprocess.Popen(["avconv","-codecs"], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
(stdout, stderr) = handle.communicate()
if 0==handle.wait():
mp2 = False
mp3 = False
ac3 = False
mpeg1 = False
mpeg2 = False
divx = False
h264 = False
for line in stdout.decode("latin-1").split("\n"):
parts = line.strip().split(" ")
if len(parts) < 2:
continue
if len(parts[0]) != 6:
continue
capabilities = parts[0]
codec = parts[1]
if capabilities[1] != 'E':
continue
if (codec == "mpeg1video"):
mpeg1 = True
continue
if (codec == "mpeg2video"):
mpeg2 = True
continue
if (codec == "mp2"):
mp2 = True
continue
if (codec == "mp3"):
mp3 = True
continue
if (codec == "ac3"):
ac3 = True
continue
if (codec == "h264") or (codec == "H264"):
h264 = True
continue
if (codec == "mpeg4"):
divx = True
continue
if (mpeg1 and mp2):
devedeng.avconv.avconv.disc_types.append("vcd")
if (mpeg2 and mp2):
devedeng.avconv.avconv.disc_types.append("svcd")
devedeng.avconv.avconv.disc_types.append("cvd")
if (mpeg2 and mp2 and ac3):
devedeng.avconv.avconv.disc_types.append("dvd")
if (divx and mp3):
devedeng.avconv.avconv.disc_types.append("divx")
if (h264 and mp3):
devedeng.avconv.avconv.disc_types.append("mkv")
return True
else:
return False
except:
return False
def __init__(self):
devedeng.executor.executor.__init__(self)
self.config = devedeng.configuration_data.configuration.get_config()
def convert_file(self,file_project,output_file,video_length,pass2 = False):
if file_project.two_pass_encoding:
if pass2:
self.text = _("Converting %(X)s (pass 2)") % {"X" : file_project.title_name}
else:
self.text = _("Converting %(X)s (pass 1)") % {"X" : file_project.title_name}
# Prepare the converting process for the second pass
tmp = devedeng.avconv.avconv()
tmp.convert_file(file_project, output_file, video_length, True)
# it deppends of this process
tmp.add_dependency(self)
# add it as a child process of this one
self.add_child_process(tmp)
else:
self.text = _("Converting %(X)s") % {"X" : file_project.title_name}
if (pass2 == False) and (file_project.two_pass_encoding == True):
# this is the first pass in a 2-pass codification
second_pass = False
else:
# second_pass is TRUE in the second pass of a 2-pass codification, and also when not doing 2-pass codification
# It is used to remove unnecessary steps during the first pass, but that are needed on the second pass, or when not using 2-pass codification
second_pass = True
if (video_length == 0):
self.final_length = file_project.original_length
else:
self.final_length = video_length
self.command_var=[]
self.command_var.append("avconv")
self.command_var.append("-i")
self.command_var.append(file_project.file_name)
if (file_project.volume!=100) and second_pass:
self.command_var.append("-vol")
self.command_var.append(str((256*file_project.volume)/100))
if (file_project.audio_delay != 0.0) and (file_project.copy_sound==False) and (file_project.no_reencode_audio_video==False) and second_pass:
self.command_var.append("-itsoffset")
self.command_var.append(str(file_project.audio_delay))
self.command_var.append("-i")
self.command_var.append(file_project.file_name)
self.command_var.append("-map")
self.command_var.append("1:"+str(file_project.video_list[0]))
if (not file_project.copy_sound) and (not file_project.no_reencode_audio_video):
for l in file_project.audio_list:
self.command_var.append("-map")
self.command_var.append("0:"+str(l))
if (file_project.no_reencode_audio_video==False) and second_pass:
cmd_line=""
if file_project.deinterlace=="deinterlace_yadif":
cmd_line+="yadif"
vflip=False
hflip=False
if (file_project.rotation=="rotation_90"):
if (cmd_line!=""):
cmd_line+=",fifo,"
cmd_line+="transpose=1"
elif (file_project.rotation=="rotation_270"):
if (cmd_line!=""):
cmd_line+=",fifo,"
cmd_line+="transpose=2"
elif (file_project.rotation=="rotation_180"):
vflip=True
hflip=True
if (file_project.mirror_vertical):
vflip=not vflip
if (file_project.mirror_horizontal):
hflip=not hflip
if (vflip):
if (cmd_line!=""):
cmd_line+=",fifo,"
cmd_line+="vflip"
if (hflip):
if (cmd_line!=""):
cmd_line+=",fifo,"
cmd_line+="hflip"
if (file_project.width_midle != file_project.original_width) or (file_project.height_midle != file_project.original_height):
if (cmd_line!=""):
cmd_line+=",fifo,"
x = int((file_project.width_midle - file_project.original_width) /2)
y = int((file_project.height_midle - file_project.original_height) /2)
if (x > 0) or (y > 0):
cmd_line+="pad="+str(file_project.width_midle)+":"+str(file_project.height_midle)+":"+str(x)+":"+str(y)+":0x000000"
else:
cmd_line+="crop="+str(file_project.width_midle)+":"+str(file_project.height_midle)+":"+str(x)+":"+str(y)
if (file_project.width_final != file_project.width_midle) or (file_project.height_final != file_project.height_midle):
if (cmd_line!=""):
cmd_line+=",fifo,"
cmd_line+="scale=w="+str(file_project.width_final)+":h="+str(file_project.height_final)
if cmd_line!="":
self.command_var.append("-vf")
self.command_var.append(cmd_line)
self.command_var.append("-y")
vcd=False
if (self.config.disc_type == "divx"):
self.command_var.append("-vcodec")
self.command_var.append("mpeg4")
self.command_var.append("-acodec")
self.command_var.append("libmp3lame")
self.command_var.append("-f")
self.command_var.append("avi")
elif (self.config.disc_type == "mkv"):
self.command_var.append("-vcodec")
self.command_var.append("h264")
self.command_var.append("-acodec")
self.command_var.append("libmp3lame")
self.command_var.append("-f")
self.command_var.append("matroska")
else:
self.command_var.append("-target")
if (self.config.disc_type=="dvd"):
if not file_project.format_pal:
self.command_var.append("ntsc-dvd")
elif (file_project.original_fps==24):
self.command_var.append("film-dvd")
else:
self.command_var.append("pal-dvd")
if (not file_project.copy_sound):
if file_project.sound5_1:
self.command_var.append("-acodec")
self.command_var.append("ac3")
elif (self.config.disc_type=="vcd"):
vcd=True
if not file_project.format_pal:
self.command_var.append("ntsc-vcd")
else:
self.command_var.append("pal-vcd")
elif (self.config.disc_type=="svcd"):
if not file_project.format_pal:
self.command_var.append("ntsc-svcd")
else:
self.command_var.append("pal-svcd")
elif (self.config.disc_type=="cvd"):
if not file_project.format_pal:
self.command_var.append("ntsc-svcd")
else:
self.command_var.append("pal-svcd")
if (not file_project.no_reencode_audio_video):
self.command_var.append("-sn") # no subtitles
if file_project.copy_sound or file_project.no_reencode_audio_video:
self.command_var.append("-acodec")
self.command_var.append("copy")
if file_project.no_reencode_audio_video:
self.command_var.append("-vcodec")
self.command_var.append("copy")
if (vcd==False):
if not file_project.format_pal:
if (file_project.original_fps==24) and ((self.config.disc_type=="dvd")):
keyintv=15
else:
keyintv=18
else:
keyintv=15
if not file_project.gop12:
self.command_var.append("-g")
self.command_var.append(str(keyintv))
if (self.config.disc_type=="divx") or (self.config.disc_type=="mkv"):
self.command_var.append("-g")
self.command_var.append("300")
elif file_project.gop12 and (file_project.no_reencode_audio_video==False):
self.command_var.append("-g")
self.command_var.append("12")
self.command_var.append("-bf")
self.command_var.append("2")
self.command_var.append("-strict")
self.command_var.append("1")
if video_length != 0:
self.command_var.append("-t")
self.command_var.append(str(video_length))
self.command_var.append("-ac")
if (file_project.sound5_1) and ((self.config.disc_type=="dvd") or (self.config.disc_type=="divx") or (self.config.disc_type=="mkv")):
self.command_var.append("6")
else:
self.command_var.append("2")
self.command_var.append("-aspect")
self.command_var.append(str(file_project.aspect_ratio_final))
if self.config.disc_type=="divx":
self.command_var.append("-vtag")
self.command_var.append("DX50")
if (file_project.deinterlace == "deinterlace_ffmpeg") and (file_project.no_reencode_audio_video==False) and second_pass:
self.command_var.append("-deinterlace")
if (file_project.no_reencode_audio_video==False) and (vcd==False) and second_pass:
self.command_var.append("-s")
self.command_var.append(str(file_project.width_final)+"x"+str(file_project.height_final))
if second_pass:
self.command_var.append("-trellis")
self.command_var.append("1")
self.command_var.append("-mbd")
self.command_var.append("2")
else:
self.command_var.append("-trellis")
self.command_var.append("0")
self.command_var.append("-mbd")
self.command_var.append("0")
if (vcd == False) and (file_project.no_reencode_audio_video == False):
self.command_var.append("-b:a")
self.command_var.append(str(file_project.audio_rate_final)+"k")
self.command_var.append("-b:v")
self.command_var.append(str(file_project.video_rate_final)+"k")
if file_project.two_pass_encoding == True:
self.command_var.append("-passlogfile")
self.command_var.append(output_file)
self.command_var.append("-pass")
if pass2:
self.command_var.append("2")
else:
self.command_var.append("1")
self.command_var.append(output_file)
def create_menu_mpeg(self,n_page,background_music,sound_length,pal,video_rate, audio_rate,output_path, use_mp2):
self.n_page = n_page
self.final_length = float(sound_length)
self.text = _("Creating menu %(X)d") % {"X": self.n_page}
self.command_var=[]
self.command_var.append("avconv")
self.command_var.append("-loop")
self.command_var.append("1")
self.command_var.append("-f")
self.command_var.append("image2")
self.command_var.append("-i")
self.command_var.append(os.path.join(output_path,"menu_"+str(n_page)+"_bg.png"))
self.command_var.append("-i")
self.command_var.append(background_music)
self.command_var.append("-y")
self.command_var.append("-target")
if pal:
self.command_var.append("pal-dvd")
else:
self.command_var.append("ntsc-dvd")
self.command_var.append("-acodec")
if (use_mp2):
self.command_var.append("mp2")
if (audio_rate > 384):
audio_rate = 384 #max bitrate for mp2
else:
self.command_var.append("ac3")
self.command_var.append("-s")
if pal:
self.command_var.append("720x576")
else:
self.command_var.append("720x480")
self.command_var.append("-g")
self.command_var.append("12")
self.command_var.append("-b:v")
self.command_var.append(str(video_rate)+"k")
self.command_var.append("-b:a")
self.command_var.append(str(audio_rate)+"k")
self.command_var.append("-aspect")
self.command_var.append("4:3")
self.command_var.append("-t")
self.command_var.append(str(1+sound_length))
movie_path = os.path.join(output_path,"menu_"+str(n_page)+".mpg")
self.command_var.append(movie_path)
muxer = devedeng.mux_dvd_menu.mux_dvd_menu()
final_path = muxer.create_mpg(n_page,output_path,movie_path)
# the muxer process depends of the converter process
muxer.add_dependency(self)
self.add_child_process(muxer)
return (final_path)
def process_stdout(self,data):
return
def process_stderr(self,data):
pos = data[0].find("time=")
if (pos != -1):
pos+=5
pos2 = data[0].find(" ",pos)
if (pos2 != -1):
parts = data[0][pos:pos2].split(":")
t = 0.0
for e in parts:
t *= 60.0
t += float(e)
t /= self.final_length
self.progress_bar[1].set_fraction(t)
self.progress_bar[1].set_text("%.1f%%" % (100.0 * t)) | gpl-3.0 | 3,150,726,217,531,143,000 | 38.016317 | 154 | 0.530561 | false |
guildai/guild | guild/commands/s3_sync_impl.py | 1 | 2872 | # Copyright 2017-2021 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import subprocess
import re
import time
from guild import cli
from guild import util
from guild import var
from . import service_impl_support
NAME = "s3-sync"
TITLE = "S3 sync service"
class State(object):
def __init__(self, runs_dir, s3_uri, log):
self.runs_dir = runs_dir
self.s3_uri = s3_uri
self.log = log
def start(args):
_check_cli()
run = lambda log: _run(args, log)
service_impl_support.start(
NAME,
run,
args,
TITLE,
log_max_size=(args.log_max_size * 1024 * 1024),
log_backups=args.log_backups,
)
def _check_cli():
if not util.which("aws"):
cli.error(
"%s requires the AWS Command Line Interface\n"
"Refer to https://docs.aws.amazon.com/cli/latest/"
"userguide/installing.html for details." % NAME
)
def _run(args, log):
assert args.sync_interval >= 5, args
log.info("%s started", TITLE)
runs_dir = var.runs_dir()
s3_uri = _s3_uri(args)
log.info("Synchronizing %s with runs in %s", s3_uri, runs_dir)
state = State(runs_dir, s3_uri, log)
sync_once = lambda: _sync_once(state)
util.loop(sync_once, time.sleep, args.sync_interval, 0)
def _s3_uri(args):
m = re.match(r"s3://([^/]+)(.*)", args.uri)
if not m:
cli.error("URI must be in the format s3://BUCKET[/PATH]")
bucket, path = m.groups()
if path[-1:] == "/":
path = path[:-1]
if path:
return "s3://{}/{}".format(bucket, path)
else:
return "s3://{}".format(bucket)
def _sync_once(state):
log = state.log
log.info("Sync started")
cmd = [
"aws",
"s3",
"sync",
"--delete",
"--size-only",
"--no-progress",
state.runs_dir,
state.s3_uri + "/runs/",
]
p = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
while True:
line = p.stdout.readline()
if not line:
break
log.info(line[:-1].decode())
log.info("Sync stopped")
def stop():
service_impl_support.stop(NAME, TITLE)
def status():
service_impl_support.status(NAME, TITLE)
| apache-2.0 | -9,071,415,864,090,001,000 | 23.758621 | 74 | 0.602019 | false |
theandygross/Figures | src/Figures/Boxplots.py | 1 | 11851 | """
Created on Apr 24, 2013
@author: agross
"""
import numpy as np
import pandas as pd
import matplotlib.pylab as plt
import Stats.Scipy as Stats
from Figures.FigureHelpers import latex_float, init_ax
from Figures.FigureHelpers import prettify_ax
from Helpers.Pandas import match_series, true_index
colors = plt.rcParams['axes.color_cycle'] * 10
def _violin_plot(ax, data, pos=[], bp=False):
"""
http://pyinsci.blogspot.com/2009/09/violin-plot-with-matplotlib.html
Create violin plots on an axis. Internal to module as it does not
use Pandas data-structures. This is split off due to it's being a
reuse of the code from the blog-post linked above, and I wanted to keep
the original code untouched.
"""
from scipy.stats import gaussian_kde
from numpy import arange
# dist = max(pos)-min(pos)
dist = len(pos)
w = min(0.25 * max(dist, 1.0), 0.5)
for p, d in enumerate(data):
try:
k = gaussian_kde(d) # calculates the kernel density
m = k.dataset.min() # lower bound of violin
M = k.dataset.max() # upper bound of violin
x = arange(m, M, (M - m) / 100.) # support for violin
v = k.evaluate(x) # violin profile (density curve)
v = v / v.max() * w # scaling the violin to the available space
ax.fill_betweenx(x, p, v + p, facecolor='y', alpha=0.1)
ax.fill_betweenx(x, p, -v + p, facecolor='y', alpha=0.1)
except:
pass
if bp:
box_plot = ax.boxplot(data, notch=1, positions=range(len(pos)), vert=1,
widths=.25)
return box_plot
def box_plot_pandas(bin_vec, real_vec, ax=None, order=None):
"""
Wrapper around matplotlib's boxplot function.
Inputs
bin_vec: Series of labels
real_vec: Series of measurements to be grouped according to bin_vec
"""
_, ax = init_ax(ax)
bin_vec, real_vec = match_series(bin_vec, real_vec)
if order is not None:
categories = order
else:
categories = bin_vec.value_counts().index
data = [real_vec[bin_vec == num] for num in categories]
bp = ax.boxplot(data, positions=range(len(categories)), widths=.3,
patch_artist=True)
if real_vec.name:
ax.set_ylabel(real_vec.name)
if bin_vec.name:
ax.set_xlabel(bin_vec.name)
ax.set_xticklabels(categories)
[p.set_visible(False) for p in bp['fliers']]
[p.set_visible(False) for p in bp['caps']]
[p.set_visible(False) for p in bp['whiskers']]
for p in bp['medians']:
p.set_color(colors[0])
p.set_lw(3)
p.set_alpha(.8)
for i, p in enumerate(bp['boxes']):
p.set_color('grey')
p.set_lw(3)
p.set_alpha(.7)
if len(data[i]) < 3:
p.set_alpha(0)
def violin_plot_pandas(bin_vec, real_vec, ann='p', order=None, ax=None,
filename=None):
"""
http://pyinsci.blogspot.com/2009/09/violin-plot-with-matplotlib.html
Wrapper around matplotlib's boxplot function to add violin profile.
Inputs
bin_vec: Series of labels
real_vec: Series of measurements to be grouped according to bin_vec
"""
fig, ax = init_ax(ax)
ax.set_ylabel(real_vec.name)
ax.set_xlabel(bin_vec.name)
bin_vec, real_vec = match_series(bin_vec, real_vec)
try:
if order is None:
categories = bin_vec.value_counts().index
else:
categories = order
_violin_plot(ax, [real_vec[bin_vec == num] for num in categories],
pos=categories, bp=True)
ax.set_xticklabels([str(c) + '\n(n=%i)' % sum(bin_vec == c)
for c in categories])
except:
box_plot_pandas(bin_vec, real_vec, ax=ax)
#if type(bin_vec.name) == str:
# ax.set_title(str(bin_vec.name) + ' x ' + str(real_vec.name))
p_value = Stats.kruskal_pandas(bin_vec, real_vec)['p']
if ann == 'p_fancy':
ax.annotate('$p = {}$'.format(latex_float(p_value)), (.95, -.02),
xycoords='axes fraction', ha='right', va='bottom', size=14)
if ann == 'p':
ax.annotate('p = {0:.1e}'.format(p_value), (.95, .02),
xycoords='axes fraction', ha='right', va='bottom', size=12)
elif ann is not None:
ax.annotate(ann, (.95, .02), xycoords='axes fraction', ha='right',
va='bottom', size=12)
if filename is not None:
fig.savefig(filename)
return
def violin_plot_series(s, **kw_args):
"""
Wrapper for drawing a violin plot on a series with a multi-index.
The second level of the index is used as the binning variable.
"""
assert s.index.levshape[1] > 1
violin_plot_pandas(pd.Series(s.index.get_level_values(1), s.index), s,
**kw_args)
def paired_boxplot_o(boxes):
"""
Wrapper around plt.boxplot to draw paired boxplots
for a set of boxes.
Input is the same as plt.boxplot:
Array or a sequence of vectors.
"""
fig = plt.figure(figsize=(len(boxes) / 2.5, 4))
ax1 = fig.add_subplot(111)
plt.subplots_adjust(left=0.075, right=0.95, top=0.9, bottom=0.25)
bp = ax1.boxplot(boxes, notch=0, positions=np.arange(len(boxes)) +
1.5 * (np.arange(len(boxes)) / 2), patch_artist=True)
[p.set_color(colors[0]) for p in bp['boxes'][::2]]
[p.set_color('black') for p in bp['whiskers']]
[p.set_color('black') for p in bp['fliers']]
[p.set_alpha(.4) for p in bp['fliers']]
[p.set_alpha(.6) for p in bp['boxes']]
[p.set_edgecolor('black') for p in bp['boxes']]
ax1.yaxis.grid(True, linestyle='-', which='major', color='lightgrey',
alpha=0.5)
# Hide these grid behind plot objects
ax1.set_axisbelow(True)
ax1.set_ylabel('$Log_{2}$ RNA Expression')
ax1.set_xticks(3.5 * np.arange(len(boxes) / 2) + .5)
return ax1, bp
def paired_boxplot(boxes, ax1=None):
if not ax1:
fig = plt.figure(figsize=(len(boxes) / 2.5, 4))
ax1 = fig.add_subplot(111)
plt.subplots_adjust(left=0.075, right=0.95, top=0.9, bottom=0.25)
bp = ax1.boxplot(boxes, notch=0, positions=np.arange(len(boxes)) +
1.5 * (np.arange(len(boxes)) / 2), patch_artist=True)
[p.set_color(colors[0]) for p in bp['boxes'][::2]]
[p.set_color(colors[1]) for p in bp['boxes'][1::2]]
[p.set_color('black') for p in bp['whiskers']]
[p.set_color('black') for p in bp['fliers']]
[p.set_alpha(.4) for p in bp['fliers']]
[p.set_alpha(.8) for p in bp['boxes']]
[p.set_edgecolor('black') for p in bp['boxes']]
ax1.yaxis.grid(True, linestyle='-', which='major', color='lightgrey',
alpha=0.5)
# Hide these grid behind plot objects
ax1.set_axisbelow(True)
ax1.set_ylabel('$Log_{2}$ RNA Expression')
ax1.set_xticks(3.5 * np.arange(len(boxes) / 2) + .5)
return ax1, bp
def paired_boxplot_tumor_normal(df, sig=True, cutoffs=[.01, .00001],
order=None, ax=None):
"""
Draws a paired boxplot given a DataFrame with both tumor and normal
samples on the index. '01' and '11' are hard-coded as the ids for
tumor/normal.
"""
n = df.groupby(level=0).size() == 2
df = df.ix[n[n].index]
if order is None:
o = df.xs('11', level=1).median().order().index
df = df[o[::-1]]
else:
df = df[order]
l1 = list(df.xs('01', level=1).as_matrix().T)
l2 = list(df.xs('11', level=1).as_matrix().T)
boxes = [x for t in zip(l1, l2) for x in t]
ax1, bp = paired_boxplot(boxes, ax)
test = lambda v: Stats.ttest_rel(v.unstack()['01'], v.unstack()['11'])
res = df.apply(test).T
p = res.p
if sig:
pts = [(i * 3.5 + .5, 18) for i, n in enumerate(p) if n < cutoffs[1]]
if len(pts) > 0:
s1 = ax1.scatter(*zip(*pts), marker='$**$', label='$p<10^{-5}$', s=200)
else:
s1 = None
pts = [(i * 3.5 + .5, 18) for i, n in enumerate(p)
if (n < cutoffs[0]) and (n > cutoffs[1])]
if len(pts) > 0:
s2 = ax1.scatter(*zip(*pts), marker='$*$', label='$p<10^{-2}$', s=30)
else:
s2 = None
ax1.legend(bp['boxes'][:2] + [s2, s1],
('Tumor', 'Normal', '$p<10^{-2}$', '$p<10^{-5}$'),
loc='best', scatterpoints=1)
else:
ax1.legend(bp['boxes'][:2], ('Tumor', 'Normal'), loc='best')
ax1.set_xticklabels(df.columns)
def boxplot_panel(hit_vec, response_df):
"""
Draws a series of paired boxplots with the rows of the response_df
split according to hit_vec.
"""
b = response_df.copy()
b.columns = pd.MultiIndex.from_arrays([b.columns, hit_vec.ix[b.columns]])
b = b.T
v1, v2 = hit_vec.unique()
test = lambda v: Stats.anova(v.reset_index(level=1)[v.index.names[1]],
v.reset_index(level=1)[v.name])
res = b.apply(test).T
p = res.p.order()
b = b.ix[:, p.index]
l1 = list(b.xs(v1, level=1).as_matrix().T)
l2 = list(b.xs(v2, level=1).as_matrix().T)
boxes = [x for t in zip(l1, l2) for x in t]
ax1, bp = paired_boxplot(boxes)
y_lim = (response_df.T.quantile(.9).max()) * 1.2
pts = [(i * 3.5 + .5, y_lim) for i, n in enumerate(p) if n < .00001]
if len(pts) > 0:
s1 = ax1.scatter(*zip(*pts), marker='$**$', label='$p<10^{-5}$', s=200)
else:
s1 = None
pts = [(i * 3.5 + .5, y_lim) for i, n in enumerate(p) if (n < .01)
and (n > .00001)]
if len(pts) > 0:
s2 = ax1.scatter(*zip(*pts), marker='$*$', label='$p<10^{-2}$', s=30)
else:
s2 = None
ax1.set_xticklabels(b.columns)
ax1.legend(bp['boxes'][:2] + [s2, s1],
(v1, v2, '$p<10^{-2}$', '$p<10^{-5}$'),
loc='best', scatterpoints=1)
def paired_bp_tn_split(vec, assignment, ax=None, split_vals=('01', '11'),
data_type='gene expression'):
"""
Paired boxplot for a single Series, with splitting on the index,
grouped by assignment. I.E. Tumor-Normal gene expression split by
cancer.
vec:
vector of values to plot.
assignment:
vector mapping keys to group assignment
ax (None):
matplotlib axis to plot on or None
split_vals ('01','11'):
Values to split the boxplot pairing on. The default of
('01','11') indicates tumor vs. normal in the standard
TCGA barcode nomenclature. This should coorespond to values
on the second level of the index for vec and assignment.
**both vec and assignment should have an overlapping index with
multiple levels**
"""
_, ax = init_ax(ax, figsize=(8, 3))
if vec.name != None:
label = vec.name # lose label in manipulation
else:
label = ''
g1 = split_vals[0]
g2 = split_vals[1]
vec = pd.concat([vec[:, g1], vec[:, g2]], keys=[g1, g2],
axis=1)
vec = vec.dropna().stack()
counts = vec.unstack().groupby(assignment).size()
groups = list(true_index(counts > 5))
groups = vec.unstack().groupby(assignment).median()[g1].ix[groups]
groups = groups.order().index[::-1]
l1 = [np.array(vec[:, g1].ix[true_index(assignment == c)].dropna())
for c in groups]
l2 = [np.array(vec[:, g2].ix[true_index(assignment == c)].dropna())
for c in groups]
boxes = [x for t in zip(l1, l2) for x in t if len(t[1]) > 5]
ax, bp = paired_boxplot(boxes, ax)
labels = ['{}\n({})'.format(c, counts[c]) for c in groups]
ax.set_xticklabels(labels)
prettify_ax(ax)
ax.set_ylabel('{} {}'.format(label, data_type))
| mit | -5,644,501,510,842,958,000 | 35.24159 | 83 | 0.562062 | false |
jackuess/pirateplay.se | lib/pirateplay/lib/rerequest.py | 1 | 2797 | import itertools, re, urllib2
DEBUG = False
def set_debug(debug):
global DEBUG
DEBUG = debug
def debug_print(s):
if DEBUG:
print(' DEBUG ' + s)
def del_empty_values(d):
for key in d.keys():
if d[key] == '' or d[key] == None:
del d[key]
return d
def req_key(req):
key = req.get_full_url()
key += str(req.header_items())
if req.has_data():
key += req.get_data()
return key
def get_vars(requestchain, content, cache = {}):
debug_print('Testing with service = %s' % requestchain.get('title', 'untitled'))
cumulated_vars = requestchain.get('startvars', {})
for item in requestchain.get('items', []):
new_vars = item.create_vars(content, cumulated_vars)
try:
first = new_vars.next()
except StopIteration:
break
new_vars = itertools.chain([first], new_vars)
if 'final_url' in first:
return [del_empty_values(dict(cumulated_vars, **v)) for v in new_vars]
for v in new_vars:
cumulated_vars.update(v)
if 'req_url' in cumulated_vars:
req = item.create_req(cumulated_vars)
rk = req_key(req)
if rk in cache:
content = cache[rk]
else:
content = item.create_content(req)
cache[rk] = content
return []
class TemplateRequest:
def __init__(self, re,
encode_vars = lambda x: x, decode_content = lambda c, v: c,
handlerchain = None):
self.re = re
self.encode_vars = encode_vars
self.decode_content = decode_content
self.handlerchain = handlerchain
def create_vars(self, content, cumulated_vars):
content = self.decode_content(content, cumulated_vars)
#Make sure req_data and req_headers are empty
if 'req_data' in cumulated_vars:
del cumulated_vars['req_data']
if 'req_headers' in cumulated_vars:
del cumulated_vars['req_headers']
for match in re.finditer(self.re, content, re.DOTALL):
d = dict(cumulated_vars, **match.groupdict())
d.update(self.encode_vars(d))
yield d
def create_req(self, cumulated_vars):
url = cumulated_vars['req_url']
if not url.startswith('http'):
url = 'http://' + url
req = urllib2.Request(url)
if 'req_data' in cumulated_vars:
debug_print('Adding post data to request: ' + cumulated_vars['req_data'])
req.add_data(cumulated_vars['req_data'])
for header, value in cumulated_vars.get('req_headers', {}).items():
debug_print('Adding header to request: %s = %s' % (header, value))
req.add_header(header, value)
return req
def create_content(self, req):
debug_print('Opening URL: ' + req.get_full_url())
try:
try:
f = urllib2.build_opener(self.handlerchain).open(req)
except TypeError:
f = urllib2.urlopen(req)
content = f.read()
f.close()
except (urllib2.HTTPError, urllib2.URLError):
content = ''
return content
def to_dict(self):
return { 'test': self.re } | gpl-3.0 | 6,237,002,380,573,614,000 | 23.982143 | 81 | 0.65463 | false |
bakhtout/odoo-educ | addons/openeducat_erp/op_book_movement/op_book_movement.py | 1 | 6674 | # -*- coding: utf-8 -*-
#/#############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.tech-receptives.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#/#############################################################################
from openerp.osv import osv, fields
from datetime import datetime
from openerp.tools.translate import _
def days_between(d1, d2):
d1 = datetime.strptime(d1, "%Y-%m-%d")
d2 = datetime.strptime(d2, "%Y-%m-%d")
return abs((d2 - d1).days)
class op_book_movement(osv.osv):
_name = 'op.book.movement'
_rec_name = 'book_id'
_columns = {
'book_id': fields.many2one('op.book', string='Book', required=True),
'quantity': fields.integer('No. Of Books', size=256, required=True),
'type': fields.selection([('student', 'Student'), ('faculty', 'Faculty')], 'Student/Faculty', required=True),
'student_id': fields.many2one('op.student', string='Student'),
'faculty_id': fields.many2one('op.faculty', string='Faculty'),
'library_card_id': fields.many2one('op.library.card', 'Library Card', required=True),
'issued_date': fields.date(string='Issued Date', required=True),
'return_date': fields.date(string='Return Date', required=True),
'actual_return_date': fields.date(string='Actual Return Date'),
'penalty': fields.float(string='Penalty'),
'partner_id': fields.many2one('res.partner', 'Person'),
'reserver_name': fields.char('Person Name', size=256),
'state': fields.selection([('i','Issued'),('a','Available'),('l','Lost'),('r','Reserved')], string='Status'),
}
_defaults = {'state': 'a'}
def _check_date(self, cr, uid, ids, context=None):
for self_obj in self.browse(cr, uid, ids):
if self_obj.issued_date > self_obj.return_date:
return False
return True
_constraints = [
(_check_date, 'Issue Date Should be greater than Return Date.', ['Date']),
]
def onchange_book_id(self, cr, uid, ids, book, context=None):
res = {}
res = {
'state': self.pool.get('op.book').browse(cr, uid, book).state
}
return {'value': res}
def issue_book(self, cr, uid, ids, context={}):
''' function to issuing book '''
book_pool = self.pool.get('op.book')
for obj in self.browse(cr, uid, ids, context):
if obj.book_id.state and obj.book_id.state == 'a':
book_pool.write(cr, uid, obj.book_id.id, {'state': 'i'})
self.write(cr, uid, obj.id, {'state': 'i'})
else: return True
# else:
# book_state = obj.book_id.state == 'i' and 'Issued' or \
# obj.book_id.state == 'a' and 'Available' or \
# obj.book_id.state == 'l' and 'Lost' or \
# obj.book_id.state == 'r' and 'Reserved'
# raise osv.except_osv(('Error!'),("Book Can not be issued because book state is : %s") %(book_state))
return True
def calculate_penalty(self, cr, uid, obj, context={}):
book_pool = self.pool.get('op.book')
penalty_amt = 0
penalty_days = 0
for obj in self.browse(cr, uid, obj, context):
standard_diff = days_between(obj.return_date,obj.issued_date)
actual_diff = days_between(obj.actual_return_date,obj.issued_date)
if obj.library_card_id and obj.library_card_id.library_card_type_id:
penalty_days = actual_diff > (standard_diff + obj.library_card_id.library_card_type_id.duration) and actual_diff - (standard_diff + obj.library_card_id.library_card_type_id.duration) or penalty_days
penalty_amt = round(penalty_days - penalty_days/7) * obj.library_card_id.library_card_type_id.penalty_amt_per_day
self.write(cr, uid, obj.id, {'penalty':penalty_amt,'state': 'a'})
book_pool.write(cr, uid, obj.book_id.id, {'state': 'a'})
return True
def return_book(self, cr, uid, ids, context={}):
''' function to returning book '''
for obj in self.browse(cr, uid, ids, context):
if obj.book_id.state and obj.book_id.state == 'i':
#wizard call for return date
value = {}
data_obj = self.pool.get('ir.model.data')
view_id = data_obj._get_id(cr, uid, 'openeducat_erp', 'return_date_act')
value = {
'name': _('Return Date'),
'view_type': 'form',
'view_mode': 'form',
'res_model': 'return.date',
'type': 'ir.actions.act_window',
'target':'new',
}
return value
else: return True
# book_state = obj.book_id.state == 'i' and 'Issued' or \
# obj.book_id.state == 'a' and 'Available' or \
# obj.book_id.state == 'l' and 'Lost' or \
# obj.book_id.state == 'r' and 'Reserved'
# raise osv.except_osv(('Error!'),("Book Can not be issued because book state is : %s") %(book_state))
return True
def do_book_reservation(self, cr, uid, ids, context={}):
''' function to reserve book '''
value = {}
value = {
'name': _('Book Reservation'),
'view_type': 'form',
'view_mode': 'form',
'res_model': 'reserve.book',
'type': 'ir.actions.act_window',
'target':'new',
}
return value
op_book_movement()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 8,279,460,501,516,231,000 | 45.027586 | 214 | 0.534612 | false |
PitchBlackPL/Solar | solar_vis.py | 1 | 4499 | # coding: utf-8
# license: GPLv3
"""Модуль визуализации.
Нигде, кроме этого модуля, не используются экранные координаты объектов.
Функции, создающие гaрафические объекты и перемещающие их на экране, принимают физические координаты
"""
header_font = "Arial-16"
"""Шрифт в заголовке"""
window_width = 800
"""Ширина окна"""
window_height = 800
"""Высота окна"""
scale_factor = None
"""Масштабирование экранных координат по отношению к физическим.
Тип: float
Мера: количество пикселей на один метр."""
def calculate_scale_factor(max_distance):
"""Вычисляет значение глобальной переменной **scale_factor** по данной характерной длине"""
global scale_factor
scale_factor = 0.4*min(window_height, window_width)/max_distance
print('Scale factor:', scale_factor)
def scale_x(x):
"""Возвращает экранную **x** координату по **x** координате модели.
Принимает вещественное число, возвращает целое число.
В случае выхода **x** координаты за пределы экрана возвращает
координату, лежащую за пределами холста.
Параметры:
**x** — x-координата модели.
"""
return int(x*scale_factor) + window_width//2
def scale_y(y):
"""Возвращает экранную **y** координату по **y** координате модели.
Принимает вещественное число, возвращает целое число.
В случае выхода **y** координаты за пределы экрана возвращает
координату, лежащую за пределами холста.
Направление оси развёрнуто, чтобы у модели ось **y** смотрела вверх.
Параметры:
**y** — y-координата модели.
"""
return int(y*scale_factor) + window_width//2 # FIXME: not done yet
def create_star_image(space, star):
"""Создаёт отображаемый объект звезды.
Параметры:
**space** — холст для рисования.
**star** — объект звезды.
"""
x = scale_x(star.x)
y = scale_y(star.y)
r = star.R
star.image = space.create_oval([x - r, y - r], [x + r, y + r], fill=star.color)
def create_planet_image(space, planet):
"""Создаёт отображаемый объект планеты.
Параметры:
**space** — холст для рисования.
**planet** — объект планеты.
"""
x = scale_x(planet.x)
y = scale_y(planet.y)
r = planet.R
planet.image = space.create_oval([x - r, y - r], [x + r, y + r], fill=planet.color) # FIXME: сделать как у звезды
def update_system_name(space, system_name):
"""Создаёт на холсте текст с названием системы небесных тел.
Если текст уже был, обновляет его содержание.
Параметры:
**space** — холст для рисования.
**system_name** — название системы тел.
"""
space.create_text(30, 80, tag="header", text=system_name, font=header_font)
def update_object_position(space, body):
"""Перемещает отображаемый объект на холсте.
Параметры:
**space** — холст для рисования.
**body** — тело, которое нужно переместить.
"""
x = scale_x(body.x)
y = scale_y(body.y)
r = body.R
if x + r < 0 or x - r > window_width or y + r < 0 or y - r > window_height:
space.coords(body.image, window_width + r, window_height + r,
window_width + 2*r, window_height + 2*r) # положить за пределы окна
space.coords(body.image, x - r, y - r, x + r, y + r)
if __name__ == "__main__":
print("This module is not for direct call!")
| gpl-3.0 | -5,636,459,889,650,575,000 | 26.588235 | 117 | 0.651538 | false |
anchore/anchore-engine | anchore_engine/services/apiext/api/controllers/images.py | 1 | 55254 | import datetime
import io
import json
import re
import tarfile
from connexion import request
import anchore_engine.apis
import anchore_engine.common
import anchore_engine.common.images
import anchore_engine.configuration.localconfig
import anchore_engine.subsys.metrics
from anchore_engine import utils
from anchore_engine.apis import exceptions as api_exceptions
from anchore_engine.apis.authorization import (
get_authorizer,
RequestingAccountValue,
ActionBoundPermission,
)
from anchore_engine.apis.context import ApiRequestContextProxy
from anchore_engine.clients.services import internal_client_for
from anchore_engine.clients.services.catalog import CatalogClient
from anchore_engine.clients.services.policy_engine import PolicyEngineClient
from anchore_engine.common.helpers import make_response_error
from anchore_engine.db.entities.common import anchore_now
from anchore_engine.services.apiext.api.controllers.utils import (
normalize_image_add_source,
validate_image_add_source,
# make_response_vulnerability,
make_response_vulnerability_report,
)
from anchore_engine.subsys import taskstate, logger
from anchore_engine.subsys.metrics import flask_metrics
from anchore_engine.utils import parse_dockerimage_string
authorizer = get_authorizer()
def make_response_policyeval(eval_record, params, catalog_client):
ret = {}
try:
tag = eval_record["tag"]
ret[tag] = {}
if eval_record["evalId"] and eval_record["policyId"]:
ret[tag]["detail"] = {}
if params and "detail" in params and params["detail"]:
eval_data = eval_record["result"]
ret[tag]["detail"]["result"] = eval_data
bundle_data = catalog_client.get_document(
"policy_bundles", eval_record["policyId"]
)
ret[tag]["detail"]["policy"] = bundle_data
ret[tag]["policyId"] = eval_record["policyId"]
if eval_record["final_action"].upper() in ["GO", "WARN"]:
ret[tag]["status"] = "pass"
else:
ret[tag]["status"] = "fail"
ret[tag]["last_evaluation"] = (
datetime.datetime.utcfromtimestamp(
eval_record["created_at"]
).isoformat()
+ "Z"
)
else:
ret[tag]["policyId"] = "N/A"
ret[tag]["final_action"] = "fail"
ret[tag]["last_evaluation"] = "N/A"
ret[tag]["detail"] = {}
except Exception as err:
raise Exception("failed to format policy eval response: " + str(err))
return ret
def make_response_image(image_record, include_detail=True):
ret = image_record
image_content = {"metadata": {}}
for key in [
"arch",
"distro",
"distro_version",
"dockerfile_mode",
"image_size",
"layer_count",
]:
val = image_record.pop(key, None)
image_content["metadata"][key] = val
image_record["image_content"] = image_content
if image_record["annotations"]:
try:
annotation_data = json.loads(image_record["annotations"])
image_record["annotations"] = annotation_data
except:
pass
# try to assemble full strings
if image_record and "image_detail" in image_record:
for image_detail in image_record["image_detail"]:
try:
image_detail["fulldigest"] = (
image_detail["registry"]
+ "/"
+ image_detail["repo"]
+ "@"
+ image_detail["digest"]
)
image_detail["fulltag"] = (
image_detail["registry"]
+ "/"
+ image_detail["repo"]
+ ":"
+ image_detail["tag"]
)
except:
image_detail["fulldigest"] = None
image_detail["fulltag"] = None
for removekey in ["record_state_val", "record_state_key"]:
image_detail.pop(removekey, None)
for datekey in ["last_updated", "created_at", "tag_detected_at"]:
try:
image_detail[datekey] = (
datetime.datetime.utcfromtimestamp(
image_detail[datekey]
).isoformat()
+ "Z"
)
except:
pass
if not include_detail:
image_record["image_detail"] = []
for datekey in ["last_updated", "created_at", "analyzed_at"]:
try:
image_record[datekey] = (
datetime.datetime.utcfromtimestamp(image_record[datekey]).isoformat()
+ "Z"
)
except:
pass
for removekey in ["record_state_val", "record_state_key"]:
image_record.pop(removekey, None)
return ret
def lookup_imageDigest_from_imageId(request_inputs, imageId):
user_auth = request_inputs["auth"]
method = request_inputs["method"]
bodycontent = request_inputs["bodycontent"]
params = request_inputs["params"]
userId = request_inputs["userId"]
ret = None
try:
client = internal_client_for(CatalogClient, request_inputs["userId"])
image_records = client.get_image_by_id(imageId=imageId)
if image_records:
image_record = image_records[0]
imageDigest = image_record["imageDigest"]
ret = imageDigest
except Exception as err:
logger.debug("operation exception: " + str(err))
raise err
return ret
def vulnerability_query(
account,
digest,
vulnerability_type,
force_refresh=False,
vendor_only=True,
doformat=False,
):
# user_auth = request_inputs['auth']
# method = request_inputs['method']
# bodycontent = request_inputs['bodycontent']
# params = request_inputs['params']
return_object = {}
httpcode = 500
# userId = request_inputs['userId']
localconfig = anchore_engine.configuration.localconfig.get_config()
system_user_auth = localconfig["system_user_auth"]
verify = localconfig["internal_ssl_verify"]
# force_refresh = params.get('force_refresh', False)
# vendor_only = params.get('vendor_only', True)
try:
if (
vulnerability_type
not in anchore_engine.common.image_vulnerability_types + ["all"]
):
httpcode = 404
raise Exception(
"content type (" + str(vulnerability_type) + ") not available"
)
# tag = params.pop('tag', None)
# imageDigest = params.pop('imageDigest', None)
# digest = params.pop('digest', None)
catalog_client = internal_client_for(CatalogClient, account)
image_report = catalog_client.get_image(digest)
if image_report and image_report["analysis_status"] != taskstate.complete_state(
"analyze"
):
httpcode = 404
raise Exception(
"image is not analyzed - analysis_status: "
+ image_report["analysis_status"]
)
imageDigest = image_report["imageDigest"]
try:
image_detail = image_report["image_detail"][0]
imageId = image_detail["imageId"]
client = internal_client_for(PolicyEngineClient, account)
resp = client.get_image_vulnerabilities(
user_id=account,
image_id=imageId,
force_refresh=force_refresh,
vendor_only=vendor_only,
)
if doformat:
# ret = make_response_vulnerability(vulnerability_type, resp)
ret = make_response_vulnerability_report(vulnerability_type, resp)
return_object[imageDigest] = ret
else:
return_object[imageDigest] = resp
httpcode = 200
except Exception as err:
httpcode = 500
raise Exception("could not fetch vulnerabilities - exception: " + str(err))
httpcode = 200
except Exception as err:
return_object = make_response_error(err, in_httpcode=httpcode)
httpcode = return_object["httpcode"]
return return_object, httpcode
def get_content(request_inputs, content_type):
params = request_inputs["params"]
return_object = {}
http_code = 500
try:
localconfig = anchore_engine.configuration.localconfig.get_config()
all_content_types = localconfig.get(
"image_content_types", []
) + localconfig.get("image_metadata_types", [])
if content_type not in all_content_types:
raise Exception("content type (" + str(content_type) + ") not available")
image_digest = params.pop("imageDigest", None)
client = internal_client_for(CatalogClient, request_inputs["userId"])
return_object[image_digest] = client.get_image_content(
image_digest, content_type
)
http_code = 200
except Exception as err:
logger.exception("Failed content lookup")
return_object = make_response_error(err, in_httpcode=http_code)
http_code = return_object["httpcode"]
return return_object, http_code
# repositories
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def add_repository(repository=None, autosubscribe=False, dryrun=False):
try:
request_inputs = anchore_engine.apis.do_request_prep(
request,
default_params={
"autosubscribe": autosubscribe,
"repository": repository,
"dryrun": dryrun,
},
)
return_object, httpcode = repositories(request_inputs)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
def repositories(request_inputs):
method = request_inputs["method"]
bodycontent = request_inputs["bodycontent"]
params = request_inputs["params"]
return_object = {}
httpcode = 500
input_repo = None
if params and "repository" in params:
input_repo = params["repository"]
autosubscribe = False
if params and "autosubscribe" in params:
autosubscribe = params["autosubscribe"]
dryrun = False
if params and "dryrun" in params:
dryrun = params["dryrun"]
try:
if method == "POST":
logger.debug("handling POST: ")
try:
client = internal_client_for(CatalogClient, request_inputs["userId"])
return_object = []
repo_records = client.add_repo(
regrepo=input_repo,
autosubscribe=autosubscribe,
dryrun=dryrun,
)
for repo_record in repo_records:
return_object.append(repo_record)
httpcode = 200
except Exception as err:
raise err
except Exception as err:
logger.debug("operation exception: " + str(err))
return_object = make_response_error(err, in_httpcode=httpcode)
httpcode = return_object["httpcode"]
return return_object, httpcode
# images CRUD
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_imagetags(image_status=None):
try:
request_inputs = anchore_engine.apis.do_request_prep(request, default_params={})
user_auth = request_inputs["auth"]
method = request_inputs["method"]
bodycontent = request_inputs["bodycontent"]
params = request_inputs["params"]
return_object = {}
httpcode = 500
client = internal_client_for(CatalogClient, request_inputs["userId"])
return_object = client.get_imagetags(image_status)
httpcode = 200
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def import_image_archive(archive_file):
httpcode = 500
try:
request_inputs = anchore_engine.apis.do_request_prep(request, default_params={})
request_account = request_inputs["userId"]
# TODO perform the archive format validation here, for now just a READ
try:
archive_buffer = archive_file.read()
except Exception as err:
httpcode = 409
raise Exception(
"invalid archive format (must be an image archive tar.gz generated by anchore) - exception: {}".format(
err
)
)
# get some information out of the archive for input validation
archive_account = None
archive_digest = None
with tarfile.open(
fileobj=io.BytesIO(archive_buffer), format=tarfile.PAX_FORMAT
) as TFH:
try:
with TFH.extractfile("archive_manifest") as AMFH:
archive_manifest = json.loads(utils.ensure_str(AMFH.read()))
archive_account = archive_manifest["account"]
archive_digest = archive_manifest["image_digest"]
except Exception as err:
httpcode = 409
raise Exception(
"cannot extract/parse archive_manifest from archive file - exception: {}".format(
err
)
)
# removed the bloe validation check as the checks are now performed in the archiving subsystem, based on the authenticated account
# perform verification that the account set in the archive matches the calling account namespace
# if (not request_account or not archive_account) or (request_account != archive_account):
# httpcode = 409
# raise Exception ("account in import archive ({}) does not match API request account ({})".format(archive_account, request_account))
# make the import call to the catalog
client = internal_client_for(CatalogClient, request_inputs["userId"])
catalog_import_return_object = client.import_archive(
archive_digest, io.BytesIO(archive_buffer)
)
# finally grab the image record from the catalog, prep the respose and return
image_record = client.get_image(archive_digest)
return_object = [make_response_image(image_record, include_detail=True)]
httpcode = 200
except api_exceptions.AnchoreApiError as err:
return_object = make_response_error(err, in_httpcode=err.__response_code__)
httpcode = err.__response_code__
except Exception as err:
logger.debug("operation exception: " + str(err))
return_object = make_response_error(err, in_httpcode=httpcode)
httpcode = return_object["httpcode"]
return return_object, httpcode
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_images(
history=None,
fulltag=None,
detail=False,
image_status="active",
analysis_status=None,
):
httpcode = 500
try:
digest = None
return_object = do_list_images(
account=ApiRequestContextProxy.namespace(),
filter_digest=digest,
filter_tag=fulltag,
history=history,
image_status=image_status,
analysis_status=analysis_status,
)
httpcode = 200
except api_exceptions.AnchoreApiError as err:
return_object = make_response_error(err, in_httpcode=err.__response_code__)
httpcode = err.__response_code__
except Exception as err:
logger.debug("operation exception: " + str(err))
return_object = make_response_error(err, in_httpcode=httpcode)
httpcode = return_object["httpcode"]
return return_object, httpcode
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def delete_images_async(imageDigests, force=False):
return_object = {}
httpcode = 500
try:
logger.debug("Handling DELETE on imageDigests: %s" % imageDigests)
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
rc = client.delete_images_async(imageDigests, force=force)
if rc:
return_object = rc
httpcode = 200
else:
httpcode = 500
raise Exception(
"Operation failed due to an error/connectivity issue with catalog"
)
except Exception as err:
logger.exception("Error in asynchronous deletion of images")
return_object = make_response_error(err, in_httpcode=httpcode)
httpcode = return_object["httpcode"]
return return_object, httpcode
def validate_pullstring_is_tag(pullstring):
try:
parsed = parse_dockerimage_string(pullstring)
return parsed.get("tag") is not None
except Exception as e:
logger.debug_exception(
"Error parsing pullstring {}. Err = {}".format(pullstring, e)
)
raise ValueError("Error parsing pullstring {}".format(pullstring))
def validate_pullstring_is_digest(pullstring):
try:
parsed = parse_dockerimage_string(pullstring)
return parsed.get("digest") is not None
except Exception as e:
logger.debug_exception(
"Error parsing pullstring {}. Err = {}".format(pullstring, e)
)
raise ValueError("Error parsing pullstring {}".format(pullstring))
digest_regex = re.compile("sha256:[a-fA-F0-9]{64}")
def validate_archive_digest(digest: str):
return digest is not None and digest_regex.match(digest.strip())
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def add_image(image, force=False, autosubscribe=False):
# TODO: use for validation pass
spec = ApiRequestContextProxy.get_service().api_spec
httpcode = 500
try:
request_inputs = anchore_engine.apis.do_request_prep(
request, default_params={"force": force}
)
try:
normalized = normalize_image_add_source(analysis_request_dict=image)
validate_image_add_source(normalized, spec)
except api_exceptions.AnchoreApiError:
raise
except Exception as e:
raise api_exceptions.BadRequest(
"Could not validate request due to error",
detail={"validation_error": str(e)},
)
enable_subscriptions = ["analysis_update"]
if autosubscribe:
enable_subscriptions.append("tag_update")
source = normalized["source"]
return_object = analyze_image(
ApiRequestContextProxy.namespace(),
source,
force,
enable_subscriptions,
image.get("annotations"),
)
httpcode = 200
except api_exceptions.AnchoreApiError as err:
raise err
# httpcode = err.__response_code__
# return_object = make_response_error(err.message, details=err.detail, in_httpcode=httpcode)
except ValueError as err:
httpcode = 400
return_object = make_response_error(str(err), in_httpcode=400)
except Exception as err:
logger.debug("operation exception: {}".format(str(err)))
return_object = make_response_error(err, in_httpcode=httpcode)
httpcode = return_object["httpcode"]
return return_object, httpcode
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def delete_image(imageDigest, force=False):
try:
request_inputs = anchore_engine.apis.do_request_prep(
request, default_params={"force": force}
)
return_object, httpcode = images_imageDigest(request_inputs, imageDigest)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image(imageDigest, history=None):
try:
request_inputs = anchore_engine.apis.do_request_prep(
request, default_params={"history": False}
)
return_object, httpcode = images_imageDigest(request_inputs, imageDigest)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_by_imageId(imageId, history=None):
try:
request_inputs = anchore_engine.apis.do_request_prep(
request, default_params={"history": False}
)
try:
imageDigest = lookup_imageDigest_from_imageId(request_inputs, imageId)
except:
imageDigest = imageId
return_object, httpcode = images_imageDigest(request_inputs, imageDigest)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def delete_image_by_imageId(imageId, force=False):
try:
request_inputs = anchore_engine.apis.do_request_prep(
request, default_params={"force": force}
)
try:
imageDigest = lookup_imageDigest_from_imageId(request_inputs, imageId)
except:
imageDigest = imageId
request_inputs = anchore_engine.apis.do_request_prep(request, default_params={})
return_object, httpcode = images_imageDigest(request_inputs, imageDigest)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_policy_check(
imageDigest, policyId=None, tag=None, detail=True, history=False
):
try:
request_inputs = anchore_engine.apis.do_request_prep(
request,
default_params={
"tag": None,
"detail": True,
"history": False,
"policyId": None,
},
)
return_object, httpcode = images_imageDigest_check(request_inputs, imageDigest)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_policy_check_by_imageId(
imageId, policyId=None, tag=None, detail=None, history=None
):
try:
request_inputs = anchore_engine.apis.do_request_prep(request, default_params={})
try:
imageDigest = lookup_imageDigest_from_imageId(request_inputs, imageId)
except:
imageDigest = imageId
request_inputs = anchore_engine.apis.do_request_prep(
request,
default_params={
"tag": None,
"detail": True,
"history": False,
"policyId": None,
},
)
return_object, httpcode = images_imageDigest_check(request_inputs, imageDigest)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_image_metadata(imageDigest):
try:
localconfig = anchore_engine.configuration.localconfig.get_config()
return_object = localconfig.get("image_metadata_types", [])
httpcode = 200
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_metadata_by_type(imageDigest, mtype):
try:
request_inputs = anchore_engine.apis.do_request_prep(
request, default_params={"imageDigest": imageDigest}
)
return_object, httpcode = get_content(request_inputs, mtype)
if httpcode == 200:
return_object = {
"imageDigest": imageDigest,
"metadata_type": mtype,
"metadata": list(return_object.values())[0],
}
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_image_content(imageDigest):
try:
localconfig = anchore_engine.configuration.localconfig.get_config()
return_object = localconfig.get("image_content_types", [])
httpcode = 200
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_image_content_by_imageid(imageId):
try:
localconfig = anchore_engine.configuration.localconfig.get_config()
return_object = localconfig.get("image_content_types", [])
httpcode = 200
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_content_by_type(imageDigest, ctype):
try:
request_inputs = anchore_engine.apis.do_request_prep(
request, default_params={"imageDigest": imageDigest}
)
return_object, httpcode = get_content(request_inputs, ctype)
if httpcode == 200:
return_object = {
"imageDigest": imageDigest,
"content_type": ctype,
"content": list(return_object.values())[0],
}
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_content_by_type_files(imageDigest):
return get_image_content_by_type(imageDigest, "files")
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_content_by_type_javapackage(imageDigest):
return get_image_content_by_type(imageDigest, "java")
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_content_by_type_malware(imageDigest):
return get_image_content_by_type(imageDigest, "malware")
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_content_by_type_imageId(imageId, ctype):
try:
request_inputs = anchore_engine.apis.do_request_prep(request, default_params={})
try:
imageDigest = lookup_imageDigest_from_imageId(request_inputs, imageId)
except:
imageDigest = imageId
return_object, httpcode = get_image_content_by_type(imageDigest, ctype)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_content_by_type_imageId_files(imageId):
return get_image_content_by_type_imageId(imageId, "files")
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_content_by_type_imageId_javapackage(imageId):
return get_image_content_by_type_imageId(imageId, "java")
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_vulnerability_types(imageDigest):
try:
return_object = anchore_engine.common.image_vulnerability_types + ["all"]
httpcode = 200
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_vulnerability_types_by_imageId(imageId):
try:
request_inputs = anchore_engine.apis.do_request_prep(request, default_params={})
try:
imageDigest = lookup_imageDigest_from_imageId(request_inputs, imageId)
except:
imageDigest = imageId
return_object, httpcode = get_image_vulnerability_types(imageDigest)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_vulnerabilities_by_type(
imageDigest, vtype, force_refresh=False, vendor_only=True
):
try:
vulnerability_type = vtype
return_object, httpcode = vulnerability_query(
ApiRequestContextProxy.namespace(),
imageDigest,
vulnerability_type,
force_refresh,
vendor_only,
doformat=True,
)
if httpcode == 200:
return_object = {
"imageDigest": imageDigest,
"vulnerability_type": vulnerability_type,
"vulnerabilities": list(return_object.values())[0],
}
except Exception as err:
logger.exception("Exception getting vulns")
httpcode = 500
return_object = str(err)
return return_object, httpcode
@flask_metrics.do_not_track()
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_image_vulnerabilities_by_type_imageId(imageId, vtype):
try:
vulnerability_type = vtype
request_inputs = anchore_engine.apis.do_request_prep(request, default_params={})
try:
imageDigest = lookup_imageDigest_from_imageId(request_inputs, imageId)
except:
imageDigest = imageId
return_object, httpcode = get_image_vulnerabilities_by_type(
imageDigest, vulnerability_type
)
except Exception as err:
httpcode = 500
return_object = str(err)
return return_object, httpcode
# @flask_metrics.do_not_track()
# @authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
# def import_image(analysis_report):
# try:
# request_inputs = anchore_engine.apis.do_request_prep(request, default_params={})
# return_object, httpcode = do_import_image(request_inputs, analysis_report)
#
# except Exception as err:
# httpcode = 500
# return_object = str(err)
#
# return return_object, httpcode
# def do_import_image(request_inputs, importRequest):
# user_auth = request_inputs['auth']
# method = request_inputs['method']
# bodycontent = request_inputs['bodycontent']
# params = request_inputs['params']
#
# return_object = {}
# httpcode = 500
#
# userId, pw = user_auth
#
# try:
# client = internal_client_for(CatalogClient, request_inputs['userId'])
# return_object = []
# image_records = client.import_image(json.loads(bodycontent))
# for image_record in image_records:
# return_object.append(make_response_image(image_record))
# httpcode = 200
#
# except Exception as err:
# logger.debug("operation exception: " + str(err))
# return_object = make_response_error(err, in_httpcode=httpcode)
# httpcode = return_object['httpcode']
#
# return(return_object, httpcode)
def do_list_images(
account,
filter_tag=None,
filter_digest=None,
history=False,
image_status=None,
analysis_status=None,
):
client = internal_client_for(CatalogClient, account)
try:
# Query param fulltag has precedence for search
image_records = client.list_images(
tag=filter_tag,
digest=filter_digest,
history=history,
image_status=image_status,
analysis_status=analysis_status,
)
return [
make_response_image(image_record, include_detail=True)
for image_record in image_records
]
except Exception as err:
logger.debug("operation exception: " + str(err))
raise err
def analyze_image(
account, source, force=False, enable_subscriptions=None, annotations=None
):
"""
Analyze an image from a source where a source can be one of:
'digest': {
'pullstring': str, (digest or tag, e.g docker.io/alpine@sha256:abc),
'tag': str, the tag itself to associate (e.g. docker.io/alpine:latest),
'creation_timestamp_override: str, rfc3339 format. necessary only if not doing a force re-analysis of existing image,
'dockerfile': str, the base64 encoded dockerfile content to associate with this tag at analysis time. optional
}
'tag': {
'pullstring': str, the full tag-style pull string for docker (e.g. docker.io/nginx:latest),
'dockerfile': str optional base-64 encoded dockerfile content to associate with this tag at analysis time. optional
}
'archive': {
'digest': str, the digest to restore from the analysis archive
}
:param account: str account id
:param source: dict source object with keys: 'tag', 'digest', and 'archive', with associated config for pulling source from each. See the api spec for schema details
:param force: bool, if true re-analyze existing image
:param enable_subscriptions: the list of subscriptions to enable at add time. Optional
:param annotations: Dict of k/v annotations. Optional.
:return: resulting image record
"""
if not source:
raise Exception("Must have source to fetch image or analysis from")
client = internal_client_for(CatalogClient, account)
tag = None
digest = None
ts = None
is_from_archive = False
dockerfile = None
image_check = None
image_record = None
try:
logger.debug(
"handling POST: source={}, force={}, enable_subscriptions={}, annotations={}".format(
source, force, enable_subscriptions, annotations
)
)
# if not, add it and set it up to be analyzed
if source.get("import"):
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
image_record = client.import_image(
source.get("import"), annotations=annotations, force=force
)
# The import path will fail with an expected error if the image is already analyzed and not in a failed state
# and the user did not specify a force re-load of the image. The regular image analysis path will allow such
# a case for idempotent operation and to permit updates to things like annotations.
else:
if source.get("archive"):
img_source = source.get("archive")
# Do archive-based add
digest = img_source["digest"]
is_from_archive = True
elif source.get("tag"):
# Do tag-based add
img_source = source.get("tag")
tag = img_source["pullstring"]
dockerfile = img_source.get("dockerfile")
elif source.get("digest"):
# Do digest-based add
img_source = source.get("digest")
tag = img_source["tag"]
digest_info = anchore_engine.utils.parse_dockerimage_string(
img_source["pullstring"]
)
digest = digest_info["digest"]
dockerfile = img_source.get("dockerfile")
ts = img_source.get("creation_timestamp_override")
if ts:
try:
ts = utils.rfc3339str_to_epoch(ts)
except Exception as err:
raise api_exceptions.InvalidDateFormat(
"source.creation_timestamp_override", ts
)
if force:
# Grab the trailing digest sha section and ensure it exists
try:
image_check = client.get_image(digest)
if not image_check:
raise Exception(
"No image found for digest {}".format(digest)
)
if not ts:
# Timestamp required for analysis by digest & tag (if none specified,
# default to previous image's timestamp)
ts = image_check.get("created_at", anchore_now())
except Exception as err:
raise ValueError(
"image digest must already exist to force re-analyze using tag+digest"
)
elif not ts:
# If a new analysis of an image by digest + tag, we need a timestamp to insert into the tag history
# properly. Therefore, if no timestamp is provided, we use the current time
ts = anchore_now()
else:
raise ValueError(
"The source property must have at least one of tag, digest, or archive set to non-null"
)
image_record = client.add_image(
tag=tag,
digest=digest,
dockerfile=dockerfile,
annotations=annotations,
created_at=ts,
from_archive=is_from_archive,
allow_dockerfile_update=force,
)
# finally, do any state updates and return
if image_record:
imageDigest = image_record["imageDigest"]
logger.debug("added image: " + str(imageDigest))
initialize_subscriptions(client, image_record, enable_subscriptions)
imageDigest = image_record["imageDigest"]
# set the state of the image appropriately
currstate = image_record["analysis_status"]
if not currstate:
newstate = taskstate.init_state("analyze", None)
elif force or currstate == taskstate.fault_state("analyze"):
newstate = taskstate.reset_state("analyze")
elif image_record["image_status"] != taskstate.base_state("image_status"):
newstate = taskstate.reset_state("analyze")
else:
newstate = currstate
if (currstate != newstate) or (force):
logger.debug(
"state change detected: " + str(currstate) + " : " + str(newstate)
)
image_record.update(
{
"image_status": taskstate.reset_state("image_status"),
"analysis_status": newstate,
}
)
updated_image_record = client.update_image(imageDigest, image_record)
if updated_image_record:
image_record = updated_image_record[0]
else:
logger.debug(
"no state change detected: "
+ str(currstate)
+ " : "
+ str(newstate)
)
return [make_response_image(image_record, include_detail=True)]
except Exception as err:
logger.debug("operation exception: " + str(err))
raise err
def initialize_subscriptions(
catalog_client: CatalogClient, image_record, enable_subscriptions=None
):
"""
Setup the subscriptions for an image record
:param image_record:
:param enable_subscriptions:
:return:
"""
for image_detail in image_record["image_detail"]:
fulltag = (
image_detail["registry"]
+ "/"
+ image_detail["repo"]
+ ":"
+ image_detail["tag"]
)
foundtypes = []
try:
subscription_records = catalog_client.get_subscription(
subscription_key=fulltag
)
except Exception as err:
subscription_records = []
for subscription_record in subscription_records:
if subscription_record["subscription_key"] == fulltag:
foundtypes.append(subscription_record["subscription_type"])
sub_types = anchore_engine.common.tag_subscription_types
for sub_type in sub_types:
if sub_type not in foundtypes:
try:
default_active = False
if enable_subscriptions and sub_type in enable_subscriptions:
logger.debug("auto-subscribing image: " + str(sub_type))
default_active = True
catalog_client.add_subscription(
{
"active": default_active,
"subscription_type": sub_type,
"subscription_key": fulltag,
}
)
except:
try:
catalog_client.update_subscription(
{
"subscription_type": sub_type,
"subscription_key": fulltag,
}
)
except:
pass
else:
if enable_subscriptions and sub_type in enable_subscriptions:
catalog_client.update_subscription(
{
"active": True,
"subscription_type": sub_type,
"subscription_key": fulltag,
}
)
def next_analysis_state(image_record, force=False):
"""
Return the next state for the image record to transition to
:param currstate:
:param force:
:return:
"""
currstate = image_record["analysis_status"]
if not currstate:
newstate = taskstate.init_state("analyze", None)
elif force or currstate == taskstate.fault_state("analyze"):
newstate = taskstate.reset_state("analyze")
elif image_record["image_status"] != taskstate.base_state("image_status"):
newstate = taskstate.reset_state("analyze")
else:
newstate = currstate
return newstate
def update_image_status(
catalog_client: CatalogClient, image_record, to_status: str, force=False
) -> dict:
"""
Update the image status to the requested new status, idempotently
If not a valid transtion, an ConflictingRequest exception is raised
:param image_record:
:param to_status:
:param force: bool to force the transition if the state machine doesn't already support it (e.g. re-analyze requested by user)
:return:
"""
analysis_status = image_record["analysis_status"]
next_status = next_analysis_state(image_record, force=force)
# Code carried over from previous impl. Not sure if this has any effect if force=True but the states are the same
# The only thing may be annotation updates etc that force the body to update event though the status is the same
# That needs to be fixed to use another route or PUT/PATCH explicitly rather than another POST
if next_status != analysis_status or force:
logger.debug(
"state change detected: " + str(analysis_status) + " : " + str(next_status)
)
image_record.update(
{
"image_status": taskstate.reset_state("image_status"),
"analysis_status": next_status,
}
)
# Yes, this returns an array, need to fix that but is always an array of size 1
updated_image_records = catalog_client.update_image(
image_record["imageDigest"], image_record
)
if updated_image_records:
image_record = updated_image_records[0]
else:
raise Exception("no response found from image update API call to catalog")
else:
logger.debug(
"no state change detected: "
+ str(analysis_status)
+ " : "
+ str(next_status)
)
return image_record
def images_imageDigest(request_inputs, imageDigest):
user_auth = request_inputs["auth"]
method = request_inputs["method"]
bodycontent = request_inputs["bodycontent"]
params = request_inputs.get("params", {})
return_object = {}
httpcode = 500
username, pw = user_auth
userId = request_inputs["userId"]
try:
client = internal_client_for(CatalogClient, request_inputs["userId"])
if method == "GET":
logger.debug("handling GET on imageDigest: " + str(imageDigest))
image_record = client.get_image(imageDigest)
if image_record:
if "detail" in params and not params.get("detail"):
detail = False
else:
detail = True
return_object = [
make_response_image(image_record, include_detail=detail)
]
httpcode = 200
else:
httpcode = 404
raise Exception("cannot locate specified image")
elif method == "DELETE":
logger.debug("handling DELETE on imageDigest: " + str(imageDigest))
rc = False
try:
rc = client.delete_image(imageDigest, force=params["force"])
except Exception as err:
raise err
if rc:
return_object = rc
httpcode = 200
else:
httpcode = 500
raise Exception("failed to delete")
except Exception as err:
logger.debug("operation exception: " + str(err))
return_object = make_response_error(err, in_httpcode=httpcode)
httpcode = return_object["httpcode"]
return return_object, httpcode
def images_check_impl(request_inputs, image_records):
method = request_inputs["method"]
bodycontent = request_inputs["bodycontent"]
params = request_inputs["params"]
return_object = []
httpcode = 500
userId = request_inputs["userId"]
try:
client = internal_client_for(CatalogClient, request_inputs["userId"])
if "policyId" in params and params["policyId"]:
bundle_records = client.get_policy(policyId=params["policyId"])
policyId = params["policyId"]
else:
bundle_records = client.get_active_policy()
policyId = None
if not bundle_records:
httpcode = 404
raise Exception("user has no active policy to evaluate: " + str(userId))
# this is to check that we got at least one evaluation in the response, otherwise routine should throw a 404
atleastone = False
if image_records:
for image_record in image_records:
imageDigest = image_record["imageDigest"]
return_object_el = {}
return_object_el[imageDigest] = {}
tags = []
if params and "tag" in params and params["tag"]:
image_info = anchore_engine.common.images.get_image_info(
userId,
"docker",
params["tag"],
registry_lookup=False,
registry_creds=[],
)
if "fulltag" in image_info and image_info["fulltag"]:
params["tag"] = image_info["fulltag"]
tags.append(params["tag"])
else:
for image_detail in image_record["image_detail"]:
fulltag = (
image_detail["registry"]
+ "/"
+ image_detail["repo"]
+ ":"
+ image_detail["tag"]
)
tags.append(fulltag)
for tag in tags:
if tag not in return_object_el[imageDigest]:
return_object_el[imageDigest][tag] = []
try:
if params and params.get("history", False):
results = client.get_evals(
imageDigest=imageDigest, tag=tag, policyId=policyId
)
elif params and params.get("interactive", False):
results = [
client.get_eval_interactive(
imageDigest=imageDigest, tag=tag, policyId=policyId
)
]
else:
results = [
client.get_eval_latest(
imageDigest=imageDigest, tag=tag, policyId=policyId
)
]
except Exception as err:
results = []
httpcode = 200
for result in results:
fresult = make_response_policyeval(result, params, client)
return_object_el[imageDigest][tag].append(fresult[tag])
atleastone = True
if return_object_el:
return_object.append(return_object_el)
else:
httpcode = 404
raise Exception("could not find image record(s) input imageDigest(s)")
if not atleastone:
httpcode = 404
raise Exception("could not find any evaluations for input images")
except Exception as err:
logger.debug("operation exception: " + str(err))
return_object = make_response_error(err, in_httpcode=httpcode)
httpcode = return_object["httpcode"]
return return_object, httpcode
def images_imageDigest_check(request_inputs, imageDigest):
user_auth = request_inputs["auth"]
method = request_inputs["method"]
bodycontent = request_inputs["bodycontent"]
params = request_inputs["params"]
return_object = {}
httpcode = 500
username, pw = user_auth
userId = request_inputs["userId"]
try:
client = internal_client_for(CatalogClient, request_inputs["userId"])
image_record = client.get_image(imageDigest)
if image_record and image_record["analysis_status"] != taskstate.complete_state(
"analyze"
):
httpcode = 404
raise Exception(
"image is not analyzed - analysis_status: "
+ str(image_record["analysis_status"])
)
# Use a list of records here for backwards compat of api
return_object, httpcode = images_check_impl(request_inputs, [image_record])
except Exception as err:
logger.debug("operation exception: " + str(err))
return_object = make_response_error(err, in_httpcode=httpcode)
httpcode = return_object["httpcode"]
return return_object, httpcode
def _get_image_ok(account, imageDigest):
"""
Get the image id if the image exists and is analyzed, else raise error
:param account:
:param imageDigest:
:return:
"""
catalog_client = internal_client_for(CatalogClient, account)
image_report = catalog_client.get_image(imageDigest)
if image_report and image_report["analysis_status"] != taskstate.complete_state(
"analyze"
):
raise api_exceptions.ResourceNotFound(
"artifacts",
detail={
"details": "image is not analyzed - analysis_status: "
+ image_report["analysis_status"]
},
)
elif not image_report:
raise api_exceptions.ResourceNotFound(imageDigest, detail={})
image_detail = image_report["image_detail"][0]
imageId = image_detail["imageId"]
return imageId
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_retrieved_files(imageDigest):
"""
GET /images/{imageDigest}/artifacts/retrieved_files
:param imageDigest:
:param artifactType:
:return:
"""
account = ApiRequestContextProxy.namespace()
try:
imageId = _get_image_ok(account, imageDigest)
client = internal_client_for(PolicyEngineClient, account)
resp = client.list_image_analysis_artifacts(
user_id=account, image_id=imageId, artifact_type="retrieved_files"
)
return resp, 200
except api_exceptions.AnchoreApiError:
raise
except Exception as err:
raise api_exceptions.InternalError(str(err), detail={})
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_file_content_search_results(imageDigest):
"""
GET /images/{imageDigest}/artifacts/file_content_search
:param imageDigest:
:param artifactType:
:return:
"""
account = ApiRequestContextProxy.namespace()
try:
imageId = _get_image_ok(account, imageDigest)
client = internal_client_for(PolicyEngineClient, account)
resp = client.list_image_analysis_artifacts(
user_id=account, image_id=imageId, artifact_type="file_content_search"
)
return resp, 200
except api_exceptions.AnchoreApiError:
raise
except Exception as err:
raise api_exceptions.InternalError(str(err), detail={})
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_secret_search_results(imageDigest):
"""
GET /images/{imageDigest}/artifacts/secret_search
:param imageDigest:
:param artifactType:
:return:
"""
account = ApiRequestContextProxy.namespace()
try:
imageId = _get_image_ok(account, imageDigest)
client = internal_client_for(PolicyEngineClient, account)
resp = client.list_image_analysis_artifacts(
user_id=account, image_id=imageId, artifact_type="secret_search"
)
return resp, 200
except api_exceptions.AnchoreApiError:
raise
except Exception as err:
raise api_exceptions.InternalError(str(err), detail={})
| apache-2.0 | 974,177,585,576,685,700 | 33.255425 | 169 | 0.59326 | false |
bradyt/taskdsetup | taskdsetup/client.py | 1 | 1148 |
import os
import shutil
import subprocess
from .core import (ensure_dir, canonicalize)
def main(data, server, port, data_orgs_dict):
task_dir = os.path.expanduser('~/.task')
ensure_dir(task_dir)
d = data_orgs_dict
for org in d:
for uuid in d[org]:
full_name = d[org][uuid]
user_name = canonicalize(full_name)
for cert in [ user_name + '.cert.pem',
user_name + '.key.pem',
'ca.cert.pem' ]:
shutil.copy(os.path.join(data, 'pki', cert), task_dir)
for setting in [ [ 'taskd.certificate', '--', '~/.task/' + user_name + '.cert.pem' ],
[ 'taskd.key', '--', '~/.task/' + user_name + '.key.pem' ],
[ 'taskd.ca', '--', '~/.task/ca.cert.pem' ],
[ 'taskd.server', '--', server + ':' + port ],
[ 'taskd.credentials', '--', org + '/' + full_name + '/' + uuid ] ]:
subprocess.run(['task', 'rc.confirmation=0', 'config'] + setting)
| gpl-3.0 | -2,863,920,050,727,676,000 | 46.833333 | 99 | 0.436411 | false |
openstack/python-heatclient | heatclient/common/utils.py | 1 | 14093 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import logging
import os
import textwrap
import uuid
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import prettytable
import six
from six.moves.urllib import error
from six.moves.urllib import parse
from six.moves.urllib import request
import yaml
from heatclient._i18n import _
from heatclient import exc
LOG = logging.getLogger(__name__)
supported_formats = {
"json": lambda x: jsonutils.dumps(x, indent=2),
"yaml": yaml.safe_dump
}
def arg(*args, **kwargs):
"""Decorator for CLI args.
Example:
>>> @arg("name", help="Name of the new entity")
... def entity_create(args):
... pass
"""
def _decorator(func):
add_arg(func, *args, **kwargs)
return func
return _decorator
def env(*args, **kwargs):
"""Returns the first environment variable set.
If all are empty, defaults to '' or keyword arg `default`.
"""
for arg in args:
value = os.environ.get(arg)
if value:
return value
return kwargs.get('default', '')
def add_arg(func, *args, **kwargs):
"""Bind CLI arguments to a shell.py `do_foo` function."""
if not hasattr(func, 'arguments'):
func.arguments = []
# NOTE(sirp): avoid dups that can occur when the module is shared across
# tests.
if (args, kwargs) not in func.arguments:
# Because of the semantics of decorator composition if we just append
# to the options list positional options will appear to be backwards.
func.arguments.insert(0, (args, kwargs))
def print_list(objs, fields, formatters=None, sortby_index=0,
mixed_case_fields=None, field_labels=None):
"""Print a list of objects as a table, one row per object.
:param objs: iterable of :class:`Resource`
:param fields: attributes that correspond to columns, in order
:param formatters: `dict` of callables for field formatting
:param sortby_index: index of the field for sorting table rows
:param mixed_case_fields: fields corresponding to object attributes that
have mixed case names (e.g., 'serverId')
:param field_labels: Labels to use in the heading of the table, default to
fields.
"""
formatters = formatters or {}
mixed_case_fields = mixed_case_fields or []
field_labels = field_labels or fields
if len(field_labels) != len(fields):
raise ValueError(_("Field labels list %(labels)s has different number "
"of elements than fields list %(fields)s"),
{'labels': field_labels, 'fields': fields})
if sortby_index is None:
kwargs = {}
else:
kwargs = {'sortby': field_labels[sortby_index]}
pt = prettytable.PrettyTable(field_labels)
pt.align = 'l'
for o in objs:
row = []
for field in fields:
if field in formatters:
row.append(formatters[field](o))
else:
if field in mixed_case_fields:
field_name = field.replace(' ', '_')
else:
field_name = field.lower().replace(' ', '_')
data = getattr(o, field_name, '')
row.append(data)
pt.add_row(row)
if six.PY3:
print(encodeutils.safe_encode(pt.get_string(**kwargs)).decode())
else:
print(encodeutils.safe_encode(pt.get_string(**kwargs)))
def link_formatter(links):
def format_link(l):
if 'rel' in l:
return "%s (%s)" % (l.get('href', ''), l.get('rel', ''))
else:
return "%s" % (l.get('href', ''))
return '\n'.join(format_link(l) for l in links or [])
def resource_nested_identifier(rsrc):
nested_link = [l for l in rsrc.links or []
if l.get('rel') == 'nested']
if nested_link:
nested_href = nested_link[0].get('href')
nested_identifier = nested_href.split("/")[-2:]
return "/".join(nested_identifier)
def json_formatter(js):
return jsonutils.dumps(js, indent=2, ensure_ascii=False,
separators=(', ', ': '))
def yaml_formatter(js):
return yaml.safe_dump(js, default_flow_style=False)
def text_wrap_formatter(d):
return '\n'.join(textwrap.wrap(d or '', 55))
def newline_list_formatter(r):
return '\n'.join(r or [])
def print_dict(d, formatters=None):
formatters = formatters or {}
pt = prettytable.PrettyTable(['Property', 'Value'],
caching=False, print_empty=False)
pt.align = 'l'
for field in d:
if field in formatters:
pt.add_row([field, formatters[field](d[field])])
else:
pt.add_row([field, d[field]])
print(pt.get_string(sortby='Property'))
class EventLogContext(object):
def __init__(self):
# key is a stack id or the name of the nested stack, value is a tuple
# of the parent stack id, and the name of the resource in the parent
# stack
self.id_to_res_info = {}
def prepend_paths(self, resource_path, stack_id):
if stack_id not in self.id_to_res_info:
return
stack_id, res_name = self.id_to_res_info.get(stack_id)
if res_name in self.id_to_res_info:
# do a double lookup to skip the ugly stack name that doesn't
# correspond to an actual resource name
n_stack_id, res_name = self.id_to_res_info.get(res_name)
resource_path.insert(0, res_name)
self.prepend_paths(resource_path, n_stack_id)
elif res_name:
resource_path.insert(0, res_name)
def build_resource_name(self, event):
res_name = getattr(event, 'resource_name')
# Contribute this event to self.id_to_res_info to assist with
# future calls to build_resource_name
def get_stack_id():
if getattr(event, 'stack_id', None) is not None:
return event.stack_id
for l in getattr(event, 'links', []):
if l.get('rel') == 'stack':
if 'href' not in l:
return None
stack_link = l['href']
return stack_link.split('/')[-1]
stack_id = get_stack_id()
if not stack_id:
return res_name
phys_id = getattr(event, 'physical_resource_id', None)
status = getattr(event, 'resource_status', None)
is_stack_event = stack_id == phys_id
if is_stack_event:
# this is an event for a stack
self.id_to_res_info[stack_id] = (stack_id, res_name)
elif phys_id and status == 'CREATE_IN_PROGRESS':
# this might be an event for a resource which creates a stack
self.id_to_res_info[phys_id] = (stack_id, res_name)
# Now build this resource path based on previous calls to
# build_resource_name
resource_path = []
if res_name and not is_stack_event:
resource_path.append(res_name)
self.prepend_paths(resource_path, stack_id)
return '.'.join(resource_path)
def event_log_formatter(events, event_log_context=None):
"""Return the events in log format."""
event_log = []
log_format = ("%(event_time)s "
"[%(rsrc_name)s]: %(rsrc_status)s %(rsrc_status_reason)s")
# It is preferable for a context to be passed in, but there might be enough
# events in this call to build a better resource name, so create a context
# anyway
if event_log_context is None:
event_log_context = EventLogContext()
for event in events:
rsrc_name = event_log_context.build_resource_name(event)
event_time = getattr(event, 'event_time', '')
log = log_format % {
'event_time': event_time.replace('T', ' '),
'rsrc_name': rsrc_name,
'rsrc_status': getattr(event, 'resource_status', ''),
'rsrc_status_reason': getattr(event, 'resource_status_reason', '')
}
event_log.append(log)
return "\n".join(event_log)
def print_update_list(lst, fields, formatters=None):
"""Print the stack-update --dry-run output as a table.
This function is necessary to print the stack-update --dry-run
output, which contains additional information about the update.
"""
formatters = formatters or {}
pt = prettytable.PrettyTable(fields, caching=False, print_empty=False)
pt.align = 'l'
for change in lst:
row = []
for field in fields:
if field in formatters:
row.append(formatters[field](change.get(field, None)))
else:
row.append(change.get(field, None))
pt.add_row(row)
if six.PY3:
print(encodeutils.safe_encode(pt.get_string()).decode())
else:
print(encodeutils.safe_encode(pt.get_string()))
def find_resource(manager, name_or_id):
"""Helper for the _find_* methods."""
# first try to get entity as integer id
try:
if isinstance(name_or_id, int) or name_or_id.isdigit():
return manager.get(int(name_or_id))
except exc.NotFound:
pass
# now try to get entity as uuid
try:
uuid.UUID(str(name_or_id))
return manager.get(name_or_id)
except (ValueError, exc.NotFound):
pass
# finally try to find entity by name
try:
return manager.find(name=name_or_id)
except exc.NotFound:
msg = (
_("No %(name)s with a name or ID of "
"'%(name_or_id)s' exists.")
% {
'name': manager.resource_class.__name__.lower(),
'name_or_id': name_or_id
})
raise exc.CommandError(msg)
def format_parameters(params, parse_semicolon=True):
'''Reformat parameters into dict of format expected by the API.'''
if not params:
return {}
if parse_semicolon:
# expect multiple invocations of --parameters but fall back
# to ; delimited if only one --parameters is specified
if len(params) == 1:
params = params[0].split(';')
parameters = {}
for p in params:
try:
(n, v) = p.split(('='), 1)
except ValueError:
msg = _('Malformed parameter(%s). Use the key=value format.') % p
raise exc.CommandError(msg)
if n not in parameters:
parameters[n] = v
else:
if not isinstance(parameters[n], list):
parameters[n] = [parameters[n]]
parameters[n].append(v)
return parameters
def format_all_parameters(params, param_files,
template_file=None, template_url=None):
parameters = {}
parameters.update(format_parameters(params))
parameters.update(format_parameter_file(
param_files,
template_file,
template_url))
return parameters
def format_parameter_file(param_files, template_file=None,
template_url=None):
'''Reformat file parameters into dict of format expected by the API.'''
if not param_files:
return {}
params = format_parameters(param_files, False)
template_base_url = None
if template_file or template_url:
template_base_url = base_url_for_url(get_template_url(
template_file, template_url))
param_file = {}
for key, value in params.items():
param_file[key] = resolve_param_get_file(value,
template_base_url)
return param_file
def resolve_param_get_file(file, base_url):
if base_url and not base_url.endswith('/'):
base_url = base_url + '/'
str_url = parse.urljoin(base_url, file)
return read_url_content(str_url)
def format_output(output, format='yaml'):
"""Format the supplied dict as specified."""
output_format = format.lower()
try:
return supported_formats[output_format](output)
except KeyError:
raise exc.HTTPUnsupported(_("The format(%s) is unsupported.")
% output_format)
def parse_query_url(url):
base_url, query_params = url.split('?')
return base_url, parse.parse_qs(query_params)
def get_template_url(template_file=None, template_url=None):
if template_file:
template_url = normalise_file_path_to_url(template_file)
return template_url
def read_url_content(url):
try:
content = request.urlopen(url).read()
except error.URLError:
raise exc.CommandError(_('Could not fetch contents for %s') % url)
if content:
try:
content.decode('utf-8')
except ValueError:
content = base64.encodebytes(content)
return content
def base_url_for_url(url):
parsed = parse.urlparse(url)
parsed_dir = os.path.dirname(parsed.path)
return parse.urljoin(url, parsed_dir)
def normalise_file_path_to_url(path):
if parse.urlparse(path).scheme:
return path
path = os.path.abspath(path)
return parse.urljoin('file:', request.pathname2url(path))
def get_response_body(resp):
body = resp.content
if 'application/json' in resp.headers.get('content-type', ''):
try:
body = resp.json()
except ValueError:
LOG.error('Could not decode response body as JSON')
else:
body = None
return body
| apache-2.0 | 5,589,027,641,212,562,000 | 30.387528 | 79 | 0.598027 | false |
DSP-sleep/CUBIC-Atlas | cell_mapping.py | 1 | 19899 | # cell_mapping.py gives annotation to the detected cells through registering
# the sample brain to the CUBIC-Atlas.
# Please see the help.txt file for details.
#
# Copyright (C) 2017, Tatsuya C. Murakami
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import csv
from time import localtime, strftime
import numpy as np
import math
import json
import sys
from subprocess import call
import cv2
def readdist(xyzfilename_list,z_size,y_size,x_size):
mv_img = np.zeros((3,int(z_size), int(y_size), int(x_size)))
xyz_counter = 0
for filename in xyzfilename_list:
f = open(filename, "r")
flag = 0
slice_num = 0
for row in f:
if flag == int(y_size):
flag = 0
slice_num += 1
continue
if slice_num == int(z_size):
break
row_list = row.split()
row_list.reverse()
mv_img[xyz_counter,slice_num,flag,:] = np.array(row_list) #mv_img[z, y, x] order
flag += 1
f.close()
xyz_counter += 1
return mv_img
def affinematrix_calc(filename):
affine = np.zeros((4,4))
f = open(filename, 'r')
affine_file = f.read()
f.close()
lines = affine_file.split('\n')
Parameter = lines[3].split(' ')
FixedParameter = lines[4].split(' ')
matrix = [[float(Parameter[1]),float(Parameter[2]),float(Parameter[3])],[float(Parameter[4]),float(Parameter[5]),float(Parameter[6])],[float(Parameter[7]),float(Parameter[8]),float(Parameter[9])]]
translation = [float(Parameter[10]),float(Parameter[11]),float(Parameter[12])]
center = [float(FixedParameter[1]),float(FixedParameter[2]),float(FixedParameter[3])]
offset = [0, 0, 0]
for i in range(0,3):
offset[i] = translation[i] + center[i]
for j in range(0,3):
offset[i] -= matrix[i][j] * center[j]
inv_matrix = np.array([[float(Parameter[1]),float(Parameter[2]),float(Parameter[3]),float(offset[0])],[float(Parameter[4]),float(Parameter[5]),float(Parameter[6]),float(offset[1])],[float(Parameter[7]),float(Parameter[8]),float(Parameter[9]),float(offset[2])],[0,0,0,1]])
affine_matrix = np.linalg.inv(inv_matrix)
affine = affine_matrix
return affine
def ANTSregistration(iterationL,iterationM,iterationN,output_path,atlas_img,compress_img_path):
cmd = "ANTS 3 -i " + str(iterationL) + "x" + str(iterationM) + "x" + str(iterationN) + \
" -o " + output_path + "registered.nii --MI-option 64x300000 " + \
"-m CC[" + atlas_img + "," + compress_img_path + ",1,5]"
call([cmd], shell=True)
cmd = "WarpImageMultiTransform 3 " + compress_img_path + " " + output_path + "registered.tif -R " + atlas_img + " " + output_path + "registeredWarp.nii " + output_path + "registeredAffine.txt"
call([cmd], shell=True)
cmd = "fsl2ascii " + output_path + "registeredInverseWarp.nii " + output_path + "registeredInverseWarp.txt"
call([cmd], shell=True)
affine_path = output_path + "registeredAffine.txt"
moving_path = output_path + "registeredInverseWarp.txt"
moving_list = [moving_path + "00000",moving_path + "00001",moving_path + "00002"]
return [affine_path,moving_list]
def atlasmapping(output_path,csv_path,coordinate_info,affine,mv_img,x_size,y_size,z_size,compression_x,compression_y,compression_z):
output_csv_name = output_path + "registered_points.csv"
with open(output_csv_name,'a') as f_write:
with open(csv_path, 'r') as f_read:
reader = csv.reader(f_read)
headers = reader.next()
x_index = headers.index(coordinate_info[0])
y_index = headers.index(coordinate_info[1])
z_index = headers.index(coordinate_info[2])
counter = 0
for k in reader:
counter += 1
x = float(k[x_index]) / compression_x
y = float(k[y_index]) / compression_y
z = float(k[z_index]) / compression_z
RX = x * affine[0][0] + y * affine[0][1] + z * affine[0][2] + affine[0][3]
RY = x * affine[1][0] + y * affine[1][1] + z * affine[1][2] + affine[1][3]
RZ = x * affine[2][0] + y * affine[2][1] + z * affine[2][2] + affine[2][3]
X = int(RX)
Y = int(RY)
Z = int(RZ)
if RX >= 0 and X + 1 < int(x_size) and RY >= 0 and Y + 1 < int(y_size) and RZ >= 0 and Z + 1 < int(z_size):
#following seems complicated, but it calculates (linear interpolation of each point with mv_img)
SyN_x = RX + (mv_img[0,Z,Y,X] * (1-(RX - X)) + mv_img[0,Z,Y,X+1] * (RX - X))
SyN_y = RY + (mv_img[1,Z,Y,X] * (1-(RY - Y)) + mv_img[1,Z,Y+1,X] * (RY - Y))
SyN_z = RZ + (mv_img[2,Z,Y,X] * (1-(RZ - Z)) + mv_img[2,Z+1,Y,X] * (RZ - Z))
writer = csv.writer(f_write, lineterminator='\n')
writer.writerow([round(SyN_x,5),round(SyN_y,5),round(SyN_z,5),counter])
def id_finder(indict,id_number):
if isinstance(indict,dict):
for key, value in indict.items():
if isinstance(value, list):
#print "yes"
if value == []:
pass
else:
for d in value:
try:
return id_finder(d,id_number)
except ValueError:
pass
elif key == 'id':
if value == id_number:
return [indict['name'], indict['parent_structure_id']]
#return indict['parent_structure_id']
raise ValueError("Request file not found")
def color_finder(indict,id_number):
if isinstance(indict,dict):
for key, value in indict.items():
if isinstance(value, list):
if value == []:
pass
else:
for d in value:
try:
return color_finder(d,id_number)
except ValueError:
pass
elif key == 'id':
if value == id_number:
return indict['color_hex_triplet']
raise ValueError("Request file not found")
def annotation(output_path,array_x,array_y,array_z,whole_list):
id_dic = {}
input_csv = output_path + "registered_points.csv"
with open(input_csv, 'r') as f_read:
reader = csv.reader(f_read)
for k in reader:
x = int(float(k[0]))
y = int(float(k[1]))
z = int(float(k[2]))
if x >= 0 and x < array_x and y >= 0 and y < array_y and z >= 0 and z < array_z:
number = (z - 1) * array_x * array_y + (y - 1) * array_x + x
cell_list_array = whole_list[number]
zero_confirmation = cell_list_array.size
if zero_confirmation != 0:
row_num, column_num = cell_list_array.shape
my_pos = np.array([float(k[0]),float(k[1]),float(k[2])])
dist_temp_array = np.subtract(cell_list_array[:,0:3],my_pos)
dist_temp_array = np.square(dist_temp_array)
dist_array = np.sum(dist_temp_array, axis=1)
min_index = np.argmin(dist_array)
atlas_id = int(cell_list_array[min_index,3])
id_dic[int(k[3])] = atlas_id
else:
atlas_id = 0
id_dic[int(k[3])] = atlas_id
else:
atlas_id = 0
id_dic[int(k[3])] = atlas_id
return id_dic
def mergecsv(output_path,coordinate_info,csv_path,id_dic,additional_info):
annotated_csv = output_path + "result.csv"
with open(annotated_csv,'a') as f_write:
with open(csv_path, 'r') as f_read:
reader = csv.reader(f_read)
headers = reader.next()
x_index = headers.index(coordinate_info[0])
y_index = headers.index(coordinate_info[1])
z_index = headers.index(coordinate_info[2])
counter = 0
additional_length = len(additional_info)
additional_idx_list = []
for addition in additional_info:
additional_idx = headers.index(addition)
additional_idx_list.append(additional_idx)
for k in reader:
counter += 1
x = float(k[x_index])
y = float(k[y_index])
z = float(k[z_index])
additionals = []
for addition in additional_idx_list:
additionals.append(float(k[addition]))
if counter in id_dic:
allocated_atlas_id = id_dic[counter]
else:
allocated_atlas_id = 0
writer = csv.writer(f_write, lineterminator='\n')
out = [x,y,z,allocated_atlas_id]
out.extend(additionals)
writer.writerow(out)
def count_number(output_path,additional_info):
annotated_csv = output_path + "result.csv"
output_csv = output_path + "regional_result.csv"
count_array = np.genfromtxt(annotated_csv,delimiter = ",")
ID_count_array = count_array[:,3]
(A, B) = count_array.shape
additional_num = len(additional_info)
ids, counts = np.unique(ID_count_array,return_counts = True)
ids = ids.tolist()
counts = counts.tolist()
counter_dict = {}
for (i, ID) in enumerate(ids):
additional_arr = np.array([])
additional_arr = count_array[count_array[:,3]==ID,4:B]
average_arr = additional_arr.mean(axis=0)
average_list = average_arr.tolist()
SD_arr = additional_arr.std(axis=0)
SD_list = SD_arr.tolist()
out = [counts[i]]
out.extend(average_list)
out.extend(SD_list)
counter_dict[ID] = out
with open(output_csv,'a') as f_write:
header = ['parent_structure','atlas_ID','Name','counts']
for info in additional_info:
header.append('average_' + info)
for info in additional_info:
header.append('SD_' + info)
writer = csv.writer(f_write, lineterminator='\n')
writer.writerow(header)
for child_id in unique_list:
if child_id == 0:
row = [[],0.0,"None"]
row.extend(counter_dict[child_id])
else:
row = []
parent_id_list = []
[name, parent] = id_finder(structure['msg'][0],child_id)
while parent != None:
parent_id_list.append(int(parent))
[parent_name, parent] = id_finder(structure['msg'][0],parent)
row.append(parent_id_list[::-1])
row.append(child_id)
row.append(name)
if child_id in ids:
row.extend(counter_dict[child_id])
else:
row.append(0)
row.extend([0]*(additional_num*2))
writer = csv.writer(f_write, lineterminator='\n')
writer.writerow(row)
def image_out(output_path,compress_x,compress_y,compress_z):
if not os.path.isdir(output_path + "annotated_points"):
os.mkdir(output_path + "annotated_points")
image_output_path = output_path + "annotated_points/"
compress_x = int(compress_x)
compress_y = int(compress_y)
compress_z = int(compress_z)
image_array = np.zeros((compress_z, compress_y, compress_x, 3),dtype = np.uint8)
with open(output_path + "result.csv", 'r') as f_read:
reader = csv.reader(f_read)
for k in reader:
z = int(float(k[2])/compression_z)
y = int(float(k[1])/compression_y)
x = int(float(k[0])/compression_x)
atlas_id = int(float(k[3]))
if z >= 0 and z < compress_z and y >= 0 and y < compress_y and x >= 0 and x < compress_x:
if atlas_id == 0:
BGR = "000000"
else:
BGR = str(color_finder(structure['msg'][0],int(float(k[3]))))
image_array[z, y, x, 0] = int(BGR[0:2],16)
image_array[z, y, x, 1] = int(BGR[2:4],16)
image_array[z, y, x, 2] = int(BGR[4:6],16)
for k in range(0, compress_z):
str_k = str(k)
while(len(str_k) < 5):
str_k = "0" + str_k
filename = image_output_path + "image" + str_k + ".tif"
cv2.imwrite(filename, image_array[k,:,:,:])
def cleanup(output_path):
reg_output_path = output_path + "registration_information"
if not os.path.isdir(reg_output_path):
os.mkdir(reg_output_path)
reg_output_path = output_path + "registration_information/"
shutil.move(output_path + 'registeredAffine.txt',reg_output_path)
shutil.move(output_path + 'registeredInverseWarp.nii',reg_output_path)
shutil.move(output_path + 'registeredWarp.nii',reg_output_path)
shutil.move(output_path + 'registeredInverseWarp.txt00000',reg_output_path)
shutil.move(output_path + 'registeredInverseWarp.txt00001',reg_output_path)
shutil.move(output_path + 'registeredInverseWarp.txt00002',reg_output_path)
shutil.move(output_path + 'registered_points.csv',reg_output_path)
if len(sys.argv)!=2:
print "\nUsage: "+sys.argv[0]+" <parameter_file>"
quit()
# Reading the parameters
parameter_file = open(sys.argv[1],'r')
parameters = []
for line in parameter_file:
if line[0] == "#":
continue
parameters.append(line.rstrip())
parameter_file.close()
# Processing the parameters
output_path_parent = parameters[0]
atlas_folder = parameters[1]
size_info = parameters[2]
iterationL = parameters[3]
iterationM = parameters[4]
iterationN = parameters[5]
compress_img_path_parent = parameters[6]
csv_path_parent = parameters[7]
coordinate_info = [parameters[8],parameters[9],parameters[10]]
additional_info = []
for i in range(11,len(parameters)):
additional_info.append(parameters[i])
# Checking the parameters
print "The method will: "
print " - save resulting data to "+output_path_parent
print " - read atlas from "+atlas_folder
print " - read size information from : " + size_info
print "\nANTS registration iteration: "
print iterationL,iterationM,iterationN
print " - read compressed nucleus stained image from "+compress_img_path_parent
print " - read csv from "+csv_path_parent
print " - read coordinate information from following column, X: "+coordinate_info[0]+" Y: "+coordinate_info[1]+" Z: "+coordinate_info[2]
print " - read additional information "+str(additional_info)########## test
print " "
while 1:
feedback = raw_input("Is this correct? (yes/no)\t").rstrip()
if feedback == "yes":
print "Program starting...\n"
break
if feedback == "no":
print "Please edit the parameter file."
quit()
#Fixed inputs
atlas_folder = atlas_folder + "/"
x_size = 241.
y_size = 286.
z_size = 135.
atlas_img = atlas_folder + "reference120.tif"
atlas_csv = atlas_folder + "atlas120.csv"
allen_structure_path = atlas_folder + "structure_graph.json"
unique_id = atlas_folder + "uniqueID.csv"
#Make output folder
if not os.path.isdir(output_path_parent):
os.mkdir(output_path_parent)
log = output_path_parent + "/log_step2.txt"
log_file = open(log,'w')
output_message = strftime("%H:%M:%S", localtime())+": Preparing for calculation "
print output_message
log_file.write(output_message+"\n")
#Obtain sample information
sample_info = []
with open(size_info, 'r') as f_read:
reader = csv.reader(f_read)
for k in reader:
sample_info.append([k[0],float(k[1]),float(k[2]),float(k[3]),float(k[4]),float(k[5]),float(k[6])])
#Read structure graph
f = open(allen_structure_path,'r')
structure = json.load(f)
f.close()
#Read unique ID in allen brain atlas annotation image
unique_array = np.genfromtxt(unique_id,delimiter = ",")
unique_list = unique_array.tolist()
#Read CUBIC-Atlas csv
array_x = int(x_size)
array_y = int(y_size)
array_z = int(z_size)
whole_list = [np.empty((0,4))] * (array_x*array_y*array_z)
with open(atlas_csv, 'r') as f_read:
reader = csv.reader(f_read)
for k in reader:
x = int(float(k[0]))
y = int(float(k[1]))
z = int(float(k[2]))
if x >= 0 and x < array_x and y >= 0 and y < array_y and z >= 0 and z < array_z:
number = (z - 1) * array_x * array_y + (y - 1) * array_x + x
temp_array = np.append(whole_list[number],np.array([[float(k[0]),float(k[1]),float(k[2]),int(k[3])]]), axis = 0)
whole_list[number] = temp_array
for sample in sample_info:
sample_name = sample[0]
output_message = sample_name
print output_message
log_file.write(output_message+"\n")
compression_x = sample[1] / sample[4]
compression_y = sample[2] / sample[5]
compression_z = sample[3] / sample[6]
compress_x = int(sample[4])
compress_y = int(sample[5])
compress_z = int(sample[6])
output_path = output_path_parent + "/" + sample_name
if not os.path.isdir(output_path):
os.mkdir(output_path)
output_path = output_path + "/"
csv_path = csv_path_parent + "/" + sample_name + ".csv"
compress_img_path = compress_img_path_parent + "/" + sample_name + ".tif"
#ANTS symmmetric normalization registration
output_message = strftime("%H:%M:%S", localtime())+": Registration to CUBIC-Atlas "
print output_message
log_file.write(output_message+"\n")
[affine_path,moving_list] = ANTSregistration(iterationL,iterationM,iterationN,output_path,atlas_img,compress_img_path)
#Application of deformation field to detected cells
output_message = strftime("%H:%M:%S", localtime())+": Application of deformation field to detected cells "
print output_message
log_file.write(output_message+"\n")
affine = affinematrix_calc(affine_path)
mv_img = readdist(moving_list,z_size,y_size,x_size)
atlasmapping(output_path,csv_path,coordinate_info,affine,mv_img,x_size,y_size,z_size, compression_x, compression_y, compression_z)
#Annotation to the detected cells
output_message = strftime("%H:%M:%S", localtime())+": Annotation "
print output_message
log_file.write(output_message+"\n")
id_dictionary = annotation(output_path,array_x,array_y,array_z,whole_list)
mergecsv(output_path,coordinate_info,csv_path,id_dictionary,additional_info)
# Count cell number in each annatomical region
output_message = strftime("%H:%M:%S", localtime())+": Counting cell number in each anatomical region "
print output_message
log_file.write(output_message+"\n")
count_number(output_path,additional_info)
# Print image of annotated cells
output_message = strftime("%H:%M:%S", localtime())+": Image output "
print output_message
log_file.write(output_message+"\n")
image_out(output_path,compress_x,compress_y,compress_z)
output_message = strftime("%H:%M:%S", localtime())+": Done."
print output_message
log_file.write(output_message+"\n")
log_file.close()
| gpl-3.0 | -1,083,745,853,646,201,300 | 39.281377 | 275 | 0.582843 | false |
JointBox/jbaccess-server | jbaccess/jba_core/migrations/0001_initial.py | 1 | 4911 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-03 12:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BaseACLEntry',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('type', models.IntegerField()),
],
),
migrations.CreateModel(
name='Controller',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
('controller_id', models.CharField(max_length=255, unique=True)),
],
),
migrations.CreateModel(
name='Door',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
('access_id', models.CharField(max_length=255, unique=True)),
],
),
migrations.CreateModel(
name='Key',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
('access_key', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Place',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
('doors', models.ManyToManyField(to='jba_core.Door')),
],
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='SimpleRecurringPattern',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('from_time', models.TimeField()),
('until_time', models.TimeField()),
('days_of_week', models.CharField(max_length=255)),
('days_of_month', models.CharField(max_length=255)),
('months', models.CharField(max_length=255)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='PersonACLEntry',
fields=[
('baseaclentry_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='jba_core.BaseACLEntry')),
],
bases=('jba_core.baseaclentry',),
),
migrations.CreateModel(
name='RoleACLEntry',
fields=[
('baseaclentry_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='jba_core.BaseACLEntry')),
('role', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jba_core.Role')),
],
bases=('jba_core.baseaclentry',),
),
migrations.AddField(
model_name='simplerecurringpattern',
name='acl',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='jba_core.BaseACLEntry'),
),
migrations.AddField(
model_name='person',
name='roles',
field=models.ManyToManyField(to='jba_core.Role'),
),
migrations.AddField(
model_name='key',
name='person',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jba_core.Person'),
),
migrations.AddField(
model_name='controller',
name='doors',
field=models.ManyToManyField(to='jba_core.Door'),
),
migrations.AddField(
model_name='baseaclentry',
name='place',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jba_core.Place'),
),
migrations.AddField(
model_name='personaclentry',
name='person',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jba_core.Person'),
),
]
| gpl-3.0 | 390,449,312,591,841,100 | 37.069767 | 204 | 0.534922 | false |
erudit/eruditorg | eruditorg/erudit/migrations/0057_auto_20161123_1703.py | 1 | 1817 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-11-23 22:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("erudit", "0056_auto_20161111_1125"),
]
operations = [
migrations.CreateModel(
name="ArticleSubtitle",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
(
"title",
models.CharField(blank=True, max_length=600, null=True, verbose_name="Titre"),
),
(
"language",
models.CharField(
blank=True, max_length=10, null=True, verbose_name="Code langue"
),
),
("paral", models.BooleanField(default=False, verbose_name="Titre parallèle")),
(
"article",
models.ForeignKey(
on_delete=models.deletion.CASCADE,
related_name="subtitles",
to="erudit.Article",
verbose_name="Article",
),
),
],
options={
"verbose_name_plural": "Sous-titres d'articles",
"verbose_name": "Sous-titre d'article",
},
),
migrations.AlterModelOptions(
name="articletitle",
options={"verbose_name": "Titre d'article", "verbose_name_plural": "Titres d'articles"},
),
]
| gpl-3.0 | 4,950,922,311,403,660,000 | 32.018182 | 100 | 0.449339 | false |
strahlex/pymachinetalk | pymachinetalk/machinetalk_core/common/subscribe.py | 1 | 9269 | # coding=utf-8
import zmq
import threading
import uuid
from google.protobuf.message import DecodeError
from fysom import Fysom
import machinetalk.protobuf.types_pb2 as pb
from machinetalk.protobuf.message_pb2 import Container
class Subscribe(object):
def __init__(self, debuglevel=0, debugname='Subscribe'):
self.debuglevel = debuglevel
self.debugname = debugname
self._error_string = ''
self.on_error_string_changed = []
# ZeroMQ
context = zmq.Context()
context.linger = 0
self._context = context
# pipe to signalize a shutdown
self._shutdown = context.socket(zmq.PUSH)
self._shutdown_uri = b'inproc://shutdown-%s' % str(uuid.uuid4()).encode()
self._shutdown.bind(self._shutdown_uri)
self._thread = None # socket worker tread
self._tx_lock = threading.Lock() # lock for outgoing messages
# Socket
self.socket_uri = ''
self._socket_topics = set()
# more efficient to reuse protobuf messages
self._socket_rx = Container()
# Heartbeat
self._heartbeat_lock = threading.Lock()
self._heartbeat_interval = 2500
self._heartbeat_timer = None
self._heartbeat_active = False
self._heartbeat_liveness = 0
self._heartbeat_reset_liveness = 5
# callbacks
self.on_socket_message_received = []
self.on_state_changed = []
# fsm
self._fsm = Fysom(
{
'initial': 'down',
'events': [
{'name': 'start', 'src': 'down', 'dst': 'trying'},
{'name': 'full_update_received', 'src': 'trying', 'dst': 'up'},
{'name': 'stop', 'src': 'trying', 'dst': 'down'},
{'name': 'heartbeat_timeout', 'src': 'up', 'dst': 'trying'},
{'name': 'heartbeat_tick', 'src': 'up', 'dst': 'up'},
{'name': 'any_msg_received', 'src': 'up', 'dst': 'up'},
{'name': 'stop', 'src': 'up', 'dst': 'down'},
],
}
)
self._fsm.ondown = self._on_fsm_down
self._fsm.onafterstart = self._on_fsm_start
self._fsm.ontrying = self._on_fsm_trying
self._fsm.onafterfull_update_received = self._on_fsm_full_update_received
self._fsm.onafterstop = self._on_fsm_stop
self._fsm.onup = self._on_fsm_up
self._fsm.onafterheartbeat_timeout = self._on_fsm_heartbeat_timeout
self._fsm.onafterheartbeat_tick = self._on_fsm_heartbeat_tick
self._fsm.onafterany_msg_received = self._on_fsm_any_msg_received
def _on_fsm_down(self, _):
if self.debuglevel > 0:
print('[%s]: state DOWN' % self.debugname)
for cb in self.on_state_changed:
cb('down')
return True
def _on_fsm_start(self, _):
if self.debuglevel > 0:
print('[%s]: event START' % self.debugname)
self.start_socket()
return True
def _on_fsm_trying(self, _):
if self.debuglevel > 0:
print('[%s]: state TRYING' % self.debugname)
for cb in self.on_state_changed:
cb('trying')
return True
def _on_fsm_full_update_received(self, _):
if self.debuglevel > 0:
print('[%s]: event FULL UPDATE RECEIVED' % self.debugname)
self.reset_heartbeat_liveness()
self.start_heartbeat_timer()
return True
def _on_fsm_stop(self, _):
if self.debuglevel > 0:
print('[%s]: event STOP' % self.debugname)
self.stop_heartbeat_timer()
self.stop_socket()
return True
def _on_fsm_up(self, _):
if self.debuglevel > 0:
print('[%s]: state UP' % self.debugname)
for cb in self.on_state_changed:
cb('up')
return True
def _on_fsm_heartbeat_timeout(self, _):
if self.debuglevel > 0:
print('[%s]: event HEARTBEAT TIMEOUT' % self.debugname)
self.stop_heartbeat_timer()
self.stop_socket()
self.start_socket()
return True
def _on_fsm_heartbeat_tick(self, _):
if self.debuglevel > 0:
print('[%s]: event HEARTBEAT TICK' % self.debugname)
self.reset_heartbeat_timer()
return True
def _on_fsm_any_msg_received(self, _):
if self.debuglevel > 0:
print('[%s]: event ANY MSG RECEIVED' % self.debugname)
self.reset_heartbeat_liveness()
self.reset_heartbeat_timer()
return True
@property
def error_string(self):
return self._error_string
@error_string.setter
def error_string(self, string):
if self._error_string is string:
return
self._error_string = string
for cb in self.on_error_string_changed:
cb(string)
def start(self):
if self._fsm.isstate('down'):
self._fsm.start()
def stop(self):
if self._fsm.isstate('trying'):
self._fsm.stop()
elif self._fsm.isstate('up'):
self._fsm.stop()
def add_socket_topic(self, name):
self._socket_topics.add(name)
def remove_socket_topic(self, name):
self._socket_topics.remove(name)
def clear_socket_topics(self):
self._socket_topics.clear()
def _socket_worker(self, context, uri):
poll = zmq.Poller()
socket = context.socket(zmq.SUB)
socket.setsockopt(zmq.LINGER, 0)
socket.connect(uri)
poll.register(socket, zmq.POLLIN)
# subscribe is always connected to socket creation
for topic in self._socket_topics:
socket.setsockopt(zmq.SUBSCRIBE, topic.encode())
shutdown = context.socket(zmq.PULL)
shutdown.connect(self._shutdown_uri)
poll.register(shutdown, zmq.POLLIN)
while True:
s = dict(poll.poll())
if shutdown in s:
shutdown.recv()
return # shutdown signal
if socket in s:
self._socket_message_received(socket)
def start_socket(self):
self._thread = threading.Thread(
target=self._socket_worker, args=(self._context, self.socket_uri)
)
self._thread.start()
def stop_socket(self):
self._shutdown.send(b' ') # trigger socket thread shutdown
self._thread = None
def _heartbeat_timer_tick(self):
with self._heartbeat_lock:
self._heartbeat_timer = None # timer is dead on tick
if self.debuglevel > 0:
print('[%s] heartbeat timer tick' % self.debugname)
self._heartbeat_liveness -= 1
if self._heartbeat_liveness == 0:
if self._fsm.isstate('up'):
self._fsm.heartbeat_timeout()
return
if self._fsm.isstate('up'):
self._fsm.heartbeat_tick()
def reset_heartbeat_liveness(self):
self._heartbeat_liveness = self._heartbeat_reset_liveness
def reset_heartbeat_timer(self):
if not self._heartbeat_active:
return
self._heartbeat_lock.acquire()
if self._heartbeat_timer:
self._heartbeat_timer.cancel()
self._heartbeat_timer = None
if self._heartbeat_interval > 0:
self._heartbeat_timer = threading.Timer(
self._heartbeat_interval / 1000.0, self._heartbeat_timer_tick
)
self._heartbeat_timer.start()
self._heartbeat_lock.release()
if self.debuglevel > 0:
print('[%s] heartbeat timer reset' % self.debugname)
def start_heartbeat_timer(self):
self._heartbeat_active = True
self.reset_heartbeat_timer()
def stop_heartbeat_timer(self):
self._heartbeat_active = False
self._heartbeat_lock.acquire()
if self._heartbeat_timer:
self._heartbeat_timer.cancel()
self._heartbeat_timer = None
self._heartbeat_lock.release()
# process all messages received on socket
def _socket_message_received(self, socket):
(identity, msg) = socket.recv_multipart() # identity is topic
try:
self._socket_rx.ParseFromString(msg)
except DecodeError as e:
note = 'Protobuf Decode Error: ' + str(e)
print(note) # TODO: decode error
return
if self.debuglevel > 0:
print('[%s] received message' % self.debugname)
if self.debuglevel > 1:
print(self._socket_rx)
rx = self._socket_rx
# react to any incoming message
if self._fsm.isstate('up'):
self._fsm.any_msg_received()
# react to ping message
if rx.type == pb.MT_PING:
return # ping is uninteresting
# react to full update message
elif rx.type == pb.MT_FULL_UPDATE:
if rx.HasField('pparams'):
interval = rx.pparams.keepalive_timer
self._heartbeat_interval = interval
if self._fsm.isstate('trying'):
self._fsm.full_update_received()
for cb in self.on_socket_message_received:
cb(identity, rx)
| mit | 6,050,195,010,412,286,000 | 31.985765 | 83 | 0.561657 | false |
tovmeod/anaf | anaf/events/urls.py | 1 | 1779 | """
Events module URLs
"""
from django.conf.urls import patterns, url
from anaf.events import views
urlpatterns = patterns('anaf.events.views',
url(r'^(\.(?P<response_format>\w+))?$', views.month_view, name='events'),
url(r'^index(\.(?P<response_format>\w+))?$', views.index, name='events_index'),
url(r'^upcoming(\.(?P<response_format>\w+))?/?$', views.upcoming, name='events_upcoming'),
url(r'^month(\.(?P<response_format>\w+))?/?$', views.month_view, name='events_month'),
url(r'^week(\.(?P<response_format>\w+))?/?$', views.week_view, name='events_week'),
url(r'^day(\.(?P<response_format>\w+))?/?$', views.day_view, name='events_day'),
# Events
url(r'^event/add(\.(?P<response_format>\w+))?/?$', views.event_add, name='events_event_add'),
url(r'^event/add/(?P<date>[0-9\-]+)/(?P<hour>[0-9]+)?(\.(?P<response_format>\w+))?/?$',
views.event_add, name='events_event_add_to_date'),
url(r'^event/view/(?P<event_id>\d+)(\.(?P<response_format>\w+))?/?$', views.event_view,
name='events_event_view'),
url(r'^event/edit/(?P<event_id>\d+)(\.(?P<response_format>\w+))?/?$', views.event_edit,
name='events_event_edit'),
url(r'^event/delete/(?P<event_id>\d+)(\.(?P<response_format>\w+))?/?$', views.event_delete,
name='events_event_delete'),
# Export iCalendar
url(r'^ical/?$', views.ical_all_event, name='events_all_ical'),
)
| bsd-3-clause | 1,345,017,845,240,748,800 | 54.59375 | 116 | 0.469927 | false |
naronald/SUMOoD | peopleCollection.py | 1 | 3732 | # -*- coding: utf-8 -*-
"""
@file peopleCollection.py
@author Nicole Ronald
@date 2014-03-17
@version
Defines a collection of people who request trips from the
demand-responsive transportation system.
SUMOoD (SUMO on Demand); see http://imod-au.info/sumood
Copyright (C) 2014 iMoD
SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/
Copyright (C) 2008-2012 DLR (http://www.dlr.de/) and contributors
All rights reserved
"""
from person import Person
from stop import Stop, StopType
import csv
class PeopleCollection:
""" a collection of persons """
people = {}
def __init__(self):
self.people = {}
def readFile(self, fileName):
""" read people from a file
format:
id, callTime, requestTime, originLink, originPos, destLink, destPos
(string)"""
with open(fileName, 'rb') as pFile:
persons = csv.reader(pFile, delimiter=',')
for row in persons:
assert len(row) == 7
i = row[0]
entry = int(row[1])
request = int(row[2])
originLink = row[3]
originPos = float(row[4])
destLink = row[5]
destPos = float(row[6])
p = Person(i)
p.setCallTime(entry)
p.setRequestTime(request)
p.setOD(Stop(i, originLink, originPos, StopType.PICKUP,
request),
Stop(i, destLink, destPos, StopType.DROPOFF))
self.addPerson(p)
def addPerson(self, person):
""" add a person to the collection
(Person)"""
self.people[person.personID] = person
def updatePersonPickup(self, personID, time):
""" set a person's pickup time
(string, int)"""
self.people[personID].setPickupTime(time)
def updatePersonDropoff(self, personID, time):
""" set a person's dropoff time
(string, int)"""
self.people[personID].setDropoffTime(time)
def incrementPersonDistance(self, personID, distance):
""" increment the distance travelled by a person by the distance
provided
(string, float)"""
self.people[personID].incrementDistance(distance)
def getTravelTime(self, personID):
""" return tiem passenger was travelling for occupancy
(string) -> int"""
return self.people[personID].travelTime
def output(self, folder="./", code="0"):
""" print details for all people, to *-person.out.csv in an optionally
specified folder with an optionally specified run code
(string ,string)"""
with open(folder + code + '-person.out.csv', 'wb') \
as csvfile:
peopleWriter = csv.writer(csvfile,
delimiter=',')
peopleWriter.writerow(\
["personID","callTime","requestTime","directDistance",\
"actualDistance","directTime","waitTime","pickupTime",\
"travelTime", "dropoffTime", "excessTravelTime", \
"excessTime", "excessDistance","state"])
for p in self.people.values():
peopleWriter.writerow(p.getOutput())
def getList(self):
""" return a list of people
() -> Person[]"""
persons = []
for p in self.people.values():
persons.append(p)
return persons
def estimatePenalty(self, personID, dropoffTime):
""" estimate penalty for a person at an estimated dropoff time """
return self.people[personID].estimatePenalty(dropoffTime)
peopleCollection = PeopleCollection()
| gpl-2.0 | -7,704,165,522,916,638,000 | 32.026549 | 78 | 0.576367 | false |
rgtjf/Semantic-Texual-Similarity-Toolkits | stst/libs/word_aligner/aligner.py | 1 | 88789 | # coding: utf8
from .config import *
from .parseUtil import *
from .util import *
from .wordSim import *
##############################################################################################################################
def alignNouns(source, target, sourceParseResult, targetParseResult, existingAlignments):
# source and target:: each is a list of elements of the form:
# [[character begin offset, character end offset], word index, word, lemma, pos tag]
global ppdbSim
global theta1
nounAlignments = []
sourceWordIndices = [i + 1 for i in range(len(source))]
targetWordIndices = [i + 1 for i in range(len(target))]
sourceWordIndicesAlreadyAligned = sorted(list(set([item[0] for item in existingAlignments])))
targetWordIndicesAlreadyAligned = sorted(list(set([item[1] for item in existingAlignments])))
sourceWords = [item[2] for item in source]
targetWords = [item[2] for item in target]
sourceLemmas = [item[3] for item in source]
targetLemmas = [item[3] for item in target]
sourcePosTags = [item[4] for item in source]
targetPosTags = [item[4] for item in target]
sourceDParse = dependencyParseAndPutOffsets(sourceParseResult)
targetDParse = dependencyParseAndPutOffsets(targetParseResult)
numberOfNounsInSource = 0
evidenceCountsMatrix = {}
relativeAlignmentsMatrix = {}
wordSimilarities = {}
# construct the two matrices in the following loop
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned \
or (sourcePosTags[i-1][0].lower() != 'n' and sourcePosTags[i-1].lower() != 'prp'):
continue
numberOfNounsInSource += 1
for j in targetWordIndices:
if j in targetWordIndicesAlreadyAligned \
or (targetPosTags[j-1][0].lower() != 'n' and targetPosTags[j-1].lower() != 'prp'):
continue
if max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1])) < ppdbSim:
continue
wordSimilarities[(i, j)] = max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))
sourceWordParents = findParents(sourceDParse, i, sourceWords[i-1])
sourceWordChildren = findChildren(sourceDParse, i, sourceWords[i-1])
targetWordParents = findParents(targetDParse, j, targetWords[j-1])
targetWordChildren = findChildren(targetDParse, j, targetWords[j-1])
# search for common or equivalent parents
groupOfSimilarRelationsForNounParent = ['pos', 'nn', 'prep_of', 'prep_in', 'prep_at', 'prep_for']
group1OfSimilarRelationsForVerbParent = ['agent', 'nsubj', 'xsubj']
group2OfSimilarRelationsForVerbParent = ['ccomp', 'dobj', 'nsubjpass', 'rel', 'partmod']
group3OfSimilarRelationsForVerbParent = ['tmod' 'prep_in', 'prep_at', 'prep_on']
group4OfSimilarRelationsForVerbParent = ['iobj', 'prep_to']
for ktem in sourceWordParents:
for ltem in targetWordParents:
if ((ktem[0], ltem[0]) in existingAlignments+nounAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2] == ltem[2]) or
(ktem[2] in groupOfSimilarRelationsForNounParent and ltem[2] in groupOfSimilarRelationsForNounParent) or
(ktem[2] in group1OfSimilarRelationsForVerbParent and ltem[2] in group1OfSimilarRelationsForVerbParent) or
(ktem[2] in group2OfSimilarRelationsForVerbParent and ltem[2] in group2OfSimilarRelationsForVerbParent) or
(ktem[2] in group3OfSimilarRelationsForVerbParent and ltem[2] in group3OfSimilarRelationsForVerbParent) or
(ktem[2] in group4OfSimilarRelationsForVerbParent and ltem[2] in group4OfSimilarRelationsForVerbParent)):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for common or equivalent children
groupOfSimilarRelationsForNounChild = ['pos', 'nn' 'prep_of', 'prep_in', 'prep_at', 'prep_for']
groupOfSimilarRelationsForVerbChild = ['infmod', 'partmod', 'rcmod']
groupOfSimilarRelationsForAdjectiveChild = ['amod', 'rcmod']
for ktem in sourceWordChildren:
for ltem in targetWordChildren:
if ((ktem[0], ltem[0]) in existingAlignments+nounAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in groupOfSimilarRelationsForNounChild and ltem[2] in groupOfSimilarRelationsForNounChild) or
(ktem[2] in groupOfSimilarRelationsForVerbChild and ltem[2] in groupOfSimilarRelationsForVerbChild) or
(ktem[2] in groupOfSimilarRelationsForAdjectiveChild and ltem[2] in groupOfSimilarRelationsForAdjectiveChild)):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for equivalent parent-child relations
groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild = [['nsubj'], ['amod', 'rcmod']]
groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild = [['ccomp', 'dobj', 'nsubjpass', 'rel', 'partmod'], ['infmod', 'partmod', 'rcmod']]
group1OfSimilarRelationsInOppositeDirectionForNounParentAndChild = [['conj_and'], ['conj_and']]
group2OfSimilarRelationsInOppositeDirectionForNounParentAndChild = [['conj_or'], ['conj_or']]
group3OfSimilarRelationsInOppositeDirectionForNounParentAndChild = [['conj_nor'], ['conj_nor']]
for ktem in sourceWordParents:
for ltem in targetWordChildren:
if ((ktem[0], ltem[0]) in existingAlignments+nounAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1]) or
(ktem[2] in group1OfSimilarRelationsInOppositeDirectionForNounParentAndChild[0] and ltem[2] in group1OfSimilarRelationsInOppositeDirectionForNounParentAndChild[1]) or
(ktem[2] in group2OfSimilarRelationsInOppositeDirectionForNounParentAndChild[0] and ltem[2] in group2OfSimilarRelationsInOppositeDirectionForNounParentAndChild[1]) or
(ktem[2] in group3OfSimilarRelationsInOppositeDirectionForNounParentAndChild[0] and ltem[2] in group3OfSimilarRelationsInOppositeDirectionForNounParentAndChild[1])):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for equivalent child-parent relations
for ktem in sourceWordChildren:
for ltem in targetWordParents:
if ((ktem[0], ltem[0]) in existingAlignments+nounAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0]) or
(ktem[2] in group1OfSimilarRelationsInOppositeDirectionForNounParentAndChild[1] and ltem[2] in group1OfSimilarRelationsInOppositeDirectionForNounParentAndChild[0]) or
(ktem[2] in group2OfSimilarRelationsInOppositeDirectionForNounParentAndChild[1] and ltem[2] in group2OfSimilarRelationsInOppositeDirectionForNounParentAndChild[0]) or
(ktem[2] in group3OfSimilarRelationsInOppositeDirectionForNounParentAndChild[1] and ltem[2] in group3OfSimilarRelationsInOppositeDirectionForNounParentAndChild[0])):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# now use the collected stats to align
for n in range(numberOfNounsInSource):
maxEvidenceCountForCurrentPass = 0
maxOverallValueForCurrentPass = 0
indexPairWithStrongestTieForCurrentPass = [-1, -1]
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned or sourcePosTags[i-1][0].lower() != 'n' or sourceLemmas[i-1] in stopwords:
continue
for j in targetWordIndices:
if j in targetWordIndicesAlreadyAligned or targetPosTags[j-1][0].lower() != 'n' or targetLemmas[j-1] in stopwords:
continue
if (i, j) in evidenceCountsMatrix and theta1*wordSimilarities[(i, j)]+(1-theta1)*evidenceCountsMatrix[(i, j)]>maxOverallValueForCurrentPass:
maxOverallValueForCurrentPass = theta1*wordSimilarities[(i, j)]+(1-theta1)*evidenceCountsMatrix[(i, j)]
maxEvidenceCountForCurrentPass = evidenceCountsMatrix[(i, j)]
indexPairWithStrongestTieForCurrentPass = [i, j]
if maxEvidenceCountForCurrentPass > 0:
nounAlignments.append(indexPairWithStrongestTieForCurrentPass)
sourceWordIndicesAlreadyAligned.append(indexPairWithStrongestTieForCurrentPass[0])
targetWordIndicesAlreadyAligned.append(indexPairWithStrongestTieForCurrentPass[1])
for item in relativeAlignmentsMatrix[(indexPairWithStrongestTieForCurrentPass[0], indexPairWithStrongestTieForCurrentPass[1])]:
if item[0]!=0 and item[1]!=0 and item[0] not in sourceWordIndicesAlreadyAligned and item[1] not in targetWordIndicesAlreadyAligned:
nounAlignments.append(item)
sourceWordIndicesAlreadyAligned.append(item[0])
targetWordIndicesAlreadyAligned.append(item[1])
else:
break
return nounAlignments
##############################################################################################################################
##############################################################################################################################
def alignMainVerbs(source, target, sourceParseResult, targetParseResult, existingAlignments):
# source and target:: each is a list of elements of the form:
# [[character begin offset, character end offset], word index, word, lemma, pos tag]
global ppdbSim
global theta1
mainVerbAlignments = []
sourceWordIndices = [i+1 for i in range(len(source))]
targetWordIndices = [i+1 for i in range(len(target))]
sourceWordIndicesAlreadyAligned = sorted(list(set([item[0] for item in existingAlignments])))
targetWordIndicesAlreadyAligned = sorted(list(set([item[1] for item in existingAlignments])))
sourceWords = [item[2] for item in source]
targetWords = [item[2] for item in target]
sourceLemmas = [item[3] for item in source]
targetLemmas = [item[3] for item in target]
sourcePosTags = [item[4] for item in source]
targetPosTags = [item[4] for item in target]
sourceDParse = dependencyParseAndPutOffsets(sourceParseResult)
targetDParse = dependencyParseAndPutOffsets(targetParseResult)
numberOfMainVerbsInSource = 0
evidenceCountsMatrix = {}
relativeAlignmentsMatrix = {}
wordSimilarities = {}
# construct the two matrices in the following loop
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned or sourcePosTags[i-1][0].lower() != 'v' or sourceLemmas[i-1] in stopwords:
continue
numberOfMainVerbsInSource += 1
for j in targetWordIndices:
if j in targetWordIndicesAlreadyAligned or targetPosTags[j-1][0].lower() != 'v' or targetLemmas[j-1] in stopwords:
continue
if max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))<ppdbSim:
continue
wordSimilarities[(i, j)] = max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))
sourceWordParents = findParents(sourceDParse, i, sourceWords[i-1])
sourceWordChildren = findChildren(sourceDParse, i, sourceWords[i-1])
targetWordParents = findParents(targetDParse, j, targetWords[j-1])
targetWordChildren = findChildren(targetDParse, j, targetWords[j-1])
# search for common or equivalent children
group1OfSimilarRelationsForNounChild = ['agent', 'nsubj' 'xsubj']
group2OfSimilarRelationsForNounChild = ['ccomp', 'dobj' 'nsubjpass', 'rel', 'partmod']
group3OfSimilarRelationsForNounChild = ['tmod', 'prep_in', 'prep_at', 'prep_on']
group4OfSimilarRelationsForNounChild = ['iobj', 'prep_to']
groupOfSimilarRelationsForVerbChild = ['purpcl', 'xcomp']
for ktem in sourceWordChildren:
for ltem in targetWordChildren:
if ((ktem[0], ltem[0]) in existingAlignments+mainVerbAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in group1OfSimilarRelationsForNounChild and ltem[2] in group1OfSimilarRelationsForNounChild) or
(ktem[2] in group2OfSimilarRelationsForNounChild and ltem[2] in group2OfSimilarRelationsForNounChild) or
(ktem[2] in group3OfSimilarRelationsForNounChild and ltem[2] in group3OfSimilarRelationsForNounChild) or
(ktem[2] in group4OfSimilarRelationsForNounChild and ltem[2] in group4OfSimilarRelationsForNounChild) or
(ktem[2] in groupOfSimilarRelationsForVerbChild and ltem[2] in groupOfSimilarRelationsForVerbChild)):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for common or equivalent parents
groupOfSimilarRelationsForNounParent = ['infmod', 'partmod', 'rcmod']
groupOfSimilarRelationsForVerbParent = ['purpcl', 'xcomp']
for ktem in sourceWordParents:
for ltem in targetWordParents:
if ((ktem[0], ltem[0]) in existingAlignments+mainVerbAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in groupOfSimilarRelationsForNounParent and ltem[2] in groupOfSimilarRelationsForNounParent) or
(ktem[2] in groupOfSimilarRelationsForVerbParent and ltem[2] in groupOfSimilarRelationsForVerbParent)):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for equivalent parent-child pairs
groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild = [['cop', 'csubj'], ['acomp']]
group1OfSimilarRelationsInOppositeDirectionForVerbParentAndChild = [['csubj'], ['csubjpass']]
group2OfSimilarRelationsInOppositeDirectionForVerbParentAndChild = [['conj_and'], ['conj_and']]
group3OfSimilarRelationsInOppositeDirectionForVerbParentAndChild = [['conj_or'], ['conj_or']]
group4OfSimilarRelationsInOppositeDirectionForVerbParentAndChild = [['conj_nor'], ['conj_nor']]
for ktem in sourceWordParents:
for ltem in targetWordChildren:
if ((ktem[0], ltem[0]) in existingAlignments+mainVerbAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1]) or
(ktem[2] in group1OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0] and ltem[2] in group1OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1]) or
(ktem[2] in group2OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0] and ltem[2] in group2OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1]) or
(ktem[2] in group3OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0] and ltem[2] in group3OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1]) or
(ktem[2] in group4OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0] and ltem[2] in group4OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1])):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for equivalent child-parent pairs
for ktem in sourceWordChildren:
for ltem in targetWordParents:
if ((ktem[0], ltem[0]) in existingAlignments+mainVerbAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0]) or
(ktem[2] in group1OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1] and ltem[2] in group1OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0]) or
(ktem[2] in group2OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1] and ltem[2] in group2OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0]) or
(ktem[2] in group3OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1] and ltem[2] in group3OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0]) or
(ktem[2] in group4OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1] and ltem[2] in group4OfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0])):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# now use the collected stats to align
for n in range(numberOfMainVerbsInSource):
maxEvidenceCountForCurrentPass = 0
maxOverallValueForCurrentPass = 0
indexPairWithStrongestTieForCurrentPass = [-1, -1]
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned or sourcePosTags[i-1][0].lower() != 'v' or sourceLemmas[i-1] in stopwords:
continue
for j in targetWordIndices:
if j in targetWordIndicesAlreadyAligned or targetPosTags[j-1][0].lower() != 'v' or targetLemmas[j-1] in stopwords:
continue
if (i, j) in evidenceCountsMatrix and theta1*wordSimilarities[(i, j)]+(1-theta1)*evidenceCountsMatrix[(i, j)]>maxOverallValueForCurrentPass:
maxOverallValueForCurrentPass = theta1*wordSimilarities[(i, j)]+(1-theta1)*evidenceCountsMatrix[(i, j)]
maxEvidenceCountForCurrentPass = evidenceCountsMatrix[(i, j)]
indexPairWithStrongestTieForCurrentPass = [i, j]
if maxEvidenceCountForCurrentPass > 0:
mainVerbAlignments.append(indexPairWithStrongestTieForCurrentPass)
sourceWordIndicesAlreadyAligned.append(indexPairWithStrongestTieForCurrentPass[0])
targetWordIndicesAlreadyAligned.append(indexPairWithStrongestTieForCurrentPass[1])
for item in relativeAlignmentsMatrix[(indexPairWithStrongestTieForCurrentPass[0], indexPairWithStrongestTieForCurrentPass[1])]:
if item[0]!=0 and item[1]!=0 and item[0] not in sourceWordIndicesAlreadyAligned and item[1] not in targetWordIndicesAlreadyAligned:
mainVerbAlignments.append(item)
sourceWordIndicesAlreadyAligned.append(item[0])
targetWordIndicesAlreadyAligned.append(item[1])
else:
break
return mainVerbAlignments
##############################################################################################################################
##############################################################################################################################
def alignAdjectives(source, target, sourceParseResult, targetParseResult, existingAlignments):
# source and target:: each is a list of elements of the form:
# [[character begin offset, character end offset], word index, word, lemma, pos tag]
global ppdbSim
global theta1
adjectiveAlignments = []
sourceWordIndices = [i+1 for i in range(len(source))]
targetWordIndices = [i+1 for i in range(len(target))]
sourceWordIndicesAlreadyAligned = sorted(list(set([item[0] for item in existingAlignments])))
targetWordIndicesAlreadyAligned = sorted(list(set([item[1] for item in existingAlignments])))
sourceWords = [item[2] for item in source]
targetWords = [item[2] for item in target]
sourceLemmas = [item[3] for item in source]
targetLemmas = [item[3] for item in target]
sourcePosTags = [item[4] for item in source]
targetPosTags = [item[4] for item in target]
sourceDParse = dependencyParseAndPutOffsets(sourceParseResult)
targetDParse = dependencyParseAndPutOffsets(targetParseResult)
numberOfAdjectivesInSource = 0
evidenceCountsMatrix = {}
relativeAlignmentsMatrix = {}
wordSimilarities = {}
# construct the two matrices in the following loop
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned or sourcePosTags[i-1][0].lower() != 'j':
continue
numberOfAdjectivesInSource += 1
for j in targetWordIndices:
if j in targetWordIndicesAlreadyAligned or targetPosTags[j-1][0].lower() != 'j':
continue
if max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))<ppdbSim:
continue
wordSimilarities[(i, j)] = max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))
sourceWordParents = findParents(sourceDParse, i, sourceWords[i-1])
sourceWordChildren = findChildren(sourceDParse, i, sourceWords[i-1])
targetWordParents = findParents(targetDParse, j, targetWords[j-1])
targetWordChildren = findChildren(targetDParse, j, targetWords[j-1])
# search for common or equivalent parents
groupOfSimilarRelationsForNounParent = ['amod', 'rcmod']
for ktem in sourceWordParents:
for ltem in targetWordParents:
if ((ktem[0], ltem[0]) in existingAlignments+adjectiveAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and ((ktem[2]==ltem[2]) or (ktem[2] in groupOfSimilarRelationsForNounParent and ltem[2] in groupOfSimilarRelationsForNounParent)):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for common children
for ktem in sourceWordChildren:
for ltem in targetWordChildren:
if ((ktem[0], ltem[0]) in existingAlignments+adjectiveAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (ktem[2]==ltem[2]):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for equivalent parent-child pair
groupOfSimilarRelationsInOppositeDirectionForNounParentAndChild = [['amod', 'rcmod'], ['nsubj']]
groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild = [['acomp'], ['cop', 'csubj']]
group1OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild = [['conj_and'], ['conj_and']]
group2OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild = [['conj_or'], ['conj_or']]
group3OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild = [['conj_nor'], ['conj_nor']]
for ktem in sourceWordParents:
for ltem in targetWordChildren:
if ((ktem[0], ltem[0]) in existingAlignments+adjectiveAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForNounParentAndChild[0] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForNounParentAndChild[1]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1]) or
(ktem[2] in group1OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0] and ltem[2] in group1OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1]) or
(ktem[2] in group2OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0] and ltem[2] in group2OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1]) or
(ktem[2] in group3OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0] and ltem[2] in group3OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1])):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for equivalent child-parent pair
for ktem in sourceWordChildren:
for ltem in targetWordParents:
if ((ktem[0], ltem[0]) in existingAlignments+adjectiveAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForNounParentAndChild[1] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForNounParentAndChild[0]) or
(ktem[2] in groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild[1] and ltem[2] in groupOfSimilarRelationsInOppositeDirectionForVerbParentAndChild[0]) or
(ktem[2] in group1OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1] and ltem[2] in group1OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0]) or
(ktem[2] in group2OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1] and ltem[2] in group2OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0]) or
(ktem[2] in group3OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[1] and ltem[2] in group3OfSimilarRelationsInOppositeDirectionForAdjectiveParentAndChild[0])):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# now use the collected stats to align
for n in range(numberOfAdjectivesInSource):
maxEvidenceCountForCurrentPass = 0
maxOverallValueForCurrentPass = 0
indexPairWithStrongestTieForCurrentPass = [-1, -1]
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned or sourcePosTags[i-1][0].lower() != 'j' or sourceLemmas[i-1] in stopwords:
continue
for j in targetWordIndices:
if j in targetWordIndicesAlreadyAligned or targetPosTags[j-1][0].lower() != 'j' or targetLemmas[j-1] in stopwords:
continue
if (i, j) in evidenceCountsMatrix and theta1*wordSimilarities[(i, j)]+(1-theta1)*evidenceCountsMatrix[(i, j)]>maxOverallValueForCurrentPass:
maxOverallValueForCurrentPass = theta1*wordSimilarities[(i, j)]+(1-theta1)*evidenceCountsMatrix[(i, j)]
maxEvidenceCountForCurrentPass = evidenceCountsMatrix[(i, j)]
indexPairWithStrongestTieForCurrentPass = [i, j]
if maxEvidenceCountForCurrentPass > 0:
adjectiveAlignments.append(indexPairWithStrongestTieForCurrentPass)
sourceWordIndicesAlreadyAligned.append(indexPairWithStrongestTieForCurrentPass[0])
targetWordIndicesAlreadyAligned.append(indexPairWithStrongestTieForCurrentPass[1])
for item in relativeAlignmentsMatrix[(indexPairWithStrongestTieForCurrentPass[0], indexPairWithStrongestTieForCurrentPass[1])]:
if item[0]!=0 and item[1]!=0 and item[0] not in sourceWordIndicesAlreadyAligned and item[1] not in targetWordIndicesAlreadyAligned:
adjectiveAlignments.append(item)
sourceWordIndicesAlreadyAligned.append(item[0])
targetWordIndicesAlreadyAligned.append(item[1])
else:
break
return adjectiveAlignments
##############################################################################################################################
##############################################################################################################################
def alignAdverbs(source, target, sourceParseResult, targetParseResult, existingAlignments):
# source and target:: each is a list of elements of the form:
# [[character begin offset, character end offset], word index, word, lemma, pos tag]
global ppdbSim
global theta1
adverbAlignments = []
sourceWordIndices = [i+1 for i in range(len(source))]
targetWordIndices = [i+1 for i in range(len(target))]
sourceWordIndicesAlreadyAligned = sorted(list(set([item[0] for item in existingAlignments])))
targetWordIndicesAlreadyAligned = sorted(list(set([item[1] for item in existingAlignments])))
sourceWords = [item[2] for item in source]
targetWords = [item[2] for item in target]
sourceLemmas = [item[3] for item in source]
targetLemmas = [item[3] for item in target]
sourcePosTags = [item[4] for item in source]
targetPosTags = [item[4] for item in target]
sourceDParse = dependencyParseAndPutOffsets(sourceParseResult)
targetDParse = dependencyParseAndPutOffsets(targetParseResult)
numberOfAdverbsInSource = 0
evidenceCountsMatrix = {}
relativeAlignmentsMatrix = {}
wordSimilarities = {}
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned or (sourcePosTags[i-1][0].lower() != 'r'):
continue
numberOfAdverbsInSource += 1
for j in targetWordIndices:
if j in targetWordIndicesAlreadyAligned or (targetPosTags[j-1][0].lower() != 'r'):
continue
if max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))<ppdbSim:
continue
wordSimilarities[(i, j)] = max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))
sourceWordParents = findParents(sourceDParse, i, sourceWords[i-1])
sourceWordChildren = findChildren(sourceDParse, i, sourceWords[i-1])
targetWordParents = findParents(targetDParse, j, targetWords[j-1])
targetWordChildren = findChildren(targetDParse, j, targetWords[j-1])
# search for common parents
for ktem in sourceWordParents:
for ltem in targetWordParents:
if ((ktem[0], ltem[0]) in existingAlignments+adverbAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (ktem[2]==ltem[2]):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for common children
for ktem in sourceWordChildren:
for ltem in targetWordChildren:
if ((ktem[0], ltem[0]) in existingAlignments+adverbAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (ktem[2]==ltem[2]):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for equivalent parent-child relationships
group1OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild = [['conj_and'], ['conj_and']]
group2OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild = [['conj_or'], ['conj_or']]
group3OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild = [['conj_nor'], ['conj_nor']]
for ktem in sourceWordParents:
for ltem in targetWordChildren:
if ((ktem[0], ltem[0]) in existingAlignments+adverbAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in group1OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[0] and ltem[2] in group1OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[1]) or
(ktem[2] in group2OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[0] and ltem[2] in group2OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[1]) or
(ktem[2] in group3OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[0] and ltem[2] in group3OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[1])):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# search for equivalent child-parent relationships
for ktem in sourceWordChildren:
for ltem in targetWordParents:
if ((ktem[0], ltem[0]) in existingAlignments+adverbAlignments or max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))>=ppdbSim) and (
(ktem[2]==ltem[2]) or
(ktem[2] in group1OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[1] and ltem[2] in group1OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[0]) or
(ktem[2] in group2OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[1] and ltem[2] in group2OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[0]) or
(ktem[2] in group3OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[1] and ltem[2] in group3OfSimilarRelationsInOppositeDirectionForAdverbParentAndChild[0])):
if (i, j) in evidenceCountsMatrix:
evidenceCountsMatrix[(i, j)] += max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
else:
evidenceCountsMatrix[(i, j)] = max(wordRelatedness(ktem[1], sourcePosTags[ktem[0]-1], ltem[1], targetPosTags[ltem[0]-1]), wordRelatedness(sourceLemmas[ktem[0]-1], sourcePosTags[ktem[0]-1], targetLemmas[ltem[0]-1], targetPosTags[ltem[0]-1]))
if (i, j) in relativeAlignmentsMatrix:
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
else:
relativeAlignmentsMatrix[(i, j)] = []
relativeAlignmentsMatrix[(i, j)].append([ktem[0], ltem[0]])
# now use the collected stats to align
for n in range(numberOfAdverbsInSource):
maxEvidenceCountForCurrentPass = 0
maxOverallValueForCurrentPass = 0
indexPairWithStrongestTieForCurrentPass = [-1, -1]
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned or sourcePosTags[i-1][0].lower() != 'r' or sourceLemmas[i-1] in stopwords:
continue
for j in targetWordIndices:
if j in targetWordIndicesAlreadyAligned or targetPosTags[j-1][0].lower() != 'r' or targetLemmas[j-1] in stopwords:
continue
if (i, j) in evidenceCountsMatrix and theta1*wordSimilarities[(i, j)]+(1-theta1)*evidenceCountsMatrix[(i, j)]>maxOverallValueForCurrentPass:
maxOverallValueForCurrentPass = theta1*wordSimilarities[(i, j)]+(1-theta1)*evidenceCountsMatrix[(i, j)]
maxEvidenceCountForCurrentPass = evidenceCountsMatrix[(i, j)]
indexPairWithStrongestTieForCurrentPass = [i, j]
if maxEvidenceCountForCurrentPass > 0:
adverbAlignments.append(indexPairWithStrongestTieForCurrentPass)
sourceWordIndicesAlreadyAligned.append(indexPairWithStrongestTieForCurrentPass[0])
targetWordIndicesAlreadyAligned.append(indexPairWithStrongestTieForCurrentPass[1])
for item in relativeAlignmentsMatrix[(indexPairWithStrongestTieForCurrentPass[0], indexPairWithStrongestTieForCurrentPass[1])]:
if item[0]!=0 and item[1]!=0 and item[0] not in sourceWordIndicesAlreadyAligned and item[1] not in targetWordIndicesAlreadyAligned:
adverbAlignments.append(item)
sourceWordIndicesAlreadyAligned.append(item[0])
targetWordIndicesAlreadyAligned.append(item[1])
else:
break
return adverbAlignments
##############################################################################################################################
##############################################################################################################################
def alignNamedEntities(source, target, sourceParseResult, targetParseResult, existingAlignments):
# source and target:: each is a list of elements of the form:
# [[character begin offset, character end offset], word index, word, lemma, pos tag]
global punctuations
alignments = []
sourceNamedEntities = ner(sourceParseResult)
sourceNamedEntities = sorted(sourceNamedEntities, key=len)
targetNamedEntities = ner(targetParseResult)
targetNamedEntities = sorted(targetNamedEntities, key=len)
# learn from the other sentence that a certain word/phrase is a named entity (learn for source from target)
for item in source:
alreadyIncluded = False
for jtem in sourceNamedEntities:
if item[1] in jtem[1]:
alreadyIncluded = True
break
if alreadyIncluded or (len(item[2]) >0 and not item[2][0].isupper()):
continue
for jtem in targetNamedEntities:
if item[2] in jtem[2]:
# construct the item
newItem = [[item[0]], [item[1]], [item[2]], jtem[3]]
# check if the current item is part of a named entity part of which has already been added (by checking contiguousness)
partOfABiggerName = False
for k in range(len(sourceNamedEntities)):
if sourceNamedEntities[k][1][len(sourceNamedEntities[k][1])-1] == newItem[1][0] - 1:
sourceNamedEntities[k][0].append(newItem[0][0])
sourceNamedEntities[k][1].append(newItem[1][0])
sourceNamedEntities[k][2].append(newItem[2][0])
partOfABiggerName = True
if not partOfABiggerName:
sourceNamedEntities.append(newItem)
elif isAcronym(item[2], jtem[2]) and [[item[0]], [item[1]], [item[2]], jtem[3]] not in sourceNamedEntities:
sourceNamedEntities.append([[item[0]], [item[1]], [item[2]], jtem[3]])
# learn from the other sentence that a certain word/phrase is a named entity (learn for target from source)
for item in target:
alreadyIncluded = False
for jtem in targetNamedEntities:
if item[1] in jtem[1]:
alreadyIncluded = True
break
if alreadyIncluded or (len(item[2]) >0 and not item[2][0].isupper()):
continue
for jtem in sourceNamedEntities:
if item[2] in jtem[2]:
# construct the item
newItem = [[item[0]], [item[1]], [item[2]], jtem[3]]
# check if the current item is part of a named entity part of which has already been added (by checking contiguousness)
partOfABiggerName = False
for k in range(len(targetNamedEntities)):
if targetNamedEntities[k][1][len(targetNamedEntities[k][1])-1] == newItem[1][0] - 1:
targetNamedEntities[k][0].append(newItem[0][0])
targetNamedEntities[k][1].append(newItem[1][0])
targetNamedEntities[k][2].append(newItem[2][0])
partOfABiggerName = True
if not partOfABiggerName:
targetNamedEntities.append(newItem)
elif isAcronym(item[2], jtem[2]) and [[item[0]], [item[1]], [item[2]], jtem[3]] not in targetNamedEntities:
targetNamedEntities.append([[item[0]], [item[1]], [item[2]], jtem[3]])
sourceWords = []
targetWords = []
for item in sourceNamedEntities:
for jtem in item[1]:
if item[3] in ['PERSON', 'ORGANIZATION', 'LOCATION']:
sourceWords.append(source[jtem-1][2])
for item in targetNamedEntities:
for jtem in item[1]:
if item[3] in ['PERSON', 'ORGANIZATION', 'LOCATION']:
targetWords.append(target[jtem-1][2])
if len(sourceNamedEntities) == 0 or len(targetNamedEntities) == 0:
return []
sourceNamedEntitiesAlreadyAligned = []
targetNamedEntitiesAlreadyAligned = []
# align all full matches
for item in sourceNamedEntities:
if item[3] not in ['PERSON', 'ORGANIZATION', 'LOCATION']:
continue
# do not align if the current source entity is present more than once
count = 0
for ktem in sourceNamedEntities:
if ktem[2] == item[2]:
count += 1
if count > 1:
continue
for jtem in targetNamedEntities:
if jtem[3] not in ['PERSON', 'ORGANIZATION', 'LOCATION']:
continue
# do not align if the current target entity is present more than once
count = 0
for ktem in targetNamedEntities:
if ktem[2] == jtem[2]:
count += 1
if count > 1:
continue
# get rid of dots and hyphens
canonicalItemWord = [i.replace('.', '') for i in item[2]]
canonicalItemWord = [i.replace('-', '') for i in item[2]]
canonicalJtemWord = [j.replace('.', '') for j in jtem[2]]
canonicalJtemWord = [j.replace('-', '') for j in jtem[2]]
if canonicalItemWord == canonicalJtemWord:
for k in range(len(item[1])):
if ([item[1][k], jtem[1][k]]) not in alignments:
alignments.append([item[1][k], jtem[1][k]])
sourceNamedEntitiesAlreadyAligned.append(item)
targetNamedEntitiesAlreadyAligned.append(jtem)
# align acronyms with their elaborations
for item in sourceNamedEntities:
if item[3] not in ['PERSON', 'ORGANIZATION', 'LOCATION']:
continue
for jtem in targetNamedEntities:
if jtem[3] not in ['PERSON', 'ORGANIZATION', 'LOCATION']:
continue
if len(item[2])==1 and isAcronym(item[2][0], jtem[2]):
for i in range(len(jtem[1])):
if [item[1][0], jtem[1][i]] not in alignments:
alignments.append([item[1][0], jtem[1][i]])
sourceNamedEntitiesAlreadyAligned.append(item[1][0])
targetNamedEntitiesAlreadyAligned.append(jtem[1][i])
elif len(jtem[2])==1 and isAcronym(jtem[2][0], item[2]):
for i in range(len(item[1])):
if [item[1][i], jtem[1][0]] not in alignments:
alignments.append([item[1][i], jtem[1][0]])
sourceNamedEntitiesAlreadyAligned.append(item[1][i])
targetNamedEntitiesAlreadyAligned.append(jtem[1][0])
# align subset matches
for item in sourceNamedEntities:
if item[3] not in ['PERSON', 'ORGANIZATION', 'LOCATION'] or item in sourceNamedEntitiesAlreadyAligned:
continue
# do not align if the current source entity is present more than once
count = 0
for ktem in sourceNamedEntities:
if ktem[2] == item[2]:
count += 1
if count > 1:
continue
for jtem in targetNamedEntities:
if jtem[3] not in ['PERSON', 'ORGANIZATION', 'LOCATION'] or jtem in targetNamedEntitiesAlreadyAligned:
continue
if item[3] != jtem[3]:
continue
# do not align if the current target entity is present more than once
count = 0
for ktem in targetNamedEntities:
if ktem[2] == jtem[2]:
count += 1
if count > 1:
continue
# find if the first is a part of the second
if isSublist(item[2], jtem[2]):
unalignedWordIndicesInTheLongerName = []
for ktem in jtem[1]:
unalignedWordIndicesInTheLongerName.append(ktem)
for k in range(len(item[2])):
for l in range(len(jtem[2])):
if item[2][k] == jtem[2][l] and [item[1][k], jtem[1][l]] not in alignments:
alignments.append([item[1][k], jtem[1][l]])
if jtem[1][l] in unalignedWordIndicesInTheLongerName:
unalignedWordIndicesInTheLongerName.remove(jtem[1][l])
for k in range(len(item[1])): # the shorter name
for l in range(len(jtem[1])): # the longer name
# find if the current term in the longer name has already been aligned (before calling alignNamedEntities()), do not align it in that case
alreadyInserted = False
for mtem in existingAlignments:
if mtem[1] == jtem[1][l]:
alreadyInserted = True
break
if jtem[1][l] not in unalignedWordIndicesInTheLongerName or alreadyInserted:
continue
if [item[1][k], jtem[1][l]] not in alignments and target[jtem[1][l]-1][2] not in sourceWords and item[2][k] not in punctuations and jtem[2][l] not in punctuations:
alignments.append([item[1][k], jtem[1][l]])
# else find if the second is a part of the first
elif isSublist(jtem[2], item[2]):
unalignedWordIndicesInTheLongerName = []
for ktem in item[1]:
unalignedWordIndicesInTheLongerName.append(ktem)
for k in range(len(jtem[2])):
for l in range(len(item[2])):
if jtem[2][k] == item[2][l] and [item[1][l], jtem[1][k]] not in alignments:
alignments.append([item[1][l], jtem[1][k]])
if item[1][l] in unalignedWordIndicesInTheLongerName:
unalignedWordIndicesInTheLongerName.remove(item[1][l])
for k in range(len(jtem[1])): # the shorter name
for l in range(len(item[1])): # the longer name
# find if the current term in the longer name has already been aligned (before calling alignNamedEntities()), do not align it in that case
alreadyInserted = False
for mtem in existingAlignments:
if mtem[0] == item[1][k]:
alreadyInserted = True
break
if item[1][l] not in unalignedWordIndicesInTheLongerName or alreadyInserted:
continue
if [item[1][l], jtem[1][k]] not in alignments and source[item[1][k]-1][2] not in targetWords and item[2][l] not in punctuations and jtem[2][k] not in punctuations:
alignments.append([item[1][l], jtem[1][k]])
# unalignedWordIndicesInTheLongerName.remove(jtem[1][l])
return alignments
##############################################################################################################################
##############################################################################################################################
def alignWords(source, target, sourceParseResult, targetParseResult):
# source and target:: each is a list of elements of the form:
# [[character begin offset, character end offset], word index, word, lemma, pos tag]
# function returns the word alignments from source to target - each alignment returned is of the following form:
# [
# [[source word character begin offset, source word character end offset], source word index, source word, source word lemma],
# [[target word character begin offset, target word character end offset], target word index, target word, target word lemma]
# ]
global punctuations
sourceWordIndices = [i+1 for i in range(len(source))]
targetWordIndices = [i+1 for i in range(len(target))]
alignments = []
sourceWordIndicesAlreadyAligned= []
targetWordIndicesAlreadyAligned= []
sourceWords = [item[2] for item in source]
targetWords = [item[2] for item in target]
sourceLemmas = [item[3] for item in source]
targetLemmas = [item[3] for item in target]
sourcePosTags = [item[4] for item in source]
targetPosTags = [item[4] for item in target]
# align the sentence ending punctuation first
if (sourceWords[len(source)-1] in ['.', '!'] and targetWords[len(target)-1] in ['.', '!']) or sourceWords[len(source)-1]==targetWords[len(target)-1]:
alignments.append([len(source), len(target)])
sourceWordIndicesAlreadyAligned.append(len(source))
targetWordIndicesAlreadyAligned.append(len(target))
elif (sourceWords[len(source)-2] in ['.', '!'] and targetWords[len(target)-1] in ['.', '!']):
alignments.append([len(source)-1, len(target)])
sourceWordIndicesAlreadyAligned.append(len(source)-1)
targetWordIndicesAlreadyAligned.append(len(target))
elif sourceWords[len(source)-1] in ['.', '!'] and targetWords[len(target)-2] in ['.', '!']:
alignments.append([len(source), len(target)-1])
sourceWordIndicesAlreadyAligned.append(len(source))
targetWordIndicesAlreadyAligned.append(len(target)-1)
elif sourceWords[len(source)-2] in ['.', '!'] and targetWords[len(target)-2] in ['.', '!']:
alignments.append([len(source)-1, len(target)-1])
sourceWordIndicesAlreadyAligned.append(len(source)-1)
targetWordIndicesAlreadyAligned.append(len(target)-1)
# align all (>=2)-gram matches with at least one content word
commonContiguousSublists = findAllCommonContiguousSublists(sourceWords, targetWords, True)
for item in commonContiguousSublists:
allStopWords = True
for jtem in item:
if jtem not in stopwords and jtem not in punctuations:
allStopWords = False
break
if len(item[0]) >= 2 and not allStopWords:
for j in range(len(item[0])):
if item[0][j]+1 not in sourceWordIndicesAlreadyAligned and item[1][j]+1 not in targetWordIndicesAlreadyAligned and [item[0][j]+1, item[1][j]+1] not in alignments:
alignments.append([item[0][j]+1, item[1][j]+1])
sourceWordIndicesAlreadyAligned.append(item[0][j]+1)
targetWordIndicesAlreadyAligned.append(item[1][j]+1)
# align hyphenated word groups
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned:
continue
if '-' in sourceWords[i-1] and sourceWords[i-1] != '-':
tokens = sourceWords[i-1].split('-')
commonContiguousSublists = findAllCommonContiguousSublists(tokens, targetWords)
for item in commonContiguousSublists:
if len(item[0]) > 1:
for jtem in item[1]:
if [i, jtem+1] not in alignments:
alignments.append([i, jtem+1])
sourceWordIndicesAlreadyAligned.append(i)
targetWordIndicesAlreadyAligned.append(jtem+1)
for i in targetWordIndices:
if i in targetWordIndicesAlreadyAligned:
continue
if '-' in target[i-1][2] and target[i-1][2] != '-':
tokens = target[i-1][2].split('-')
commonContiguousSublists = findAllCommonContiguousSublists(sourceWords, tokens)
for item in commonContiguousSublists:
if len(item[0]) > 1:
for jtem in item[0]:
if [jtem+1, i] not in alignments:
alignments.append([jtem+1, i])
sourceWordIndicesAlreadyAligned.append(jtem+1)
targetWordIndicesAlreadyAligned.append(i)
# align named entities
neAlignments = alignNamedEntities(source, target, sourceParseResult, targetParseResult, alignments)
for item in neAlignments:
if item not in alignments:
alignments.append(item)
if item[0] not in sourceWordIndicesAlreadyAligned:
sourceWordIndicesAlreadyAligned.append(item[0])
if item[1] not in targetWordIndicesAlreadyAligned:
targetWordIndicesAlreadyAligned.append(item[1])
# align words based on word and dependency match
sourceDParse = dependencyParseAndPutOffsets(sourceParseResult)
targetDParse = dependencyParseAndPutOffsets(targetParseResult)
mainVerbAlignments = alignMainVerbs(source, target, sourceParseResult, targetParseResult, alignments)
for item in mainVerbAlignments:
if item not in alignments:
alignments.append(item)
if item[0] not in sourceWordIndicesAlreadyAligned:
sourceWordIndicesAlreadyAligned.append(item[0])
if item[1] not in targetWordIndicesAlreadyAligned:
targetWordIndicesAlreadyAligned.append(item[1])
nounAlignments = alignNouns(source, target, sourceParseResult, targetParseResult, alignments)
for item in nounAlignments:
if item not in alignments:
alignments.append(item)
if item[0] not in sourceWordIndicesAlreadyAligned:
sourceWordIndicesAlreadyAligned.append(item[0])
if item[1] not in targetWordIndicesAlreadyAligned:
targetWordIndicesAlreadyAligned.append(item[1])
adjectiveAlignments = alignAdjectives(source, target, sourceParseResult, targetParseResult, alignments)
for item in adjectiveAlignments:
if item not in alignments:
alignments.append(item)
if item[0] not in sourceWordIndicesAlreadyAligned:
sourceWordIndicesAlreadyAligned.append(item[0])
if item[1] not in targetWordIndicesAlreadyAligned:
targetWordIndicesAlreadyAligned.append(item[1])
adverbAlignments = alignAdverbs(source, target, sourceParseResult, targetParseResult, alignments)
for item in adverbAlignments:
if item not in alignments:
alignments.append(item)
if item[0] not in sourceWordIndicesAlreadyAligned:
sourceWordIndicesAlreadyAligned.append(item[0])
if item[1] not in targetWordIndicesAlreadyAligned:
targetWordIndicesAlreadyAligned.append(item[1])
# collect evidence from textual neighborhood for aligning content words
wordSimilarities = {}
textualNeighborhoodSimilarities = {}
sourceWordIndicesBeingConsidered = []
targetWordIndicesBeingConsidered = []
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned or sourceLemmas[i-1] in stopwords + punctuations + ['\'s', '\'d', '\'ll']:
continue
for j in targetWordIndices:
if j in targetWordIndicesAlreadyAligned or targetLemmas[j-1] in stopwords + punctuations + ['\'s', '\'d', '\'ll']:
continue
wordSimilarities[(i, j)] = max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))
sourceWordIndicesBeingConsidered.append(i)
targetWordIndicesBeingConsidered.append(j)
# textual neighborhood similarities
sourceNeighborhood = findTextualNeighborhood(source, i, 3, 3)
targetNeighborhood = findTextualNeighborhood(target, j, 3, 3)
evidence = 0
for k in range(len(sourceNeighborhood[0])):
for l in range(len(targetNeighborhood[0])):
if (sourceNeighborhood[1][k] not in stopwords + punctuations) and ((sourceNeighborhood[0][k], targetNeighborhood[0][l]) in alignments or (wordRelatedness(sourceNeighborhood[1][k], 'none', targetNeighborhood[1][l], 'none')>=ppdbSim)):
evidence += wordRelatedness(sourceNeighborhood[1][k], 'none', targetNeighborhood[1][l], 'none')
textualNeighborhoodSimilarities[(i, j)] = evidence
numOfUnalignedWordsInSource = len(sourceWordIndicesBeingConsidered)
# now align: find the best alignment in each iteration of the following loop and include in alignments if good enough
for item in range(numOfUnalignedWordsInSource):
highestWeightedSim = 0
bestWordSim = 0
bestSourceIndex = -1
bestTargetIndex = -1
for i in sourceWordIndicesBeingConsidered:
if i in sourceWordIndicesAlreadyAligned:
continue
for j in targetWordIndicesBeingConsidered:
if j in targetWordIndicesAlreadyAligned:
continue
if (i, j) not in wordSimilarities:
continue
theta2 = 1 - theta1
if theta1*wordSimilarities[(i, j)] + theta2*textualNeighborhoodSimilarities[(i, j)] > highestWeightedSim:
highestWeightedSim = theta1*wordSimilarities[(i, j)] + theta2*textualNeighborhoodSimilarities[(i, j)]
bestSourceIndex = i
bestTargetIndex = j
bestWordSim = wordSimilarities[(i, j)]
bestTextNeighborhoodSim = textualNeighborhoodSimilarities[(i, j)]
if bestWordSim>=ppdbSim and [bestSourceIndex, bestTargetIndex] not in alignments:
if sourceLemmas[bestSourceIndex-1] not in stopwords:
alignments.append([bestSourceIndex, bestTargetIndex])
sourceWordIndicesAlreadyAligned.append(bestSourceIndex)
targetWordIndicesAlreadyAligned.append(bestTargetIndex)
if bestSourceIndex in sourceWordIndicesBeingConsidered:
sourceWordIndicesBeingConsidered.remove(bestSourceIndex)
if bestTargetIndex in targetWordIndicesBeingConsidered:
targetWordIndicesBeingConsidered.remove(bestTargetIndex)
# look if any remaining word is a part of a hyphenated word
for i in sourceWordIndices:
if i in sourceWordIndicesAlreadyAligned:
continue
if '-' in sourceWords[i-1] and sourceWords[i-1] != '-':
tokens = sourceWords[i-1].split('-')
commonContiguousSublists = findAllCommonContiguousSublists(tokens, targetWords)
for item in commonContiguousSublists:
if len(item[0]) == 1 and target[item[1][0]][3] not in stopwords:
for jtem in item[1]:
if [i, jtem+1] not in alignments and jtem+1 not in targetWordIndicesAlreadyAligned:
alignments.append([i, jtem+1])
sourceWordIndicesAlreadyAligned.append(i)
targetWordIndicesAlreadyAligned.append(jtem+1)
for i in targetWordIndices:
if i in targetWordIndicesAlreadyAligned:
continue
if '-' in target[i-1][2] and target[i-1][2] != '-':
tokens = target[i-1][2].split('-')
commonContiguousSublists = findAllCommonContiguousSublists(sourceWords, tokens)
for item in commonContiguousSublists:
if len(item[0]) == 1 and source[item[0][0]][3] not in stopwords:
for jtem in item[0]:
if [jtem+1, i] not in alignments and i not in targetWordIndicesAlreadyAligned:
alignments.append([jtem+1, i])
sourceWordIndicesAlreadyAligned.append(jtem+1)
targetWordIndicesAlreadyAligned.append(i)
# collect evidence from dependency neighborhood for aligning stopwords
wordSimilarities = {}
dependencyNeighborhoodSimilarities = {}
sourceWordIndicesBeingConsidered = []
targetWordIndicesBeingConsidered = []
for i in sourceWordIndices:
if sourceLemmas[i-1] not in stopwords or i in sourceWordIndicesAlreadyAligned:
continue
for j in targetWordIndices:
if targetLemmas[j-1] not in stopwords or j in targetWordIndicesAlreadyAligned:
continue
if (sourceLemmas[i-1]!=targetLemmas[j-1]) and (wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1])<ppdbSim):
continue
wordSimilarities[(i, j)] = max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))
sourceWordIndicesBeingConsidered.append(i)
targetWordIndicesBeingConsidered.append(j)
sourceWordParents = findParents(sourceDParse, i, sourceWords[i-1])
sourceWordChildren = findChildren(sourceDParse, i, sourceWords[i-1])
targetWordParents = findParents(targetDParse, j, targetWords[j-1])
targetWordChildren = findChildren(targetDParse, j, targetWords[j-1])
evidence = 0
for item in sourceWordParents:
for jtem in targetWordParents:
if [item[0], jtem[0]] in alignments:
evidence += 1
for item in sourceWordChildren:
for jtem in targetWordChildren:
if [item[0], jtem[0]] in alignments:
evidence += 1
dependencyNeighborhoodSimilarities[(i, j)] = evidence
numOfUnalignedWordsInSource = len(sourceWordIndicesBeingConsidered)
# now align: find the best alignment in each iteration of the following loop and include in alignments if good enough
for item in range(numOfUnalignedWordsInSource):
highestWeightedSim = 0
bestWordSim = 0
bestSourceIndex = -1
bestTargetIndex = -1
for i in sourceWordIndicesBeingConsidered:
for j in targetWordIndicesBeingConsidered:
if (i, j) not in wordSimilarities:
continue
theta2 = 1 - theta1
if theta1*wordSimilarities[(i, j)] + theta2*dependencyNeighborhoodSimilarities[(i, j)] > highestWeightedSim:
highestWeightedSim = theta1*wordSimilarities[(i, j)] + theta2*dependencyNeighborhoodSimilarities[(i, j)]
bestSourceIndex = i
bestTargetIndex = j
bestWordSim = wordSimilarities[(i, j)]
bestDependencyNeighborhoodSim = dependencyNeighborhoodSimilarities[(i, j)]
if bestWordSim>=ppdbSim and bestDependencyNeighborhoodSim>0 and [bestSourceIndex, bestTargetIndex] not in alignments:
alignments.append([bestSourceIndex, bestTargetIndex])
sourceWordIndicesAlreadyAligned.append(bestSourceIndex)
targetWordIndicesAlreadyAligned.append(bestTargetIndex)
if bestSourceIndex in sourceWordIndicesBeingConsidered:
sourceWordIndicesBeingConsidered.remove(bestSourceIndex)
if bestTargetIndex in targetWordIndicesBeingConsidered:
targetWordIndicesBeingConsidered.remove(bestTargetIndex)
# collect evidence from textual neighborhood for aligning stopwords and punctuations
wordSimilarities = {}
textualNeighborhoodSimilarities = {}
sourceWordIndicesBeingConsidered = []
targetWordIndicesBeingConsidered = []
for i in sourceWordIndices:
if (sourceLemmas[i-1] not in stopwords + punctuations + ['\'s', '\'d', '\'ll']) or i in sourceWordIndicesAlreadyAligned:
continue
for j in targetWordIndices:
if (targetLemmas[j-1] not in stopwords + punctuations + ['\'s', '\'d', '\'ll']) or j in targetWordIndicesAlreadyAligned:
continue
if wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]) < ppdbSim:
continue
wordSimilarities[(i, j)] = max(wordRelatedness(sourceWords[i-1], sourcePosTags[i-1], targetWords[j-1], targetPosTags[j-1]), wordRelatedness(sourceLemmas[i-1], sourcePosTags[i-1], targetLemmas[j-1], targetPosTags[j-1]))
sourceWordIndicesBeingConsidered.append(i)
targetWordIndicesBeingConsidered.append(j)
# textual neighborhood evidence
evidence = 0
if [i-1, j-1] in alignments:
evidence += 1
if [i+1, j+1] in alignments:
evidence += 1
try:
textualNeighborhoodSimilarities[(i, j)] = evidence
except ZeroDivisionError:
textualNeighborhoodSimilarities[(i, j)] = 0
numOfUnalignedWordsInSource = len(sourceWordIndicesBeingConsidered)
# now align: find the best alignment in each iteration of the following loop and include in alignments if good enough
for item in range(numOfUnalignedWordsInSource):
highestWeightedSim = 0
bestWordSim = 0
bestSourceIndex = -1
bestTargetIndex = -1
for i in sourceWordIndicesBeingConsidered:
if i in sourceWordIndicesAlreadyAligned:
continue
for j in targetWordIndicesBeingConsidered:
if j in targetWordIndicesAlreadyAligned:
continue
if (i, j) not in wordSimilarities:
continue
theta2 = 1 - theta1
if theta1*wordSimilarities[(i, j)] + theta2*textualNeighborhoodSimilarities[(i, j)] > highestWeightedSim:
highestWeightedSim = theta1*wordSimilarities[(i, j)] + theta2*textualNeighborhoodSimilarities[(i, j)]
bestSourceIndex = i
bestTargetIndex = j
bestWordSim = wordSimilarities[(i, j)]
bestTextNeighborhoodSim = textualNeighborhoodSimilarities[(i, j)]
if bestWordSim>=ppdbSim and bestTextNeighborhoodSim>0 and [bestSourceIndex, bestTargetIndex] not in alignments:
alignments.append([bestSourceIndex, bestTargetIndex])
sourceWordIndicesAlreadyAligned.append(bestSourceIndex)
targetWordIndicesAlreadyAligned.append(bestTargetIndex)
if bestSourceIndex in sourceWordIndicesBeingConsidered:
sourceWordIndicesBeingConsidered.remove(bestSourceIndex)
if bestTargetIndex in targetWordIndicesBeingConsidered:
targetWordIndicesBeingConsidered.remove(bestTargetIndex)
alignments = [item for item in alignments if item[0]!=0 and item[1]!=0]
return alignments
##############################################################################################################################
##############################################################################################################################
def align(sentence1, sentence2):
if isinstance(sentence1, list):
sentence1 = ' '.join(sentence1)
if isinstance(sentence2, list):
sentence2 = ' '.join(sentence2)
sentence1ParseResult = parseText(sentence1)
sentence2ParseResult = parseText(sentence2)
sentence1Lemmatized = lemmatize(sentence1ParseResult)
sentence2Lemmatized = lemmatize(sentence2ParseResult)
sentence1PosTagged = posTag(sentence1ParseResult)
sentence2PosTagged = posTag(sentence2ParseResult)
sentence1LemmasAndPosTags = []
for i in range(len(sentence1Lemmatized)):
sentence1LemmasAndPosTags.append([])
for i in range(len(sentence1Lemmatized)):
for item in sentence1Lemmatized[i]:
sentence1LemmasAndPosTags[i].append(item)
sentence1LemmasAndPosTags[i].append(sentence1PosTagged[i][3])
sentence2LemmasAndPosTags = []
for i in range(len(sentence2Lemmatized)):
sentence2LemmasAndPosTags.append([])
for i in range(len(sentence2Lemmatized)):
for item in sentence2Lemmatized[i]:
sentence2LemmasAndPosTags[i].append(item)
sentence2LemmasAndPosTags[i].append(sentence2PosTagged[i][3])
myWordAlignments = alignWords(sentence1LemmasAndPosTags, sentence2LemmasAndPosTags, sentence1ParseResult, sentence2ParseResult)
myWordAlignmentTokens = [[str(sentence1Lemmatized[item[0]-1][2]), str(sentence2Lemmatized[item[1]-1][2])] for item in myWordAlignments]
return [myWordAlignments, myWordAlignmentTokens]
def align_feats(parse_sa, parse_sb):
sentence1ParseResult = parse_sa
sentence2ParseResult = parse_sb
sentence1Lemmatized = lemmatize(sentence1ParseResult)
sentence2Lemmatized = lemmatize(sentence2ParseResult)
sentence1PosTagged = posTag(sentence1ParseResult)
sentence2PosTagged = posTag(sentence2ParseResult)
sentence1LemmasAndPosTags = []
for i in range(len(sentence1Lemmatized)):
sentence1LemmasAndPosTags.append([])
for i in range(len(sentence1Lemmatized)):
for item in sentence1Lemmatized[i]:
sentence1LemmasAndPosTags[i].append(item)
sentence1LemmasAndPosTags[i].append(sentence1PosTagged[i][3])
sentence2LemmasAndPosTags = []
for i in range(len(sentence2Lemmatized)):
sentence2LemmasAndPosTags.append([])
for i in range(len(sentence2Lemmatized)):
for item in sentence2Lemmatized[i]:
sentence2LemmasAndPosTags[i].append(item)
sentence2LemmasAndPosTags[i].append(sentence2PosTagged[i][3])
myWordAlignments = alignWords(sentence1LemmasAndPosTags, sentence2LemmasAndPosTags, sentence1ParseResult,
sentence2ParseResult)
myWordAlignmentTokens = [[ sentence1Lemmatized[item[0] - 1][2], sentence2Lemmatized[item[1] - 1][2] ] for
item in myWordAlignments]
n_ac1 = [pair[0] for pair in myWordAlignments]
n_ac1 = list(set(n_ac1))
n_ac2 = [pair[1] for pair in myWordAlignments]
n_ac2 = list(set(n_ac2))
n_c1 = len(parse_sa['sentences'][0]["tokens"])
n_c2 = len(parse_sb['sentences'][0]["tokens"])
score = 1.0 * (len(n_ac1) + len(n_ac2)) / (n_c1 + n_c2)
return [ score ], [myWordAlignments, myWordAlignmentTokens]
##############################################################################################################################
| mit | -1,147,417,126,804,859,300 | 60.83078 | 438 | 0.624526 | false |
Machyne/econ_comps | full_2011.py | 1 | 6827 | import os
import numpy as np
import pandas as pd
from pandas.tools.plotting import scatter_matrix
import pylab
import statsmodels.formula.api as smf
import statsmodels.stats.api as sms
from industry_to_days import get_census_mapper
"""
USAGE:
python full_2011.py
CREATES:
results/2011/clean.csv
results/2011/corr.txt
results/2011/het_breushpagan.txt
results/2011/ols1.txt
results/2011/ols2.txt
results/2011/scatter_matrix.png
results/2011/summary.txt
"""
COL_ORDER = ['vacation', 'paid_vacation', 'age', 'fam_size', 'is_female',
'income10', 'salary', 'is_employed']
PSID_CSV = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'psid', '2011.csv'))
def get_f_path(fname):
return os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'results', '2011', fname))
CLEAN_CSV = get_f_path('clean.csv')
CORR_TXT = get_f_path('corr.txt')
HET_BP_TXT = get_f_path('het_breushpagan.txt')
OLS1_TXT = get_f_path('ols1.txt')
OLS2_TXT = get_f_path('ols2.txt')
SCAT_MATRIX_PNG = get_f_path('scatter_matrix.png')
SUMMARY_TXT = get_f_path('summary.txt')
f_exists = (lambda file_: os.path.isfile(file_))
def _calc_vacation(row):
took, days, weeks, months = (row['took_vac'], row['days_vac'],
row['weeks_vac'], row['months_vac'])
if took in [8, 9] or (days in [998, 999]) or (months in [98, 99]) or (
weeks in [98, 99]):
return np.nan
elif took == 5:
return 0
else:
return days + (5 * weeks) + (22 * months)
def _calc_salary(row):
amt, unit = row['salary_amt'], row['salary_unit']
if amt in [0.0, 9999998.0] or unit in [0, 7, 8, 9]:
return np.nan
if unit == 3: # one week
scalar = 52.0
elif unit == 4: # two weeks
scalar = 26.0
elif unit == 5: # one month
scalar = 12.0
elif unit == 6: # one year
scalar = 1.0
return scalar * amt
def clean(df):
# make sex into dummy for is_female
df['is_female'] = df['sex'] - 1
# remove unknown age values
df.age = df.age.replace(999, np.nan)
# figure out total vacation taken
df['vacation'] = df.apply(_calc_vacation, axis=1)
# fix salary to be annual amount
df['salary'] = df.apply(_calc_salary, axis=1)
# remove outliers
df.ix[df.salary < 1e3] = np.nan
df.ix[df.salary >= 400e3] = np.nan
df.ix[df.income10 < 1e3] = np.nan
df.ix[df.income10 >= 400e3] = np.nan
# make employment into dummy for is_employed
df['is_employed'] = df.employment
# remove all those not working
for i in range(2,10) + [99]:
df.is_employed.replace(i, 0, inplace=True)
# merge industry data
df['paid_vacation'] = df.industry.map(get_census_mapper())
# drop old values
for col in ['took_vac', 'days_vac', 'weeks_vac', 'months_vac', 'industry',
'salary_amt', 'salary_unit', 'sex', 'employment']:
df.drop(col, axis=1, inplace=True)
df = df.reindex_axis(sorted(df.columns, key=COL_ORDER.index), axis=1)
return df
def do_stats(df):
# Only view those that received vacation and are employed
df.is_employed.replace(0.0, np.nan, inplace=True)
df.paid_vacation.replace(0.0, np.nan, inplace=True)
df.dropna(inplace=True)
# No longer need this dummy
df.drop('is_employed', axis=1, inplace=True)
# Summary stats
if not f_exists(SUMMARY_TXT):
summary = df.describe().T
summary = np.round(summary, decimals=3)
with open(SUMMARY_TXT, 'w') as f:
f.write(summary.to_string())
# Test for autocorrelation: scatter matrix, correlation, run OLS
if not f_exists(SCAT_MATRIX_PNG):
scatter_matrix(df, alpha=0.2, figsize=(64, 64), diagonal='hist')
pylab.savefig(SCAT_MATRIX_PNG, bbox_inches='tight')
if not f_exists(CORR_TXT):
corr = df.corr()
corr = corr.reindex_axis(
sorted(corr.columns, key=COL_ORDER.index), axis=0)
corr = corr.reindex_axis(
sorted(corr.columns, key=COL_ORDER.index), axis=1)
for i, k in enumerate(corr):
row = corr[k]
for j in range(len(row)):
if j > i:
row[j] = np.nan
with open(CORR_TXT, 'w') as f:
f.write(np.round(corr, decimals=3).to_string(na_rep=''))
if not f_exists(OLS1_TXT):
ols_results = smf.ols(
formula='vacation ~ paid_vacation + np.square(paid_vacation) + '
'age + fam_size + is_female + income10 + salary + '
'np.square(salary)',
data=df).fit()
with open(OLS1_TXT, 'w') as f:
f.write(str(ols_results.summary()))
f.write('\n\nCondition Number: {}'.format(
np.linalg.cond(ols_results.model.exog)))
# Need to drop salary, too much autocorrelation
df.drop('salary', axis=1, inplace=True)
# Test for autocorrelation: scatter matrix, correlation, run OLS
if not f_exists(HET_BP_TXT):
ols_results = smf.ols(
formula='vacation ~ paid_vacation + np.square(paid_vacation) + '
'age + fam_size + is_female + income10',
data=df).fit()
names = ['LM', 'LM P val.', 'F Stat.', 'F Stat. P val.']
test = sms.het_breushpagan(ols_results.resid, ols_results.model.exog)
f_p = test[3]
with open(HET_BP_TXT, 'w') as f:
str_ = '\n'.join('{}: {}'.format(n, v)
for n, v in zip(names, test))
f.write(str_ + '\n\n')
if f_p < .01:
f.write('No Heteroskedasticity found.\n')
else:
f.write('Warning: Heteroskedasticity found!\n')
# no Heteroskedasticity found
# final OLS results with robust standard errors
if not f_exists(OLS2_TXT):
ols_results = smf.ols(
formula='vacation ~ paid_vacation + np.square(paid_vacation) + '
'age + fam_size + is_female + income10',
data=df).fit().get_robustcov_results(cov_type='HAC', maxlags=1)
with open(OLS2_TXT, 'w') as f:
f.write(str(ols_results.summary()))
f.write('\n\nCondition Number: {}'.format(
np.linalg.cond(ols_results.model.exog)))
return df
def main():
df = None
if f_exists(CLEAN_CSV):
df = pd.io.parsers.read_csv(CLEAN_CSV)
df.drop('Unnamed: 0', axis=1, inplace=True)
else:
with open(PSID_CSV) as csv:
df = pd.io.parsers.read_csv(csv)
df = clean(df)
# write output to a file
with open(CLEAN_CSV, 'w+') as csv:
df.to_csv(path_or_buf=csv)
return do_stats(df)
if __name__ == '__main__':
main()
print '2011 succeeds! :)'
| bsd-3-clause | -5,579,685,436,232,137,000 | 32.465686 | 78 | 0.573458 | false |
jaggerkyne/PyKids | Part_02/Chapter_16/stick_figure_game.py | 1 | 4105 | # Another code written by Jagger Kyne
# Copyright 2006 - 2013 Jagger Kyne <[email protected]>
__author__ = 'Jagger Kyne'
from tkinter import *
import random
import time
class Game:
def __init__(self):
self.tk = Tk()
self.tk.title("Mr. Stick Man Race for the Exit")
self.tk.resizable(0,0)
self.tk.wm_attributes('-topmost',1)
self.canvas = Canvas(self.tk,width=500,height=500,highlightthickness=0)
self.canvas.pack()
self.canvas.update()
self.canvas_height = 500
self.canvas_width = 500
bg_path = '/Users/Wilson/Documents/Sites/Python_For_Kids/Part_02/Chapter_16/graphics/background.gif'
self.bg = PhotoImage(file=bg_path)
# test.gif
w = self.bg.width()
h = self.bg.height()
for x in range(0,5):
for y in range(0,5):
self.canvas.create_image(x*w,y*h,image=self.bg,anchor='nw')
self.sprites = []
self.running = True
def mainloop(self):
while 1:
if self.running == True:
for sprite in self.sprites:
sprite.move()
self.tk.update_idletasks()
self.tk.update()
time.sleep(0.01)
class Corrds:
def __init__(self,x1=0,y1=0,x2=0,y2=0):
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
def within_x(co1,co2):
if (co1.x1 > co2.x1 and co1.x1 < co2.x2)\
or (co1.x2 > co2.x1 and co1.x2 < co2.x2)\
or (co2.x1 > co1.x1 and co2.x2 < co1.x1)\
or (co2.x2 > co1.x1 and co2.x2 < co1.x1):
return True
else:
return False
def within_y(co1,co2):
if(co1.y1 > co2.y1 and co1.y1 <co2.y2)\
or(co1.y2 > co2.y1 and co1.y2 < co2.y2)\
or(co2.y1 > co1.y1 and co2.y1 < co1.y2)\
or(co2.y2 > co1.y1 and co2.y2 < co1.y1):
return True
else:
return False
def collided_left(co1,co2):
if within_y(co1,co2):
if co1.x1 <= co2.x2 and co1.x1 >= co2.x1:
return True
return False
def collided_right(co1,co2):
if within_y(co1,co2):
if co1.x2 >= co2.x1 and co1.x2 <= co2.x2:
return True
return False
def collided_top(co1,co2):
if within_x(co1,co2):
if co1.y1 <= co2.y2 and co1.y1 >= co2.y1:
return True
return False
def collided_bottom(y,co1,co2):
if within_x(co1, co2):
y_cal = co1.y2 + y
if y_cal >= co2.y1 and y_cal <= co2.y2:
return True
return False
class Sprite:
def __init__(self,game):
self.game = game
self.endgame = False
self.coordinates = None
def move(self):
pass
def coords(self):
return self.coordinates
class PlatformSprite(Sprite):
def __init__(self,game,photo_image,x,y,width,height):
Sprite.__init__(self,game)
self.photo_image = photo_image
self.image = game.canvas.create_image(x,y,image=self.photo_image,anchor='nw')
self.coordinates = Corrds(x,y,x + width, y + height)
g = Game()
path1 = '/Users/Wilson/Documents/Sites/Python_For_Kids/Part_02/Chapter_16/graphics/platform1.gif'
path2 = '/Users/Wilson/Documents/Sites/Python_For_Kids/Part_02/Chapter_16/graphics/platform2.gif'
path3 = '/Users/Wilson/Documents/Sites/Python_For_Kids/Part_02/Chapter_16/graphics/platform3.gif'
platform1 = PlatformSprite(g,PhotoImage(file=path1),0,480,100,10)
platform2 = PlatformSprite(g,PhotoImage(file=path1),150,440,100,10)
platform3 = PlatformSprite(g,PhotoImage(file=path1),300,480,100,10)
platform4 = PlatformSprite(g,PhotoImage(file=path1),300,160,100,10)
platform5 = PlatformSprite(g,PhotoImage(file=path2),175,350,66,10)
platform6 = PlatformSprite(g,PhotoImage(file=path2),50,380,66,10)
platform7 = PlatformSprite(g,PhotoImage(file=path2),170,120,66,10)
platform8 = PlatformSprite(g,PhotoImage(file=path2),45,60,66,10)
platform9 = PlatformSprite(g,PhotoImage(file=path3),170,250,32,10)
platform10 = PlatformSprite(g,PhotoImage(file=path3),230,280,32,10)
g.sprites.append(platform1)
g.mainloop() | gpl-2.0 | -187,072,159,571,861,660 | 31.078125 | 108 | 0.613886 | false |
sagiss/sardana | sandbox/mntgrp_gui.py | 1 | 27553 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mg_gui.ui'
#
# Created: Fri Jul 29 15:42:51 2011
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(501, 414)
self.gridLayout_2 = QtGui.QGridLayout(Form)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.groupBox = QtGui.QGroupBox(Form)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.gridLayout = QtGui.QGridLayout(self.groupBox)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.taurusLabel = TaurusLabel(self.groupBox)
self.taurusLabel.setObjectName(_fromUtf8("taurusLabel"))
self.gridLayout.addWidget(self.taurusLabel, 0, 0, 1, 1)
self.taurusLabel_2 = TaurusLabel(self.groupBox)
self.taurusLabel_2.setObjectName(_fromUtf8("taurusLabel_2"))
self.gridLayout.addWidget(self.taurusLabel_2, 0, 2, 1, 1)
self.taurusLed_2 = TaurusLed(self.groupBox)
self.taurusLed_2.setObjectName(_fromUtf8("taurusLed_2"))
self.gridLayout.addWidget(self.taurusLed_2, 0, 3, 1, 1)
self.taurusLabel_3 = TaurusLabel(self.groupBox)
self.taurusLabel_3.setObjectName(_fromUtf8("taurusLabel_3"))
self.gridLayout.addWidget(self.taurusLabel_3, 1, 0, 1, 1)
self.taurusLabel_4 = TaurusLabel(self.groupBox)
self.taurusLabel_4.setObjectName(_fromUtf8("taurusLabel_4"))
self.gridLayout.addWidget(self.taurusLabel_4, 1, 2, 1, 1)
self.taurusLed_3 = TaurusLed(self.groupBox)
self.taurusLed_3.setObjectName(_fromUtf8("taurusLed_3"))
self.gridLayout.addWidget(self.taurusLed_3, 1, 3, 1, 1)
self.taurusLabel_5 = TaurusLabel(self.groupBox)
self.taurusLabel_5.setObjectName(_fromUtf8("taurusLabel_5"))
self.gridLayout.addWidget(self.taurusLabel_5, 2, 0, 1, 1)
self.taurusLabel_6 = TaurusLabel(self.groupBox)
self.taurusLabel_6.setObjectName(_fromUtf8("taurusLabel_6"))
self.gridLayout.addWidget(self.taurusLabel_6, 2, 2, 1, 1)
self.taurusLed_4 = TaurusLed(self.groupBox)
self.taurusLed_4.setObjectName(_fromUtf8("taurusLed_4"))
self.gridLayout.addWidget(self.taurusLed_4, 2, 3, 1, 1)
self.taurusLabel_7 = TaurusLabel(self.groupBox)
self.taurusLabel_7.setObjectName(_fromUtf8("taurusLabel_7"))
self.gridLayout.addWidget(self.taurusLabel_7, 3, 0, 1, 1)
self.taurusLabel_8 = TaurusLabel(self.groupBox)
self.taurusLabel_8.setObjectName(_fromUtf8("taurusLabel_8"))
self.gridLayout.addWidget(self.taurusLabel_8, 3, 2, 1, 1)
self.taurusLed_5 = TaurusLed(self.groupBox)
self.taurusLed_5.setObjectName(_fromUtf8("taurusLed_5"))
self.gridLayout.addWidget(self.taurusLed_5, 3, 3, 1, 1)
self.taurusLabel_9 = TaurusLabel(self.groupBox)
self.taurusLabel_9.setObjectName(_fromUtf8("taurusLabel_9"))
self.gridLayout.addWidget(self.taurusLabel_9, 4, 0, 1, 1)
self.taurusLabel_10 = TaurusLabel(self.groupBox)
self.taurusLabel_10.setObjectName(_fromUtf8("taurusLabel_10"))
self.gridLayout.addWidget(self.taurusLabel_10, 4, 2, 1, 1)
self.taurusLed_6 = TaurusLed(self.groupBox)
self.taurusLed_6.setObjectName(_fromUtf8("taurusLed_6"))
self.gridLayout.addWidget(self.taurusLed_6, 4, 3, 1, 1)
self.taurusLabel_11 = TaurusLabel(self.groupBox)
self.taurusLabel_11.setObjectName(_fromUtf8("taurusLabel_11"))
self.gridLayout.addWidget(self.taurusLabel_11, 5, 0, 1, 1)
self.taurusLabel_12 = TaurusLabel(self.groupBox)
self.taurusLabel_12.setObjectName(_fromUtf8("taurusLabel_12"))
self.gridLayout.addWidget(self.taurusLabel_12, 5, 2, 1, 1)
self.taurusLed_7 = TaurusLed(self.groupBox)
self.taurusLed_7.setObjectName(_fromUtf8("taurusLed_7"))
self.gridLayout.addWidget(self.taurusLed_7, 5, 3, 1, 1)
self.gridLayout_2.addWidget(self.groupBox, 0, 2, 1, 1)
self.groupBox_2 = QtGui.QGroupBox(Form)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.gridLayout_3 = QtGui.QGridLayout(self.groupBox_2)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.taurusLabel_13 = TaurusLabel(self.groupBox_2)
self.taurusLabel_13.setObjectName(_fromUtf8("taurusLabel_13"))
self.gridLayout_3.addWidget(self.taurusLabel_13, 0, 0, 1, 1)
self.taurusLabel_14 = TaurusLabel(self.groupBox_2)
self.taurusLabel_14.setObjectName(_fromUtf8("taurusLabel_14"))
self.gridLayout_3.addWidget(self.taurusLabel_14, 0, 2, 1, 1)
self.taurusLed_8 = TaurusLed(self.groupBox_2)
self.taurusLed_8.setObjectName(_fromUtf8("taurusLed_8"))
self.gridLayout_3.addWidget(self.taurusLed_8, 0, 3, 1, 1)
self.taurusLabel_15 = TaurusLabel(self.groupBox_2)
self.taurusLabel_15.setObjectName(_fromUtf8("taurusLabel_15"))
self.gridLayout_3.addWidget(self.taurusLabel_15, 1, 0, 1, 1)
self.taurusLabel_16 = TaurusLabel(self.groupBox_2)
self.taurusLabel_16.setObjectName(_fromUtf8("taurusLabel_16"))
self.gridLayout_3.addWidget(self.taurusLabel_16, 1, 2, 1, 1)
self.taurusLed_9 = TaurusLed(self.groupBox_2)
self.taurusLed_9.setObjectName(_fromUtf8("taurusLed_9"))
self.gridLayout_3.addWidget(self.taurusLed_9, 1, 3, 1, 1)
self.taurusLabel_17 = TaurusLabel(self.groupBox_2)
self.taurusLabel_17.setObjectName(_fromUtf8("taurusLabel_17"))
self.gridLayout_3.addWidget(self.taurusLabel_17, 2, 0, 1, 1)
self.taurusLabel_18 = TaurusLabel(self.groupBox_2)
self.taurusLabel_18.setObjectName(_fromUtf8("taurusLabel_18"))
self.gridLayout_3.addWidget(self.taurusLabel_18, 2, 2, 1, 1)
self.taurusLed_10 = TaurusLed(self.groupBox_2)
self.taurusLed_10.setObjectName(_fromUtf8("taurusLed_10"))
self.gridLayout_3.addWidget(self.taurusLed_10, 2, 3, 1, 1)
self.taurusLabel_19 = TaurusLabel(self.groupBox_2)
self.taurusLabel_19.setObjectName(_fromUtf8("taurusLabel_19"))
self.gridLayout_3.addWidget(self.taurusLabel_19, 3, 0, 1, 1)
self.taurusLabel_20 = TaurusLabel(self.groupBox_2)
self.taurusLabel_20.setObjectName(_fromUtf8("taurusLabel_20"))
self.gridLayout_3.addWidget(self.taurusLabel_20, 3, 2, 1, 1)
self.taurusLed_11 = TaurusLed(self.groupBox_2)
self.taurusLed_11.setObjectName(_fromUtf8("taurusLed_11"))
self.gridLayout_3.addWidget(self.taurusLed_11, 3, 3, 1, 1)
self.taurusLabel_21 = TaurusLabel(self.groupBox_2)
self.taurusLabel_21.setObjectName(_fromUtf8("taurusLabel_21"))
self.gridLayout_3.addWidget(self.taurusLabel_21, 4, 0, 1, 1)
self.taurusLabel_22 = TaurusLabel(self.groupBox_2)
self.taurusLabel_22.setObjectName(_fromUtf8("taurusLabel_22"))
self.gridLayout_3.addWidget(self.taurusLabel_22, 4, 2, 1, 1)
self.taurusLed_12 = TaurusLed(self.groupBox_2)
self.taurusLed_12.setObjectName(_fromUtf8("taurusLed_12"))
self.gridLayout_3.addWidget(self.taurusLed_12, 4, 3, 1, 1)
self.taurusLabel_23 = TaurusLabel(self.groupBox_2)
self.taurusLabel_23.setObjectName(_fromUtf8("taurusLabel_23"))
self.gridLayout_3.addWidget(self.taurusLabel_23, 5, 0, 1, 1)
self.taurusLabel_24 = TaurusLabel(self.groupBox_2)
self.taurusLabel_24.setObjectName(_fromUtf8("taurusLabel_24"))
self.gridLayout_3.addWidget(self.taurusLabel_24, 5, 2, 1, 1)
self.taurusLed_13 = TaurusLed(self.groupBox_2)
self.taurusLed_13.setObjectName(_fromUtf8("taurusLed_13"))
self.gridLayout_3.addWidget(self.taurusLed_13, 5, 3, 1, 1)
self.gridLayout_2.addWidget(self.groupBox_2, 0, 3, 1, 1)
self.groupBox_4 = QtGui.QGroupBox(Form)
self.groupBox_4.setObjectName(_fromUtf8("groupBox_4"))
self.gridLayout_5 = QtGui.QGridLayout(self.groupBox_4)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.taurusLabel_26 = TaurusLabel(self.groupBox_4)
self.taurusLabel_26.setObjectName(_fromUtf8("taurusLabel_26"))
self.gridLayout_5.addWidget(self.taurusLabel_26, 0, 0, 1, 3)
self.taurusLed_14 = TaurusLed(self.groupBox_4)
self.taurusLed_14.setObjectName(_fromUtf8("taurusLed_14"))
self.gridLayout_5.addWidget(self.taurusLed_14, 1, 0, 1, 1)
self.taurusLabel_29 = TaurusLabel(self.groupBox_4)
self.taurusLabel_29.setObjectName(_fromUtf8("taurusLabel_29"))
self.gridLayout_5.addWidget(self.taurusLabel_29, 2, 0, 1, 1)
self.taurusLabel_30 = TaurusLabel(self.groupBox_4)
self.taurusLabel_30.setObjectName(_fromUtf8("taurusLabel_30"))
self.gridLayout_5.addWidget(self.taurusLabel_30, 2, 1, 1, 1)
self.taurusValueLineEdit_2 = TaurusValueLineEdit(self.groupBox_4)
self.taurusValueLineEdit_2.setObjectName(_fromUtf8("taurusValueLineEdit_2"))
self.gridLayout_5.addWidget(self.taurusValueLineEdit_2, 2, 2, 1, 1)
self.taurusLabel_33 = TaurusLabel(self.groupBox_4)
self.taurusLabel_33.setObjectName(_fromUtf8("taurusLabel_33"))
self.gridLayout_5.addWidget(self.taurusLabel_33, 3, 0, 1, 1)
self.taurusLabel_34 = TaurusLabel(self.groupBox_4)
self.taurusLabel_34.setObjectName(_fromUtf8("taurusLabel_34"))
self.gridLayout_5.addWidget(self.taurusLabel_34, 3, 1, 1, 1)
self.taurusValueLineEdit_4 = TaurusValueLineEdit(self.groupBox_4)
self.taurusValueLineEdit_4.setObjectName(_fromUtf8("taurusValueLineEdit_4"))
self.gridLayout_5.addWidget(self.taurusValueLineEdit_4, 3, 2, 1, 1)
self.taurusLabel_37 = TaurusLabel(self.groupBox_4)
self.taurusLabel_37.setObjectName(_fromUtf8("taurusLabel_37"))
self.gridLayout_5.addWidget(self.taurusLabel_37, 4, 0, 1, 1)
self.taurusLabel_38 = TaurusLabel(self.groupBox_4)
self.taurusLabel_38.setObjectName(_fromUtf8("taurusLabel_38"))
self.gridLayout_5.addWidget(self.taurusLabel_38, 4, 1, 1, 1)
self.taurusValueLineEdit_6 = TaurusValueLineEdit(self.groupBox_4)
self.taurusValueLineEdit_6.setObjectName(_fromUtf8("taurusValueLineEdit_6"))
self.gridLayout_5.addWidget(self.taurusValueLineEdit_6, 4, 2, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.taurusCommandButton_2 = TaurusCommandButton(self.groupBox_4)
self.taurusCommandButton_2.setObjectName(_fromUtf8("taurusCommandButton_2"))
self.horizontalLayout_2.addWidget(self.taurusCommandButton_2)
self.cfgMg2 = QtGui.QToolButton(self.groupBox_4)
self.cfgMg2.setObjectName(_fromUtf8("cfgMg2"))
self.horizontalLayout_2.addWidget(self.cfgMg2)
self.horizontalLayout_2.setStretch(0, 1)
self.gridLayout_5.addLayout(self.horizontalLayout_2, 1, 1, 1, 2)
self.gridLayout_2.addWidget(self.groupBox_4, 1, 3, 1, 1)
self.groupBox_3 = QtGui.QGroupBox(Form)
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.gridLayout_4 = QtGui.QGridLayout(self.groupBox_3)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.taurusLabel_25 = TaurusLabel(self.groupBox_3)
self.taurusLabel_25.setObjectName(_fromUtf8("taurusLabel_25"))
self.gridLayout_4.addWidget(self.taurusLabel_25, 0, 0, 1, 3)
self.taurusLabel_27 = TaurusLabel(self.groupBox_3)
self.taurusLabel_27.setObjectName(_fromUtf8("taurusLabel_27"))
self.gridLayout_4.addWidget(self.taurusLabel_27, 2, 1, 1, 1)
self.taurusLabel_28 = TaurusLabel(self.groupBox_3)
self.taurusLabel_28.setObjectName(_fromUtf8("taurusLabel_28"))
self.gridLayout_4.addWidget(self.taurusLabel_28, 2, 0, 1, 1)
self.taurusValueLineEdit = TaurusValueLineEdit(self.groupBox_3)
self.taurusValueLineEdit.setObjectName(_fromUtf8("taurusValueLineEdit"))
self.gridLayout_4.addWidget(self.taurusValueLineEdit, 2, 2, 1, 1)
self.taurusLed = TaurusLed(self.groupBox_3)
self.taurusLed.setObjectName(_fromUtf8("taurusLed"))
self.gridLayout_4.addWidget(self.taurusLed, 1, 0, 1, 1)
self.taurusLabel_31 = TaurusLabel(self.groupBox_3)
self.taurusLabel_31.setObjectName(_fromUtf8("taurusLabel_31"))
self.gridLayout_4.addWidget(self.taurusLabel_31, 3, 0, 1, 1)
self.taurusLabel_32 = TaurusLabel(self.groupBox_3)
self.taurusLabel_32.setObjectName(_fromUtf8("taurusLabel_32"))
self.gridLayout_4.addWidget(self.taurusLabel_32, 3, 1, 1, 1)
self.taurusValueLineEdit_3 = TaurusValueLineEdit(self.groupBox_3)
self.taurusValueLineEdit_3.setObjectName(_fromUtf8("taurusValueLineEdit_3"))
self.gridLayout_4.addWidget(self.taurusValueLineEdit_3, 3, 2, 1, 1)
self.taurusLabel_35 = TaurusLabel(self.groupBox_3)
self.taurusLabel_35.setObjectName(_fromUtf8("taurusLabel_35"))
self.gridLayout_4.addWidget(self.taurusLabel_35, 4, 0, 1, 1)
self.taurusLabel_36 = TaurusLabel(self.groupBox_3)
self.taurusLabel_36.setObjectName(_fromUtf8("taurusLabel_36"))
self.gridLayout_4.addWidget(self.taurusLabel_36, 4, 1, 1, 1)
self.taurusValueLineEdit_5 = TaurusValueLineEdit(self.groupBox_3)
self.taurusValueLineEdit_5.setObjectName(_fromUtf8("taurusValueLineEdit_5"))
self.gridLayout_4.addWidget(self.taurusValueLineEdit_5, 4, 2, 1, 1)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.taurusCommandButton = TaurusCommandButton(self.groupBox_3)
self.taurusCommandButton.setObjectName(_fromUtf8("taurusCommandButton"))
self.horizontalLayout_3.addWidget(self.taurusCommandButton)
self.cfgMg1 = QtGui.QToolButton(self.groupBox_3)
self.cfgMg1.setObjectName(_fromUtf8("cfgMg1"))
self.horizontalLayout_3.addWidget(self.cfgMg1)
self.gridLayout_4.addLayout(self.horizontalLayout_3, 1, 1, 1, 2)
self.gridLayout_2.addWidget(self.groupBox_3, 1, 2, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Form", "CTs of CTRL1", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/1/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_2.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/1/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_2.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/1/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_3.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/2/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_3.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_4.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/2/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_3.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/2/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_5.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/3/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_5.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_6.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/3/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_4.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/3/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_7.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/4/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_7.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_8.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/4/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_5.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/4/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_9.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/5/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_9.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_10.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/5/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_6.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/5/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_11.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/6/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_11.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_12.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/6/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_7.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/6/state", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("Form", "CTs of CTRL2", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_13.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/1/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_13.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_14.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/1/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_8.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/1/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_15.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/2/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_15.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_16.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/2/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_9.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/2/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_17.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/3/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_17.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_18.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/3/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_10.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/3/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_19.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/4/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_19.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_20.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/4/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_11.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/4/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_21.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/5/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_21.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_22.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/5/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_12.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/5/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_23.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/6/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_23.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_24.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/6/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_13.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/6/state", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_4.setTitle(QtGui.QApplication.translate("Form", "MG2", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_26.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/elementlist", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_26.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_14.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_29.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/integrationtime?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_29.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_30.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/integrationtime", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_2.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/integrationtime", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_33.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/monitorcount?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_33.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_34.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/monitorcount", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_4.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/monitorcount", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_37.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/acquisitionmode?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_37.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_38.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/acquisitionmode", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_6.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/acquisitionmode", None, QtGui.QApplication.UnicodeUTF8))
self.taurusCommandButton_2.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2", None, QtGui.QApplication.UnicodeUTF8))
self.taurusCommandButton_2.setCommand(QtGui.QApplication.translate("Form", "start", None, QtGui.QApplication.UnicodeUTF8))
self.cfgMg2.setText(QtGui.QApplication.translate("Form", "...", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_3.setTitle(QtGui.QApplication.translate("Form", "MG1", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_25.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/elementlist", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_25.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_27.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/integrationtime", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_28.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/integrationtime?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_28.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/integrationtime", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_31.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/monitorcount?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_31.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_32.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/monitorcount", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_3.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/monitorcount", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_35.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/acquisitionmode?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_35.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_36.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/acquisitionmode", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_5.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/acquisitionmode", None, QtGui.QApplication.UnicodeUTF8))
self.taurusCommandButton.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1", None, QtGui.QApplication.UnicodeUTF8))
self.taurusCommandButton.setCommand(QtGui.QApplication.translate("Form", "start", None, QtGui.QApplication.UnicodeUTF8))
self.cfgMg1.setText(QtGui.QApplication.translate("Form", "...", None, QtGui.QApplication.UnicodeUTF8))
from taurus.qt.qtgui.display import TaurusLabel, TaurusLed
from taurus.qt.qtgui.input import TaurusValueLineEdit
from taurus.qt.qtgui.button import TaurusCommandButton
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Form = QtGui.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
| lgpl-3.0 | 3,599,267,286,467,090,400 | 79.329446 | 168 | 0.726926 | false |
CityGrid/arsenal | server/arsenalweb/views/all_audit.py | 1 | 4827 | '''Arsenal audit UI.'''
# Copyright 2015 CityGrid Media, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from pyramid.view import view_config
from arsenalweb.views import (
_api_get,
get_authenticated_user,
get_nav_urls,
get_pag_params,
site_layout,
)
LOG = logging.getLogger(__name__)
@view_config(route_name='data_centers_audit', permission='view', renderer='arsenalweb:templates/all_audit.pt')
@view_config(route_name='hardware_profiles_audit', permission='view', renderer='arsenalweb:templates/all_audit.pt')
@view_config(route_name='ip_addresses_audit', permission='view', renderer='arsenalweb:templates/all_audit.pt')
@view_config(route_name='network_interfaces_audit', permission='view', renderer='arsenalweb:templates/all_audit.pt')
@view_config(route_name='node_groups_audit', permission='view', renderer='arsenalweb:templates/all_audit.pt')
@view_config(route_name='nodes_audit', permission='view', renderer='arsenalweb:templates/all_audit.pt')
@view_config(route_name='operating_systems_audit', permission='view', renderer='arsenalweb:templates/all_audit.pt')
@view_config(route_name='statuses_audit', permission='view', renderer='arsenalweb:templates/all_audit.pt')
@view_config(route_name='tags_audit', permission='view', renderer='arsenalweb:templates/all_audit.pt')
def view_all_audit(request):
'''Handle requests for the overall object type audit UI route.'''
page_title_type = 'objects/'
auth_user = get_authenticated_user(request)
(perpage, offset) = get_pag_params(request)
meta = {
'data_centers_audit': {
'page_type': 'Data Centers',
'object_type': 'data_centers',
},
'hardware_profiles_audit': {
'page_type': 'Hardware Profiles',
'object_type': 'hardware_profiles',
},
'ip_addresses_audit': {
'page_type': 'IpAddress',
'object_type': 'ip_addresses',
},
'network_interfaces_audit': {
'page_type': 'NetworkInterface',
'object_type': 'network_interfaces',
},
'nodes_audit': {
'page_type': 'Node',
'object_type': 'nodes',
},
'node_groups_audit': {
'page_type': 'Node Group',
'object_type': 'node_groups',
},
'operating_systems_audit': {
'page_type': 'Operating Systems',
'object_type': 'operating_systems',
},
'statuses_audit': {
'page_type': 'Status',
'object_type': 'statuses',
},
'tags_audit': {
'page_type': 'Tags',
'object_type': 'tags',
},
}
params = meta[request.matched_route.name]
page_title_name = '{0}_audit'.format(params['object_type'])
uri = '/api/{0}_audit'.format(params['object_type'])
payload = {}
for k in request.GET:
payload[k] = request.GET[k]
# Force the UI to 50 results per page
if not perpage:
perpage = 50
payload['perpage'] = perpage
LOG.info('UI requesting data from API={0},payload={1}'.format(uri, payload))
resp = _api_get(request, uri, payload)
total = 0
objects_audit = []
if resp:
total = resp['meta']['total']
objects_audit = resp['results']
nav_urls = get_nav_urls(request.path, offset, perpage, total, payload)
# Used by the columns menu to determine what to show/hide.
column_selectors = [
{'name': 'created', 'pretty_name': 'Date Created'},
{'name': 'field', 'pretty_name': 'Field'},
{'name': 'new_value', 'pretty_name': 'New Value'},
{'name': 'node_audit_id', 'pretty_name': 'Audit ID'},
{'name': 'object_id', 'pretty_name': '{0} ID'.format(params['page_type'])},
{'name': 'old_value', 'pretty_name': 'Old Value'},
{'name': 'updated_by', 'pretty_name': 'Updated By'},
]
return {
'au': auth_user,
'column_selectors': column_selectors,
'layout': site_layout('max'),
'nav_urls': nav_urls,
'objects_audit': objects_audit,
'offset': offset,
'page_title_name': page_title_name,
'page_title_type': page_title_type,
'params': params,
'perpage': perpage,
'total': total,
}
| apache-2.0 | 3,563,715,030,307,404,300 | 35.293233 | 116 | 0.610317 | false |
ZeitOnline/briefkasten | application/briefkasten/views.py | 1 | 4612 | # -*- coding: utf-8 -*-
import pkg_resources
import colander
from pyramid.httpexceptions import HTTPFound
from pyramid.renderers import get_renderer
from briefkasten import _, is_equal
title = "ZEIT ONLINE Briefkasten"
version = pkg_resources.get_distribution("briefkasten").version
class _FieldStorage(colander.SchemaType):
def deserialize(self, node, cstruct):
if cstruct in (colander.null, None, '', b''):
return colander.null
# weak attempt at duck-typing
if not hasattr(cstruct, 'file'):
raise colander.Invalid(node, "%s is not a FieldStorage instance" % cstruct)
return cstruct
class DropboxSchema(colander.MappingSchema):
message = colander.SchemaNode(
colander.String(),
title=_(u'Anonymous submission to the editors'),
missing=None)
upload = colander.SchemaNode(
_FieldStorage(),
missing=None
)
testing_secret = colander.SchemaNode(
colander.String(),
missing=u'')
dropbox_schema = DropboxSchema()
def defaults(request):
return dict(
static_url=request.static_url('briefkasten:static/'),
base_url=request.registry.settings.get('appserver_root_url', '/'),
master=get_renderer('%s:templates/master.pt' % request.registry.settings.get(
'theme_package', 'briefkasten')).implementation().macros['master'],
version=version,
title=title)
def dropbox_form(request):
""" generates a dropbox uid and renders the submission form with a signed version of that id"""
from briefkasten import generate_post_token
token = generate_post_token(secret=request.registry.settings['post_secret'])
return dict(
action=request.route_url('dropbox_form_submit', token=token),
fileupload_url=request.route_url('dropbox_fileupload', token=token),
**defaults(request))
def dropbox_fileupload(dropbox, request):
""" accepts a single file upload and adds it to the dropbox as attachment"""
attachment = request.POST['attachment']
attached = dropbox.add_attachment(attachment)
return dict(
files=[dict(
name=attached,
type=attachment.type,
)]
)
def dropbox_submission(dropbox, request):
""" handles the form submission, redirects to the dropbox's status page."""
try:
data = dropbox_schema.deserialize(request.POST)
except Exception:
return HTTPFound(location=request.route_url('dropbox_form'))
# set the message
dropbox.message = data.get('message')
# recognize submission from watchdog
if 'testing_secret' in dropbox.settings:
dropbox.from_watchdog = is_equal(
dropbox.settings['test_submission_secret'],
data.pop('testing_secret', ''))
# a non-js client might have uploaded an attachment via the form's fileupload field:
if data.get('upload') is not None:
dropbox.add_attachment(data['upload'])
# now we can call the process method
dropbox.submit()
drop_url = request.route_url('dropbox_view', drop_id=dropbox.drop_id)
print("Created dropbox %s" % drop_url)
return HTTPFound(location=drop_url)
def dropbox_submitted(dropbox, request):
appstruct = defaults(request)
appstruct.update(
title='%s - %s' % (title, dropbox.status),
drop_id=dropbox.drop_id,
status_code=dropbox.status[0],
status_int=dropbox.status_int,
status=dropbox.status,
replies=dropbox.replies)
return appstruct
class DropboxReplySchema(colander.MappingSchema):
reply = colander.SchemaNode(colander.String())
author = colander.SchemaNode(colander.String())
dropboxreply_schema = DropboxReplySchema()
def dropbox_editor_view(dropbox, request):
appstruct = defaults(request)
appstruct.update(
title='%s - %s' % (title, dropbox.status),
drop_id=dropbox.drop_id,
status=dropbox.status,
replies=dropbox.replies,
message=None,
action=request.url,
)
return appstruct
def dropbox_reply_submitted(dropbox, request):
try:
data = DropboxReplySchema().deserialize(request.POST)
except Exception:
appstruct = defaults(request)
appstruct.update(
title='%s - %s' % (title, dropbox.status),
action=request.url,
message=u'Alle Felder müssen ausgefüllt werden.',
drop_id=dropbox.drop_id,
)
return appstruct
dropbox.add_reply(data)
return HTTPFound(location=request.route_url('dropbox_view', drop_id=dropbox.drop_id))
| bsd-3-clause | 6,311,432,539,316,876,000 | 31.013889 | 99 | 0.662473 | false |
CoffeRobot/amazon_challenge_motion | src/amazon_challenge_motion/bt_motion.py | 1 | 18421 | #!/usr/bin/env python
# motion_utils
#
# Created on: April 13, 2015
# Authors: Francisco Vina
# fevb <at> kth.se
#
# Copyright (c) 2015, Francisco Vina, CVAP, KTH
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of KTH nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL KTH BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import rospy
import moveit_commander
import numpy as np
import amazon_challenge_bt_actions.msg
import actionlib
from std_msgs.msg import String
from pr2_controllers_msgs.msg import Pr2GripperCommand
import copy
import random
from moveit_commander import PlanningSceneInterface
import tf
from simtrack_nodes.srv import SwitchObjects
from amazon_challenge_grasping.srv import BaseMove, BaseMoveRequest, BaseMoveResponse
from std_srvs.srv import Empty, EmptyRequest
from geometry_msgs.msg import PoseStamped
class BTMotion:
def __init__(self, name):
# create messages that are used to publish feedback/result
self._feedback = amazon_challenge_bt_actions.msg.BTFeedback()
self._result = amazon_challenge_bt_actions.msg.BTResult()
self._action_name = name
self._as = actionlib.SimpleActionServer(self._action_name, amazon_challenge_bt_actions.msg.BTAction, execute_cb=self.execute_cb, auto_start = False)
self.pub_posed = rospy.Publisher('arm_posed', String, queue_size=10)
self.pub_rate = rospy.Rate(30)
self._planning_scene = PlanningSceneInterface()
# get ROS parameters
rospy.loginfo('Getting parameters...')
while not rospy.is_shutdown():
try:
self._base_move_params = rospy.get_param('/base_move')
self._timeout = rospy.get_param(name + '/timeout')
self._sim = rospy.get_param(name + '/sim')
self._base_pos_dict = rospy.get_param('/base_pos_dict')
self._left_arm_joint_pos_dict = rospy.get_param('/left_arm_joint_pos_dict')
self._right_arm_joint_pos_dict = rospy.get_param('/right_arm_joint_pos_dict')
break
except:
rospy.sleep(random.uniform(0,2))
continue
self._exit = False
while not rospy.is_shutdown():
try:
self._robot = moveit_commander.RobotCommander()
self._left_arm = self._robot.get_group('left_arm')
self._right_arm = self._robot.get_group('right_arm')
self._arms = self._robot.get_group('arms')
self._torso = self._robot.get_group('torso')
self._head = self._robot.get_group('head')
self._arms_dict = {'left_arm': self._left_arm, 'right_arm': self._right_arm}
break
except:
rospy.sleep(random.uniform(0,2))
continue
self._tf_listener = tf.TransformListener()
self._next_task_sub = rospy.Subscriber("/amazon_next_task", String, self.get_task)
self._shelf_pose_sub = rospy.Subscriber("/pubShelfSep", PoseStamped, self.get_shelf_pose)
self._got_shelf_pose = False
self._l_gripper_pub = rospy.Publisher('/l_gripper_controller/command', Pr2GripperCommand)
while not rospy.is_shutdown():
try:
self._tool_size = rospy.get_param('/tool_size', [0.16, 0.02, 0.04])
self._contest = rospy.get_param('/contest', True)
break
except:
rospy.sleep(random.uniform(0,1))
continue
if self._contest:
self._length_tool = 0.18 + self._tool_size[0]
else:
self._length_tool = 0.216 + self._tool_size[0]
self._as.start()
rospy.loginfo('['+rospy.get_name()+']: ready!')
def get_shelf_pose(self, msg):
self._shelf_pose = msg
self._got_shelf_pose = True
def get_bm_srv(self):
while not rospy.is_shutdown():
try:
rospy.wait_for_service('/base_move_server/move', 5.0)
rospy.wait_for_service('/base_move_server/preempt', 5.0)
break
except:
rospy.loginfo('[' + rospy.get_name() + ']: waiting for base move server')
continue
self._bm_move_srv = rospy.ServiceProxy('/base_move_server/move', BaseMove)
self._bm_preempt_srv = rospy.ServiceProxy('/base_move_server/preempt', Empty)
def timer_callback(self, event):
self._timer_started = True
rospy.logerr('[' + rospy.get_name() + ']: TIMED OUT!')
self._planning_scene.remove_attached_object('l_wrist_roll_link', 'grasped_object')
self._planning_scene.remove_world_object('grasped_object')
# pull the base back 60 cm
self._left_arm.stop()
self._right_arm.stop()
r = rospy.Rate(1.0)
while not self._got_shelf_pose:
rospy.loginfo('[' + rospy.get_name() + ']: waiting for shelf pose')
r.sleep()
base_pos_goal = [-1.42, -self._shelf_pose.pose.position.y, 0.0, 0.0, 0.0, 0.0]
self.get_bm_srv()
self._bm_preempt_srv.call(EmptyRequest())
while not self.go_base_pos_async(base_pos_goal):
rospy.sleep(1.0)
left_arm_joint_pos_goal = copy.deepcopy(self._left_arm_joint_pos_dict['start'])
right_arm_joint_pos_goal = copy.deepcopy(self._right_arm_joint_pos_dict['start'])
joint_pos_goal = left_arm_joint_pos_goal + right_arm_joint_pos_goal
self._arms.set_joint_value_target(joint_pos_goal)
self._arms.go()
self._exit = True
def execute_exit(self):
if self._exit:
self._success = False
self.set_status('FAILURE')
self._timer.shutdown()
return True
return False
def execute_cb(self, goal):
print 'bt motion execute callback'
def shutdown_simtrack(self):
# get simtrack switch objects service
while not rospy.is_shutdown():
try:
rospy.wait_for_service('/simtrack/switch_objects', 10.0)
break
except:
rospy.loginfo('[' + rospy.get_name() + ']: waiting for simtrack switch object service')
continue
simtrack_switch_objects_srv = rospy.ServiceProxy('/simtrack/switch_objects', SwitchObjects)
simtrack_switch_objects_srv.call()
def init_as(self):
self._planning_scene.remove_attached_object('l_wrist_roll_link', 'grasped_object')
self._planning_scene.remove_world_object('grasped_object')
self._timer_started = False
self._exit=False
self._timer = rospy.Timer(rospy.Duration(self._timeout), self.timer_callback, oneshot=True)
self.shutdown_simtrack()
rospy.sleep(2.0)
def get_task(self, msg):
text = msg.data
text = text.replace('[','')
text = text.replace(']','')
words = text.split(',')
self._bin = words[0]
self._item = words[1]
def get_row(self):
'''
For setting the torso height and arm pose
'''
while not rospy.is_shutdown():
try:
if self._bin=='bin_A' or self._bin=='bin_B' or self._bin=='bin_C':
return 'row_1'
elif self._bin=='bin_D' or self._bin=='bin_E' or self._bin=='bin_F':
return 'row_2'
elif self._bin=='bin_G' or self._bin=='bin_H' or self._bin=='bin_I':
return 'row_3'
elif self._bin=='bin_J' or self._bin=='bin_K' or self._bin=='bin_L':
return 'row_4'
except:
pass
def get_column(self):
'''
For setting the base pose
'''
while not rospy.is_shutdown():
try:
if self._bin=='bin_A' or self._bin=='bin_D' or self._bin=='bin_G' or self._bin=='bin_J':
return 'column_1'
elif self._bin=='bin_B' or self._bin=='bin_E' or self._bin=='bin_H' or self._bin=='bin_K':
return 'column_2'
elif self._bin=='bin_C' or self._bin=='bin_F' or self._bin=='bin_I' or self._bin=='bin_L':
return 'column_3'
except:
pass
def go_joint_goal_async(self, group, joint_pos_goal, normalize_angles=False):
q_goal = np.array(joint_pos_goal)
if normalize_angles:
q_goal = self.normalize_angles(joint_pos_goal)
group.set_joint_value_target(joint_pos_goal)
if not group.go(wait=False):
return False
q_now = np.array(group.get_current_joint_values())
if normalize_angles:
q_now = self.normalize_angles(q_now)
q_tol = group.get_goal_joint_tolerance()
if group.get_name()=='left_arm' or group.get_name()=='right_arm' or group.get_name()=='arms' or group.get_name()=='head':
q_tol = 0.04
elif group.get_name()=='torso':
q_tol = 0.003
t_print = rospy.Time.now()
r = rospy.Rate(1.0)
# check for preemption while the arm hasn't reach goal configuration
while np.max(np.abs(q_goal-q_now)) > q_tol and not rospy.is_shutdown():
if self.execute_exit():
return False
q_now = np.array(group.get_current_joint_values())
if normalize_angles:
q_now = self.normalize_angles(q_now)
# check that preempt has not been requested by the client
if self._as.is_preempt_requested():
#HERE THE CODE TO EXECUTE WHEN THE BEHAVIOR TREE DOES HALT THE ACTION
group.stop()
rospy.loginfo('action halted')
self._as.set_preempted()
self._exit = True
if self.execute_exit():
return False
if (rospy.Time.now()-t_print).to_sec()>3.0:
t_print = rospy.Time.now()
rospy.loginfo('[' + rospy.get_name() + ']: executing action')
#HERE THE CODE TO EXECUTE AS LONG AS THE BEHAVIOR TREE DOES NOT HALT THE ACTION
r.sleep()
if rospy.is_shutdown():
return False
return True
def normalize_angles(self, q):
'''
normalize angles to -pi, pi
'''
q_normalized = np.mod(q, 2*np.pi)
for i in xrange(np.size(q)):
if q_normalized[i] > np.pi:
q_normalized[i] = -(2*np.pi - q_normalized[i])
return q_normalized
def go_base_pos_async(self, base_pos_goal):
angle = base_pos_goal[5]
pos = base_pos_goal[0:2]
r = rospy.Rate(20.0)
req = BaseMoveRequest()
req.x = pos[0]
req.y = pos[1]
req.theta = angle
self.get_bm_srv()
res = self._bm_move_srv.call(req)
if self.execute_exit():
return False
# check that preempt has not been requested by the client
if self._as.is_preempt_requested():
#HERE THE CODE TO EXECUTE WHEN THE BEHAVIOR TREE DOES HALT THE ACTION
rospy.loginfo('action halted while moving base')
self._as.set_preempted()
self._exit = True
if self.execute_exit():
return False
return res.result
def go_base_moveit_group_pos_async(self, base_pos_goal, group, joint_pos_goal, normalize_angles=False):
angle = base_pos_goal[5]
pos = base_pos_goal[0:2]
r = rospy.Rate(20.0)
q_goal = np.array(joint_pos_goal)
if normalize_angles:
q_goal = self.normalize_angles(joint_pos_goal)
group.set_joint_value_target(joint_pos_goal)
group.go(wait=False)
q_now = np.array(group.get_current_joint_values())
if normalize_angles:
q_now = self.normalize_angles(q_now)
q_tol = group.get_goal_joint_tolerance()
if group.get_name()=='left_arm' or group.get_name()=='right_arm' or group.get_name()=='arms' or group.get_name()=='head':
q_tol = 0.04
elif group.get_name()=='torso':
q_tol = 0.003
t_print = rospy.Time.now()
req = BaseMoveRequest()
req.x = pos[0]
req.y = pos[1]
req.theta = angle
self.get_bm_srv()
res = self._bm_move_srv.call(req)
if self.execute_exit():
return False
# check that preempt has not been requested by the client
if self._as.is_preempt_requested():
#HERE THE CODE TO EXECUTE WHEN THE BEHAVIOR TREE DOES HALT THE ACTION
rospy.loginfo('action halted while moving base')
self._as.set_preempted()
self._exit = True
if self.execute_exit():
return False
# check for preemption while the arm hasn't reach goal configuration
while np.max(np.abs(q_goal-q_now)) > q_tol and not rospy.is_shutdown():
if self.execute_exit():
return False
q_now = np.array(group.get_current_joint_values())
if normalize_angles:
q_now = self.normalize_angles(q_now)
# check that preempt has not been requested by the client
if self._as.is_preempt_requested():
#HERE THE CODE TO EXECUTE WHEN THE BEHAVIOR TREE DOES HALT THE ACTION
group.stop()
rospy.loginfo('action halted')
self._as.set_preempted()
self._exit = True
if self.execute_exit():
return False
if (rospy.Time.now()-t_print).to_sec()>3.0:
t_print = rospy.Time.now()
rospy.loginfo('[' + rospy.get_name() + ']: executing action')
#HERE THE CODE TO EXECUTE AS LONG AS THE BEHAVIOR TREE DOES NOT HALT THE ACTION
r.sleep()
if rospy.is_shutdown():
return False
return res.result
def request_detection(self):
client = actionlib.SimpleActionClient('amazon_detector', amazon_challenge_bt_actions.msg.DetectorAction)
# Waits until the action server has started up and started
# listening for goals.
rospy.loginfo('Start Waiting')
client.wait_for_server()
# Creates a goal to send to the action server.
goal = amazon_challenge_bt_actions.msg.DetectorGoal(parameter=1)
# Sends the goal to the action server.
client.send_goal(goal)
rospy.loginfo('Goal Sent')
# Waits for the server to finish performing the action.
client.wait_for_result()
def set_status(self,status):
if status == 'SUCCESS':
self.pub_posed.publish("SUCCESS")
rospy.sleep(1)
self._feedback.status = 1
self._result.status = self._feedback.status
rospy.loginfo('Action %s: Succeeded' % self._action_name)
self._as.set_succeeded(self._result)
elif status == 'FAILURE':
self._feedback.status = 2
self._result.status = self._feedback.status
rospy.loginfo('Action %s: Failed' % self._action_name)
self._as.set_succeeded(self._result)
else:
rospy.logerr('Action %s: has a wrong return status' % self._action_name)
def open_l_gripper(self):
gripper_command_msg = Pr2GripperCommand()
gripper_command_msg.max_effort = 40.0
gripper_command_msg.position = 10.0
r = rospy.Rate(10.0)
t_init = rospy.Time.now()
while (rospy.Time.now()-t_init).to_sec()<3.0 and not rospy.is_shutdown():
if self.execute_exit():
return False
self._l_gripper_pub.publish(gripper_command_msg)
# check that preempt has not been requested by the client
if self._as.is_preempt_requested():
#HERE THE CODE TO EXECUTE WHEN THE BEHAVIOR TREE DOES HALT THE ACTION
rospy.loginfo('action halted while opening gripper')
self._as.set_preempted()
self._exit = True
if self.execute_exit():
return False
r.sleep()
return True
def move_l_arm_z(self, z_desired):
'''
computes straight line cartesian path in z direction
:param z_desired: of tool tip w.r.t. odom_combined
:return:
'''
waypoints = []
# waypoints.append(self._left_arm.get_current_pose().pose)
wpose = copy.deepcopy(self._left_arm.get_current_pose().pose)
wpose.position.z = z_desired + self._length_tool
waypoints.append(copy.deepcopy(wpose))
(plan, fraction) = self._left_arm.compute_cartesian_path(waypoints, 0.05, 0.0)
# TODO make this asynchronous
self._left_arm.execute(plan)
return True
| bsd-3-clause | 7,456,780,908,091,122,000 | 33.239777 | 156 | 0.575267 | false |
SaschaMester/delicium | gpu/command_buffer/build_gles2_cmd_buffer.py | 1 | 350071 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""code generator for GLES2 command buffers."""
import itertools
import os
import os.path
import sys
import re
import platform
from optparse import OptionParser
from subprocess import call
_SIZE_OF_UINT32 = 4
_SIZE_OF_COMMAND_HEADER = 4
_FIRST_SPECIFIC_COMMAND_ID = 256
_LICENSE = """// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
"""
_DO_NOT_EDIT_WARNING = """// This file is auto-generated from
// gpu/command_buffer/build_gles2_cmd_buffer.py
// It's formatted by clang-format using chromium coding style:
// clang-format -i -style=chromium filename
// DO NOT EDIT!
"""
# This string is copied directly out of the gl2.h file from GLES2.0
#
# Edits:
#
# *) Any argument that is a resourceID has been changed to GLid<Type>.
# (not pointer arguments) and if it's allowed to be zero it's GLidZero<Type>
# If it's allowed to not exist it's GLidBind<Type>
#
# *) All GLenums have been changed to GLenumTypeOfEnum
#
_GL_TYPES = {
'GLenum': 'unsigned int',
'GLboolean': 'unsigned char',
'GLbitfield': 'unsigned int',
'GLbyte': 'signed char',
'GLshort': 'short',
'GLint': 'int',
'GLsizei': 'int',
'GLubyte': 'unsigned char',
'GLushort': 'unsigned short',
'GLuint': 'unsigned int',
'GLfloat': 'float',
'GLclampf': 'float',
'GLvoid': 'void',
'GLfixed': 'int',
'GLclampx': 'int'
}
_GL_TYPES_32 = {
'GLintptr': 'long int',
'GLsizeiptr': 'long int'
}
_GL_TYPES_64 = {
'GLintptr': 'long long int',
'GLsizeiptr': 'long long int'
}
# Capabilites selected with glEnable
_CAPABILITY_FLAGS = [
{'name': 'blend'},
{'name': 'cull_face'},
{'name': 'depth_test', 'state_flag': 'framebuffer_state_.clear_state_dirty'},
{'name': 'dither', 'default': True},
{'name': 'polygon_offset_fill'},
{'name': 'sample_alpha_to_coverage'},
{'name': 'sample_coverage'},
{'name': 'scissor_test'},
{'name': 'stencil_test',
'state_flag': 'framebuffer_state_.clear_state_dirty'},
{'name': 'rasterizer_discard', 'es3': True},
{'name': 'primitive_restart_fixed_index', 'es3': True},
]
_STATES = {
'ClearColor': {
'type': 'Normal',
'func': 'ClearColor',
'enum': 'GL_COLOR_CLEAR_VALUE',
'states': [
{'name': 'color_clear_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'ClearDepthf': {
'type': 'Normal',
'func': 'ClearDepth',
'enum': 'GL_DEPTH_CLEAR_VALUE',
'states': [
{'name': 'depth_clear', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'ColorMask': {
'type': 'Normal',
'func': 'ColorMask',
'enum': 'GL_COLOR_WRITEMASK',
'states': [
{
'name': 'color_mask_red',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_green',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_blue',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_alpha',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
],
'state_flag': 'framebuffer_state_.clear_state_dirty',
},
'ClearStencil': {
'type': 'Normal',
'func': 'ClearStencil',
'enum': 'GL_STENCIL_CLEAR_VALUE',
'states': [
{'name': 'stencil_clear', 'type': 'GLint', 'default': '0'},
],
},
'BlendColor': {
'type': 'Normal',
'func': 'BlendColor',
'enum': 'GL_BLEND_COLOR',
'states': [
{'name': 'blend_color_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'BlendEquation': {
'type': 'SrcDst',
'func': 'BlendEquationSeparate',
'states': [
{
'name': 'blend_equation_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_RGB',
'default': 'GL_FUNC_ADD',
},
{
'name': 'blend_equation_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_ALPHA',
'default': 'GL_FUNC_ADD',
},
],
},
'BlendFunc': {
'type': 'SrcDst',
'func': 'BlendFuncSeparate',
'states': [
{
'name': 'blend_source_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_RGB',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_RGB',
'default': 'GL_ZERO',
},
{
'name': 'blend_source_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_ALPHA',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_ALPHA',
'default': 'GL_ZERO',
},
],
},
'PolygonOffset': {
'type': 'Normal',
'func': 'PolygonOffset',
'states': [
{
'name': 'polygon_offset_factor',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_FACTOR',
'default': '0.0f',
},
{
'name': 'polygon_offset_units',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_UNITS',
'default': '0.0f',
},
],
},
'CullFace': {
'type': 'Normal',
'func': 'CullFace',
'enum': 'GL_CULL_FACE_MODE',
'states': [
{
'name': 'cull_mode',
'type': 'GLenum',
'default': 'GL_BACK',
},
],
},
'FrontFace': {
'type': 'Normal',
'func': 'FrontFace',
'enum': 'GL_FRONT_FACE',
'states': [{'name': 'front_face', 'type': 'GLenum', 'default': 'GL_CCW'}],
},
'DepthFunc': {
'type': 'Normal',
'func': 'DepthFunc',
'enum': 'GL_DEPTH_FUNC',
'states': [{'name': 'depth_func', 'type': 'GLenum', 'default': 'GL_LESS'}],
},
'DepthRange': {
'type': 'Normal',
'func': 'DepthRange',
'enum': 'GL_DEPTH_RANGE',
'states': [
{'name': 'z_near', 'type': 'GLclampf', 'default': '0.0f'},
{'name': 'z_far', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'SampleCoverage': {
'type': 'Normal',
'func': 'SampleCoverage',
'states': [
{
'name': 'sample_coverage_value',
'type': 'GLclampf',
'enum': 'GL_SAMPLE_COVERAGE_VALUE',
'default': '1.0f',
},
{
'name': 'sample_coverage_invert',
'type': 'GLboolean',
'enum': 'GL_SAMPLE_COVERAGE_INVERT',
'default': 'false',
},
],
},
'StencilMask': {
'type': 'FrontBack',
'func': 'StencilMaskSeparate',
'state_flag': 'framebuffer_state_.clear_state_dirty',
'states': [
{
'name': 'stencil_front_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_WRITEMASK',
'default': '0xFFFFFFFFU',
'cached': True,
},
{
'name': 'stencil_back_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_WRITEMASK',
'default': '0xFFFFFFFFU',
'cached': True,
},
],
},
'StencilOp': {
'type': 'FrontBack',
'func': 'StencilOpSeparate',
'states': [
{
'name': 'stencil_front_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
],
},
'StencilFunc': {
'type': 'FrontBack',
'func': 'StencilFuncSeparate',
'states': [
{
'name': 'stencil_front_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_front_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_REF',
'default': '0',
},
{
'name': 'stencil_front_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
{
'name': 'stencil_back_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_back_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_BACK_REF',
'default': '0',
},
{
'name': 'stencil_back_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
],
},
'Hint': {
'type': 'NamedParameter',
'func': 'Hint',
'states': [
{
'name': 'hint_generate_mipmap',
'type': 'GLenum',
'enum': 'GL_GENERATE_MIPMAP_HINT',
'default': 'GL_DONT_CARE',
'gl_version_flag': '!is_desktop_core_profile'
},
{
'name': 'hint_fragment_shader_derivative',
'type': 'GLenum',
'enum': 'GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES',
'default': 'GL_DONT_CARE',
'extension_flag': 'oes_standard_derivatives'
}
],
},
'PixelStore': {
'type': 'NamedParameter',
'func': 'PixelStorei',
'states': [
{
'name': 'pack_alignment',
'type': 'GLint',
'enum': 'GL_PACK_ALIGNMENT',
'default': '4'
},
{
'name': 'unpack_alignment',
'type': 'GLint',
'enum': 'GL_UNPACK_ALIGNMENT',
'default': '4'
}
],
},
# TODO: Consider implemenenting these states
# GL_ACTIVE_TEXTURE
'LineWidth': {
'type': 'Normal',
'func': 'LineWidth',
'enum': 'GL_LINE_WIDTH',
'states': [
{
'name': 'line_width',
'type': 'GLfloat',
'default': '1.0f',
'range_checks': [{'check': "<= 0.0f", 'test_value': "0.0f"}],
'nan_check': True,
}],
},
'DepthMask': {
'type': 'Normal',
'func': 'DepthMask',
'enum': 'GL_DEPTH_WRITEMASK',
'states': [
{
'name': 'depth_mask',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
],
'state_flag': 'framebuffer_state_.clear_state_dirty',
},
'Scissor': {
'type': 'Normal',
'func': 'Scissor',
'enum': 'GL_SCISSOR_BOX',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'scissor_x',
'type': 'GLint',
'default': '0',
'expected': 'kViewportX',
},
{
'name': 'scissor_y',
'type': 'GLint',
'default': '0',
'expected': 'kViewportY',
},
{
'name': 'scissor_width',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportWidth',
},
{
'name': 'scissor_height',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportHeight',
},
],
},
'Viewport': {
'type': 'Normal',
'func': 'Viewport',
'enum': 'GL_VIEWPORT',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'viewport_x',
'type': 'GLint',
'default': '0',
'expected': 'kViewportX',
},
{
'name': 'viewport_y',
'type': 'GLint',
'default': '0',
'expected': 'kViewportY',
},
{
'name': 'viewport_width',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportWidth',
},
{
'name': 'viewport_height',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportHeight',
},
],
},
'MatrixValuesCHROMIUM': {
'type': 'NamedParameter',
'func': 'MatrixLoadfEXT',
'states': [
{ 'enum': 'GL_PATH_MODELVIEW_MATRIX_CHROMIUM',
'enum_set': 'GL_PATH_MODELVIEW_CHROMIUM',
'name': 'modelview_matrix',
'type': 'GLfloat',
'default': [
'1.0f', '0.0f','0.0f','0.0f',
'0.0f', '1.0f','0.0f','0.0f',
'0.0f', '0.0f','1.0f','0.0f',
'0.0f', '0.0f','0.0f','1.0f',
],
'extension_flag': 'chromium_path_rendering',
},
{ 'enum': 'GL_PATH_PROJECTION_MATRIX_CHROMIUM',
'enum_set': 'GL_PATH_PROJECTION_CHROMIUM',
'name': 'projection_matrix',
'type': 'GLfloat',
'default': [
'1.0f', '0.0f','0.0f','0.0f',
'0.0f', '1.0f','0.0f','0.0f',
'0.0f', '0.0f','1.0f','0.0f',
'0.0f', '0.0f','0.0f','1.0f',
],
'extension_flag': 'chromium_path_rendering',
},
],
},
}
# Named type info object represents a named type that is used in OpenGL call
# arguments. Each named type defines a set of valid OpenGL call arguments. The
# named types are used in 'cmd_buffer_functions.txt'.
# type: The actual GL type of the named type.
# valid: The list of values that are valid for both the client and the service.
# valid_es3: The list of values that are valid in OpenGL ES 3, but not ES 2.
# invalid: Examples of invalid values for the type. At least these values
# should be tested to be invalid.
# deprecated_es3: The list of values that are valid in OpenGL ES 2, but
# deprecated in ES 3.
# is_complete: The list of valid values of type are final and will not be
# modified during runtime.
_NAMED_TYPE_INFO = {
'BlitFilter': {
'type': 'GLenum',
'valid': [
'GL_NEAREST',
'GL_LINEAR',
],
'invalid': [
'GL_LINEAR_MIPMAP_LINEAR',
],
},
'FrameBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER',
],
'valid_es3': [
'GL_DRAW_FRAMEBUFFER' ,
'GL_READ_FRAMEBUFFER' ,
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'InvalidateFrameBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER',
],
'invalid': [
'GL_DRAW_FRAMEBUFFER' ,
'GL_READ_FRAMEBUFFER' ,
],
},
'RenderBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_RENDERBUFFER',
],
'invalid': [
'GL_FRAMEBUFFER',
],
},
'BufferTarget': {
'type': 'GLenum',
'valid': [
'GL_ARRAY_BUFFER',
'GL_ELEMENT_ARRAY_BUFFER',
],
'valid_es3': [
'GL_COPY_READ_BUFFER',
'GL_COPY_WRITE_BUFFER',
'GL_PIXEL_PACK_BUFFER',
'GL_PIXEL_UNPACK_BUFFER',
'GL_TRANSFORM_FEEDBACK_BUFFER',
'GL_UNIFORM_BUFFER',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'IndexedBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_TRANSFORM_FEEDBACK_BUFFER',
'GL_UNIFORM_BUFFER',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'MapBufferAccess': {
'type': 'GLenum',
'valid': [
'GL_MAP_READ_BIT',
'GL_MAP_WRITE_BIT',
'GL_MAP_INVALIDATE_RANGE_BIT',
'GL_MAP_INVALIDATE_BUFFER_BIT',
'GL_MAP_FLUSH_EXPLICIT_BIT',
'GL_MAP_UNSYNCHRONIZED_BIT',
],
'invalid': [
'GL_SYNC_FLUSH_COMMANDS_BIT',
],
},
'Bufferiv': {
'type': 'GLenum',
'valid': [
'GL_COLOR',
'GL_STENCIL',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'Bufferuiv': {
'type': 'GLenum',
'valid': [
'GL_COLOR',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'Bufferfv': {
'type': 'GLenum',
'valid': [
'GL_COLOR',
'GL_DEPTH',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'Bufferfi': {
'type': 'GLenum',
'valid': [
'GL_DEPTH_STENCIL',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'BufferUsage': {
'type': 'GLenum',
'valid': [
'GL_STREAM_DRAW',
'GL_STATIC_DRAW',
'GL_DYNAMIC_DRAW',
],
'valid_es3': [
'GL_STREAM_READ',
'GL_STREAM_COPY',
'GL_STATIC_READ',
'GL_STATIC_COPY',
'GL_DYNAMIC_READ',
'GL_DYNAMIC_COPY',
],
'invalid': [
'GL_NONE',
],
},
'CompressedTextureFormat': {
'type': 'GLenum',
'valid': [
],
'valid_es3': [
'GL_COMPRESSED_R11_EAC',
'GL_COMPRESSED_SIGNED_R11_EAC',
'GL_COMPRESSED_RG11_EAC',
'GL_COMPRESSED_SIGNED_RG11_EAC',
'GL_COMPRESSED_RGB8_ETC2',
'GL_COMPRESSED_SRGB8_ETC2',
'GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_RGBA8_ETC2_EAC',
'GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC',
],
},
'GLState': {
'type': 'GLenum',
'valid': [
# NOTE: State an Capability entries added later.
'GL_ACTIVE_TEXTURE',
'GL_ALIASED_LINE_WIDTH_RANGE',
'GL_ALIASED_POINT_SIZE_RANGE',
'GL_ALPHA_BITS',
'GL_ARRAY_BUFFER_BINDING',
'GL_BLUE_BITS',
'GL_COMPRESSED_TEXTURE_FORMATS',
'GL_CURRENT_PROGRAM',
'GL_DEPTH_BITS',
'GL_DEPTH_RANGE',
'GL_ELEMENT_ARRAY_BUFFER_BINDING',
'GL_FRAMEBUFFER_BINDING',
'GL_GENERATE_MIPMAP_HINT',
'GL_GREEN_BITS',
'GL_IMPLEMENTATION_COLOR_READ_FORMAT',
'GL_IMPLEMENTATION_COLOR_READ_TYPE',
'GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS',
'GL_MAX_CUBE_MAP_TEXTURE_SIZE',
'GL_MAX_FRAGMENT_UNIFORM_VECTORS',
'GL_MAX_RENDERBUFFER_SIZE',
'GL_MAX_TEXTURE_IMAGE_UNITS',
'GL_MAX_TEXTURE_SIZE',
'GL_MAX_VARYING_VECTORS',
'GL_MAX_VERTEX_ATTRIBS',
'GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS',
'GL_MAX_VERTEX_UNIFORM_VECTORS',
'GL_MAX_VIEWPORT_DIMS',
'GL_NUM_COMPRESSED_TEXTURE_FORMATS',
'GL_NUM_SHADER_BINARY_FORMATS',
'GL_PACK_ALIGNMENT',
'GL_RED_BITS',
'GL_RENDERBUFFER_BINDING',
'GL_SAMPLE_BUFFERS',
'GL_SAMPLE_COVERAGE_INVERT',
'GL_SAMPLE_COVERAGE_VALUE',
'GL_SAMPLES',
'GL_SCISSOR_BOX',
'GL_SHADER_BINARY_FORMATS',
'GL_SHADER_COMPILER',
'GL_SUBPIXEL_BITS',
'GL_STENCIL_BITS',
'GL_TEXTURE_BINDING_2D',
'GL_TEXTURE_BINDING_CUBE_MAP',
'GL_UNPACK_ALIGNMENT',
'GL_BIND_GENERATES_RESOURCE_CHROMIUM',
# we can add this because we emulate it if the driver does not support it.
'GL_VERTEX_ARRAY_BINDING_OES',
'GL_VIEWPORT',
],
'valid_es3': [
'GL_COPY_READ_BUFFER_BINDING',
'GL_COPY_WRITE_BUFFER_BINDING',
'GL_DRAW_BUFFER0',
'GL_DRAW_BUFFER1',
'GL_DRAW_BUFFER2',
'GL_DRAW_BUFFER3',
'GL_DRAW_BUFFER4',
'GL_DRAW_BUFFER5',
'GL_DRAW_BUFFER6',
'GL_DRAW_BUFFER7',
'GL_DRAW_BUFFER8',
'GL_DRAW_BUFFER9',
'GL_DRAW_BUFFER10',
'GL_DRAW_BUFFER11',
'GL_DRAW_BUFFER12',
'GL_DRAW_BUFFER13',
'GL_DRAW_BUFFER14',
'GL_DRAW_BUFFER15',
'GL_DRAW_FRAMEBUFFER_BINDING',
'GL_FRAGMENT_SHADER_DERIVATIVE_HINT',
'GL_MAJOR_VERSION',
'GL_MAX_3D_TEXTURE_SIZE',
'GL_MAX_ARRAY_TEXTURE_LAYERS',
'GL_MAX_COLOR_ATTACHMENTS',
'GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS',
'GL_MAX_COMBINED_UNIFORM_BLOCKS',
'GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS',
'GL_MAX_DRAW_BUFFERS',
'GL_MAX_ELEMENT_INDEX',
'GL_MAX_ELEMENTS_INDICES',
'GL_MAX_ELEMENTS_VERTICES',
'GL_MAX_FRAGMENT_INPUT_COMPONENTS',
'GL_MAX_FRAGMENT_UNIFORM_BLOCKS',
'GL_MAX_FRAGMENT_UNIFORM_COMPONENTS',
'GL_MAX_PROGRAM_TEXEL_OFFSET',
'GL_MAX_SAMPLES',
'GL_MAX_SERVER_WAIT_TIMEOUT',
'GL_MAX_TEXTURE_LOD_BIAS',
'GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS',
'GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS',
'GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS',
'GL_MAX_UNIFORM_BLOCK_SIZE',
'GL_MAX_UNIFORM_BUFFER_BINDINGS',
'GL_MAX_VARYING_COMPONENTS',
'GL_MAX_VERTEX_OUTPUT_COMPONENTS',
'GL_MAX_VERTEX_UNIFORM_BLOCKS',
'GL_MAX_VERTEX_UNIFORM_COMPONENTS',
'GL_MIN_PROGRAM_TEXEL_OFFSET',
'GL_MINOR_VERSION',
'GL_NUM_EXTENSIONS',
'GL_NUM_PROGRAM_BINARY_FORMATS',
'GL_PACK_ROW_LENGTH',
'GL_PACK_SKIP_PIXELS',
'GL_PACK_SKIP_ROWS',
'GL_PIXEL_PACK_BUFFER_BINDING',
'GL_PIXEL_UNPACK_BUFFER_BINDING',
'GL_PROGRAM_BINARY_FORMATS',
'GL_READ_BUFFER',
'GL_READ_FRAMEBUFFER_BINDING',
'GL_SAMPLER_BINDING',
'GL_TEXTURE_BINDING_2D_ARRAY',
'GL_TEXTURE_BINDING_3D',
'GL_TRANSFORM_FEEDBACK_BINDING',
'GL_TRANSFORM_FEEDBACK_ACTIVE',
'GL_TRANSFORM_FEEDBACK_BUFFER_BINDING',
'GL_TRANSFORM_FEEDBACK_PAUSED',
'GL_TRANSFORM_FEEDBACK_BUFFER_SIZE',
'GL_TRANSFORM_FEEDBACK_BUFFER_START',
'GL_UNIFORM_BUFFER_BINDING',
'GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT',
'GL_UNIFORM_BUFFER_SIZE',
'GL_UNIFORM_BUFFER_START',
'GL_UNPACK_IMAGE_HEIGHT',
'GL_UNPACK_ROW_LENGTH',
'GL_UNPACK_SKIP_IMAGES',
'GL_UNPACK_SKIP_PIXELS',
'GL_UNPACK_SKIP_ROWS',
# GL_VERTEX_ARRAY_BINDING is the same as GL_VERTEX_ARRAY_BINDING_OES
# 'GL_VERTEX_ARRAY_BINDING',
],
'invalid': [
'GL_FOG_HINT',
],
},
'IndexedGLState': {
'type': 'GLenum',
'valid': [
'GL_TRANSFORM_FEEDBACK_BUFFER_BINDING',
'GL_TRANSFORM_FEEDBACK_BUFFER_SIZE',
'GL_TRANSFORM_FEEDBACK_BUFFER_START',
'GL_UNIFORM_BUFFER_BINDING',
'GL_UNIFORM_BUFFER_SIZE',
'GL_UNIFORM_BUFFER_START',
],
'invalid': [
'GL_FOG_HINT',
],
},
'GetTexParamTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP',
],
'valid_es3': [
'GL_TEXTURE_2D_ARRAY',
'GL_TEXTURE_3D',
],
'invalid': [
'GL_PROXY_TEXTURE_CUBE_MAP',
]
},
'TextureTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP_POSITIVE_X',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_X',
'GL_TEXTURE_CUBE_MAP_POSITIVE_Y',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Y',
'GL_TEXTURE_CUBE_MAP_POSITIVE_Z',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Z',
],
'invalid': [
'GL_PROXY_TEXTURE_CUBE_MAP',
]
},
'Texture3DTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_3D',
'GL_TEXTURE_2D_ARRAY',
],
'invalid': [
'GL_TEXTURE_2D',
]
},
'TextureBindTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP',
],
'valid_es3': [
'GL_TEXTURE_3D',
'GL_TEXTURE_2D_ARRAY',
],
'invalid': [
'GL_TEXTURE_1D',
'GL_TEXTURE_3D',
],
},
'TransformFeedbackBindTarget': {
'type': 'GLenum',
'valid': [
'GL_TRANSFORM_FEEDBACK',
],
'invalid': [
'GL_TEXTURE_2D',
],
},
'TransformFeedbackPrimitiveMode': {
'type': 'GLenum',
'valid': [
'GL_POINTS',
'GL_LINES',
'GL_TRIANGLES',
],
'invalid': [
'GL_LINE_LOOP',
],
},
'ShaderType': {
'type': 'GLenum',
'valid': [
'GL_VERTEX_SHADER',
'GL_FRAGMENT_SHADER',
],
'invalid': [
'GL_GEOMETRY_SHADER',
],
},
'FaceType': {
'type': 'GLenum',
'valid': [
'GL_FRONT',
'GL_BACK',
'GL_FRONT_AND_BACK',
],
},
'FaceMode': {
'type': 'GLenum',
'valid': [
'GL_CW',
'GL_CCW',
],
},
'CmpFunction': {
'type': 'GLenum',
'valid': [
'GL_NEVER',
'GL_LESS',
'GL_EQUAL',
'GL_LEQUAL',
'GL_GREATER',
'GL_NOTEQUAL',
'GL_GEQUAL',
'GL_ALWAYS',
],
},
'Equation': {
'type': 'GLenum',
'valid': [
'GL_FUNC_ADD',
'GL_FUNC_SUBTRACT',
'GL_FUNC_REVERSE_SUBTRACT',
],
'valid_es3': [
'GL_MIN',
'GL_MAX',
],
'invalid': [
'GL_NONE',
],
},
'SrcBlendFactor': {
'type': 'GLenum',
'valid': [
'GL_ZERO',
'GL_ONE',
'GL_SRC_COLOR',
'GL_ONE_MINUS_SRC_COLOR',
'GL_DST_COLOR',
'GL_ONE_MINUS_DST_COLOR',
'GL_SRC_ALPHA',
'GL_ONE_MINUS_SRC_ALPHA',
'GL_DST_ALPHA',
'GL_ONE_MINUS_DST_ALPHA',
'GL_CONSTANT_COLOR',
'GL_ONE_MINUS_CONSTANT_COLOR',
'GL_CONSTANT_ALPHA',
'GL_ONE_MINUS_CONSTANT_ALPHA',
'GL_SRC_ALPHA_SATURATE',
],
},
'DstBlendFactor': {
'type': 'GLenum',
'valid': [
'GL_ZERO',
'GL_ONE',
'GL_SRC_COLOR',
'GL_ONE_MINUS_SRC_COLOR',
'GL_DST_COLOR',
'GL_ONE_MINUS_DST_COLOR',
'GL_SRC_ALPHA',
'GL_ONE_MINUS_SRC_ALPHA',
'GL_DST_ALPHA',
'GL_ONE_MINUS_DST_ALPHA',
'GL_CONSTANT_COLOR',
'GL_ONE_MINUS_CONSTANT_COLOR',
'GL_CONSTANT_ALPHA',
'GL_ONE_MINUS_CONSTANT_ALPHA',
],
},
'Capability': {
'type': 'GLenum',
'valid': ["GL_%s" % cap['name'].upper() for cap in _CAPABILITY_FLAGS
if 'es3' not in cap or cap['es3'] != True],
'valid_es3': ["GL_%s" % cap['name'].upper() for cap in _CAPABILITY_FLAGS
if 'es3' in cap and cap['es3'] == True],
'invalid': [
'GL_CLIP_PLANE0',
'GL_POINT_SPRITE',
],
},
'DrawMode': {
'type': 'GLenum',
'valid': [
'GL_POINTS',
'GL_LINE_STRIP',
'GL_LINE_LOOP',
'GL_LINES',
'GL_TRIANGLE_STRIP',
'GL_TRIANGLE_FAN',
'GL_TRIANGLES',
],
'invalid': [
'GL_QUADS',
'GL_POLYGON',
],
},
'IndexType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT',
],
'valid_es3': [
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_INT',
],
},
'GetMaxIndexType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT',
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_INT',
],
},
'Attachment': {
'type': 'GLenum',
'valid': [
'GL_COLOR_ATTACHMENT0',
'GL_DEPTH_ATTACHMENT',
'GL_STENCIL_ATTACHMENT',
],
'valid_es3': [
'GL_DEPTH_STENCIL_ATTACHMENT',
],
},
'BackbufferAttachment': {
'type': 'GLenum',
'valid': [
'GL_COLOR_EXT',
'GL_DEPTH_EXT',
'GL_STENCIL_EXT',
],
},
'BufferParameter': {
'type': 'GLenum',
'valid': [
'GL_BUFFER_SIZE',
'GL_BUFFER_USAGE',
],
'valid_es3': [
'GL_BUFFER_ACCESS_FLAGS',
'GL_BUFFER_MAPPED',
'GL_BUFFER_MAP_LENGTH',
'GL_BUFFER_MAP_OFFSET',
],
'invalid': [
'GL_PIXEL_PACK_BUFFER',
],
},
'BufferMode': {
'type': 'GLenum',
'valid': [
'GL_INTERLEAVED_ATTRIBS',
'GL_SEPARATE_ATTRIBS',
],
'invalid': [
'GL_PIXEL_PACK_BUFFER',
],
},
'FrameBufferParameter': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE',
'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE',
],
'valid_es3': [
'GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE',
'GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER',
],
},
'MatrixMode': {
'type': 'GLenum',
'valid': [
'GL_PATH_PROJECTION_CHROMIUM',
'GL_PATH_MODELVIEW_CHROMIUM',
],
},
'ProgramParameter': {
'type': 'GLenum',
'valid': [
'GL_DELETE_STATUS',
'GL_LINK_STATUS',
'GL_VALIDATE_STATUS',
'GL_INFO_LOG_LENGTH',
'GL_ATTACHED_SHADERS',
'GL_ACTIVE_ATTRIBUTES',
'GL_ACTIVE_ATTRIBUTE_MAX_LENGTH',
'GL_ACTIVE_UNIFORMS',
'GL_ACTIVE_UNIFORM_MAX_LENGTH',
],
'valid_es3': [
'GL_ACTIVE_UNIFORM_BLOCKS',
'GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH',
'GL_TRANSFORM_FEEDBACK_BUFFER_MODE',
'GL_TRANSFORM_FEEDBACK_VARYINGS',
'GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH',
],
'invalid': [
'GL_PROGRAM_BINARY_RETRIEVABLE_HINT', # not supported in Chromium.
],
},
'QueryObjectParameter': {
'type': 'GLenum',
'valid': [
'GL_QUERY_RESULT_EXT',
'GL_QUERY_RESULT_AVAILABLE_EXT',
],
},
'QueryParameter': {
'type': 'GLenum',
'valid': [
'GL_CURRENT_QUERY_EXT',
],
},
'QueryTarget': {
'type': 'GLenum',
'valid': [
'GL_ANY_SAMPLES_PASSED_EXT',
'GL_ANY_SAMPLES_PASSED_CONSERVATIVE_EXT',
'GL_COMMANDS_ISSUED_CHROMIUM',
'GL_LATENCY_QUERY_CHROMIUM',
'GL_ASYNC_PIXEL_UNPACK_COMPLETED_CHROMIUM',
'GL_ASYNC_PIXEL_PACK_COMPLETED_CHROMIUM',
'GL_COMMANDS_COMPLETED_CHROMIUM',
],
},
'RenderBufferParameter': {
'type': 'GLenum',
'valid': [
'GL_RENDERBUFFER_RED_SIZE',
'GL_RENDERBUFFER_GREEN_SIZE',
'GL_RENDERBUFFER_BLUE_SIZE',
'GL_RENDERBUFFER_ALPHA_SIZE',
'GL_RENDERBUFFER_DEPTH_SIZE',
'GL_RENDERBUFFER_STENCIL_SIZE',
'GL_RENDERBUFFER_WIDTH',
'GL_RENDERBUFFER_HEIGHT',
'GL_RENDERBUFFER_INTERNAL_FORMAT',
],
'valid_es3': [
'GL_RENDERBUFFER_SAMPLES',
],
},
'InternalFormatParameter': {
'type': 'GLenum',
'valid': [
'GL_NUM_SAMPLE_COUNTS',
'GL_SAMPLES',
],
},
'SamplerParameter': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_MAG_FILTER',
'GL_TEXTURE_MIN_FILTER',
'GL_TEXTURE_MIN_LOD',
'GL_TEXTURE_MAX_LOD',
'GL_TEXTURE_WRAP_S',
'GL_TEXTURE_WRAP_T',
'GL_TEXTURE_WRAP_R',
'GL_TEXTURE_COMPARE_MODE',
'GL_TEXTURE_COMPARE_FUNC',
],
'invalid': [
'GL_GENERATE_MIPMAP',
],
},
'ShaderParameter': {
'type': 'GLenum',
'valid': [
'GL_SHADER_TYPE',
'GL_DELETE_STATUS',
'GL_COMPILE_STATUS',
'GL_INFO_LOG_LENGTH',
'GL_SHADER_SOURCE_LENGTH',
'GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE',
],
},
'ShaderPrecision': {
'type': 'GLenum',
'valid': [
'GL_LOW_FLOAT',
'GL_MEDIUM_FLOAT',
'GL_HIGH_FLOAT',
'GL_LOW_INT',
'GL_MEDIUM_INT',
'GL_HIGH_INT',
],
},
'StringType': {
'type': 'GLenum',
'valid': [
'GL_VENDOR',
'GL_RENDERER',
'GL_VERSION',
'GL_SHADING_LANGUAGE_VERSION',
'GL_EXTENSIONS',
],
},
'TextureParameter': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_MAG_FILTER',
'GL_TEXTURE_MIN_FILTER',
'GL_TEXTURE_POOL_CHROMIUM',
'GL_TEXTURE_WRAP_S',
'GL_TEXTURE_WRAP_T',
],
'valid_es3': [
'GL_TEXTURE_BASE_LEVEL',
'GL_TEXTURE_COMPARE_FUNC',
'GL_TEXTURE_COMPARE_MODE',
'GL_TEXTURE_IMMUTABLE_FORMAT',
'GL_TEXTURE_IMMUTABLE_LEVELS',
'GL_TEXTURE_MAX_LEVEL',
'GL_TEXTURE_MAX_LOD',
'GL_TEXTURE_MIN_LOD',
'GL_TEXTURE_WRAP_R',
],
'invalid': [
'GL_GENERATE_MIPMAP',
],
},
'TexturePool': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_POOL_MANAGED_CHROMIUM',
'GL_TEXTURE_POOL_UNMANAGED_CHROMIUM',
],
},
'TextureWrapMode': {
'type': 'GLenum',
'valid': [
'GL_CLAMP_TO_EDGE',
'GL_MIRRORED_REPEAT',
'GL_REPEAT',
],
},
'TextureMinFilterMode': {
'type': 'GLenum',
'valid': [
'GL_NEAREST',
'GL_LINEAR',
'GL_NEAREST_MIPMAP_NEAREST',
'GL_LINEAR_MIPMAP_NEAREST',
'GL_NEAREST_MIPMAP_LINEAR',
'GL_LINEAR_MIPMAP_LINEAR',
],
},
'TextureMagFilterMode': {
'type': 'GLenum',
'valid': [
'GL_NEAREST',
'GL_LINEAR',
],
},
'TextureCompareFunc': {
'type': 'GLenum',
'valid': [
'GL_LEQUAL',
'GL_GEQUAL',
'GL_LESS',
'GL_GREATER',
'GL_EQUAL',
'GL_NOTEQUAL',
'GL_ALWAYS',
'GL_NEVER',
],
},
'TextureCompareMode': {
'type': 'GLenum',
'valid': [
'GL_NONE',
'GL_COMPARE_REF_TO_TEXTURE',
],
},
'TextureUsage': {
'type': 'GLenum',
'valid': [
'GL_NONE',
'GL_FRAMEBUFFER_ATTACHMENT_ANGLE',
],
},
'VertexAttribute': {
'type': 'GLenum',
'valid': [
# some enum that the decoder actually passes through to GL needs
# to be the first listed here since it's used in unit tests.
'GL_VERTEX_ATTRIB_ARRAY_NORMALIZED',
'GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING',
'GL_VERTEX_ATTRIB_ARRAY_ENABLED',
'GL_VERTEX_ATTRIB_ARRAY_SIZE',
'GL_VERTEX_ATTRIB_ARRAY_STRIDE',
'GL_VERTEX_ATTRIB_ARRAY_TYPE',
'GL_CURRENT_VERTEX_ATTRIB',
],
'valid_es3': [
'GL_VERTEX_ATTRIB_ARRAY_INTEGER',
'GL_VERTEX_ATTRIB_ARRAY_DIVISOR',
],
},
'VertexPointer': {
'type': 'GLenum',
'valid': [
'GL_VERTEX_ATTRIB_ARRAY_POINTER',
],
},
'HintTarget': {
'type': 'GLenum',
'valid': [
'GL_GENERATE_MIPMAP_HINT',
],
'valid_es3': [
'GL_FRAGMENT_SHADER_DERIVATIVE_HINT',
],
'invalid': [
'GL_PERSPECTIVE_CORRECTION_HINT',
],
},
'HintMode': {
'type': 'GLenum',
'valid': [
'GL_FASTEST',
'GL_NICEST',
'GL_DONT_CARE',
],
},
'PixelStore': {
'type': 'GLenum',
'valid': [
'GL_PACK_ALIGNMENT',
'GL_UNPACK_ALIGNMENT',
],
'valid_es3': [
'GL_PACK_ROW_LENGTH',
'GL_PACK_SKIP_PIXELS',
'GL_PACK_SKIP_ROWS',
'GL_UNPACK_ROW_LENGTH',
'GL_UNPACK_IMAGE_HEIGHT',
'GL_UNPACK_SKIP_PIXELS',
'GL_UNPACK_SKIP_ROWS',
'GL_UNPACK_SKIP_IMAGES',
],
'invalid': [
'GL_PACK_SWAP_BYTES',
'GL_UNPACK_SWAP_BYTES',
],
},
'PixelStoreAlignment': {
'type': 'GLint',
'valid': [
'1',
'2',
'4',
'8',
],
'invalid': [
'3',
'9',
],
},
'ReadPixelFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'valid_es3': [
'GL_RGBA_INTEGER',
],
'deprecated_es3': [
'GL_ALPHA',
'GL_RGB',
],
},
'PixelType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
'valid_es3': [
'GL_BYTE',
'GL_UNSIGNED_SHORT',
'GL_SHORT',
'GL_UNSIGNED_INT',
'GL_INT',
'GL_HALF_FLOAT',
'GL_FLOAT',
'GL_UNSIGNED_INT_2_10_10_10_REV',
'GL_UNSIGNED_INT_10F_11F_11F_REV',
'GL_UNSIGNED_INT_5_9_9_9_REV',
'GL_UNSIGNED_INT_24_8',
'GL_FLOAT_32_UNSIGNED_INT_24_8_REV',
],
'invalid': [
'GL_UNSIGNED_BYTE_3_3_2',
],
},
'ReadPixelType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
'invalid': [
'GL_SHORT',
],
'valid_es3': [
'GL_UNSIGNED_INT',
'GL_INT',
'GL_FLOAT',
],
'deprecated_es3': [
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
},
'RenderBufferFormat': {
'type': 'GLenum',
'valid': [
'GL_RGBA4',
'GL_RGB565',
'GL_RGB5_A1',
'GL_DEPTH_COMPONENT16',
'GL_STENCIL_INDEX8',
],
'valid_es3': [
'GL_R8',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_RGB8',
'GL_RGBA8',
'GL_SRGB8_ALPHA8',
'GL_RGB10_A2',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
],
},
'ShaderBinaryFormat': {
'type': 'GLenum',
'valid': [
],
},
'StencilOp': {
'type': 'GLenum',
'valid': [
'GL_KEEP',
'GL_ZERO',
'GL_REPLACE',
'GL_INCR',
'GL_INCR_WRAP',
'GL_DECR',
'GL_DECR_WRAP',
'GL_INVERT',
],
},
'TextureFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'valid_es3': [
'GL_RED',
'GL_RED_INTEGER',
'GL_RG',
'GL_RG_INTEGER',
'GL_RGB_INTEGER',
'GL_RGBA_INTEGER',
'GL_DEPTH_COMPONENT',
'GL_DEPTH_STENCIL',
],
'invalid': [
'GL_BGRA',
'GL_BGR',
],
},
'TextureInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'valid_es3': [
'GL_R8',
'GL_R8_SNORM',
'GL_R16F',
'GL_R32F',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8_SNORM',
'GL_RG16F',
'GL_RG32F',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_RGB8',
'GL_SRGB8',
'GL_RGB565',
'GL_RGB8_SNORM',
'GL_R11F_G11F_B10F',
'GL_RGB9_E5',
'GL_RGB16F',
'GL_RGB32F',
'GL_RGB8UI',
'GL_RGB8I',
'GL_RGB16UI',
'GL_RGB16I',
'GL_RGB32UI',
'GL_RGB32I',
'GL_RGBA8',
'GL_SRGB8_ALPHA8',
'GL_RGBA8_SNORM',
'GL_RGB5_A1',
'GL_RGBA4',
'GL_RGB10_A2',
'GL_RGBA16F',
'GL_RGBA32F',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
# The DEPTH/STENCIL formats are not supported in CopyTexImage2D.
# We will reject them dynamically in GPU command buffer.
'GL_DEPTH_COMPONENT16',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
],
'invalid': [
'GL_BGRA',
'GL_BGR',
],
},
'TextureInternalFormatStorage': {
'type': 'GLenum',
'valid': [
'GL_RGB565',
'GL_RGBA4',
'GL_RGB5_A1',
'GL_ALPHA8_EXT',
'GL_LUMINANCE8_EXT',
'GL_LUMINANCE8_ALPHA8_EXT',
'GL_RGB8_OES',
'GL_RGBA8_OES',
],
'valid_es3': [
'GL_R8',
'GL_R8_SNORM',
'GL_R16F',
'GL_R32F',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8_SNORM',
'GL_RG16F',
'GL_RG32F',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_SRGB8',
'GL_RGB8_SNORM',
'GL_R11F_G11F_B10F',
'GL_RGB9_E5',
'GL_RGB16F',
'GL_RGB32F',
'GL_RGB8UI',
'GL_RGB8I',
'GL_RGB16UI',
'GL_RGB16I',
'GL_RGB32UI',
'GL_RGB32I',
'GL_SRGB8_ALPHA8',
'GL_RGBA8_SNORM',
'GL_RGB10_A2',
'GL_RGBA16F',
'GL_RGBA32F',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
'GL_DEPTH_COMPONENT16',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
'GL_COMPRESSED_R11_EAC',
'GL_COMPRESSED_SIGNED_R11_EAC',
'GL_COMPRESSED_RG11_EAC',
'GL_COMPRESSED_SIGNED_RG11_EAC',
'GL_COMPRESSED_RGB8_ETC2',
'GL_COMPRESSED_SRGB8_ETC2',
'GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_RGBA8_ETC2_EAC',
'GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC',
],
'deprecated_es3': [
'GL_ALPHA8_EXT',
'GL_LUMINANCE8_EXT',
'GL_LUMINANCE8_ALPHA8_EXT',
'GL_ALPHA16F_EXT',
'GL_LUMINANCE16F_EXT',
'GL_LUMINANCE_ALPHA16F_EXT',
'GL_ALPHA32F_EXT',
'GL_LUMINANCE32F_EXT',
'GL_LUMINANCE_ALPHA32F_EXT',
],
},
'ImageInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_RGB',
'GL_RGB_YUV_420_CHROMIUM',
'GL_RGBA',
],
},
'ImageUsage': {
'type': 'GLenum',
'valid': [
'GL_MAP_CHROMIUM',
'GL_SCANOUT_CHROMIUM'
],
},
'ValueBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_SUBSCRIBED_VALUES_BUFFER_CHROMIUM',
],
},
'SubscriptionTarget': {
'type': 'GLenum',
'valid': [
'GL_MOUSE_POSITION_CHROMIUM',
],
},
'UniformParameter': {
'type': 'GLenum',
'valid': [
'GL_UNIFORM_SIZE',
'GL_UNIFORM_TYPE',
'GL_UNIFORM_NAME_LENGTH',
'GL_UNIFORM_BLOCK_INDEX',
'GL_UNIFORM_OFFSET',
'GL_UNIFORM_ARRAY_STRIDE',
'GL_UNIFORM_MATRIX_STRIDE',
'GL_UNIFORM_IS_ROW_MAJOR',
],
'invalid': [
'GL_UNIFORM_BLOCK_NAME_LENGTH',
],
},
'UniformBlockParameter': {
'type': 'GLenum',
'valid': [
'GL_UNIFORM_BLOCK_BINDING',
'GL_UNIFORM_BLOCK_DATA_SIZE',
'GL_UNIFORM_BLOCK_NAME_LENGTH',
'GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS',
'GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES',
'GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER',
'GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER',
],
'invalid': [
'GL_NEAREST',
],
},
'VertexAttribType': {
'type': 'GLenum',
'valid': [
'GL_BYTE',
'GL_UNSIGNED_BYTE',
'GL_SHORT',
'GL_UNSIGNED_SHORT',
# 'GL_FIXED', // This is not available on Desktop GL.
'GL_FLOAT',
],
'valid_es3': [
'GL_INT',
'GL_UNSIGNED_INT',
'GL_HALF_FLOAT',
'GL_INT_2_10_10_10_REV',
'GL_UNSIGNED_INT_2_10_10_10_REV',
],
'invalid': [
'GL_DOUBLE',
],
},
'VertexAttribIType': {
'type': 'GLenum',
'valid': [
'GL_BYTE',
'GL_UNSIGNED_BYTE',
'GL_SHORT',
'GL_UNSIGNED_SHORT',
'GL_INT',
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_FLOAT',
'GL_DOUBLE',
],
},
'TextureBorder': {
'type': 'GLint',
'is_complete': True,
'valid': [
'0',
],
'invalid': [
'1',
],
},
'VertexAttribSize': {
'type': 'GLint',
'valid': [
'1',
'2',
'3',
'4',
],
'invalid': [
'0',
'5',
],
},
'ZeroOnly': {
'type': 'GLint',
'is_complete': True,
'valid': [
'0',
],
'invalid': [
'1',
],
},
'FalseOnly': {
'type': 'GLboolean',
'is_complete': True,
'valid': [
'false',
],
'invalid': [
'true',
],
},
'ResetStatus': {
'type': 'GLenum',
'valid': [
'GL_GUILTY_CONTEXT_RESET_ARB',
'GL_INNOCENT_CONTEXT_RESET_ARB',
'GL_UNKNOWN_CONTEXT_RESET_ARB',
],
},
'SyncCondition': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_SYNC_GPU_COMMANDS_COMPLETE',
],
'invalid': [
'0',
],
},
'SyncFlags': {
'type': 'GLbitfield',
'is_complete': True,
'valid': [
'0',
],
'invalid': [
'1',
],
},
'SyncFlushFlags': {
'type': 'GLbitfield',
'valid': [
'GL_SYNC_FLUSH_COMMANDS_BIT',
'0',
],
'invalid': [
'0xFFFFFFFF',
],
},
'SyncParameter': {
'type': 'GLenum',
'valid': [
'GL_SYNC_STATUS', # This needs to be the 1st; all others are cached.
'GL_OBJECT_TYPE',
'GL_SYNC_CONDITION',
'GL_SYNC_FLAGS',
],
'invalid': [
'GL_SYNC_FENCE',
],
},
}
# This table specifies the different pepper interfaces that are supported for
# GL commands. 'dev' is true if it's a dev interface.
_PEPPER_INTERFACES = [
{'name': '', 'dev': False},
{'name': 'InstancedArrays', 'dev': False},
{'name': 'FramebufferBlit', 'dev': False},
{'name': 'FramebufferMultisample', 'dev': False},
{'name': 'ChromiumEnableFeature', 'dev': False},
{'name': 'ChromiumMapSub', 'dev': False},
{'name': 'Query', 'dev': False},
{'name': 'VertexArrayObject', 'dev': False},
{'name': 'DrawBuffers', 'dev': True},
]
# A function info object specifies the type and other special data for the
# command that will be generated. A base function info object is generated by
# parsing the "cmd_buffer_functions.txt", one for each function in the
# file. These function info objects can be augmented and their values can be
# overridden by adding an object to the table below.
#
# Must match function names specified in "cmd_buffer_functions.txt".
#
# cmd_comment: A comment added to the cmd format.
# type: defines which handler will be used to generate code.
# decoder_func: defines which function to call in the decoder to execute the
# corresponding GL command. If not specified the GL command will
# be called directly.
# gl_test_func: GL function that is expected to be called when testing.
# cmd_args: The arguments to use for the command. This overrides generating
# them based on the GL function arguments.
# gen_cmd: Whether or not this function geneates a command. Default = True.
# data_transfer_methods: Array of methods that are used for transfering the
# pointer data. Possible values: 'immediate', 'shm', 'bucket'.
# The default is 'immediate' if the command has one pointer
# argument, otherwise 'shm'. One command is generated for each
# transfer method. Affects only commands which are not of type
# 'HandWritten', 'GETn' or 'GLcharN'.
# Note: the command arguments that affect this are the final args,
# taking cmd_args override into consideration.
# impl_func: Whether or not to generate the GLES2Implementation part of this
# command.
# impl_decl: Whether or not to generate the GLES2Implementation declaration
# for this command.
# needs_size: If True a data_size field is added to the command.
# count: The number of units per element. For PUTn or PUT types.
# use_count_func: If True the actual data count needs to be computed; the count
# argument specifies the maximum count.
# unit_test: If False no service side unit test will be generated.
# client_test: If False no client side unit test will be generated.
# expectation: If False the unit test will have no expected calls.
# gen_func: Name of function that generates GL resource for corresponding
# bind function.
# states: array of states that get set by this function corresponding to
# the given arguments
# state_flag: name of flag that is set to true when function is called.
# no_gl: no GL function is called.
# valid_args: A dictionary of argument indices to args to use in unit tests
# when they can not be automatically determined.
# pepper_interface: The pepper interface that is used for this extension
# pepper_name: The name of the function as exposed to pepper.
# pepper_args: A string representing the argument list (what would appear in
# C/C++ between the parentheses for the function declaration)
# that the Pepper API expects for this function. Use this only if
# the stable Pepper API differs from the GLES2 argument list.
# invalid_test: False if no invalid test needed.
# shadowed: True = the value is shadowed so no glGetXXX call will be made.
# first_element_only: For PUT types, True if only the first element of an
# array is used and we end up calling the single value
# corresponding function. eg. TexParameteriv -> TexParameteri
# extension: Function is an extension to GL and should not be exposed to
# pepper unless pepper_interface is defined.
# extension_flag: Function is an extension and should be enabled only when
# the corresponding feature info flag is enabled. Implies
# 'extension': True.
# not_shared: For GENn types, True if objects can't be shared between contexts
# unsafe: True = no validation is implemented on the service side and the
# command is only available with --enable-unsafe-es3-apis.
# id_mapping: A list of resource type names whose client side IDs need to be
# mapped to service side IDs. This is only used for unsafe APIs.
_FUNCTION_INFO = {
'ActiveTexture': {
'decoder_func': 'DoActiveTexture',
'unit_test': False,
'impl_func': False,
'client_test': False,
},
'AttachShader': {'decoder_func': 'DoAttachShader'},
'BindAttribLocation': {
'type': 'GLchar',
'data_transfer_methods': ['bucket'],
'needs_size': True,
},
'BindBuffer': {
'type': 'Bind',
'decoder_func': 'DoBindBuffer',
'gen_func': 'GenBuffersARB',
},
'BindBufferBase': {
'type': 'Bind',
'id_mapping': [ 'Buffer' ],
'gen_func': 'GenBuffersARB',
'unsafe': True,
},
'BindBufferRange': {
'type': 'Bind',
'id_mapping': [ 'Buffer' ],
'gen_func': 'GenBuffersARB',
'valid_args': {
'3': '4',
'4': '4'
},
'unsafe': True,
},
'BindFramebuffer': {
'type': 'Bind',
'decoder_func': 'DoBindFramebuffer',
'gl_test_func': 'glBindFramebufferEXT',
'gen_func': 'GenFramebuffersEXT',
'trace_level': 1,
},
'BindRenderbuffer': {
'type': 'Bind',
'decoder_func': 'DoBindRenderbuffer',
'gl_test_func': 'glBindRenderbufferEXT',
'gen_func': 'GenRenderbuffersEXT',
},
'BindSampler': {
'type': 'Bind',
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'BindTexture': {
'type': 'Bind',
'decoder_func': 'DoBindTexture',
'gen_func': 'GenTextures',
# TODO(gman): remove this once client side caching works.
'client_test': False,
'trace_level': 2,
},
'BindTransformFeedback': {
'type': 'Bind',
'id_mapping': [ 'TransformFeedback' ],
'unsafe': True,
},
'BlitFramebufferCHROMIUM': {
'decoder_func': 'DoBlitFramebufferCHROMIUM',
'unit_test': False,
'extension_flag': 'chromium_framebuffer_multisample',
'pepper_interface': 'FramebufferBlit',
'pepper_name': 'BlitFramebufferEXT',
'defer_reads': True,
'defer_draws': True,
'trace_level': 1,
},
'BufferData': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
},
'BufferSubData': {
'type': 'Data',
'client_test': False,
'decoder_func': 'DoBufferSubData',
'data_transfer_methods': ['shm'],
'trace_level': 2,
},
'CheckFramebufferStatus': {
'type': 'Is',
'decoder_func': 'DoCheckFramebufferStatus',
'gl_test_func': 'glCheckFramebufferStatusEXT',
'error_value': 'GL_FRAMEBUFFER_UNSUPPORTED',
'result': ['GLenum'],
},
'Clear': {
'decoder_func': 'DoClear',
'defer_draws': True,
'trace_level': 2,
},
'ClearBufferiv': {
'type': 'PUT',
'use_count_func': True,
'count': 4,
'unsafe': True,
'trace_level': 2,
},
'ClearBufferuiv': {
'type': 'PUT',
'count': 4,
'unsafe': True,
'trace_level': 2,
},
'ClearBufferfv': {
'type': 'PUT',
'use_count_func': True,
'count': 4,
'unsafe': True,
'trace_level': 2,
},
'ClearBufferfi': {
'unsafe': True,
'trace_level': 2,
},
'ClearColor': {
'type': 'StateSet',
'state': 'ClearColor',
},
'ClearDepthf': {
'type': 'StateSet',
'state': 'ClearDepthf',
'decoder_func': 'glClearDepth',
'gl_test_func': 'glClearDepth',
'valid_args': {
'0': '0.5f'
},
},
'ClientWaitSync': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args': 'GLuint sync, GLbitfieldSyncFlushFlags flags, '
'GLuint timeout_0, GLuint timeout_1, GLenum* result',
'unsafe': True,
'result': ['GLenum'],
'trace_level': 2,
},
'ColorMask': {
'type': 'StateSet',
'state': 'ColorMask',
'no_gl': True,
'expectation': False,
},
'ConsumeTextureCHROMIUM': {
'decoder_func': 'DoConsumeTextureCHROMIUM',
'impl_func': False,
'type': 'PUT',
'count': 64, # GL_MAILBOX_SIZE_CHROMIUM
'unit_test': False,
'client_test': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
'trace_level': 2,
},
'CopyBufferSubData': {
'unsafe': True,
},
'CreateAndConsumeTextureCHROMIUM': {
'decoder_func': 'DoCreateAndConsumeTextureCHROMIUM',
'impl_func': False,
'type': 'HandWritten',
'data_transfer_methods': ['immediate'],
'unit_test': False,
'client_test': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
'trace_level': 2,
},
'GenValuebuffersCHROMIUM': {
'type': 'GENn',
'gl_test_func': 'glGenValuebuffersCHROMIUM',
'resource_type': 'Valuebuffer',
'resource_types': 'Valuebuffers',
'unit_test': False,
'extension': True,
'chromium': True,
},
'DeleteValuebuffersCHROMIUM': {
'type': 'DELn',
'gl_test_func': 'glDeleteValuebuffersCHROMIUM',
'resource_type': 'Valuebuffer',
'resource_types': 'Valuebuffers',
'unit_test': False,
'extension': True,
'chromium': True,
},
'IsValuebufferCHROMIUM': {
'type': 'Is',
'decoder_func': 'DoIsValuebufferCHROMIUM',
'expectation': False,
'extension': True,
'chromium': True,
},
'BindValuebufferCHROMIUM': {
'type': 'Bind',
'decoder_func': 'DoBindValueBufferCHROMIUM',
'gen_func': 'GenValueBuffersCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'SubscribeValueCHROMIUM': {
'decoder_func': 'DoSubscribeValueCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'PopulateSubscribedValuesCHROMIUM': {
'decoder_func': 'DoPopulateSubscribedValuesCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'UniformValuebufferCHROMIUM': {
'decoder_func': 'DoUniformValueBufferCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'ClearStencil': {
'type': 'StateSet',
'state': 'ClearStencil',
},
'EnableFeatureCHROMIUM': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'decoder_func': 'DoEnableFeatureCHROMIUM',
'expectation': False,
'cmd_args': 'GLuint bucket_id, GLint* result',
'result': ['GLint'],
'extension': True,
'chromium': True,
'pepper_interface': 'ChromiumEnableFeature',
},
'CompileShader': {'decoder_func': 'DoCompileShader', 'unit_test': False},
'CompressedTexImage2D': {
'type': 'Manual',
'data_transfer_methods': ['bucket', 'shm'],
'trace_level': 1,
},
'CompressedTexSubImage2D': {
'type': 'Data',
'data_transfer_methods': ['bucket', 'shm'],
'decoder_func': 'DoCompressedTexSubImage2D',
'trace_level': 1,
},
'CopyTexImage2D': {
'decoder_func': 'DoCopyTexImage2D',
'unit_test': False,
'defer_reads': True,
'trace_level': 1,
},
'CopyTexSubImage2D': {
'decoder_func': 'DoCopyTexSubImage2D',
'defer_reads': True,
'trace_level': 1,
},
'CompressedTexImage3D': {
'type': 'Manual',
'data_transfer_methods': ['bucket', 'shm'],
'unsafe': True,
'trace_level': 1,
},
'CompressedTexSubImage3D': {
'type': 'Data',
'data_transfer_methods': ['bucket', 'shm'],
'decoder_func': 'DoCompressedTexSubImage3D',
'unsafe': True,
'trace_level': 1,
},
'CopyTexSubImage3D': {
'defer_reads': True,
'unsafe': True,
'trace_level': 1,
},
'CreateImageCHROMIUM': {
'type': 'Manual',
'cmd_args':
'ClientBuffer buffer, GLsizei width, GLsizei height, '
'GLenum internalformat',
'result': ['GLuint'],
'client_test': False,
'gen_cmd': False,
'expectation': False,
'extension': "CHROMIUM_image",
'chromium': True,
'trace_level': 1,
},
'DestroyImageCHROMIUM': {
'type': 'Manual',
'client_test': False,
'gen_cmd': False,
'extension': "CHROMIUM_image",
'chromium': True,
'trace_level': 1,
},
'CreateGpuMemoryBufferImageCHROMIUM': {
'type': 'Manual',
'cmd_args':
'GLsizei width, GLsizei height, GLenum internalformat, GLenum usage',
'result': ['GLuint'],
'client_test': False,
'gen_cmd': False,
'expectation': False,
'extension': "CHROMIUM_image",
'chromium': True,
'trace_level': 1,
},
'CreateProgram': {
'type': 'Create',
'client_test': False,
},
'CreateShader': {
'type': 'Create',
'client_test': False,
},
'BlendColor': {
'type': 'StateSet',
'state': 'BlendColor',
},
'BlendEquation': {
'type': 'StateSetRGBAlpha',
'state': 'BlendEquation',
'valid_args': {
'0': 'GL_FUNC_SUBTRACT'
},
},
'BlendEquationSeparate': {
'type': 'StateSet',
'state': 'BlendEquation',
'valid_args': {
'0': 'GL_FUNC_SUBTRACT'
},
},
'BlendFunc': {
'type': 'StateSetRGBAlpha',
'state': 'BlendFunc',
},
'BlendFuncSeparate': {
'type': 'StateSet',
'state': 'BlendFunc',
},
'BlendBarrierKHR': {
'gl_test_func': 'glBlendBarrierKHR',
'extension': True,
'extension_flag': 'blend_equation_advanced',
'client_test': False,
},
'SampleCoverage': {'decoder_func': 'DoSampleCoverage'},
'StencilFunc': {
'type': 'StateSetFrontBack',
'state': 'StencilFunc',
},
'StencilFuncSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilFunc',
},
'StencilOp': {
'type': 'StateSetFrontBack',
'state': 'StencilOp',
'valid_args': {
'1': 'GL_INCR'
},
},
'StencilOpSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilOp',
'valid_args': {
'1': 'GL_INCR'
},
},
'Hint': {
'type': 'StateSetNamedParameter',
'state': 'Hint',
},
'CullFace': {'type': 'StateSet', 'state': 'CullFace'},
'FrontFace': {'type': 'StateSet', 'state': 'FrontFace'},
'DepthFunc': {'type': 'StateSet', 'state': 'DepthFunc'},
'LineWidth': {
'type': 'StateSet',
'state': 'LineWidth',
'valid_args': {
'0': '0.5f'
},
},
'PolygonOffset': {
'type': 'StateSet',
'state': 'PolygonOffset',
},
'DeleteBuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteBuffersARB',
'resource_type': 'Buffer',
'resource_types': 'Buffers',
},
'DeleteFramebuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteFramebuffersEXT',
'resource_type': 'Framebuffer',
'resource_types': 'Framebuffers',
'trace_level': 2,
},
'DeleteProgram': { 'type': 'Delete' },
'DeleteRenderbuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteRenderbuffersEXT',
'resource_type': 'Renderbuffer',
'resource_types': 'Renderbuffers',
'trace_level': 2,
},
'DeleteSamplers': {
'type': 'DELn',
'resource_type': 'Sampler',
'resource_types': 'Samplers',
'unsafe': True,
},
'DeleteShader': { 'type': 'Delete' },
'DeleteSync': {
'type': 'Delete',
'cmd_args': 'GLuint sync',
'resource_type': 'Sync',
'unsafe': True,
},
'DeleteTextures': {
'type': 'DELn',
'resource_type': 'Texture',
'resource_types': 'Textures',
},
'DeleteTransformFeedbacks': {
'type': 'DELn',
'resource_type': 'TransformFeedback',
'resource_types': 'TransformFeedbacks',
'unsafe': True,
},
'DepthRangef': {
'decoder_func': 'DoDepthRangef',
'gl_test_func': 'glDepthRange',
},
'DepthMask': {
'type': 'StateSet',
'state': 'DepthMask',
'no_gl': True,
'expectation': False,
},
'DetachShader': {'decoder_func': 'DoDetachShader'},
'Disable': {
'decoder_func': 'DoDisable',
'impl_func': False,
'client_test': False,
},
'DisableVertexAttribArray': {
'decoder_func': 'DoDisableVertexAttribArray',
'impl_decl': False,
},
'DrawArrays': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLint first, GLsizei count',
'defer_draws': True,
'trace_level': 2,
},
'DrawElements': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLsizei count, '
'GLenumIndexType type, GLuint index_offset',
'client_test': False,
'defer_draws': True,
'trace_level': 2,
},
'DrawRangeElements': {
'type': 'Manual',
'gen_cmd': 'False',
'unsafe': True,
},
'Enable': {
'decoder_func': 'DoEnable',
'impl_func': False,
'client_test': False,
},
'EnableVertexAttribArray': {
'decoder_func': 'DoEnableVertexAttribArray',
'impl_decl': False,
},
'FenceSync': {
'type': 'Create',
'client_test': False,
'unsafe': True,
'trace_level': 1,
},
'Finish': {
'impl_func': False,
'client_test': False,
'decoder_func': 'DoFinish',
'defer_reads': True,
'trace_level': 1,
},
'Flush': {
'impl_func': False,
'decoder_func': 'DoFlush',
'trace_level': 1,
},
'FramebufferRenderbuffer': {
'decoder_func': 'DoFramebufferRenderbuffer',
'gl_test_func': 'glFramebufferRenderbufferEXT',
'trace_level': 1,
},
'FramebufferTexture2D': {
'decoder_func': 'DoFramebufferTexture2D',
'gl_test_func': 'glFramebufferTexture2DEXT',
'trace_level': 1,
},
'FramebufferTexture2DMultisampleEXT': {
'decoder_func': 'DoFramebufferTexture2DMultisample',
'gl_test_func': 'glFramebufferTexture2DMultisampleEXT',
'expectation': False,
'unit_test': False,
'extension_flag': 'multisampled_render_to_texture',
'trace_level': 1,
},
'FramebufferTextureLayer': {
'decoder_func': 'DoFramebufferTextureLayer',
'unsafe': True,
'trace_level': 1,
},
'GenerateMipmap': {
'decoder_func': 'DoGenerateMipmap',
'gl_test_func': 'glGenerateMipmapEXT',
'trace_level': 1,
},
'GenBuffers': {
'type': 'GENn',
'gl_test_func': 'glGenBuffersARB',
'resource_type': 'Buffer',
'resource_types': 'Buffers',
},
'GenMailboxCHROMIUM': {
'type': 'HandWritten',
'impl_func': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
},
'GenFramebuffers': {
'type': 'GENn',
'gl_test_func': 'glGenFramebuffersEXT',
'resource_type': 'Framebuffer',
'resource_types': 'Framebuffers',
},
'GenRenderbuffers': {
'type': 'GENn', 'gl_test_func': 'glGenRenderbuffersEXT',
'resource_type': 'Renderbuffer',
'resource_types': 'Renderbuffers',
},
'GenSamplers': {
'type': 'GENn',
'gl_test_func': 'glGenSamplers',
'resource_type': 'Sampler',
'resource_types': 'Samplers',
'unsafe': True,
},
'GenTextures': {
'type': 'GENn',
'gl_test_func': 'glGenTextures',
'resource_type': 'Texture',
'resource_types': 'Textures',
},
'GenTransformFeedbacks': {
'type': 'GENn',
'gl_test_func': 'glGenTransformFeedbacks',
'resource_type': 'TransformFeedback',
'resource_types': 'TransformFeedbacks',
'unsafe': True,
},
'GetActiveAttrib': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': [
'int32_t success',
'int32_t size',
'uint32_t type',
],
},
'GetActiveUniform': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': [
'int32_t success',
'int32_t size',
'uint32_t type',
],
},
'GetActiveUniformBlockiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLint>'],
'unsafe': True,
},
'GetActiveUniformBlockName': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': ['int32_t'],
'unsafe': True,
},
'GetActiveUniformsiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t indices_bucket_id, GLenum pname, '
'GLint* params',
'result': ['SizedResult<GLint>'],
'unsafe': True,
},
'GetAttachedShaders': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args': 'GLidProgram program, void* result, uint32_t result_size',
'result': ['SizedResult<GLuint>'],
},
'GetAttribLocation': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* location',
'result': ['GLint'],
'error_return': -1,
},
'GetFragDataLocation': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* location',
'result': ['GLint'],
'error_return': -1,
'unsafe': True,
},
'GetBooleanv': {
'type': 'GETn',
'result': ['SizedResult<GLboolean>'],
'decoder_func': 'DoGetBooleanv',
'gl_test_func': 'glGetBooleanv',
},
'GetBufferParameteriv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'decoder_func': 'DoGetBufferParameteriv',
'expectation': False,
'shadowed': True,
},
'GetError': {
'type': 'Is',
'decoder_func': 'GetErrorState()->GetGLError',
'impl_func': False,
'result': ['GLenum'],
'client_test': False,
},
'GetFloatv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'decoder_func': 'DoGetFloatv',
'gl_test_func': 'glGetFloatv',
},
'GetFramebufferAttachmentParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetFramebufferAttachmentParameteriv',
'gl_test_func': 'glGetFramebufferAttachmentParameterivEXT',
'result': ['SizedResult<GLint>'],
},
'GetGraphicsResetStatusKHR': {
'extension': True,
'client_test': False,
'gen_cmd': False,
'trace_level': 1,
},
'GetInteger64v': {
'type': 'GETn',
'result': ['SizedResult<GLint64>'],
'client_test': False,
'decoder_func': 'DoGetInteger64v',
'unsafe': True
},
'GetIntegerv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'decoder_func': 'DoGetIntegerv',
'client_test': False,
},
'GetInteger64i_v': {
'type': 'GETn',
'result': ['SizedResult<GLint64>'],
'client_test': False,
'unsafe': True
},
'GetIntegeri_v': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'client_test': False,
'unsafe': True
},
'GetInternalformativ': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLint>'],
'cmd_args':
'GLenumRenderBufferTarget target, GLenumRenderBufferFormat format, '
'GLenumInternalFormatParameter pname, GLint* params',
'unsafe': True,
},
'GetMaxValueInBufferCHROMIUM': {
'type': 'Is',
'decoder_func': 'DoGetMaxValueInBufferCHROMIUM',
'result': ['GLuint'],
'unit_test': False,
'client_test': False,
'extension': True,
'chromium': True,
'impl_func': False,
},
'GetProgramiv': {
'type': 'GETn',
'decoder_func': 'DoGetProgramiv',
'result': ['SizedResult<GLint>'],
'expectation': False,
},
'GetProgramInfoCHROMIUM': {
'type': 'Custom',
'expectation': False,
'impl_func': False,
'extension': True,
'chromium': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': [
'uint32_t link_status',
'uint32_t num_attribs',
'uint32_t num_uniforms',
],
},
'GetProgramInfoLog': {
'type': 'STRn',
'expectation': False,
},
'GetRenderbufferParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetRenderbufferParameteriv',
'gl_test_func': 'glGetRenderbufferParameterivEXT',
'result': ['SizedResult<GLint>'],
},
'GetSamplerParameterfv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'GetSamplerParameteriv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'GetShaderiv': {
'type': 'GETn',
'decoder_func': 'DoGetShaderiv',
'result': ['SizedResult<GLint>'],
},
'GetShaderInfoLog': {
'type': 'STRn',
'get_len_func': 'glGetShaderiv',
'get_len_enum': 'GL_INFO_LOG_LENGTH',
'unit_test': False,
},
'GetShaderPrecisionFormat': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLenumShaderType shadertype, GLenumShaderPrecision precisiontype, '
'void* result',
'result': [
'int32_t success',
'int32_t min_range',
'int32_t max_range',
'int32_t precision',
],
},
'GetShaderSource': {
'type': 'STRn',
'get_len_func': 'DoGetShaderiv',
'get_len_enum': 'GL_SHADER_SOURCE_LENGTH',
'unit_test': False,
'client_test': False,
},
'GetString': {
'type': 'Custom',
'client_test': False,
'cmd_args': 'GLenumStringType name, uint32_t bucket_id',
},
'GetSynciv': {
'type': 'GETn',
'cmd_args': 'GLuint sync, GLenumSyncParameter pname, void* values',
'result': ['SizedResult<GLint>'],
'id_mapping': ['Sync'],
'unsafe': True,
},
'GetTexParameterfv': {
'type': 'GETn',
'decoder_func': 'DoGetTexParameterfv',
'result': ['SizedResult<GLfloat>']
},
'GetTexParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetTexParameteriv',
'result': ['SizedResult<GLint>']
},
'GetTranslatedShaderSourceANGLE': {
'type': 'STRn',
'get_len_func': 'DoGetShaderiv',
'get_len_enum': 'GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE',
'unit_test': False,
'extension': True,
},
'GetUniformBlockIndex': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLuint* index',
'result': ['GLuint'],
'error_return': 'GL_INVALID_INDEX',
'unsafe': True,
},
'GetUniformBlocksCHROMIUM': {
'type': 'Custom',
'expectation': False,
'impl_func': False,
'extension': True,
'chromium': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': ['uint32_t'],
'unsafe': True,
},
'GetUniformsES3CHROMIUM': {
'type': 'Custom',
'expectation': False,
'impl_func': False,
'extension': True,
'chromium': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': ['uint32_t'],
'unsafe': True,
},
'GetTransformFeedbackVarying': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': [
'int32_t success',
'int32_t size',
'uint32_t type',
],
'unsafe': True,
},
'GetTransformFeedbackVaryingsCHROMIUM': {
'type': 'Custom',
'expectation': False,
'impl_func': False,
'extension': True,
'chromium': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': ['uint32_t'],
'unsafe': True,
},
'GetUniformfv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLfloat>'],
},
'GetUniformiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLint>'],
},
'GetUniformuiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLuint>'],
'unsafe': True,
},
'GetUniformIndices': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLuint>'],
'cmd_args': 'GLidProgram program, uint32_t names_bucket_id, '
'GLuint* indices',
'unsafe': True,
},
'GetUniformLocation': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* location',
'result': ['GLint'],
'error_return': -1, # http://www.opengl.org/sdk/docs/man/xhtml/glGetUniformLocation.xml
},
'GetVertexAttribfv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribfv',
'expectation': False,
'client_test': False,
},
'GetVertexAttribiv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribiv',
'expectation': False,
'client_test': False,
},
'GetVertexAttribIiv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribIiv',
'expectation': False,
'client_test': False,
'unsafe': True,
},
'GetVertexAttribIuiv': {
'type': 'GETn',
'result': ['SizedResult<GLuint>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribIuiv',
'expectation': False,
'client_test': False,
'unsafe': True,
},
'GetVertexAttribPointerv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLuint>'],
'client_test': False,
},
'InvalidateFramebuffer': {
'type': 'PUTn',
'count': 1,
'client_test': False,
'unit_test': False,
'unsafe': True,
},
'InvalidateSubFramebuffer': {
'type': 'PUTn',
'count': 1,
'client_test': False,
'unit_test': False,
'unsafe': True,
},
'IsBuffer': {
'type': 'Is',
'decoder_func': 'DoIsBuffer',
'expectation': False,
},
'IsEnabled': {
'type': 'Is',
'decoder_func': 'DoIsEnabled',
'client_test': False,
'impl_func': False,
'expectation': False,
},
'IsFramebuffer': {
'type': 'Is',
'decoder_func': 'DoIsFramebuffer',
'expectation': False,
},
'IsProgram': {
'type': 'Is',
'decoder_func': 'DoIsProgram',
'expectation': False,
},
'IsRenderbuffer': {
'type': 'Is',
'decoder_func': 'DoIsRenderbuffer',
'expectation': False,
},
'IsShader': {
'type': 'Is',
'decoder_func': 'DoIsShader',
'expectation': False,
},
'IsSampler': {
'type': 'Is',
'id_mapping': [ 'Sampler' ],
'expectation': False,
'unsafe': True,
},
'IsSync': {
'type': 'Is',
'id_mapping': [ 'Sync' ],
'cmd_args': 'GLuint sync',
'expectation': False,
'unsafe': True,
},
'IsTexture': {
'type': 'Is',
'decoder_func': 'DoIsTexture',
'expectation': False,
},
'IsTransformFeedback': {
'type': 'Is',
'id_mapping': [ 'TransformFeedback' ],
'expectation': False,
'unsafe': True,
},
'LinkProgram': {
'decoder_func': 'DoLinkProgram',
'impl_func': False,
'trace_level': 1,
},
'MapBufferCHROMIUM': {
'gen_cmd': False,
'extension': "CHROMIUM_pixel_transfer_buffer_object",
'chromium': True,
'client_test': False,
'trace_level': 1,
},
'MapBufferSubDataCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'MapTexSubImage2DCHROMIUM': {
'gen_cmd': False,
'extension': "CHROMIUM_sub_image",
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'MapBufferRange': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args': 'GLenumBufferTarget target, GLintptrNotNegative offset, '
'GLsizeiptr size, GLbitfieldMapBufferAccess access, '
'uint32_t data_shm_id, uint32_t data_shm_offset, '
'uint32_t result_shm_id, uint32_t result_shm_offset',
'unsafe': True,
'result': ['uint32_t'],
'trace_level': 1,
},
'PauseTransformFeedback': {
'unsafe': True,
},
'PixelStorei': {'type': 'Manual'},
'PostSubBufferCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'unit_test': False,
'client_test': False,
'extension': True,
'chromium': True,
},
'ProduceTextureCHROMIUM': {
'decoder_func': 'DoProduceTextureCHROMIUM',
'impl_func': False,
'type': 'PUT',
'count': 64, # GL_MAILBOX_SIZE_CHROMIUM
'unit_test': False,
'client_test': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
'trace_level': 1,
},
'ProduceTextureDirectCHROMIUM': {
'decoder_func': 'DoProduceTextureDirectCHROMIUM',
'impl_func': False,
'type': 'PUT',
'count': 64, # GL_MAILBOX_SIZE_CHROMIUM
'unit_test': False,
'client_test': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
'trace_level': 1,
},
'RenderbufferStorage': {
'decoder_func': 'DoRenderbufferStorage',
'gl_test_func': 'glRenderbufferStorageEXT',
'expectation': False,
'trace_level': 1,
},
'RenderbufferStorageMultisampleCHROMIUM': {
'cmd_comment':
'// GL_CHROMIUM_framebuffer_multisample\n',
'decoder_func': 'DoRenderbufferStorageMultisampleCHROMIUM',
'gl_test_func': 'glRenderbufferStorageMultisampleCHROMIUM',
'expectation': False,
'unit_test': False,
'extension_flag': 'chromium_framebuffer_multisample',
'pepper_interface': 'FramebufferMultisample',
'pepper_name': 'RenderbufferStorageMultisampleEXT',
'trace_level': 1,
},
'RenderbufferStorageMultisampleEXT': {
'cmd_comment':
'// GL_EXT_multisampled_render_to_texture\n',
'decoder_func': 'DoRenderbufferStorageMultisampleEXT',
'gl_test_func': 'glRenderbufferStorageMultisampleEXT',
'expectation': False,
'unit_test': False,
'extension_flag': 'multisampled_render_to_texture',
'trace_level': 1,
},
'ReadBuffer': {
'unsafe': True,
'trace_level': 1,
},
'ReadPixels': {
'cmd_comment':
'// ReadPixels has the result separated from the pixel buffer so that\n'
'// it is easier to specify the result going to some specific place\n'
'// that exactly fits the rectangle of pixels.\n',
'type': 'Custom',
'data_transfer_methods': ['shm'],
'impl_func': False,
'client_test': False,
'cmd_args':
'GLint x, GLint y, GLsizei width, GLsizei height, '
'GLenumReadPixelFormat format, GLenumReadPixelType type, '
'uint32_t pixels_shm_id, uint32_t pixels_shm_offset, '
'uint32_t result_shm_id, uint32_t result_shm_offset, '
'GLboolean async',
'result': ['uint32_t'],
'defer_reads': True,
'trace_level': 1,
},
'ReleaseShaderCompiler': {
'decoder_func': 'DoReleaseShaderCompiler',
'unit_test': False,
},
'ResumeTransformFeedback': {
'unsafe': True,
},
'SamplerParameterf': {
'valid_args': {
'2': 'GL_NEAREST'
},
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'SamplerParameterfv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'gl_test_func': 'glSamplerParameterf',
'decoder_func': 'DoSamplerParameterfv',
'first_element_only': True,
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'SamplerParameteri': {
'valid_args': {
'2': 'GL_NEAREST'
},
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'SamplerParameteriv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'gl_test_func': 'glSamplerParameteri',
'decoder_func': 'DoSamplerParameteriv',
'first_element_only': True,
'unsafe': True,
},
'ShaderBinary': {
'type': 'Custom',
'client_test': False,
},
'ShaderSource': {
'type': 'PUTSTR',
'decoder_func': 'DoShaderSource',
'expectation': False,
'data_transfer_methods': ['bucket'],
'cmd_args':
'GLuint shader, const char** str',
'pepper_args':
'GLuint shader, GLsizei count, const char** str, const GLint* length',
},
'StencilMask': {
'type': 'StateSetFrontBack',
'state': 'StencilMask',
'no_gl': True,
'expectation': False,
},
'StencilMaskSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilMask',
'no_gl': True,
'expectation': False,
},
'SwapBuffers': {
'impl_func': False,
'decoder_func': 'DoSwapBuffers',
'unit_test': False,
'client_test': False,
'extension': True,
'trace_level': 1,
},
'SwapInterval': {
'impl_func': False,
'decoder_func': 'DoSwapInterval',
'unit_test': False,
'client_test': False,
'extension': True,
'trace_level': 1,
},
'TexImage2D': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
},
'TexImage3D': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'unsafe': True,
'trace_level': 2,
},
'TexParameterf': {
'decoder_func': 'DoTexParameterf',
'valid_args': {
'2': 'GL_NEAREST'
},
},
'TexParameteri': {
'decoder_func': 'DoTexParameteri',
'valid_args': {
'2': 'GL_NEAREST'
},
},
'TexParameterfv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'decoder_func': 'DoTexParameterfv',
'gl_test_func': 'glTexParameterf',
'first_element_only': True,
},
'TexParameteriv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'decoder_func': 'DoTexParameteriv',
'gl_test_func': 'glTexParameteri',
'first_element_only': True,
},
'TexStorage3D': {
'unsafe': True,
'trace_level': 2,
},
'TexSubImage2D': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLint xoffset, GLint yoffset, '
'GLsizei width, GLsizei height, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* pixels, GLboolean internal'
},
'TexSubImage3D': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLint xoffset, GLint yoffset, GLint zoffset, '
'GLsizei width, GLsizei height, GLsizei depth, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* pixels, GLboolean internal',
'unsafe': True,
},
'TransformFeedbackVaryings': {
'type': 'PUTSTR',
'data_transfer_methods': ['bucket'],
'decoder_func': 'DoTransformFeedbackVaryings',
'cmd_args':
'GLuint program, const char** varyings, GLenum buffermode',
'unsafe': True,
},
'Uniform1f': {'type': 'PUTXn', 'count': 1},
'Uniform1fv': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoUniform1fv',
},
'Uniform1i': {'decoder_func': 'DoUniform1i', 'unit_test': False},
'Uniform1iv': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoUniform1iv',
'unit_test': False,
},
'Uniform1ui': {
'type': 'PUTXn',
'count': 1,
'unsafe': True,
},
'Uniform1uiv': {
'type': 'PUTn',
'count': 1,
'unsafe': True,
},
'Uniform2i': {'type': 'PUTXn', 'count': 2},
'Uniform2f': {'type': 'PUTXn', 'count': 2},
'Uniform2fv': {
'type': 'PUTn',
'count': 2,
'decoder_func': 'DoUniform2fv',
},
'Uniform2iv': {
'type': 'PUTn',
'count': 2,
'decoder_func': 'DoUniform2iv',
},
'Uniform2ui': {
'type': 'PUTXn',
'count': 2,
'unsafe': True,
},
'Uniform2uiv': {
'type': 'PUTn',
'count': 2,
'unsafe': True,
},
'Uniform3i': {'type': 'PUTXn', 'count': 3},
'Uniform3f': {'type': 'PUTXn', 'count': 3},
'Uniform3fv': {
'type': 'PUTn',
'count': 3,
'decoder_func': 'DoUniform3fv',
},
'Uniform3iv': {
'type': 'PUTn',
'count': 3,
'decoder_func': 'DoUniform3iv',
},
'Uniform3ui': {
'type': 'PUTXn',
'count': 3,
'unsafe': True,
},
'Uniform3uiv': {
'type': 'PUTn',
'count': 3,
'unsafe': True,
},
'Uniform4i': {'type': 'PUTXn', 'count': 4},
'Uniform4f': {'type': 'PUTXn', 'count': 4},
'Uniform4fv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniform4fv',
},
'Uniform4iv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniform4iv',
},
'Uniform4ui': {
'type': 'PUTXn',
'count': 4,
'unsafe': True,
},
'Uniform4uiv': {
'type': 'PUTn',
'count': 4,
'unsafe': True,
},
'UniformMatrix2fv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniformMatrix2fv',
},
'UniformMatrix2x3fv': {
'type': 'PUTn',
'count': 6,
'unsafe': True,
},
'UniformMatrix2x4fv': {
'type': 'PUTn',
'count': 8,
'unsafe': True,
},
'UniformMatrix3fv': {
'type': 'PUTn',
'count': 9,
'decoder_func': 'DoUniformMatrix3fv',
},
'UniformMatrix3x2fv': {
'type': 'PUTn',
'count': 6,
'unsafe': True,
},
'UniformMatrix3x4fv': {
'type': 'PUTn',
'count': 12,
'unsafe': True,
},
'UniformMatrix4fv': {
'type': 'PUTn',
'count': 16,
'decoder_func': 'DoUniformMatrix4fv',
},
'UniformMatrix4x2fv': {
'type': 'PUTn',
'count': 8,
'unsafe': True,
},
'UniformMatrix4x3fv': {
'type': 'PUTn',
'count': 12,
'unsafe': True,
},
'UniformBlockBinding': {
'type': 'Custom',
'impl_func': False,
'unsafe': True,
},
'UnmapBufferCHROMIUM': {
'gen_cmd': False,
'extension': "CHROMIUM_pixel_transfer_buffer_object",
'chromium': True,
'client_test': False,
'trace_level': 1,
},
'UnmapBufferSubDataCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'UnmapBuffer': {
'type': 'Custom',
'unsafe': True,
'trace_level': 1,
},
'UnmapTexSubImage2DCHROMIUM': {
'gen_cmd': False,
'extension': "CHROMIUM_sub_image",
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'UseProgram': {
'type': 'Bind',
'decoder_func': 'DoUseProgram',
},
'ValidateProgram': {'decoder_func': 'DoValidateProgram'},
'VertexAttrib1f': {'decoder_func': 'DoVertexAttrib1f'},
'VertexAttrib1fv': {
'type': 'PUT',
'count': 1,
'decoder_func': 'DoVertexAttrib1fv',
},
'VertexAttrib2f': {'decoder_func': 'DoVertexAttrib2f'},
'VertexAttrib2fv': {
'type': 'PUT',
'count': 2,
'decoder_func': 'DoVertexAttrib2fv',
},
'VertexAttrib3f': {'decoder_func': 'DoVertexAttrib3f'},
'VertexAttrib3fv': {
'type': 'PUT',
'count': 3,
'decoder_func': 'DoVertexAttrib3fv',
},
'VertexAttrib4f': {'decoder_func': 'DoVertexAttrib4f'},
'VertexAttrib4fv': {
'type': 'PUT',
'count': 4,
'decoder_func': 'DoVertexAttrib4fv',
},
'VertexAttribI4i': {
'unsafe': True,
'decoder_func': 'DoVertexAttribI4i',
},
'VertexAttribI4iv': {
'type': 'PUT',
'count': 4,
'unsafe': True,
'decoder_func': 'DoVertexAttribI4iv',
},
'VertexAttribI4ui': {
'unsafe': True,
'decoder_func': 'DoVertexAttribI4ui',
},
'VertexAttribI4uiv': {
'type': 'PUT',
'count': 4,
'unsafe': True,
'decoder_func': 'DoVertexAttribI4uiv',
},
'VertexAttribIPointer': {
'type': 'Manual',
'cmd_args': 'GLuint indx, GLintVertexAttribSize size, '
'GLenumVertexAttribIType type, GLsizei stride, '
'GLuint offset',
'client_test': False,
'unsafe': True,
},
'VertexAttribPointer': {
'type': 'Manual',
'cmd_args': 'GLuint indx, GLintVertexAttribSize size, '
'GLenumVertexAttribType type, GLboolean normalized, '
'GLsizei stride, GLuint offset',
'client_test': False,
},
'WaitSync': {
'type': 'Custom',
'cmd_args': 'GLuint sync, GLbitfieldSyncFlushFlags flags, '
'GLuint timeout_0, GLuint timeout_1',
'impl_func': False,
'client_test': False,
'unsafe': True,
'trace_level': 1,
},
'Scissor': {
'type': 'StateSet',
'state': 'Scissor',
},
'Viewport': {
'decoder_func': 'DoViewport',
},
'ResizeCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'unit_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'GetRequestableExtensionsCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'uint32_t bucket_id',
'extension': True,
'chromium': True,
},
'RequestExtensionCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'uint32_t bucket_id',
'extension': True,
'chromium': True,
},
'RateLimitOffscreenContextCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'CreateStreamTextureCHROMIUM': {
'type': 'HandWritten',
'impl_func': False,
'gen_cmd': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'TexImageIOSurface2DCHROMIUM': {
'decoder_func': 'DoTexImageIOSurface2DCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'CopyTextureCHROMIUM': {
'decoder_func': 'DoCopyTextureCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_copy_texture",
'chromium': True,
'trace_level': 2,
},
'CopySubTextureCHROMIUM': {
'decoder_func': 'DoCopySubTextureCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_copy_texture",
'chromium': True,
'trace_level': 2,
},
'CompressedCopyTextureCHROMIUM': {
'decoder_func': 'DoCompressedCopyTextureCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'TexStorage2DEXT': {
'unit_test': False,
'extension': True,
'decoder_func': 'DoTexStorage2DEXT',
'trace_level': 2,
},
'DrawArraysInstancedANGLE': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLint first, GLsizei count, '
'GLsizei primcount',
'extension': True,
'unit_test': False,
'pepper_interface': 'InstancedArrays',
'defer_draws': True,
'trace_level': 2,
},
'DrawBuffersEXT': {
'type': 'PUTn',
'decoder_func': 'DoDrawBuffersEXT',
'count': 1,
'client_test': False,
'unit_test': False,
# could use 'extension_flag': 'ext_draw_buffers' but currently expected to
# work without.
'extension': True,
'pepper_interface': 'DrawBuffers',
'trace_level': 2,
},
'DrawElementsInstancedANGLE': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLsizei count, '
'GLenumIndexType type, GLuint index_offset, GLsizei primcount',
'extension': True,
'unit_test': False,
'client_test': False,
'pepper_interface': 'InstancedArrays',
'defer_draws': True,
'trace_level': 2,
},
'VertexAttribDivisorANGLE': {
'type': 'Manual',
'cmd_args': 'GLuint index, GLuint divisor',
'extension': True,
'unit_test': False,
'pepper_interface': 'InstancedArrays',
},
'GenQueriesEXT': {
'type': 'GENn',
'gl_test_func': 'glGenQueriesARB',
'resource_type': 'Query',
'resource_types': 'Queries',
'unit_test': False,
'pepper_interface': 'Query',
'not_shared': 'True',
'extension': "occlusion_query_EXT",
},
'DeleteQueriesEXT': {
'type': 'DELn',
'gl_test_func': 'glDeleteQueriesARB',
'resource_type': 'Query',
'resource_types': 'Queries',
'unit_test': False,
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'IsQueryEXT': {
'gen_cmd': False,
'client_test': False,
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'BeginQueryEXT': {
'type': 'Manual',
'cmd_args': 'GLenumQueryTarget target, GLidQuery id, void* sync_data',
'data_transfer_methods': ['shm'],
'gl_test_func': 'glBeginQuery',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'BeginTransformFeedback': {
'unsafe': True,
},
'EndQueryEXT': {
'type': 'Manual',
'cmd_args': 'GLenumQueryTarget target, GLuint submit_count',
'gl_test_func': 'glEndnQuery',
'client_test': False,
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'EndTransformFeedback': {
'unsafe': True,
},
'GetQueryivEXT': {
'gen_cmd': False,
'client_test': False,
'gl_test_func': 'glGetQueryiv',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'GetQueryObjectuivEXT': {
'gen_cmd': False,
'client_test': False,
'gl_test_func': 'glGetQueryObjectuiv',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'BindUniformLocationCHROMIUM': {
'type': 'GLchar',
'extension': True,
'data_transfer_methods': ['bucket'],
'needs_size': True,
'gl_test_func': 'DoBindUniformLocationCHROMIUM',
},
'InsertEventMarkerEXT': {
'type': 'GLcharN',
'decoder_func': 'DoInsertEventMarkerEXT',
'expectation': False,
'extension': True,
},
'PushGroupMarkerEXT': {
'type': 'GLcharN',
'decoder_func': 'DoPushGroupMarkerEXT',
'expectation': False,
'extension': True,
},
'PopGroupMarkerEXT': {
'decoder_func': 'DoPopGroupMarkerEXT',
'expectation': False,
'extension': True,
'impl_func': False,
},
'GenVertexArraysOES': {
'type': 'GENn',
'extension': True,
'gl_test_func': 'glGenVertexArraysOES',
'resource_type': 'VertexArray',
'resource_types': 'VertexArrays',
'unit_test': False,
'pepper_interface': 'VertexArrayObject',
},
'BindVertexArrayOES': {
'type': 'Bind',
'extension': True,
'gl_test_func': 'glBindVertexArrayOES',
'decoder_func': 'DoBindVertexArrayOES',
'gen_func': 'GenVertexArraysOES',
'unit_test': False,
'client_test': False,
'pepper_interface': 'VertexArrayObject',
},
'DeleteVertexArraysOES': {
'type': 'DELn',
'extension': True,
'gl_test_func': 'glDeleteVertexArraysOES',
'resource_type': 'VertexArray',
'resource_types': 'VertexArrays',
'unit_test': False,
'pepper_interface': 'VertexArrayObject',
},
'IsVertexArrayOES': {
'type': 'Is',
'extension': True,
'gl_test_func': 'glIsVertexArrayOES',
'decoder_func': 'DoIsVertexArrayOES',
'expectation': False,
'unit_test': False,
'pepper_interface': 'VertexArrayObject',
},
'BindTexImage2DCHROMIUM': {
'decoder_func': 'DoBindTexImage2DCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_image",
'chromium': True,
},
'ReleaseTexImage2DCHROMIUM': {
'decoder_func': 'DoReleaseTexImage2DCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_image",
'chromium': True,
},
'ShallowFinishCHROMIUM': {
'impl_func': False,
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'ShallowFlushCHROMIUM': {
'impl_func': False,
'gen_cmd': False,
'extension': "CHROMIUM_miscellaneous",
'chromium': True,
'client_test': False,
},
'OrderingBarrierCHROMIUM': {
'impl_func': False,
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'TraceBeginCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'GLuint category_bucket_id, GLuint name_bucket_id',
'extension': True,
'chromium': True,
},
'TraceEndCHROMIUM': {
'impl_func': False,
'client_test': False,
'decoder_func': 'DoTraceEndCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'AsyncTexImage2DCHROMIUM': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLintTextureInternalFormat internalformat, '
'GLsizei width, GLsizei height, '
'GLintTextureBorder border, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* pixels, '
'uint32_t async_upload_token, '
'void* sync_data',
'extension': True,
'chromium': True,
'trace_level': 2,
},
'AsyncTexSubImage2DCHROMIUM': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLint xoffset, GLint yoffset, '
'GLsizei width, GLsizei height, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* data, '
'uint32_t async_upload_token, '
'void* sync_data',
'extension': True,
'chromium': True,
'trace_level': 2,
},
'WaitAsyncTexImage2DCHROMIUM': {
'type': 'Manual',
'client_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'WaitAllAsyncTexImage2DCHROMIUM': {
'type': 'Manual',
'client_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'DiscardFramebufferEXT': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoDiscardFramebufferEXT',
'unit_test': False,
'client_test': False,
'extension_flag': 'ext_discard_framebuffer',
'trace_level': 2,
},
'LoseContextCHROMIUM': {
'decoder_func': 'DoLoseContextCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'InsertSyncPointCHROMIUM': {
'type': 'HandWritten',
'impl_func': False,
'extension': "CHROMIUM_sync_point",
'chromium': True,
'trace_level': 1,
},
'WaitSyncPointCHROMIUM': {
'type': 'Custom',
'impl_func': True,
'extension': "CHROMIUM_sync_point",
'chromium': True,
'trace_level': 1,
},
'DiscardBackbufferCHROMIUM': {
'type': 'Custom',
'impl_func': True,
'extension': True,
'chromium': True,
'trace_level': 2,
},
'ScheduleOverlayPlaneCHROMIUM': {
'type': 'Custom',
'impl_func': True,
'unit_test': False,
'client_test': False,
'extension': True,
'chromium': True,
},
'MatrixLoadfCHROMIUM': {
'type': 'PUT',
'count': 16,
'data_type': 'GLfloat',
'decoder_func': 'DoMatrixLoadfCHROMIUM',
'gl_test_func': 'glMatrixLoadfEXT',
'chromium': True,
'extension': True,
'extension_flag': 'chromium_path_rendering',
},
'MatrixLoadIdentityCHROMIUM': {
'decoder_func': 'DoMatrixLoadIdentityCHROMIUM',
'gl_test_func': 'glMatrixLoadIdentityEXT',
'chromium': True,
'extension': True,
'extension_flag': 'chromium_path_rendering',
},
}
def Grouper(n, iterable, fillvalue=None):
"""Collect data into fixed-length chunks or blocks"""
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def SplitWords(input_string):
"""Split by '_' if found, otherwise split at uppercase/numeric chars.
Will split "some_TEXT" into ["some", "TEXT"], "CamelCase" into ["Camel",
"Case"], and "Vector3" into ["Vector", "3"].
"""
if input_string.find('_') > -1:
# 'some_TEXT_' -> 'some TEXT'
return input_string.replace('_', ' ').strip().split()
else:
if re.search('[A-Z]', input_string) and re.search('[a-z]', input_string):
# mixed case.
# look for capitalization to cut input_strings
# 'SomeText' -> 'Some Text'
input_string = re.sub('([A-Z])', r' \1', input_string).strip()
# 'Vector3' -> 'Vector 3'
input_string = re.sub('([^0-9])([0-9])', r'\1 \2', input_string)
return input_string.split()
def ToUnderscore(input_string):
"""converts CamelCase to camel_case."""
words = SplitWords(input_string)
return '_'.join([word.lower() for word in words])
def CachedStateName(item):
if item.get('cached', False):
return 'cached_' + item['name']
return item['name']
def ToGLExtensionString(extension_flag):
"""Returns GL-type extension string of a extension flag."""
if extension_flag == "oes_compressed_etc1_rgb8_texture":
return "OES_compressed_ETC1_RGB8_texture" # Fixup inconsitency with rgb8,
# unfortunate.
uppercase_words = [ 'img', 'ext', 'arb', 'chromium', 'oes', 'amd', 'bgra8888',
'egl', 'atc', 'etc1', 'angle']
parts = extension_flag.split('_')
return "_".join(
[part.upper() if part in uppercase_words else part for part in parts])
def ToCamelCase(input_string):
"""converts ABC_underscore_case to ABCUnderscoreCase."""
return ''.join(w[0].upper() + w[1:] for w in input_string.split('_'))
def GetGLGetTypeConversion(result_type, value_type, value):
"""Makes a gl compatible type conversion string for accessing state variables.
Useful when accessing state variables through glGetXXX calls.
glGet documetation (for example, the manual pages):
[...] If glGetIntegerv is called, [...] most floating-point values are
rounded to the nearest integer value. [...]
Args:
result_type: the gl type to be obtained
value_type: the GL type of the state variable
value: the name of the state variable
Returns:
String that converts the state variable to desired GL type according to GL
rules.
"""
if result_type == 'GLint':
if value_type == 'GLfloat':
return 'static_cast<GLint>(round(%s))' % value
return 'static_cast<%s>(%s)' % (result_type, value)
class CWriter(object):
"""Writes to a file formatting it for Google's style guidelines."""
def __init__(self, filename):
self.filename = filename
self.content = []
def Write(self, string):
"""Writes a string to a file spliting if it's > 80 characters."""
lines = string.splitlines()
num_lines = len(lines)
for ii in range(0, num_lines):
self.content.append(lines[ii])
if ii < (num_lines - 1) or string[-1] == '\n':
self.content.append('\n')
def Close(self):
"""Close the file."""
content = "".join(self.content)
write_file = True
if os.path.exists(self.filename):
old_file = open(self.filename, "rb");
old_content = old_file.read()
old_file.close();
if content == old_content:
write_file = False
if write_file:
file = open(self.filename, "wb")
file.write(content)
file.close()
class CHeaderWriter(CWriter):
"""Writes a C Header file."""
_non_alnum_re = re.compile(r'[^a-zA-Z0-9]')
def __init__(self, filename, file_comment = None):
CWriter.__init__(self, filename)
base = os.path.abspath(filename)
while os.path.basename(base) != 'src':
new_base = os.path.dirname(base)
assert new_base != base # Prevent infinite loop.
base = new_base
hpath = os.path.relpath(filename, base)
self.guard = self._non_alnum_re.sub('_', hpath).upper() + '_'
self.Write(_LICENSE)
self.Write(_DO_NOT_EDIT_WARNING)
if not file_comment == None:
self.Write(file_comment)
self.Write("#ifndef %s\n" % self.guard)
self.Write("#define %s\n\n" % self.guard)
def Close(self):
self.Write("#endif // %s\n\n" % self.guard)
CWriter.Close(self)
class TypeHandler(object):
"""This class emits code for a particular type of function."""
_remove_expected_call_re = re.compile(r' EXPECT_CALL.*?;\n', re.S)
def __init__(self):
pass
def InitFunction(self, func):
"""Add or adjust anything type specific for this function."""
if func.GetInfo('needs_size') and not func.name.endswith('Bucket'):
func.AddCmdArg(DataSizeArgument('data_size'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return func.num_pointer_args >= 1
def WriteStruct(self, func, file):
"""Writes a structure that matches the arguments to a function."""
comment = func.GetInfo('cmd_comment')
if not comment == None:
file.Write(comment)
file.Write("struct %s {\n" % func.name)
file.Write(" typedef %s ValueType;\n" % func.name)
file.Write(" static const CommandId kCmdId = k%s;\n" % func.name)
func.WriteCmdArgFlag(file)
func.WriteCmdFlag(file)
file.Write("\n")
result = func.GetInfo('result')
if not result == None:
if len(result) == 1:
file.Write(" typedef %s Result;\n\n" % result[0])
else:
file.Write(" struct Result {\n")
for line in result:
file.Write(" %s;\n" % line)
file.Write(" };\n\n")
func.WriteCmdComputeSize(file)
func.WriteCmdSetHeader(file)
func.WriteCmdInit(file)
func.WriteCmdSet(file)
file.Write(" gpu::CommandHeader header;\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s %s;\n" % (arg.cmd_type, arg.name))
consts = func.GetCmdConstants()
for const in consts:
file.Write(" static const %s %s = %s;\n" %
(const.cmd_type, const.name, const.GetConstantValue()))
file.Write("};\n")
file.Write("\n")
size = len(args) * _SIZE_OF_UINT32 + _SIZE_OF_COMMAND_HEADER
file.Write("static_assert(sizeof(%s) == %d,\n" % (func.name, size))
file.Write(" \"size of %s should be %d\");\n" %
(func.name, size))
file.Write("static_assert(offsetof(%s, header) == 0,\n" % func.name)
file.Write(" \"offset of %s header should be 0\");\n" %
func.name)
offset = _SIZE_OF_COMMAND_HEADER
for arg in args:
file.Write("static_assert(offsetof(%s, %s) == %d,\n" %
(func.name, arg.name, offset))
file.Write(" \"offset of %s %s should be %d\");\n" %
(func.name, arg.name, offset))
offset += _SIZE_OF_UINT32
if not result == None and len(result) > 1:
offset = 0;
for line in result:
parts = line.split()
name = parts[-1]
check = """
static_assert(offsetof(%(cmd_name)s::Result, %(field_name)s) == %(offset)d,
"offset of %(cmd_name)s Result %(field_name)s should be "
"%(offset)d");
"""
file.Write((check.strip() + "\n") % {
'cmd_name': func.name,
'field_name': name,
'offset': offset,
})
offset += _SIZE_OF_UINT32
file.Write("\n")
def WriteHandlerImplementation(self, func, file):
"""Writes the handler implementation for this command."""
if func.IsUnsafe() and func.GetInfo('id_mapping'):
code_no_gen = """ if (!group_->Get%(type)sServiceId(
%(var)s, &%(service_var)s)) {
LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "%(func)s", "invalid %(var)s id");
return error::kNoError;
}
"""
code_gen = """ if (!group_->Get%(type)sServiceId(
%(var)s, &%(service_var)s)) {
if (!group_->bind_generates_resource()) {
LOCAL_SET_GL_ERROR(
GL_INVALID_OPERATION, "%(func)s", "invalid %(var)s id");
return error::kNoError;
}
GLuint client_id = %(var)s;
gl%(gen_func)s(1, &%(service_var)s);
Create%(type)s(client_id, %(service_var)s);
}
"""
gen_func = func.GetInfo('gen_func')
for id_type in func.GetInfo('id_mapping'):
service_var = id_type.lower()
if id_type == 'Sync':
service_var = "service_%s" % service_var
file.Write(" GLsync %s = 0;\n" % service_var)
if gen_func and id_type in gen_func:
file.Write(code_gen % { 'type': id_type,
'var': id_type.lower(),
'service_var': service_var,
'func': func.GetGLFunctionName(),
'gen_func': gen_func })
else:
file.Write(code_no_gen % { 'type': id_type,
'var': id_type.lower(),
'service_var': service_var,
'func': func.GetGLFunctionName() })
args = []
for arg in func.GetOriginalArgs():
if arg.type == "GLsync":
args.append("service_%s" % arg.name)
elif arg.name.endswith("size") and arg.type == "GLsizei":
args.append("num_%s" % func.GetLastOriginalArg().name)
elif arg.name == "length":
args.append("nullptr")
else:
args.append(arg.name)
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), ", ".join(args)))
def WriteCmdSizeTest(self, func, file):
"""Writes the size test for a command."""
file.Write(" EXPECT_EQ(sizeof(cmd), cmd.header.size * 4u);\n")
def WriteFormatTest(self, func, file):
"""Writes a format test for a command."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
file.Write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
file.Write(");\n")
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
func.type_handler.WriteCmdSizeTest(func, file)
for value, arg in enumerate(args):
file.Write(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);\n" %
(arg.type, value + 11, arg.name))
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd));\n")
file.Write("}\n")
file.Write("\n")
def WriteImmediateFormatTest(self, func, file):
"""Writes a format test for an immediate version of a command."""
pass
def WriteBucketFormatTest(self, func, file):
"""Writes a format test for a bucket version of a command."""
pass
def WriteGetDataSizeCode(self, func, file):
"""Writes the code to set data_size used in validation"""
pass
def WriteImmediateCmdSizeTest(self, func, file):
"""Writes a size test for an immediate version of a command."""
file.Write(" // TODO(gman): Compute correct size.\n")
file.Write(" EXPECT_EQ(sizeof(cmd), cmd.header.size * 4u);\n")
def __WriteIdMapping(self, func, file):
"""Writes client side / service side ID mapping."""
if not func.IsUnsafe() or not func.GetInfo('id_mapping'):
return
for id_type in func.GetInfo('id_mapping'):
file.Write(" group_->Get%sServiceId(%s, &%s);\n" %
(id_type, id_type.lower(), id_type.lower()))
def WriteImmediateHandlerImplementation (self, func, file):
"""Writes the handler impl for the immediate version of a command."""
self.__WriteIdMapping(func, file)
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteBucketHandlerImplementation (self, func, file):
"""Writes the handler impl for the bucket version of a command."""
self.__WriteIdMapping(func, file)
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteServiceHandlerFunctionHeader(self, func, file):
"""Writes function header for service implementation handlers."""
file.Write("""error::Error GLES2DecoderImpl::Handle%(name)s(
uint32_t immediate_data_size, const void* cmd_data) {
""" % {'name': func.name})
if func.IsUnsafe():
file.Write("""if (!unsafe_es3_apis_enabled())
return error::kUnknownCommand;
""")
file.Write("""const gles2::cmds::%(name)s& c =
*static_cast<const gles2::cmds::%(name)s*>(cmd_data);
(void)c;
""" % {'name': func.name})
def WriteServiceImplementation(self, func, file):
"""Writes the service implementation for a command."""
self.WriteServiceHandlerFunctionHeader(func, file)
self.WriteHandlerExtensionCheck(func, file)
self.WriteHandlerDeferReadWrite(func, file);
if len(func.GetOriginalArgs()) > 0:
last_arg = func.GetLastOriginalArg()
all_but_last_arg = func.GetOriginalArgs()[:-1]
for arg in all_but_last_arg:
arg.WriteGetCode(file)
self.WriteGetDataSizeCode(func, file)
last_arg.WriteGetCode(file)
func.WriteHandlerValidation(file)
func.WriteHandlerImplementation(file)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteImmediateServiceImplementation(self, func, file):
"""Writes the service implementation for an immediate version of command."""
self.WriteServiceHandlerFunctionHeader(func, file)
self.WriteHandlerExtensionCheck(func, file)
self.WriteHandlerDeferReadWrite(func, file);
for arg in func.GetOriginalArgs():
if arg.IsPointer():
self.WriteGetDataSizeCode(func, file)
arg.WriteGetCode(file)
func.WriteHandlerValidation(file)
func.WriteHandlerImplementation(file)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteBucketServiceImplementation(self, func, file):
"""Writes the service implementation for a bucket version of command."""
self.WriteServiceHandlerFunctionHeader(func, file)
self.WriteHandlerExtensionCheck(func, file)
self.WriteHandlerDeferReadWrite(func, file);
for arg in func.GetCmdArgs():
arg.WriteGetCode(file)
func.WriteHandlerValidation(file)
func.WriteHandlerImplementation(file)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteHandlerExtensionCheck(self, func, file):
if func.GetInfo('extension_flag'):
file.Write(" if (!features().%s) {\n" % func.GetInfo('extension_flag'))
file.Write(" LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, \"gl%s\","
" \"function not available\");\n" % func.original_name)
file.Write(" return error::kNoError;")
file.Write(" }\n\n")
def WriteHandlerDeferReadWrite(self, func, file):
"""Writes the code to handle deferring reads or writes."""
defer_draws = func.GetInfo('defer_draws')
defer_reads = func.GetInfo('defer_reads')
if defer_draws or defer_reads:
file.Write(" error::Error error;\n")
if defer_draws:
file.Write(" error = WillAccessBoundFramebufferForDraw();\n")
file.Write(" if (error != error::kNoError)\n")
file.Write(" return error;\n")
if defer_reads:
file.Write(" error = WillAccessBoundFramebufferForRead();\n")
file.Write(" if (error != error::kNoError)\n")
file.Write(" return error;\n")
def WriteValidUnitTest(self, func, file, test, *extras):
"""Writes a valid unit test for the service implementation."""
if func.GetInfo('expectation') == False:
test = self._remove_expected_call_re.sub('', test)
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
gl_arg_strings = [
arg.GetValidGLArg(func) \
for arg in func.GetOriginalArgs()
]
gl_func_name = func.GetGLTestFunctionName()
vars = {
'name':name,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
}
for extra in extras:
vars.update(extra)
old_test = ""
while (old_test != test):
old_test = test
test = test % vars
file.Write(test % vars)
def WriteInvalidUnitTest(self, func, file, test, *extras):
"""Writes an invalid unit test for the service implementation."""
if func.IsUnsafe():
return
for invalid_arg_index, invalid_arg in enumerate(func.GetOriginalArgs()):
# Service implementation does not test constants, as they are not part of
# the call in the service side.
if invalid_arg.IsConstant():
continue
num_invalid_values = invalid_arg.GetNumInvalidValues(func)
for value_index in range(0, num_invalid_values):
arg_strings = []
parse_result = "kNoError"
gl_error = None
for arg in func.GetOriginalArgs():
if arg.IsConstant():
continue
if invalid_arg is arg:
(arg_string, parse_result, gl_error) = arg.GetInvalidArg(
value_index)
else:
arg_string = arg.GetValidArg(func)
arg_strings.append(arg_string)
gl_arg_strings = []
for arg in func.GetOriginalArgs():
gl_arg_strings.append("_")
gl_func_name = func.GetGLTestFunctionName()
gl_error_test = ''
if not gl_error == None:
gl_error_test = '\n EXPECT_EQ(%s, GetGLError());' % gl_error
vars = {
'name': func.name,
'arg_index': invalid_arg_index,
'value_index': value_index,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'all_but_last_args': ", ".join(arg_strings[:-1]),
'gl_args': ", ".join(gl_arg_strings),
'parse_result': parse_result,
'gl_error_test': gl_error_test,
}
for extra in extras:
vars.update(extra)
file.Write(test % vars)
def WriteServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for a command."""
if func.name == 'Enable':
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SetupExpectationsForEnableDisable(%(gl_args)s, true);
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
elif func.name == 'Disable':
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SetupExpectationsForEnableDisable(%(gl_args)s, false);
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
else:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
self.WriteValidUnitTest(func, file, valid_test, *extras)
if not func.IsUnsafe():
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for an immediate command."""
file.Write("// TODO(gman): %s\n" % func.name)
def WriteImmediateValidationCode(self, func, file):
"""Writes the validation code for an immediate version of a command."""
pass
def WriteBucketServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for a bucket command."""
file.Write("// TODO(gman): %s\n" % func.name)
def WriteBucketValidationCode(self, func, file):
"""Writes the validation code for a bucket version of a command."""
file.Write("// TODO(gman): %s\n" % func.name)
def WriteGLES2ImplementationDeclaration(self, func, file):
"""Writes the GLES2 Implemention declaration."""
impl_decl = func.GetInfo('impl_decl')
if impl_decl == None or impl_decl == True:
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write("\n")
def WriteGLES2CLibImplementation(self, func, file):
file.Write("%s GLES2%s(%s) {\n" %
(func.return_type, func.name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
file.Write(" %sgles2::GetGLContext()->%s(%s);\n" %
(result_string, func.original_name,
func.MakeOriginalArgString("")))
file.Write("}\n")
def WriteGLES2Header(self, func, file):
"""Writes a re-write macro for GLES"""
file.Write("#define gl%s GLES2_GET_FUN(%s)\n" %(func.name, func.name))
def WriteClientGLCallLog(self, func, file):
"""Writes a logging macro for the client side code."""
comma = ""
if len(func.GetOriginalArgs()):
comma = " << "
file.Write(
' GPU_CLIENT_LOG("[" << GetLogPrefix() << "] gl%s("%s%s << ")");\n' %
(func.original_name, comma, func.MakeLogArgString()))
def WriteClientGLReturnLog(self, func, file):
"""Writes the return value logging code."""
if func.return_type != "void":
file.Write(' GPU_CLIENT_LOG("return:" << result)\n')
def WriteGLES2ImplementationHeader(self, func, file):
"""Writes the GLES2 Implemention."""
self.WriteGLES2ImplementationDeclaration(func, file)
def WriteGLES2TraceImplementationHeader(self, func, file):
"""Writes the GLES2 Trace Implemention header."""
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2TraceImplementation(self, func, file):
"""Writes the GLES2 Trace Implemention."""
file.Write("%s GLES2TraceImplementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
file.Write(' TRACE_EVENT_BINARY_EFFICIENT0("gpu", "GLES2Trace::%s");\n' %
func.name)
file.Write(" %sgl_->%s(%s);\n" %
(result_string, func.name, func.MakeOriginalArgString("")))
file.Write("}\n")
file.Write("\n")
def WriteGLES2Implementation(self, func, file):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func')
impl_decl = func.GetInfo('impl_decl')
gen_cmd = func.GetInfo('gen_cmd')
if (func.can_auto_generate and
(impl_func == None or impl_func == True) and
(impl_decl == None or impl_decl == True) and
(gen_cmd == None or gen_cmd == True)):
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteClientGLCallLog(func, file)
func.WriteDestinationInitalizationValidation(file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" helper_->%s(%s);\n" %
(func.name, func.MakeHelperArgString("")))
file.Write(" CheckGLError();\n")
self.WriteClientGLReturnLog(func, file)
file.Write("}\n")
file.Write("\n")
def WriteGLES2InterfaceHeader(self, func, file):
"""Writes the GLES2 Interface."""
file.Write("virtual %s %s(%s) = 0;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteMojoGLES2ImplHeader(self, func, file):
"""Writes the Mojo GLES2 implementation header."""
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteMojoGLES2Impl(self, func, file):
"""Writes the Mojo GLES2 implementation."""
file.Write("%s MojoGLES2Impl::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
extensions = ["CHROMIUM_sync_point", "CHROMIUM_texture_mailbox",
"CHROMIUM_sub_image", "CHROMIUM_miscellaneous",
"occlusion_query_EXT", "CHROMIUM_image",
"CHROMIUM_copy_texture",
"CHROMIUM_pixel_transfer_buffer_object"]
if func.IsCoreGLFunction() or func.GetInfo("extension") in extensions:
file.Write("MojoGLES2MakeCurrent(context_);");
func_return = "gl" + func.original_name + "(" + \
func.MakeOriginalArgString("") + ");"
if func.return_type == "void":
file.Write(func_return);
else:
file.Write("return " + func_return);
else:
file.Write("NOTREACHED() << \"Unimplemented %s.\";\n" %
func.original_name);
if func.return_type != "void":
file.Write("return 0;")
file.Write("}")
def WriteGLES2InterfaceStub(self, func, file):
"""Writes the GLES2 Interface stub declaration."""
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2InterfaceStubImpl(self, func, file):
"""Writes the GLES2 Interface stub declaration."""
args = func.GetOriginalArgs()
arg_string = ", ".join(
["%s /* %s */" % (arg.type, arg.name) for arg in args])
file.Write("%s GLES2InterfaceStub::%s(%s) {\n" %
(func.return_type, func.original_name, arg_string))
if func.return_type != "void":
file.Write(" return 0;\n")
file.Write("}\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test')
if (func.can_auto_generate and
(client_test == None or client_test == True)):
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()
]
file.Write(code % {
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
# Test constants for invalid values, as they are not tested by the
# service.
constants = [arg for arg in func.GetOriginalArgs() if arg.IsConstant()]
if constants:
code = """
TEST_F(GLES2ImplementationTest, %(name)sInvalidConstantArg%(invalid_index)d) {
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());
EXPECT_EQ(%(gl_error)s, CheckError());
}
"""
for invalid_arg in constants:
gl_arg_strings = []
invalid = invalid_arg.GetInvalidArg(func)
for arg in func.GetOriginalArgs():
if arg is invalid_arg:
gl_arg_strings.append(invalid[0])
else:
gl_arg_strings.append(arg.GetValidClientSideArg(func))
file.Write(code % {
'name': func.name,
'invalid_index': func.GetOriginalArgs().index(invalid_arg),
'args': ", ".join(gl_arg_strings),
'gl_error': invalid[2],
})
else:
if client_test != False:
file.Write("// TODO(zmo): Implement unit test for %s\n" % func.name)
def WriteDestinationInitalizationValidation(self, func, file):
"""Writes the client side destintion initialization validation."""
for arg in func.GetOriginalArgs():
arg.WriteDestinationInitalizationValidation(file, func)
def WriteTraceEvent(self, func, file):
file.Write(' TRACE_EVENT0("gpu", "GLES2Implementation::%s");\n' %
func.original_name)
def WriteImmediateCmdComputeSize(self, func, file):
"""Writes the size computation code for the immediate version of a cmd."""
file.Write(" static uint32_t ComputeSize(uint32_t size_in_bytes) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(ValueType) + // NOLINT\n")
file.Write(" RoundSizeToMultipleOfEntries(size_in_bytes));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Writes the SetHeader function for the immediate version of a cmd."""
file.Write(" void SetHeader(uint32_t size_in_bytes) {\n")
file.Write(" header.SetCmdByTotalSize<ValueType>(size_in_bytes);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Writes the Init function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteImmediateCmdSet(self, func, file):
"""Writes the Set function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteCmdHelper(self, func, file):
"""Writes the cmd helper definition for a cmd."""
code = """ void %(name)s(%(typed_args)s) {
gles2::cmds::%(name)s* c = GetCmdSpace<gles2::cmds::%(name)s>();
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
def WriteImmediateCmdHelper(self, func, file):
"""Writes the cmd helper definition for the immediate version of a cmd."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t s = 0; // TODO(gman): compute correct size
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(s);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
class StateSetHandler(TypeHandler):
"""Handler for commands that simply set state."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
for ndx,item in enumerate(states):
code = []
if 'range_checks' in item:
for range_check in item['range_checks']:
code.append("%s %s" % (args[ndx].name, range_check['check']))
if 'nan_check' in item:
# Drivers might generate an INVALID_VALUE error when a value is set
# to NaN. This is allowed behavior under GLES 3.0 section 2.1.1 or
# OpenGL 4.5 section 2.3.4.1 - providing NaN allows undefined results.
# Make this behavior consistent within Chromium, and avoid leaking GL
# errors by generating the error in the command buffer instead of
# letting the GL driver generate it.
code.append("std::isnan(%s)" % args[ndx].name)
if len(code):
file.Write(" if (%s) {\n" % " ||\n ".join(code))
file.Write(
' LOCAL_SET_GL_ERROR(GL_INVALID_VALUE,'
' "%s", "%s out of range");\n' %
(func.name, args[ndx].name))
file.Write(" return error::kNoError;\n")
file.Write(" }\n")
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
file.Write(" if (%s) {\n" % " ||\n ".join(code))
for ndx,item in enumerate(states):
file.Write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
for ndx,item in enumerate(states):
if item.get('cached', False):
file.Write(" state_.%s = %s;\n" %
(CachedStateName(item), args[ndx].name))
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
TypeHandler.WriteServiceUnitTest(self, func, file, *extras)
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
for ndx,item in enumerate(states):
if 'range_checks' in item:
for check_ndx, range_check in enumerate(item['range_checks']):
valid_test = """
TEST_P(%(test_name)s, %(name)sInvalidValue%(ndx)d_%(check_ndx)d) {
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
arg_strings[ndx] = range_check['test_value']
vars = {
'name': name,
'ndx': ndx,
'check_ndx': check_ndx,
'args': ", ".join(arg_strings),
}
for extra in extras:
vars.update(extra)
file.Write(valid_test % vars)
if 'nan_check' in item:
valid_test = """
TEST_P(%(test_name)s, %(name)sNaNValue%(ndx)d) {
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
arg_strings[ndx] = 'nanf("")'
vars = {
'name': name,
'ndx': ndx,
'args': ", ".join(arg_strings),
}
for extra in extras:
vars.update(extra)
file.Write(valid_test % vars)
class StateSetRGBAlphaHandler(TypeHandler):
"""Handler for commands that simply set state that have rgb/alpha."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx % num_args].name))
file.Write(" if (%s) {\n" % " ||\n ".join(code))
for ndx, item in enumerate(states):
file.Write(" state_.%s = %s;\n" %
(item['name'], args[ndx % num_args].name))
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
class StateSetFrontBackSeparateHandler(TypeHandler):
"""Handler for commands that simply set state that have front/back."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
face = args[0].name
num_args = len(args)
file.Write(" bool changed = false;\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
file.Write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
code = []
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx + 1].name))
file.Write(" changed |= %s;\n" % " ||\n ".join(code))
file.Write(" }\n")
file.Write(" if (changed) {\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
file.Write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
for ndx, item in enumerate(group):
file.Write(" state_.%s = %s;\n" %
(item['name'], args[ndx + 1].name))
file.Write(" }\n")
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
class StateSetFrontBackHandler(TypeHandler):
"""Handler for commands that simply set state that set both front/back."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for group_ndx, group in enumerate(Grouper(num_args, states)):
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
file.Write(" if (%s) {\n" % " ||\n ".join(code))
for group_ndx, group in enumerate(Grouper(num_args, states)):
for ndx, item in enumerate(group):
file.Write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
class StateSetNamedParameter(TypeHandler):
"""Handler for commands that set a state chosen with an enum parameter."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overridden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
assert num_args == 2
file.Write(" switch (%s) {\n" % args[0].name)
for state in states:
file.Write(" case %s:\n" % state['enum'])
file.Write(" if (state_.%s != %s) {\n" %
(state['name'], args[1].name))
file.Write(" state_.%s = %s;\n" % (state['name'], args[1].name))
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
file.Write(" break;\n")
file.Write(" default:\n")
file.Write(" NOTREACHED();\n")
file.Write(" }\n")
class CustomHandler(TypeHandler):
"""Handler for commands that are auto-generated but require minor tweaks."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateCmdGetTotalSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(
" uint32_t total_size = 0; // TODO(gman): get correct size.\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
file.Write(" }\n")
file.Write("\n")
class TodoHandler(CustomHandler):
"""Handle for commands that are not yet implemented."""
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" // TODO: for now this is a no-op\n")
file.Write(
" SetGLError("
"GL_INVALID_OPERATION, \"gl%s\", \"not implemented\");\n" %
func.name)
if func.return_type != "void":
file.Write(" return 0;\n")
file.Write("}\n")
file.Write("\n")
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, file)
file.Write(" // TODO: for now this is a no-op\n")
file.Write(
" LOCAL_SET_GL_ERROR("
"GL_INVALID_OPERATION, \"gl%s\", \"not implemented\");\n" %
func.name)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
class HandWrittenHandler(CustomHandler):
"""Handler for comands where everything must be written by hand."""
def InitFunction(self, func):
"""Add or adjust anything type specific for this function."""
CustomHandler.InitFunction(self, func)
func.can_auto_generate = False
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
# If specified explicitly, force the data transfer method.
if func.GetInfo('data_transfer_methods'):
return True
return False
def WriteStruct(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteDocs(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteBucketServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Write test for %s\n" % func.name)
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Write test for %s\n" % func.name)
def WriteBucketFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Write test for %s\n" % func.name)
class ManualHandler(CustomHandler):
"""Handler for commands who's handlers must be written by hand."""
def __init__(self):
CustomHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
if (func.name == 'CompressedTexImage2DBucket' or
func.name == 'CompressedTexImage3DBucket'):
func.cmd_args = func.cmd_args[:-1]
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
else:
CustomHandler.InitFunction(self, func)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Implement test for %s\n" % func.name)
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
if func.GetInfo('impl_func'):
super(ManualHandler, self).WriteGLES2Implementation(func, file)
def WriteGLES2ImplementationHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write("\n")
def WriteImmediateCmdGetTotalSize(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Move this data to _FUNCTION_INFO?
CustomHandler.WriteImmediateCmdGetTotalSize(self, func, file)
class DataHandler(TypeHandler):
"""Handler for glBufferData, glBufferSubData, glTexImage*D, glTexSubImage*D,
glCompressedTexImage*D, glCompressedTexImageSub*D."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
if (func.name == 'CompressedTexSubImage2DBucket' or
func.name == 'CompressedTexSubImage3DBucket'):
func.cmd_args = func.cmd_args[:-1]
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Move this data to _FUNCTION_INFO?
name = func.name
if name.endswith("Immediate"):
name = name[0:-9]
if name == 'BufferData' or name == 'BufferSubData':
file.Write(" uint32_t data_size = size;\n")
elif (name == 'CompressedTexImage2D' or
name == 'CompressedTexSubImage2D' or
name == 'CompressedTexImage3D' or
name == 'CompressedTexSubImage3D'):
file.Write(" uint32_t data_size = imageSize;\n")
elif (name == 'CompressedTexSubImage2DBucket' or
name == 'CompressedTexSubImage3DBucket'):
file.Write(" Bucket* bucket = GetBucket(c.bucket_id);\n")
file.Write(" uint32_t data_size = bucket->size();\n")
file.Write(" GLsizei imageSize = data_size;\n")
elif name == 'TexImage2D' or name == 'TexSubImage2D':
code = """ uint32_t data_size;
if (!GLES2Util::ComputeImageDataSize(
width, height, format, type, unpack_alignment_, &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code)
else:
file.Write(
"// uint32_t data_size = 0; // TODO(gman): get correct size!\n")
def WriteImmediateCmdGetTotalSize(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSizeTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" EXPECT_EQ(sizeof(cmd), total_size);\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Remove this exception.
file.Write("// TODO(gman): Implement test for %s\n" % func.name)
return
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
if ((not func.name == 'CompressedTexSubImage2DBucket') and
(not func.name == 'CompressedTexSubImage3DBucket')):
TypeHandler.WriteBucketServiceImplemenation(self, func, file)
class BindHandler(TypeHandler):
"""Handler for glBind___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
if len(func.GetOriginalArgs()) == 1:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
if func.GetInfo("gen_func"):
valid_test += """
TEST_P(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(kNewServiceId));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(kNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_type': func.GetOriginalArgs()[0].resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
}, *extras)
else:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
if func.GetInfo("gen_func"):
valid_test += """
TEST_P(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_,
%(gl_func_name)s(%(gl_args_with_new_id)s));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args_with_new_id)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != NULL);
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != NULL);
}
"""
gl_args_with_new_id = []
args_with_new_id = []
for arg in func.GetOriginalArgs():
if hasattr(arg, 'resource_type'):
gl_args_with_new_id.append('kNewServiceId')
args_with_new_id.append('kNewClientId')
else:
gl_args_with_new_id.append(arg.GetValidGLArg(func))
args_with_new_id.append(arg.GetValidArg(func))
self.WriteValidUnitTest(func, file, valid_test, {
'args_with_new_id': ", ".join(args_with_new_id),
'gl_args_with_new_id': ", ".join(gl_args_with_new_id),
'resource_type': func.GetResourceIdArg().resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, *extras)
def WriteGLES2Implementation(self, func, file):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func')
impl_decl = func.GetInfo('impl_decl')
if (func.can_auto_generate and
(impl_func == None or impl_func == True) and
(impl_decl == None or impl_decl == True)):
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
code = """ if (Is%(type)sReservedId(%(id)s)) {
SetGLError(GL_INVALID_OPERATION, "%(name)s\", \"%(id)s reserved id");
return;
}
%(name)sHelper(%(arg_string)s);
CheckGLError();
}
"""
name_arg = func.GetResourceIdArg()
file.Write(code % {
'name': func.name,
'arg_string': func.MakeOriginalArgString(""),
'id': name_arg.name,
'type': name_arg.resource_type,
'lc_type': name_arg.resource_type.lower(),
})
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test')
if client_test == False:
return
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));"""
if not func.IsUnsafe():
code += """
ClearCommands();
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());"""
code += """
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()
]
file.Write(code % {
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
class GENnHandler(TypeHandler):
"""Handler for glGen___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
pass
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32_t data_size;
if (!SafeMultiplyUint32(n, sizeof(GLuint), &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code)
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" if (!%sHelper(n, %s)) {\n"
" return error::kInvalidArguments;\n"
" }\n" %
(func.name, func.GetLastOriginalArg().name))
def WriteImmediateHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
if func.IsUnsafe():
file.Write(""" for (GLsizei ii = 0; ii < n; ++ii) {
if (group_->Get%(resource_name)sServiceId(%(last_arg_name)s[ii], NULL)) {
return error::kInvalidArguments;
}
}
scoped_ptr<GLuint[]> service_ids(new GLuint[n]);
gl%(func_name)s(n, service_ids.get());
for (GLsizei ii = 0; ii < n; ++ii) {
group_->Add%(resource_name)sId(%(last_arg_name)s[ii], service_ids[ii]);
}
""" % { 'func_name': func.original_name,
'last_arg_name': func.GetLastOriginalArg().name,
'resource_name': func.GetInfo('resource_type') })
else:
file.Write(" if (!%sHelper(n, %s)) {\n"
" return error::kInvalidArguments;\n"
" }\n" %
(func.original_name, func.GetLastOriginalArg().name))
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
log_code = (""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});""" % func.GetOriginalArgs()[1].name)
args = {
'log_code': log_code,
'return_type': func.return_type,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_types': func.GetInfo('resource_types'),
'count_name': func.GetOriginalArgs()[0].name,
}
file.Write(
"%(return_type)s GLES2Implementation::%(name)s(%(typed_args)s) {\n" %
args)
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
not_shared = func.GetInfo('not_shared')
if not_shared:
alloc_code = (
""" IdAllocator* id_allocator = GetIdAllocator(id_namespaces::k%s);
for (GLsizei ii = 0; ii < n; ++ii)
%s[ii] = id_allocator->AllocateID();""" %
(func.GetInfo('resource_types'), func.GetOriginalArgs()[1].name))
else:
alloc_code = (""" GetIdHandler(id_namespaces::k%(resource_types)s)->
MakeIds(this, 0, %(args)s);""" % args)
args['alloc_code'] = alloc_code
code = """ GPU_CLIENT_SINGLE_THREAD_CHECK();
%(alloc_code)s
%(name)sHelper(%(args)s);
helper_->%(name)sImmediate(%(args)s);
if (share_group_->bind_generates_resource())
helper_->CommandBufferHelper::Flush();
%(log_code)s
CheckGLError();
}
"""
file.Write(code % args)
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
GLuint ids[2] = { 0, };
struct Cmds {
cmds::%(name)sImmediate gen;
GLuint data[2];
};
Cmds expected;
expected.gen.Init(arraysize(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(arraysize(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(k%(types)sStartId, ids[0]);
EXPECT_EQ(k%(types)sStartId + 1, ids[1]);
}
"""
file.Write(code % {
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
GetSharedMemoryAs<GLuint*>()[0] = kNewClientId;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
GLuint service_id;
EXPECT_TRUE(Get%(resource_name)sServiceId(kNewClientId, &service_id));
EXPECT_EQ(kNewServiceId, service_id)
}
"""
else:
valid_test += """
EXPECT_TRUE(Get%(resource_name)s(kNewClientId, &service_id) != NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
GetSharedMemoryAs<GLuint*>()[0] = client_%(resource_name)s_id_;
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
}, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
GLuint temp = kNewClientId;
SpecializedSetup<cmds::%(name)s, 0>(true);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
cmd->Init(1, &temp);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
GLuint service_id;
EXPECT_TRUE(Get%(resource_name)sServiceId(kNewClientId, &service_id));
EXPECT_EQ(kNewServiceId, service_id);
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
}
"""
else:
valid_test += """
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) != NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
cmd->Init(1, &client_%(resource_name)s_id_);"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(&client_%(resource_name)s_id_)));
decoder_->set_unsafe_es3_apis_enabled(false);
}
"""
else:
invalid_test += """
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(&client_%(resource_name)s_id_)));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
}, *extras)
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeDataSize(GLsizei n) {\n")
file.Write(
" return static_cast<uint32_t>(sizeof(GLuint) * n); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32_t ComputeSize(GLsizei n) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(ValueType) + ComputeDataSize(n)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader(GLsizei n) {\n")
file.Write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(n));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
file.Write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32_t size = ComputeSize(_n);\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = gles2::cmds::%(name)s::ComputeSize(n);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" static GLuint ids[] = { 12, 23, 34, };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd, static_cast<GLsizei>(arraysize(ids)), ids);\n")
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
file.Write(" cmd.header.size * 4u);\n")
file.Write(" EXPECT_EQ(static_cast<GLsizei>(arraysize(ids)), cmd.n);\n");
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(arraysize(ids) * 4u));\n")
file.Write(" // TODO(gman): Check that ids were inserted;\n")
file.Write("}\n")
file.Write("\n")
class CreateHandler(TypeHandler):
"""Handler for glCreate___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("client_id", 'uint32_t'))
def __GetResourceType(self, func):
if func.return_type == "GLsync":
return "Sync"
else:
return func.name[6:] # Create*
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
%(id_type_cast)sEXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(Return(%(const_service_id)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
%(return_type)s service_id = 0;
EXPECT_TRUE(Get%(resource_type)sServiceId(kNewClientId, &service_id));
EXPECT_EQ(%(const_service_id)s, service_id);
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_TRUE(Get%(resource_type)s(kNewClientId));
}
"""
comma = ""
cmd_arg_count = 0
for arg in func.GetOriginalArgs():
if not arg.IsConstant():
cmd_arg_count += 1
if cmd_arg_count:
comma = ", "
if func.return_type == 'GLsync':
id_type_cast = ("const GLsync kNewServiceIdGLuint = reinterpret_cast"
"<GLsync>(kNewServiceId);\n ")
const_service_id = "kNewServiceIdGLuint"
else:
id_type_cast = ""
const_service_id = "kNewServiceId"
self.WriteValidUnitTest(func, file, valid_test, {
'comma': comma,
'resource_type': self.__GetResourceType(func),
'return_type': func.return_type,
'id_type_cast': id_type_cast,
'const_service_id': const_service_id,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, {
'comma': comma,
}, *extras)
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
if func.IsUnsafe():
code = """ uint32_t client_id = c.client_id;
%(return_type)s service_id = 0;
if (group_->Get%(resource_name)sServiceId(client_id, &service_id)) {
return error::kInvalidArguments;
}
service_id = %(gl_func_name)s(%(gl_args)s);
if (service_id) {
group_->Add%(resource_name)sId(client_id, service_id);
}
"""
else:
code = """ uint32_t client_id = c.client_id;
if (Get%(resource_name)s(client_id)) {
return error::kInvalidArguments;
}
%(return_type)s service_id = %(gl_func_name)s(%(gl_args)s);
if (service_id) {
Create%(resource_name)s(client_id, service_id%(gl_args_with_comma)s);
}
"""
file.Write(code % {
'resource_name': self.__GetResourceType(func),
'return_type': func.return_type,
'gl_func_name': func.GetGLFunctionName(),
'gl_args': func.MakeOriginalArgString(""),
'gl_args_with_comma': func.MakeOriginalArgString("", True) })
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" GLuint client_id;\n")
if func.return_type == "GLsync":
file.Write(
" GetIdHandler(id_namespaces::kSyncs)->\n")
else:
file.Write(
" GetIdHandler(id_namespaces::kProgramsAndShaders)->\n")
file.Write(" MakeIds(this, 0, 1, &client_id);\n")
file.Write(" helper_->%s(%s);\n" %
(func.name, func.MakeCmdArgString("")))
file.Write(' GPU_CLIENT_LOG("returned " << client_id);\n')
file.Write(" CheckGLError();\n")
if func.return_type == "GLsync":
file.Write(" return reinterpret_cast<GLsync>(client_id);\n")
else:
file.Write(" return client_id;\n")
file.Write("}\n")
file.Write("\n")
class DeleteHandler(TypeHandler):
"""Handler for glDelete___ single resource type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
if func.IsUnsafe():
TypeHandler.WriteServiceImplementation(self, func, file)
# HandleDeleteShader and HandleDeleteProgram are manually written.
pass
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(
" GPU_CLIENT_DCHECK(%s != 0);\n" % func.GetOriginalArgs()[-1].name)
file.Write(" %sHelper(%s);\n" %
(func.original_name, func.GetOriginalArgs()[-1].name))
file.Write(" CheckGLError();\n")
file.Write("}\n")
file.Write("\n")
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
assert len(func.GetOriginalArgs()) == 1
arg = func.GetOriginalArgs()[0]
if func.IsUnsafe():
file.Write(""" %(arg_type)s service_id = 0;
if (group_->Get%(resource_type)sServiceId(%(arg_name)s, &service_id)) {
glDelete%(resource_type)s(service_id);
group_->Remove%(resource_type)sId(%(arg_name)s);
} else {
LOCAL_SET_GL_ERROR(
GL_INVALID_VALUE, "gl%(func_name)s", "unknown %(arg_name)s");
}
""" % { 'resource_type': func.GetInfo('resource_type'),
'arg_name': arg.name,
'arg_type': arg.type,
'func_name': func.original_name })
else:
file.Write(" %sHelper(%s);\n" % (func.original_name, arg.name))
class DELnHandler(TypeHandler):
"""Handler for glDelete___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32_t data_size;
if (!SafeMultiplyUint32(n, sizeof(GLuint), &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code)
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
GLuint ids[2] = { k%(types)sStartId, k%(types)sStartId + 1 };
struct Cmds {
cmds::%(name)sImmediate del;
GLuint data[2];
};
Cmds expected;
expected.del.Init(arraysize(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(arraysize(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
file.Write(code % {
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
GetSharedMemoryAs<GLuint*>()[0] = client_%(resource_name)s_id_;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
GetSharedMemoryAs<GLuint*>()[0] = kInvalidClientId;
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
cmd.Init(1, &client_%(resource_name)s_id_);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(client_%(resource_name)s_id_)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
EXPECT_FALSE(Get%(upper_resource_name)sServiceId(
client_%(resource_name)s_id_, NULL));
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(cmd, sizeof(client_%(resource_name)s_id_)));
}
"""
else:
valid_test += """
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
GLuint temp = kInvalidClientId;
cmd.Init(1, &temp);"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(cmd, sizeof(temp)));
}
"""
else:
invalid_test += """
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, *extras)
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" %sHelper(n, %s);\n" %
(func.name, func.GetLastOriginalArg().name))
def WriteImmediateHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
if func.IsUnsafe():
file.Write(""" for (GLsizei ii = 0; ii < n; ++ii) {
GLuint service_id = 0;
if (group_->Get%(resource_type)sServiceId(
%(last_arg_name)s[ii], &service_id)) {
glDelete%(resource_type)ss(1, &service_id);
group_->Remove%(resource_type)sId(%(last_arg_name)s[ii]);
}
}
""" % { 'resource_type': func.GetInfo('resource_type'),
'last_arg_name': func.GetLastOriginalArg().name })
else:
file.Write(" %sHelper(n, %s);\n" %
(func.original_name, func.GetLastOriginalArg().name))
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_decl = func.GetInfo('impl_decl')
if impl_decl == None or impl_decl == True:
args = {
'return_type': func.return_type,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_type': func.GetInfo('resource_type').lower(),
'count_name': func.GetOriginalArgs()[0].name,
}
file.Write(
"%(return_type)s GLES2Implementation::%(name)s(%(typed_args)s) {\n" %
args)
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
file.Write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});
""" % func.GetOriginalArgs()[1].name)
file.Write(""" GPU_CLIENT_DCHECK_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
DCHECK(%s[i] != 0);
}
});
""" % func.GetOriginalArgs()[1].name)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
code = """ %(name)sHelper(%(args)s);
CheckGLError();
}
"""
file.Write(code % args)
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeDataSize(GLsizei n) {\n")
file.Write(
" return static_cast<uint32_t>(sizeof(GLuint) * n); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32_t ComputeSize(GLsizei n) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(ValueType) + ComputeDataSize(n)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader(GLsizei n) {\n")
file.Write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(n));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
file.Write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32_t size = ComputeSize(_n);\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = gles2::cmds::%(name)s::ComputeSize(n);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" static GLuint ids[] = { 12, 23, 34, };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd, static_cast<GLsizei>(arraysize(ids)), ids);\n")
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
file.Write(" cmd.header.size * 4u);\n")
file.Write(" EXPECT_EQ(static_cast<GLsizei>(arraysize(ids)), cmd.n);\n");
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(arraysize(ids) * 4u));\n")
file.Write(" // TODO(gman): Check that ids were inserted;\n")
file.Write("}\n")
file.Write("\n")
class GETnHandler(TypeHandler):
"""Handler for GETn for glGetBooleanv, glGetFloatv, ... type functions."""
def __init__(self):
TypeHandler.__init__(self)
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, file)
last_arg = func.GetLastOriginalArg()
# All except shm_id and shm_offset.
all_but_last_args = func.GetCmdArgs()[:-2]
for arg in all_but_last_args:
arg.WriteGetCode(file)
code = """ typedef cmds::%(func_name)s::Result Result;
GLsizei num_values = 0;
GetNumValuesReturnedForGLGet(pname, &num_values);
Result* result = GetSharedMemoryAs<Result*>(
c.%(last_arg_name)s_shm_id, c.%(last_arg_name)s_shm_offset,
Result::ComputeSize(num_values));
%(last_arg_type)s %(last_arg_name)s = result ? result->GetData() : NULL;
"""
file.Write(code % {
'last_arg_type': last_arg.type,
'last_arg_name': last_arg.name,
'func_name': func.name,
})
func.WriteHandlerValidation(file)
code = """ // Check that the client initialized the result.
if (result->size != 0) {
return error::kInvalidArguments;
}
"""
shadowed = func.GetInfo('shadowed')
if not shadowed:
file.Write(' LOCAL_COPY_REAL_GL_ERRORS_TO_WRAPPER("%s");\n' % func.name)
file.Write(code)
func.WriteHandlerImplementation(file)
if shadowed:
code = """ result->SetNumResults(num_values);
return error::kNoError;
}
"""
else:
code = """ GLenum error = LOCAL_PEEK_GL_ERROR("%(func_name)s");
if (error == GL_NO_ERROR) {
result->SetNumResults(num_values);
}
return error::kNoError;
}
"""
file.Write(code % {'func_name': func.name})
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_decl = func.GetInfo('impl_decl')
if impl_decl == None or impl_decl == True:
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
all_but_last_args = func.GetOriginalArgs()[:-1]
args = []
has_length_arg = False
for arg in all_but_last_args:
if arg.type == 'GLsync':
args.append('ToGLuint(%s)' % arg.name)
elif arg.name.endswith('size') and arg.type == 'GLsizei':
continue
elif arg.name == 'length':
has_length_arg = True
continue
else:
args.append(arg.name)
arg_string = ", ".join(args)
all_arg_string = (
", ".join([
"%s" % arg.name
for arg in func.GetOriginalArgs() if not arg.IsConstant()]))
self.WriteTraceEvent(func, file)
code = """ if (%(func_name)sHelper(%(all_arg_string)s)) {
return;
}
typedef cmds::%(func_name)s::Result Result;
Result* result = GetResultAs<Result*>();
if (!result) {
return;
}
result->SetNumResults(0);
helper_->%(func_name)s(%(arg_string)s,
GetResultShmId(), GetResultShmOffset());
WaitForCmd();
result->CopyResult(%(last_arg_name)s);
GPU_CLIENT_LOG_CODE_BLOCK({
for (int32_t i = 0; i < result->GetNumResults(); ++i) {
GPU_CLIENT_LOG(" " << i << ": " << result->GetData()[i]);
}
});"""
if has_length_arg:
code += """
if (length) {
*length = result->GetNumResults();
}"""
code += """
CheckGLError();
}
"""
file.Write(code % {
'func_name': func.name,
'arg_string': arg_string,
'all_arg_string': all_arg_string,
'last_arg_name': func.GetLastOriginalArg().name,
})
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
typedef cmds::%(name)s::Result::Type ResultType;
ResultType result = 0;
Cmds expected;
ExpectedMemoryInfo result1 = GetExpectedResultMemory(
sizeof(uint32_t) + sizeof(ResultType));
expected.cmd.Init(%(cmd_args)s, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, SizedResultHelper<ResultType>(1)))
.RetiresOnSaturation();
gl_->%(name)s(%(args)s, &result);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(static_cast<ResultType>(1), result);
}
"""
first_cmd_arg = func.GetCmdArgs()[0].GetValidNonCachedClientSideCmdArg(func)
if not first_cmd_arg:
return
first_gl_arg = func.GetOriginalArgs()[0].GetValidNonCachedClientSideArg(
func)
cmd_arg_strings = [first_cmd_arg]
for arg in func.GetCmdArgs()[1:-2]:
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func))
gl_arg_strings = [first_gl_arg]
for arg in func.GetOriginalArgs()[1:-1]:
gl_arg_strings.append(arg.GetValidClientSideArg(func))
file.Write(code % {
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, GetError())
.WillOnce(Return(GL_NO_ERROR))
.WillOnce(Return(GL_NO_ERROR))
.RetiresOnSaturation();
SpecializedSetup<cmds::%(name)s, 0>(true);
typedef cmds::%(name)s::Result Result;
Result* result = static_cast<Result*>(shared_memory_address_);
EXPECT_CALL(*gl_, %(gl_func_name)s(%(local_gl_args)s));
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(decoder_->GetGLES2Util()->GLGetNumValuesReturned(
%(valid_pname)s),
result->GetNumResults());
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));"""
valid_test += """
}
"""
gl_arg_strings = []
cmd_arg_strings = []
valid_pname = ''
for arg in func.GetOriginalArgs()[:-1]:
if arg.name == 'length':
gl_arg_value = 'nullptr'
elif arg.name.endswith('size'):
gl_arg_value = ("decoder_->GetGLES2Util()->GLGetNumValuesReturned(%s)" %
valid_pname)
elif arg.type == 'GLsync':
gl_arg_value = 'reinterpret_cast<GLsync>(kServiceSyncId)'
else:
gl_arg_value = arg.GetValidGLArg(func)
gl_arg_strings.append(gl_arg_value)
if arg.name == 'pname':
valid_pname = gl_arg_value
if arg.name.endswith('size') or arg.name == 'length':
continue
if arg.type == 'GLsync':
arg_value = 'client_sync_id_'
else:
arg_value = arg.GetValidArg(func)
cmd_arg_strings.append(arg_value)
if func.GetInfo('gl_test_func') == 'glGetIntegerv':
gl_arg_strings.append("_")
else:
gl_arg_strings.append("result->GetData()")
cmd_arg_strings.append("shared_memory_id_")
cmd_arg_strings.append("shared_memory_offset_")
self.WriteValidUnitTest(func, file, valid_test, {
'local_gl_args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
'valid_pname': valid_pname,
}, *extras)
if not func.IsUnsafe():
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s::Result* result =
static_cast<cmds::%(name)s::Result*>(shared_memory_address_);
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));
EXPECT_EQ(0u, result->size);%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, *extras)
class ArrayArgTypeHandler(TypeHandler):
"""Base class for type handlers that handle args that are arrays"""
def __init__(self):
TypeHandler.__init__(self)
def GetArrayType(self, func):
"""Returns the type of the element in the element array being PUT to."""
for arg in func.GetOriginalArgs():
if arg.IsPointer():
element_type = arg.GetPointedType()
return element_type
# Special case: array type handler is used for a function that is forwarded
# to the actual array type implementation
element_type = func.GetOriginalArgs()[-1].type
assert all(arg.type == element_type \
for arg in func.GetOriginalArgs()[-self.GetArrayCount(func):])
return element_type
def GetArrayCount(self, func):
"""Returns the count of the elements in the array being PUT to."""
return func.GetInfo('count')
class PUTHandler(ArrayArgTypeHandler):
"""Handler for glTexParameter_v, glVertexAttrib_v functions."""
def __init__(self):
ArrayArgTypeHandler.__init__(self)
def WriteServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for a command."""
expected_call = "EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));"
if func.GetInfo("first_element_only"):
gl_arg_strings = [
arg.GetValidGLArg(func) for arg in func.GetOriginalArgs()
]
gl_arg_strings[-1] = "*" + gl_arg_strings[-1]
expected_call = ("EXPECT_CALL(*gl_, %%(gl_func_name)s(%s));" %
", ".join(gl_arg_strings))
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
%(expected_call)s
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
extra = {
'data_type': self.GetArrayType(func),
'data_value': func.GetInfo('data_value') or '0',
'expected_call': expected_call,
}
self.WriteValidUnitTest(func, file, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, extra, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for a command."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(gl_args)s, &temp[0]);
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s, %(data_ref)sreinterpret_cast<
%(data_type)s*>(ImmediateDataAddress(&cmd))));"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(cmd, sizeof(temp)));"""
valid_test += """
}
"""
gl_arg_strings = [
arg.GetValidGLArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
gl_any_strings = ["_"] * len(gl_arg_strings)
extra = {
'data_ref': ("*" if func.GetInfo('first_element_only') else ""),
'data_type': self.GetArrayType(func),
'data_count': self.GetArrayCount(func),
'data_value': func.GetInfo('data_value') or '0',
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, file, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();"""
if func.IsUnsafe():
invalid_test += """
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(1);
"""
else:
invalid_test += """
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
"""
invalid_test += """
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(all_but_last_args)s, &temp[0]);"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));
decoder_->set_unsafe_es3_apis_enabled(false);
}
"""
else:
invalid_test += """
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));
%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, extra, *extras)
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32_t data_size;
if (!ComputeDataSize(1, sizeof(%s), %d, &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code % (self.GetArrayType(func), self.GetArrayCount(func)))
if func.IsImmediate():
file.Write(" if (data_size > immediate_data_size) {\n")
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def __NeedsToCalcDataCount(self, func):
use_count_func = func.GetInfo('use_count_func')
return use_count_func != None and use_count_func != False
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func')
if (impl_func != None and impl_func != True):
return;
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
if self.__NeedsToCalcDataCount(func):
file.Write(" size_t count = GLES2Util::Calc%sDataCount(%s);\n" %
(func.name, func.GetOriginalArgs()[0].name))
file.Write(" DCHECK_LE(count, %du);\n" % self.GetArrayCount(func))
else:
file.Write(" size_t count = %d;" % self.GetArrayCount(func))
file.Write(" for (size_t ii = 0; ii < count; ++ii)\n")
file.Write(' GPU_CLIENT_LOG("value[" << ii << "]: " << %s[ii]);\n' %
func.GetLastOriginalArg().name)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeOriginalArgString("")))
file.Write(" CheckGLError();\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test')
if (client_test != None and client_test != True):
return;
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
%(type)s data[%(count)d] = {0};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count)d];
};
for (int jj = 0; jj < %(count)d; ++jj) {
data[jj] = static_cast<%(type)s>(jj);
}
Cmds expected;
expected.cmd.Init(%(cmd_args)s, &data[0]);
gl_->%(name)s(%(args)s, &data[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()[0:-2]
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
file.Write(code % {
'name': func.name,
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeDataSize() {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(%s) * %d);\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
file.Write(" }\n")
file.Write("\n")
if self.__NeedsToCalcDataCount(func):
file.Write(" static uint32_t ComputeEffectiveDataSize(%s %s) {\n" %
(func.GetOriginalArgs()[0].type,
func.GetOriginalArgs()[0].name))
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(%s) * GLES2Util::Calc%sDataCount(%s));\n" %
(self.GetArrayType(func), func.original_name,
func.GetOriginalArgs()[0].name))
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32_t ComputeSize() {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(
" sizeof(ValueType) + ComputeDataSize());\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader() {\n")
file.Write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize());\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader();\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
if self.__NeedsToCalcDataCount(func):
file.Write(" _%s, ComputeEffectiveDataSize(%s));" %
(last_arg.name, func.GetOriginalArgs()[0].name))
file.Write("""
DCHECK_GE(ComputeDataSize(), ComputeEffectiveDataSize(%(arg)s));
char* pointer = reinterpret_cast<char*>(ImmediateDataAddress(this)) +
ComputeEffectiveDataSize(%(arg)s);
memset(pointer, 0, ComputeDataSize() - ComputeEffectiveDataSize(%(arg)s));
""" % { 'arg': func.GetOriginalArgs()[0].name, })
else:
file.Write(" _%s, ComputeDataSize());\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32_t size = ComputeSize();\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = gles2::cmds::%(name)s::ComputeSize();
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" const int kSomeBaseValueToTestWith = 51;\n")
file.Write(" static %s data[] = {\n" % self.GetArrayType(func))
for v in range(0, self.GetArrayCount(func)):
file.Write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(self.GetArrayType(func), v))
file.Write(" };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
file.Write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
file.Write(",\n data);\n")
args = func.GetCmdArgs()
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n"
% func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(sizeof(data)),\n")
file.Write(" cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
file.Write(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);\n" %
(arg.type, value + 11, arg.name))
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
file.Write(" // TODO(gman): Check that data was inserted;\n")
file.Write("}\n")
file.Write("\n")
class PUTnHandler(ArrayArgTypeHandler):
"""Handler for PUTn 'glUniform__v' type functions."""
def __init__(self):
ArrayArgTypeHandler.__init__(self)
def WriteServiceUnitTest(self, func, file, *extras):
"""Overridden from TypeHandler."""
ArrayArgTypeHandler.WriteServiceUnitTest(self, func, file, *extras)
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgsCountTooLarge) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
# hardcoded to match unit tests.
if count == 0:
# the location of the second element of the 2nd uniform.
# defined in GLES2DecoderBase::SetupShaderForUniform
gl_arg_strings.append("3")
arg_strings.append("ProgramManager::MakeFakeLocation(1, 1)")
elif count == 1:
# the number of elements that gl will be called with.
gl_arg_strings.append("3")
# the number of elements requested in the command.
arg_strings.append("5")
else:
gl_arg_strings.append(arg.GetValidGLArg(func))
if not arg.IsConstant():
arg_strings.append(arg.GetValidArg(func))
extra = {
'gl_args': ", ".join(gl_arg_strings),
'args': ", ".join(arg_strings),
}
self.WriteValidUnitTest(func, file, valid_test, extra, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overridden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s,
reinterpret_cast<%(data_type)s*>(ImmediateDataAddress(&cmd))));
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
cmd.Init(%(args)s, &temp[0]);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(cmd, sizeof(temp)));"""
valid_test += """
}
"""
gl_arg_strings = []
gl_any_strings = []
arg_strings = []
for arg in func.GetOriginalArgs()[0:-1]:
gl_arg_strings.append(arg.GetValidGLArg(func))
gl_any_strings.append("_")
if not arg.IsConstant():
arg_strings.append(arg.GetValidArg(func))
extra = {
'data_type': self.GetArrayType(func),
'data_count': self.GetArrayCount(func),
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, file, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
cmd.Init(%(all_but_last_args)s, &temp[0]);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, extra, *extras)
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32_t data_size;
if (!ComputeDataSize(count, sizeof(%s), %d, &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code % (self.GetArrayType(func), self.GetArrayCount(func)))
if func.IsImmediate():
file.Write(" if (data_size > immediate_data_size) {\n")
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
last_pointer_name = func.GetLastOriginalPointerArg().name
file.Write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < count; ++i) {
""")
values_str = ' << ", " << '.join(
["%s[%d + i * %d]" % (
last_pointer_name, ndx, self.GetArrayCount(func)) for ndx in range(
0, self.GetArrayCount(func))])
file.Write(' GPU_CLIENT_LOG(" " << i << ": " << %s);\n' % values_str)
file.Write(" }\n });\n")
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeInitString("")))
file.Write(" CheckGLError();\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
%(type)s data[%(count_param)d][%(count)d] = {{0}};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count_param)d][%(count)d];
};
Cmds expected;
for (int ii = 0; ii < %(count_param)d; ++ii) {
for (int jj = 0; jj < %(count)d; ++jj) {
data[ii][jj] = static_cast<%(type)s>(ii * %(count)d + jj);
}
}
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = []
for arg in func.GetCmdArgs():
if arg.name.endswith("_shm_id"):
cmd_arg_strings.append("&data[0][0]")
elif arg.name.endswith("_shm_offset"):
continue
else:
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func))
gl_arg_strings = []
count_param = 0
for arg in func.GetOriginalArgs():
if arg.IsPointer():
valid_value = "&data[0][0]"
else:
valid_value = arg.GetValidClientSideArg(func)
gl_arg_strings.append(valid_value)
if arg.name == "count":
count_param = int(valid_value)
file.Write(code % {
'name': func.name,
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
'count_param': count_param,
})
# Test constants for invalid values, as they are not tested by the
# service.
constants = [
arg for arg in func.GetOriginalArgs()[0:-1] if arg.IsConstant()
]
if not constants:
return
code = """
TEST_F(GLES2ImplementationTest, %(name)sInvalidConstantArg%(invalid_index)d) {
%(type)s data[%(count_param)d][%(count)d] = {{0}};
for (int ii = 0; ii < %(count_param)d; ++ii) {
for (int jj = 0; jj < %(count)d; ++jj) {
data[ii][jj] = static_cast<%(type)s>(ii * %(count)d + jj);
}
}
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());
EXPECT_EQ(%(gl_error)s, CheckError());
}
"""
for invalid_arg in constants:
gl_arg_strings = []
invalid = invalid_arg.GetInvalidArg(func)
for arg in func.GetOriginalArgs():
if arg is invalid_arg:
gl_arg_strings.append(invalid[0])
elif arg.IsPointer():
gl_arg_strings.append("&data[0][0]")
else:
valid_value = arg.GetValidClientSideArg(func)
gl_arg_strings.append(valid_value)
if arg.name == "count":
count_param = int(valid_value)
file.Write(code % {
'name': func.name,
'invalid_index': func.GetOriginalArgs().index(invalid_arg),
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'gl_error': invalid[2],
'count_param': count_param,
})
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeDataSize(GLsizei count) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(%s) * %d * count); // NOLINT\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32_t ComputeSize(GLsizei count) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(
" sizeof(ValueType) + ComputeDataSize(count)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader(GLsizei count) {\n")
file.Write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize(count));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void Init(%s) {\n" %
func.MakeTypedInitString("_"))
file.Write(" SetHeader(_count);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
pointer_arg = func.GetLastOriginalPointerArg()
file.Write(" _%s, ComputeDataSize(_count));\n" % pointer_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedInitString("_", True))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" %
func.MakeInitString("_"))
file.Write(" const uint32_t size = ComputeSize(_count);\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = gles2::cmds::%(name)s::ComputeSize(count);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedInitString(""),
"args": func.MakeInitString("")
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
args = func.GetOriginalArgs()
count_param = 0
for arg in args:
if arg.name == "count":
count_param = int(arg.GetValidClientSideCmdArg(func))
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" const int kSomeBaseValueToTestWith = 51;\n")
file.Write(" static %s data[] = {\n" % self.GetArrayType(func))
for v in range(0, self.GetArrayCount(func) * count_param):
file.Write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(self.GetArrayType(func), v))
file.Write(" };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" const GLsizei kNumElements = %d;\n" % count_param)
file.Write(" const size_t kExpectedCmdSize =\n")
file.Write(" sizeof(cmd) + kNumElements * sizeof(%s) * %d;\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd")
for value, arg in enumerate(args):
if arg.IsPointer():
file.Write(",\n data")
elif arg.IsConstant():
continue
else:
file.Write(",\n static_cast<%s>(%d)" % (arg.type, value + 1))
file.Write(");\n")
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(kExpectedCmdSize, cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
if arg.IsPointer() or arg.IsConstant():
continue
file.Write(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);\n" %
(arg.type, value + 1, arg.name))
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
file.Write(" // TODO(gman): Check that data was inserted;\n")
file.Write("}\n")
file.Write("\n")
class PUTSTRHandler(ArrayArgTypeHandler):
"""Handler for functions that pass a string array."""
def __init__(self):
ArrayArgTypeHandler.__init__(self)
def __GetDataArg(self, func):
"""Return the argument that points to the 2D char arrays"""
for arg in func.GetOriginalArgs():
if arg.IsPointer2D():
return arg
return None
def __GetLengthArg(self, func):
"""Return the argument that holds length for each char array"""
for arg in func.GetOriginalArgs():
if arg.IsPointer() and not arg.IsPointer2D():
return arg
return None
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
data_arg = self.__GetDataArg(func)
length_arg = self.__GetLengthArg(func)
log_code_block = """ GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei ii = 0; ii < count; ++ii) {
if (%(data)s[ii]) {"""
if length_arg == None:
log_code_block += """
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << %(data)s[ii] << "\\n---");"""
else:
log_code_block += """
if (%(length)s && %(length)s[ii] >= 0) {
const std::string my_str(%(data)s[ii], %(length)s[ii]);
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << my_str << "\\n---");
} else {
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << %(data)s[ii] << "\\n---");
}"""
log_code_block += """
} else {
GPU_CLIENT_LOG(" " << ii << ": NULL");
}
}
});
"""
file.Write(log_code_block % {
'data': data_arg.name,
'length': length_arg.name if not length_arg == None else ''
})
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
bucket_args = []
for arg in func.GetOriginalArgs():
if arg.name == 'count' or arg == self.__GetLengthArg(func):
continue
if arg == self.__GetDataArg(func):
bucket_args.append('kResultBucketId')
else:
bucket_args.append(arg.name)
code_block = """
if (!PackStringsToBucket(count, %(data)s, %(length)s, "gl%(func_name)s")) {
return;
}
helper_->%(func_name)sBucket(%(bucket_args)s);
helper_->SetBucketSize(kResultBucketId, 0);
CheckGLError();
}
"""
file.Write(code_block % {
'data': data_arg.name,
'length': length_arg.name if not length_arg == None else 'NULL',
'func_name': func.name,
'bucket_args': ', '.join(bucket_args),
})
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
const uint32 kBucketId = GLES2Implementation::kResultBucketId;
const char* kString1 = "happy";
const char* kString2 = "ending";
const size_t kString1Size = ::strlen(kString1) + 1;
const size_t kString2Size = ::strlen(kString2) + 1;
const size_t kHeaderSize = sizeof(GLint) * 3;
const size_t kSourceSize = kHeaderSize + kString1Size + kString2Size;
const size_t kPaddedHeaderSize =
transfer_buffer_->RoundToAlignment(kHeaderSize);
const size_t kPaddedString1Size =
transfer_buffer_->RoundToAlignment(kString1Size);
const size_t kPaddedString2Size =
transfer_buffer_->RoundToAlignment(kString2Size);
struct Cmds {
cmd::SetBucketSize set_bucket_size;
cmd::SetBucketData set_bucket_header;
cmd::SetToken set_token1;
cmd::SetBucketData set_bucket_data1;
cmd::SetToken set_token2;
cmd::SetBucketData set_bucket_data2;
cmd::SetToken set_token3;
cmds::%(name)sBucket cmd_bucket;
cmd::SetBucketSize clear_bucket_size;
};
ExpectedMemoryInfo mem0 = GetExpectedMemory(kPaddedHeaderSize);
ExpectedMemoryInfo mem1 = GetExpectedMemory(kPaddedString1Size);
ExpectedMemoryInfo mem2 = GetExpectedMemory(kPaddedString2Size);
Cmds expected;
expected.set_bucket_size.Init(kBucketId, kSourceSize);
expected.set_bucket_header.Init(
kBucketId, 0, kHeaderSize, mem0.id, mem0.offset);
expected.set_token1.Init(GetNextToken());
expected.set_bucket_data1.Init(
kBucketId, kHeaderSize, kString1Size, mem1.id, mem1.offset);
expected.set_token2.Init(GetNextToken());
expected.set_bucket_data2.Init(
kBucketId, kHeaderSize + kString1Size, kString2Size, mem2.id,
mem2.offset);
expected.set_token3.Init(GetNextToken());
expected.cmd_bucket.Init(%(bucket_args)s);
expected.clear_bucket_size.Init(kBucketId, 0);
const char* kStrings[] = { kString1, kString2 };
gl_->%(name)s(%(gl_args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
gl_args = []
bucket_args = []
for arg in func.GetOriginalArgs():
if arg == self.__GetDataArg(func):
gl_args.append('kStrings')
bucket_args.append('kBucketId')
elif arg == self.__GetLengthArg(func):
gl_args.append('NULL')
elif arg.name == 'count':
gl_args.append('2')
else:
gl_args.append(arg.GetValidClientSideArg(func))
bucket_args.append(arg.GetValidClientSideArg(func))
file.Write(code % {
'name': func.name,
'gl_args': ", ".join(gl_args),
'bucket_args': ", ".join(bucket_args),
})
if self.__GetLengthArg(func) == None:
return
code = """
TEST_F(GLES2ImplementationTest, %(name)sWithLength) {
const uint32 kBucketId = GLES2Implementation::kResultBucketId;
const char* kString = "foobar******";
const size_t kStringSize = 6; // We only need "foobar".
const size_t kHeaderSize = sizeof(GLint) * 2;
const size_t kSourceSize = kHeaderSize + kStringSize + 1;
const size_t kPaddedHeaderSize =
transfer_buffer_->RoundToAlignment(kHeaderSize);
const size_t kPaddedStringSize =
transfer_buffer_->RoundToAlignment(kStringSize + 1);
struct Cmds {
cmd::SetBucketSize set_bucket_size;
cmd::SetBucketData set_bucket_header;
cmd::SetToken set_token1;
cmd::SetBucketData set_bucket_data;
cmd::SetToken set_token2;
cmds::ShaderSourceBucket shader_source_bucket;
cmd::SetBucketSize clear_bucket_size;
};
ExpectedMemoryInfo mem0 = GetExpectedMemory(kPaddedHeaderSize);
ExpectedMemoryInfo mem1 = GetExpectedMemory(kPaddedStringSize);
Cmds expected;
expected.set_bucket_size.Init(kBucketId, kSourceSize);
expected.set_bucket_header.Init(
kBucketId, 0, kHeaderSize, mem0.id, mem0.offset);
expected.set_token1.Init(GetNextToken());
expected.set_bucket_data.Init(
kBucketId, kHeaderSize, kStringSize + 1, mem1.id, mem1.offset);
expected.set_token2.Init(GetNextToken());
expected.shader_source_bucket.Init(%(bucket_args)s);
expected.clear_bucket_size.Init(kBucketId, 0);
const char* kStrings[] = { kString };
const GLint kLength[] = { kStringSize };
gl_->%(name)s(%(gl_args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
gl_args = []
for arg in func.GetOriginalArgs():
if arg == self.__GetDataArg(func):
gl_args.append('kStrings')
elif arg == self.__GetLengthArg(func):
gl_args.append('kLength')
elif arg.name == 'count':
gl_args.append('1')
else:
gl_args.append(arg.GetValidClientSideArg(func))
file.Write(code % {
'name': func.name,
'gl_args': ", ".join(gl_args),
'bucket_args': ", ".join(bucket_args),
})
def WriteBucketServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
cmd_args = []
cmd_args_with_invalid_id = []
gl_args = []
for index, arg in enumerate(func.GetOriginalArgs()):
if arg == self.__GetLengthArg(func):
gl_args.append('_')
elif arg.name == 'count':
gl_args.append('1')
elif arg == self.__GetDataArg(func):
cmd_args.append('kBucketId')
cmd_args_with_invalid_id.append('kBucketId')
gl_args.append('_')
elif index == 0: # Resource ID arg
cmd_args.append(arg.GetValidArg(func))
cmd_args_with_invalid_id.append('kInvalidClientId')
gl_args.append(arg.GetValidGLArg(func))
else:
cmd_args.append(arg.GetValidArg(func))
cmd_args_with_invalid_id.append(arg.GetValidArg(func))
gl_args.append(arg.GetValidGLArg(func))
test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
const uint32 kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kValidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));"""
if func.IsUnsafe():
test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
"""
test += """
}
"""
self.WriteValidUnitTest(func, file, test, {
'cmd_args': ", ".join(cmd_args),
'gl_args': ", ".join(gl_args),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
const uint32 kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
decoder_->set_unsafe_es3_apis_enabled(true);
cmds::%(name)s cmd;
// Test no bucket.
cmd.Init(%(cmd_args)s);
EXPECT_NE(error::kNoError, ExecuteCmd(cmd));
// Test invalid client.
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kValidStrEnd);
cmd.Init(%(cmd_args_with_invalid_id)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
self.WriteValidUnitTest(func, file, test, {
'cmd_args': ", ".join(cmd_args),
'cmd_args_with_invalid_id': ", ".join(cmd_args_with_invalid_id),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidHeader) {
const uint32 kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
const GLsizei kCount = static_cast<GLsizei>(arraysize(kSource));
const GLsizei kTests[] = {
kCount + 1,
0,
std::numeric_limits<GLsizei>::max(),
-1,
};
decoder_->set_unsafe_es3_apis_enabled(true);
for (size_t ii = 0; ii < arraysize(kTests); ++ii) {
SetBucketAsCStrings(kBucketId, 1, kSource, kTests[ii], kValidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
}
"""
self.WriteValidUnitTest(func, file, test, {
'cmd_args': ", ".join(cmd_args),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidStringEnding) {
const uint32 kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kInvalidStrEnd = '*';
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kInvalidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, file, test, {
'cmd_args': ", ".join(cmd_args),
}, *extras)
class PUTXnHandler(ArrayArgTypeHandler):
"""Handler for glUniform?f functions."""
def __init__(self):
ArrayArgTypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
code = """ %(type)s temp[%(count)s] = { %(values)s};"""
if func.IsUnsafe():
code += """
gl%(name)sv(%(location)s, 1, &temp[0]);
"""
else:
code += """
Do%(name)sv(%(location)s, 1, &temp[0]);
"""
values = ""
args = func.GetOriginalArgs()
count = int(self.GetArrayCount(func))
num_args = len(args)
for ii in range(count):
values += "%s, " % args[len(args) - count + ii].name
file.Write(code % {
'name': func.name,
'count': self.GetArrayCount(func),
'type': self.GetArrayType(func),
'location': args[0].name,
'args': func.MakeOriginalArgString(""),
'values': values,
})
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(name)sv(%(local_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));"""
valid_test += """
}
"""
args = func.GetOriginalArgs()
local_args = "%s, 1, _" % args[0].GetValidGLArg(func)
self.WriteValidUnitTest(func, file, valid_test, {
'name': func.name,
'count': self.GetArrayCount(func),
'local_args': local_args,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(name)sv(_, _, _).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, {
'name': func.GetInfo('name'),
'count': self.GetArrayCount(func),
})
class GLcharHandler(CustomHandler):
"""Handler for functions that pass a single string ."""
def __init__(self):
CustomHandler.__init__(self)
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeSize(uint32_t data_size) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(ValueType) + data_size); // NOLINT\n")
file.Write(" }\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
code = """
void SetHeader(uint32_t data_size) {
header.SetCmdBySize<ValueType>(data_size);
}
"""
file.Write(code)
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
args = func.GetCmdArgs()
set_code = []
for arg in args:
set_code.append(" %s = _%s;" % (arg.name, arg.name))
code = """
void Init(%(typed_args)s, uint32_t _data_size) {
SetHeader(_data_size);
%(set_code)s
memcpy(ImmediateDataAddress(this), _%(last_arg)s, _data_size);
}
"""
file.Write(code % {
"typed_args": func.MakeTypedArgString("_"),
"set_code": "\n".join(set_code),
"last_arg": last_arg.name
})
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void* Set(void* cmd%s, uint32_t _data_size) {\n" %
func.MakeTypedCmdArgString("_", True))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _data_size);\n" %
func.MakeCmdArgString("_"))
file.Write(" return NextImmediateCmdAddress<ValueType>("
"cmd, _data_size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t data_size = strlen(name);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpace<gles2::cmds::%(name)s>(data_size);
if (c) {
c->Init(%(args)s, data_size);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
init_code = []
check_code = []
all_but_last_arg = func.GetCmdArgs()[:-1]
for value, arg in enumerate(all_but_last_arg):
init_code.append(" static_cast<%s>(%d)," % (arg.type, value + 11))
for value, arg in enumerate(all_but_last_arg):
check_code.append(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);" %
(arg.type, value + 11, arg.name))
code = """
TEST_F(GLES2FormatTest, %(func_name)s) {
cmds::%(func_name)s& cmd = *GetBufferAs<cmds::%(func_name)s>();
static const char* const test_str = \"test string\";
void* next_cmd = cmd.Set(
&cmd,
%(init_code)s
test_str,
strlen(test_str));
EXPECT_EQ(static_cast<uint32_t>(cmds::%(func_name)s::kCmdId),
cmd.header.command);
EXPECT_EQ(sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)),
cmd.header.size * 4u);
EXPECT_EQ(static_cast<char*>(next_cmd),
reinterpret_cast<char*>(&cmd) + sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)));
%(check_code)s
EXPECT_EQ(static_cast<uint32_t>(strlen(test_str)), cmd.data_size);
EXPECT_EQ(0, memcmp(test_str, ImmediateDataAddress(&cmd), strlen(test_str)));
CheckBytesWritten(
next_cmd,
sizeof(cmd) + RoundSizeToMultipleOfEntries(strlen(test_str)),
sizeof(cmd) + strlen(test_str));
}
"""
file.Write(code % {
'func_name': func.name,
'init_code': "\n".join(init_code),
'check_code': "\n".join(check_code),
})
class GLcharNHandler(CustomHandler):
"""Handler for functions that pass a single string with an optional len."""
def __init__(self):
CustomHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.cmd_args = []
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def AddBucketFunction(self, generator, func):
"""Overrriden from TypeHandler."""
pass
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, file)
file.Write("""
GLuint bucket_id = static_cast<GLuint>(c.%(bucket_id)s);
Bucket* bucket = GetBucket(bucket_id);
if (!bucket || bucket->size() == 0) {
return error::kInvalidArguments;
}
std::string str;
if (!bucket->GetAsString(&str)) {
return error::kInvalidArguments;
}
%(gl_func_name)s(0, str.c_str());
return error::kNoError;
}
""" % {
'name': func.name,
'gl_func_name': func.GetGLFunctionName(),
'bucket_id': func.cmd_args[0].name,
})
class IsHandler(TypeHandler):
"""Handler for glIs____ type and glGetError functions."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("result_shm_id", 'uint32_t'))
func.AddCmdArg(Argument("result_shm_offset", 'uint32_t'))
if func.GetInfo('result') == None:
func.AddInfo('result', ['uint32_t'])
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));"""
valid_test += """
}
"""
comma = ""
if len(func.GetOriginalArgs()):
comma =", "
self.WriteValidUnitTest(func, file, valid_test, {
'comma': comma,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, {
'comma': comma,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgsBadSharedMemoryId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
invalid_test += """
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skInvalidSharedMemoryId, shared_memory_offset_);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));
cmd.Init(%(args)s%(comma)sshared_memory_id_, kInvalidSharedMemoryOffset);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
invalid_test += """
}
"""
self.WriteValidUnitTest(func, file, invalid_test, {
'comma': comma,
}, *extras)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, file)
args = func.GetOriginalArgs()
for arg in args:
arg.WriteGetCode(file)
code = """ typedef cmds::%(func_name)s::Result Result;
Result* result_dst = GetSharedMemoryAs<Result*>(
c.result_shm_id, c.result_shm_offset, sizeof(*result_dst));
if (!result_dst) {
return error::kOutOfBounds;
}
"""
file.Write(code % {'func_name': func.name})
func.WriteHandlerValidation(file)
if func.IsUnsafe():
assert func.GetInfo('id_mapping')
assert len(func.GetInfo('id_mapping')) == 1
assert len(args) == 1
id_type = func.GetInfo('id_mapping')[0]
file.Write(" %s service_%s = 0;\n" % (args[0].type, id_type.lower()))
file.Write(" *result_dst = group_->Get%sServiceId(%s, &service_%s);\n" %
(id_type, id_type.lower(), id_type.lower()))
else:
file.Write(" *result_dst = %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func')
if impl_func == None or impl_func == True:
error_value = func.GetInfo("error_value") or "GL_FALSE"
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteTraceEvent(func, file)
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
file.Write(" typedef cmds::%s::Result Result;\n" % func.name)
file.Write(" Result* result = GetResultAs<Result*>();\n")
file.Write(" if (!result) {\n")
file.Write(" return %s;\n" % error_value)
file.Write(" }\n")
file.Write(" *result = 0;\n")
assert len(func.GetOriginalArgs()) == 1
id_arg = func.GetOriginalArgs()[0]
if id_arg.type == 'GLsync':
arg_string = "ToGLuint(%s)" % func.MakeOriginalArgString("")
else:
arg_string = func.MakeOriginalArgString("")
file.Write(
" helper_->%s(%s, GetResultShmId(), GetResultShmOffset());\n" %
(func.name, arg_string))
file.Write(" WaitForCmd();\n")
file.Write(" %s result_value = *result" % func.return_type)
if func.return_type == "GLboolean":
file.Write(" != 0")
file.Write(';\n GPU_CLIENT_LOG("returned " << result_value);\n')
file.Write(" CheckGLError();\n")
file.Write(" return result_value;\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test')
if client_test == None or client_test == True:
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
ExpectedMemoryInfo result1 =
GetExpectedResultMemory(sizeof(cmds::%(name)s::Result));
expected.cmd.Init(%(cmd_id_value)s, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, uint32_t(GL_TRUE)))
.RetiresOnSaturation();
GLboolean result = gl_->%(name)s(%(gl_id_value)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_TRUE(result);
}
"""
args = func.GetOriginalArgs()
assert len(args) == 1
file.Write(code % {
'name': func.name,
'cmd_id_value': args[0].GetValidClientSideCmdArg(func),
'gl_id_value': args[0].GetValidClientSideArg(func) })
class STRnHandler(TypeHandler):
"""Handler for GetProgramInfoLog, GetShaderInfoLog, GetShaderSource, and
GetTranslatedShaderSourceANGLE."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
# remove all but the first cmd args.
cmd_args = func.GetCmdArgs()
func.ClearCmdArgs()
func.AddCmdArg(cmd_args[0])
# add on a bucket id.
func.AddCmdArg(Argument('bucket_id', 'uint32_t'))
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
code_1 = """%(return_type)s GLES2Implementation::%(func_name)s(%(args)s) {
GPU_CLIENT_SINGLE_THREAD_CHECK();
"""
code_2 = """ GPU_CLIENT_LOG("[" << GetLogPrefix()
<< "] gl%(func_name)s" << "("
<< %(arg0)s << ", "
<< %(arg1)s << ", "
<< static_cast<void*>(%(arg2)s) << ", "
<< static_cast<void*>(%(arg3)s) << ")");
helper_->SetBucketSize(kResultBucketId, 0);
helper_->%(func_name)s(%(id_name)s, kResultBucketId);
std::string str;
GLsizei max_size = 0;
if (GetBucketAsString(kResultBucketId, &str)) {
if (bufsize > 0) {
max_size =
std::min(static_cast<size_t>(%(bufsize_name)s) - 1, str.size());
memcpy(%(dest_name)s, str.c_str(), max_size);
%(dest_name)s[max_size] = '\\0';
GPU_CLIENT_LOG("------\\n" << %(dest_name)s << "\\n------");
}
}
if (%(length_name)s != NULL) {
*%(length_name)s = max_size;
}
CheckGLError();
}
"""
args = func.GetOriginalArgs()
str_args = {
'return_type': func.return_type,
'func_name': func.original_name,
'args': func.MakeTypedOriginalArgString(""),
'id_name': args[0].name,
'bufsize_name': args[1].name,
'length_name': args[2].name,
'dest_name': args[3].name,
'arg0': args[0].name,
'arg1': args[1].name,
'arg2': args[2].name,
'arg3': args[3].name,
}
file.Write(code_1 % str_args)
func.WriteDestinationInitalizationValidation(file)
file.Write(code_2 % str_args)
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
const char* kInfo = "hello";
const uint32_t kBucketId = 123;
SpecializedSetup<cmds::%(name)s, 0>(true);
%(expect_len_code)s
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(DoAll(SetArgumentPointee<2>(strlen(kInfo)),
SetArrayArgument<3>(kInfo, kInfo + strlen(kInfo) + 1)));
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
CommonDecoder::Bucket* bucket = decoder_->GetBucket(kBucketId);
ASSERT_TRUE(bucket != NULL);
EXPECT_EQ(strlen(kInfo) + 1, bucket->size());
EXPECT_EQ(0, memcmp(bucket->GetData(0, bucket->size()), kInfo,
bucket->size()));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
args = func.GetOriginalArgs()
id_name = args[0].GetValidGLArg(func)
get_len_func = func.GetInfo('get_len_func')
get_len_enum = func.GetInfo('get_len_enum')
sub = {
'id_name': id_name,
'get_len_func': get_len_func,
'get_len_enum': get_len_enum,
'gl_args': '%s, strlen(kInfo) + 1, _, _' %
args[0].GetValidGLArg(func),
'args': '%s, kBucketId' % args[0].GetValidArg(func),
'expect_len_code': '',
}
if get_len_func and get_len_func[0:2] == 'gl':
sub['expect_len_code'] = (
" EXPECT_CALL(*gl_, %s(%s, %s, _))\n"
" .WillOnce(SetArgumentPointee<2>(strlen(kInfo) + 1));") % (
get_len_func[2:], id_name, get_len_enum)
self.WriteValidUnitTest(func, file, valid_test, sub, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
const uint32_t kBucketId = 123;
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _, _, _))
.Times(0);
cmds::%(name)s cmd;
cmd.Init(kInvalidClientId, kBucketId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
self.WriteValidUnitTest(func, file, invalid_test, *extras)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
class NamedType(object):
"""A class that represents a type of an argument in a client function.
A type of an argument that is to be passed through in the command buffer
command. Currently used only for the arguments that are specificly named in
the 'cmd_buffer_functions.txt' file, mostly enums.
"""
def __init__(self, info):
assert not 'is_complete' in info or info['is_complete'] == True
self.info = info
self.valid = info['valid']
if 'invalid' in info:
self.invalid = info['invalid']
else:
self.invalid = []
if 'valid_es3' in info:
self.valid_es3 = info['valid_es3']
else:
self.valid_es3 = []
if 'deprecated_es3' in info:
self.deprecated_es3 = info['deprecated_es3']
else:
self.deprecated_es3 = []
def GetType(self):
return self.info['type']
def GetInvalidValues(self):
return self.invalid
def GetValidValues(self):
return self.valid
def GetValidValuesES3(self):
return self.valid_es3
def GetDeprecatedValuesES3(self):
return self.deprecated_es3
def IsConstant(self):
if not 'is_complete' in self.info:
return False
return len(self.GetValidValues()) == 1
def GetConstantValue(self):
return self.GetValidValues()[0]
class Argument(object):
"""A class that represents a function argument."""
cmd_type_map_ = {
'GLenum': 'uint32_t',
'GLint': 'int32_t',
'GLintptr': 'int32_t',
'GLsizei': 'int32_t',
'GLsizeiptr': 'int32_t',
'GLfloat': 'float',
'GLclampf': 'float',
}
need_validation_ = ['GLsizei*', 'GLboolean*', 'GLenum*', 'GLint*']
def __init__(self, name, type):
self.name = name
self.optional = type.endswith("Optional*")
if self.optional:
type = type[:-9] + "*"
self.type = type
if type in self.cmd_type_map_:
self.cmd_type = self.cmd_type_map_[type]
else:
self.cmd_type = 'uint32_t'
def IsPointer(self):
"""Returns true if argument is a pointer."""
return False
def IsPointer2D(self):
"""Returns true if argument is a 2D pointer."""
return False
def IsConstant(self):
"""Returns true if the argument has only one valid value."""
return False
def AddCmdArgs(self, args):
"""Adds command arguments for this argument to the given list."""
if not self.IsConstant():
return args.append(self)
def AddInitArgs(self, args):
"""Adds init arguments for this argument to the given list."""
if not self.IsConstant():
return args.append(self)
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
index = func.GetOriginalArgs().index(self)
return str(index + 1)
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
if self.IsPointer():
return 'nullptr'
index = func.GetOriginalArgs().index(self)
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%d)" % (index + 1))
return str(index + 1)
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
try:
index = func.GetOriginalArgs().index(self)
return str(index + 1)
except ValueError:
pass
index = func.GetCmdArgs().index(self)
return str(index + 1)
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
value = self.GetValidArg(func)
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%s)" % value)
return value
def GetValidNonCachedClientSideArg(self, func):
"""Returns a valid value for this argument in a GL call.
Using the value will produce a command buffer service invocation.
Returns None if there is no such value."""
value = '123'
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%s)" % value)
return value
def GetValidNonCachedClientSideCmdArg(self, func):
"""Returns a valid value for this argument in a command buffer command.
Calling the GL function with the value returned by
GetValidNonCachedClientSideArg will result in a command buffer command
that contains the value returned by this function. """
return '123'
def GetNumInvalidValues(self, func):
"""returns the number of invalid values to be tested."""
return 0
def GetInvalidArg(self, index):
"""returns an invalid value and expected parse result by index."""
return ("---ERROR0---", "---ERROR2---", None)
def GetLogArg(self):
"""Get argument appropriate for LOG macro."""
if self.type == 'GLboolean':
return 'GLES2Util::GetStringBool(%s)' % self.name
if self.type == 'GLenum':
return 'GLES2Util::GetStringEnum(%s)' % self.name
return self.name
def WriteGetCode(self, file):
"""Writes the code to get an argument from a command structure."""
if self.type == 'GLsync':
my_type = 'GLuint'
else:
my_type = self.type
file.Write(" %s %s = static_cast<%s>(c.%s);\n" %
(my_type, self.name, my_type, self.name))
def WriteValidationCode(self, file, func):
"""Writes the validation code for an argument."""
pass
def WriteClientSideValidationCode(self, file, func):
"""Writes the validation code for an argument."""
pass
def WriteDestinationInitalizationValidation(self, file, func):
"""Writes the client side destintion initialization validation."""
pass
def WriteDestinationInitalizationValidatationIfNeeded(self, file, func):
"""Writes the client side destintion initialization validation if needed."""
parts = self.type.split(" ")
if len(parts) > 1:
return
if parts[0] in self.need_validation_:
file.Write(
" GPU_CLIENT_VALIDATE_DESTINATION_%sINITALIZATION(%s, %s);\n" %
("OPTIONAL_" if self.optional else "", self.type[:-1], self.name))
def WriteGetAddress(self, file):
"""Writes the code to get the address this argument refers to."""
pass
def GetImmediateVersion(self):
"""Gets the immediate version of this argument."""
return self
def GetBucketVersion(self):
"""Gets the bucket version of this argument."""
return self
class BoolArgument(Argument):
"""class for GLboolean"""
def __init__(self, name, type):
Argument.__init__(self, name, 'GLboolean')
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
return 'true'
class UniformLocationArgument(Argument):
"""class for uniform locations."""
def __init__(self, name):
Argument.__init__(self, name, "GLint")
def WriteGetCode(self, file):
"""Writes the code to get an argument from a command structure."""
code = """ %s %s = static_cast<%s>(c.%s);
"""
file.Write(code % (self.type, self.name, self.type, self.name))
class DataSizeArgument(Argument):
"""class for data_size which Bucket commands do not need."""
def __init__(self, name):
Argument.__init__(self, name, "uint32_t")
def GetBucketVersion(self):
return None
class SizeArgument(Argument):
"""class for GLsizei and GLsizeiptr."""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def GetNumInvalidValues(self, func):
"""overridden from Argument."""
if func.IsImmediate():
return 0
return 1
def GetInvalidArg(self, index):
"""overridden from Argument."""
return ("-1", "kNoError", "GL_INVALID_VALUE")
def WriteValidationCode(self, file, func):
"""overridden from Argument."""
if func.IsUnsafe():
return
code = """ if (%(var_name)s < 0) {
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "gl%(func_name)s", "%(var_name)s < 0");
return error::kNoError;
}
"""
file.Write(code % {
"var_name": self.name,
"func_name": func.original_name,
})
def WriteClientSideValidationCode(self, file, func):
"""overridden from Argument."""
code = """ if (%(var_name)s < 0) {
SetGLError(GL_INVALID_VALUE, "gl%(func_name)s", "%(var_name)s < 0");
return;
}
"""
file.Write(code % {
"var_name": self.name,
"func_name": func.original_name,
})
class SizeNotNegativeArgument(SizeArgument):
"""class for GLsizeiNotNegative. It's NEVER allowed to be negative"""
def __init__(self, name, type, gl_type):
SizeArgument.__init__(self, name, gl_type)
def GetInvalidArg(self, index):
"""overridden from SizeArgument."""
return ("-1", "kOutOfBounds", "GL_NO_ERROR")
def WriteValidationCode(self, file, func):
"""overridden from SizeArgument."""
pass
class EnumBaseArgument(Argument):
"""Base class for EnumArgument, IntArgument, BitfieldArgument, and
ValidatedBoolArgument."""
def __init__(self, name, gl_type, type, gl_error):
Argument.__init__(self, name, gl_type)
self.local_type = type
self.gl_error = gl_error
name = type[len(gl_type):]
self.type_name = name
self.named_type = NamedType(_NAMED_TYPE_INFO[name])
def IsConstant(self):
return self.named_type.IsConstant()
def GetConstantValue(self):
return self.named_type.GetConstantValue()
def WriteValidationCode(self, file, func):
if func.IsUnsafe():
return
if self.named_type.IsConstant():
return
file.Write(" if (!validators_->%s.IsValid(%s)) {\n" %
(ToUnderscore(self.type_name), self.name))
if self.gl_error == "GL_INVALID_ENUM":
file.Write(
" LOCAL_SET_GL_ERROR_INVALID_ENUM(\"gl%s\", %s, \"%s\");\n" %
(func.original_name, self.name, self.name))
else:
file.Write(
" LOCAL_SET_GL_ERROR(%s, \"gl%s\", \"%s %s\");\n" %
(self.gl_error, func.original_name, self.name, self.gl_error))
file.Write(" return error::kNoError;\n")
file.Write(" }\n")
def WriteClientSideValidationCode(self, file, func):
if not self.named_type.IsConstant():
return
file.Write(" if (%s != %s) {" % (self.name,
self.GetConstantValue()))
file.Write(
" SetGLError(%s, \"gl%s\", \"%s %s\");\n" %
(self.gl_error, func.original_name, self.name, self.gl_error))
if func.return_type == "void":
file.Write(" return;\n")
else:
file.Write(" return %s;\n" % func.GetErrorReturnString())
file.Write(" }\n")
def GetValidArg(self, func):
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
valid = self.named_type.GetValidValues()
if valid:
num_valid = len(valid)
return valid[0]
index = func.GetOriginalArgs().index(self)
return str(index + 1)
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return self.GetValidArg(func)
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
valid = self.named_type.GetValidValues()
if valid:
num_valid = len(valid)
return valid[0]
try:
index = func.GetOriginalArgs().index(self)
return str(index + 1)
except ValueError:
pass
index = func.GetCmdArgs().index(self)
return str(index + 1)
def GetValidGLArg(self, func):
"""Gets a valid value for this argument."""
return self.GetValidArg(func)
def GetNumInvalidValues(self, func):
"""returns the number of invalid values to be tested."""
return len(self.named_type.GetInvalidValues())
def GetInvalidArg(self, index):
"""returns an invalid value by index."""
invalid = self.named_type.GetInvalidValues()
if invalid:
num_invalid = len(invalid)
if index >= num_invalid:
index = num_invalid - 1
return (invalid[index], "kNoError", self.gl_error)
return ("---ERROR1---", "kNoError", self.gl_error)
class EnumArgument(EnumBaseArgument):
"""A class that represents a GLenum argument"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLenum", type, "GL_INVALID_ENUM")
def GetLogArg(self):
"""Overridden from Argument."""
return ("GLES2Util::GetString%s(%s)" %
(self.type_name, self.name))
class IntArgument(EnumBaseArgument):
"""A class for a GLint argument that can only accept specific values.
For example glTexImage2D takes a GLint for its internalformat
argument instead of a GLenum.
"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLint", type, "GL_INVALID_VALUE")
class ValidatedBoolArgument(EnumBaseArgument):
"""A class for a GLboolean argument that can only accept specific values.
For example glUniformMatrix takes a GLboolean for it's transpose but it
must be false.
"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLboolean", type, "GL_INVALID_VALUE")
def GetLogArg(self):
"""Overridden from Argument."""
return 'GLES2Util::GetStringBool(%s)' % self.name
class BitFieldArgument(EnumBaseArgument):
"""A class for a GLbitfield argument that can only accept specific values.
For example glFenceSync takes a GLbitfield for its flags argument bit it
must be 0.
"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLbitfield", type,
"GL_INVALID_VALUE")
class ImmediatePointerArgument(Argument):
"""A class that represents an immediate argument to a function.
An immediate argument is one where the data follows the command.
"""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def IsPointer(self):
return True
def GetPointedType(self):
match = re.match('(const\s+)?(?P<element_type>[\w]+)\s*\*', self.type)
assert match
return match.groupdict()['element_type']
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = GetImmediateDataAs<%s>(\n" %
(self.type, self.name, self.type))
file.Write(" c, data_size, immediate_data_size);\n")
def WriteValidationCode(self, file, func):
"""Overridden from Argument."""
if self.optional:
return
file.Write(" if (%s == NULL) {\n" % self.name)
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, file, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(file, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class PointerArgument(Argument):
"""A class that represents a pointer argument to a function."""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return self.type.count('*') == 2
def GetPointedType(self):
match = re.match('(const\s+)?(?P<element_type>[\w]+)\s*\*', self.type)
assert match
return match.groupdict()['element_type']
def GetValidArg(self, func):
"""Overridden from Argument."""
return "shared_memory_id_, shared_memory_offset_"
def GetValidGLArg(self, func):
"""Overridden from Argument."""
return "reinterpret_cast<%s>(shared_memory_address_)" % self.type
def GetNumInvalidValues(self, func):
"""Overridden from Argument."""
return 2
def GetInvalidArg(self, index):
"""Overridden from Argument."""
if index == 0:
return ("kInvalidSharedMemoryId, 0", "kOutOfBounds", None)
else:
return ("shared_memory_id_, kInvalidSharedMemoryOffset",
"kOutOfBounds", None)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
def AddCmdArgs(self, args):
"""Overridden from Argument."""
args.append(Argument("%s_shm_id" % self.name, 'uint32_t'))
args.append(Argument("%s_shm_offset" % self.name, 'uint32_t'))
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = GetSharedMemoryAs<%s>(\n" %
(self.type, self.name, self.type))
file.Write(
" c.%s_shm_id, c.%s_shm_offset, data_size);\n" %
(self.name, self.name))
def WriteGetAddress(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = GetSharedMemoryAs<%s>(\n" %
(self.type, self.name, self.type))
file.Write(
" %s_shm_id, %s_shm_offset, %s_size);\n" %
(self.name, self.name, self.name))
def WriteValidationCode(self, file, func):
"""Overridden from Argument."""
if self.optional:
return
file.Write(" if (%s == NULL) {\n" % self.name)
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return ImmediatePointerArgument(self.name, self.type)
def GetBucketVersion(self):
"""Overridden from Argument."""
if self.type.find('char') >= 0:
if self.IsPointer2D():
return InputStringArrayBucketArgument(self.name, self.type)
return InputStringBucketArgument(self.name, self.type)
return BucketPointerArgument(self.name, self.type)
def WriteDestinationInitalizationValidation(self, file, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(file, func)
class BucketPointerArgument(PointerArgument):
"""A class that represents an bucket argument to a function."""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = bucket->GetData(0, data_size);\n" %
(self.type, self.name))
def WriteValidationCode(self, file, func):
"""Overridden from Argument."""
pass
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, file, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(file, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class InputStringBucketArgument(Argument):
"""A string input argument where the string is passed in a bucket."""
def __init__(self, name, type):
Argument.__init__(self, name + "_bucket_id", "uint32_t")
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return False
class InputStringArrayBucketArgument(Argument):
"""A string array input argument where the strings are passed in a bucket."""
def __init__(self, name, type):
Argument.__init__(self, name + "_bucket_id", "uint32_t")
self._original_name = name
def WriteGetCode(self, file):
"""Overridden from Argument."""
code = """
Bucket* bucket = GetBucket(c.%(name)s);
if (!bucket) {
return error::kInvalidArguments;
}
GLsizei count = 0;
std::vector<char*> strs;
std::vector<GLint> len;
if (!bucket->GetAsStrings(&count, &strs, &len)) {
return error::kInvalidArguments;
}
const char** %(original_name)s =
strs.size() > 0 ? const_cast<const char**>(&strs[0]) : NULL;
const GLint* length =
len.size() > 0 ? const_cast<const GLint*>(&len[0]) : NULL;
(void)length;
"""
file.Write(code % {
'name': self.name,
'original_name': self._original_name,
})
def GetValidArg(self, func):
return "kNameBucketId"
def GetValidGLArg(self, func):
return "_"
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return True
class ResourceIdArgument(Argument):
"""A class that represents a resource id argument to a function."""
def __init__(self, name, type):
match = re.match("(GLid\w+)", type)
self.resource_type = match.group(1)[4:]
if self.resource_type == "Sync":
type = type.replace(match.group(1), "GLsync")
else:
type = type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, type)
def WriteGetCode(self, file):
"""Overridden from Argument."""
if self.type == "GLsync":
my_type = "GLuint"
else:
my_type = self.type
file.Write(" %s %s = c.%s;\n" % (my_type, self.name, self.name))
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
if self.resource_type == "Sync":
return "reinterpret_cast<GLsync>(kService%sId)" % self.resource_type
return "kService%sId" % self.resource_type
class ResourceIdBindArgument(Argument):
"""Represents a resource id argument to a bind function."""
def __init__(self, name, type):
match = re.match("(GLidBind\w+)", type)
self.resource_type = match.group(1)[8:]
type = type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, type)
def WriteGetCode(self, file):
"""Overridden from Argument."""
code = """ %(type)s %(name)s = c.%(name)s;
"""
file.Write(code % {'type': self.type, 'name': self.name})
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
return "kService%sId" % self.resource_type
class ResourceIdZeroArgument(Argument):
"""Represents a resource id argument to a function that can be zero."""
def __init__(self, name, type):
match = re.match("(GLidZero\w+)", type)
self.resource_type = match.group(1)[8:]
type = type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, type)
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(" %s %s = c.%s;\n" % (self.type, self.name, self.name))
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
return "kService%sId" % self.resource_type
def GetNumInvalidValues(self, func):
"""returns the number of invalid values to be tested."""
return 1
def GetInvalidArg(self, index):
"""returns an invalid value by index."""
return ("kInvalidClientId", "kNoError", "GL_INVALID_VALUE")
class Function(object):
"""A class that represents a function."""
type_handlers = {
'': TypeHandler(),
'Bind': BindHandler(),
'Create': CreateHandler(),
'Custom': CustomHandler(),
'Data': DataHandler(),
'Delete': DeleteHandler(),
'DELn': DELnHandler(),
'GENn': GENnHandler(),
'GETn': GETnHandler(),
'GLchar': GLcharHandler(),
'GLcharN': GLcharNHandler(),
'HandWritten': HandWrittenHandler(),
'Is': IsHandler(),
'Manual': ManualHandler(),
'PUT': PUTHandler(),
'PUTn': PUTnHandler(),
'PUTSTR': PUTSTRHandler(),
'PUTXn': PUTXnHandler(),
'StateSet': StateSetHandler(),
'StateSetRGBAlpha': StateSetRGBAlphaHandler(),
'StateSetFrontBack': StateSetFrontBackHandler(),
'StateSetFrontBackSeparate': StateSetFrontBackSeparateHandler(),
'StateSetNamedParameter': StateSetNamedParameter(),
'STRn': STRnHandler(),
'Todo': TodoHandler(),
}
def __init__(self, name, info):
self.name = name
self.original_name = info['original_name']
self.original_args = self.ParseArgs(info['original_args'])
if 'cmd_args' in info:
self.args_for_cmds = self.ParseArgs(info['cmd_args'])
else:
self.args_for_cmds = self.original_args[:]
self.return_type = info['return_type']
if self.return_type != 'void':
self.return_arg = CreateArg(info['return_type'] + " result")
else:
self.return_arg = None
self.num_pointer_args = sum(
[1 for arg in self.args_for_cmds if arg.IsPointer()])
if self.num_pointer_args > 0:
for arg in reversed(self.original_args):
if arg.IsPointer():
self.last_original_pointer_arg = arg
break
else:
self.last_original_pointer_arg = None
self.info = info
self.type_handler = self.type_handlers[info['type']]
self.can_auto_generate = (self.num_pointer_args == 0 and
info['return_type'] == "void")
self.InitFunction()
def ParseArgs(self, arg_string):
"""Parses a function arg string."""
args = []
parts = arg_string.split(',')
for arg_string in parts:
arg = CreateArg(arg_string)
if arg:
args.append(arg)
return args
def IsType(self, type_name):
"""Returns true if function is a certain type."""
return self.info['type'] == type_name
def InitFunction(self):
"""Creates command args and calls the init function for the type handler.
Creates argument lists for command buffer commands, eg. self.cmd_args and
self.init_args.
Calls the type function initialization.
Override to create different kind of command buffer command argument lists.
"""
self.cmd_args = []
for arg in self.args_for_cmds:
arg.AddCmdArgs(self.cmd_args)
self.init_args = []
for arg in self.args_for_cmds:
arg.AddInitArgs(self.init_args)
if self.return_arg:
self.init_args.append(self.return_arg)
self.type_handler.InitFunction(self)
def IsImmediate(self):
"""Returns whether the function is immediate data function or not."""
return False
def IsUnsafe(self):
"""Returns whether the function has service side validation or not."""
return self.GetInfo('unsafe', False)
def GetInfo(self, name, default = None):
"""Returns a value from the function info for this function."""
if name in self.info:
return self.info[name]
return default
def GetValidArg(self, arg):
"""Gets a valid argument value for the parameter arg from the function info
if one exists."""
try:
index = self.GetOriginalArgs().index(arg)
except ValueError:
return None
valid_args = self.GetInfo('valid_args')
if valid_args and str(index) in valid_args:
return valid_args[str(index)]
return None
def AddInfo(self, name, value):
"""Adds an info."""
self.info[name] = value
def IsExtension(self):
return self.GetInfo('extension') or self.GetInfo('extension_flag')
def IsCoreGLFunction(self):
return (not self.IsExtension() and
not self.GetInfo('pepper_interface') and
not self.IsUnsafe())
def InPepperInterface(self, interface):
ext = self.GetInfo('pepper_interface')
if not interface.GetName():
return self.IsCoreGLFunction()
return ext == interface.GetName()
def InAnyPepperExtension(self):
return self.IsCoreGLFunction() or self.GetInfo('pepper_interface')
def GetErrorReturnString(self):
if self.GetInfo("error_return"):
return self.GetInfo("error_return")
elif self.return_type == "GLboolean":
return "GL_FALSE"
elif "*" in self.return_type:
return "NULL"
return "0"
def GetGLFunctionName(self):
"""Gets the function to call to execute GL for this command."""
if self.GetInfo('decoder_func'):
return self.GetInfo('decoder_func')
return "gl%s" % self.original_name
def GetGLTestFunctionName(self):
gl_func_name = self.GetInfo('gl_test_func')
if gl_func_name == None:
gl_func_name = self.GetGLFunctionName()
if gl_func_name.startswith("gl"):
gl_func_name = gl_func_name[2:]
else:
gl_func_name = self.original_name
return gl_func_name
def GetDataTransferMethods(self):
return self.GetInfo('data_transfer_methods',
['immediate' if self.num_pointer_args == 1 else 'shm'])
def AddCmdArg(self, arg):
"""Adds a cmd argument to this function."""
self.cmd_args.append(arg)
def GetCmdArgs(self):
"""Gets the command args for this function."""
return self.cmd_args
def ClearCmdArgs(self):
"""Clears the command args for this function."""
self.cmd_args = []
def GetCmdConstants(self):
"""Gets the constants for this function."""
return [arg for arg in self.args_for_cmds if arg.IsConstant()]
def GetInitArgs(self):
"""Gets the init args for this function."""
return self.init_args
def GetOriginalArgs(self):
"""Gets the original arguments to this function."""
return self.original_args
def GetLastOriginalArg(self):
"""Gets the last original argument to this function."""
return self.original_args[len(self.original_args) - 1]
def GetLastOriginalPointerArg(self):
return self.last_original_pointer_arg
def GetResourceIdArg(self):
for arg in self.original_args:
if hasattr(arg, 'resource_type'):
return arg
return None
def _MaybePrependComma(self, arg_string, add_comma):
"""Adds a comma if arg_string is not empty and add_comma is true."""
comma = ""
if add_comma and len(arg_string):
comma = ", "
return "%s%s" % (comma, arg_string)
def MakeTypedOriginalArgString(self, prefix, add_comma = False):
"""Gets a list of arguments as they are in GL."""
args = self.GetOriginalArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeOriginalArgString(self, prefix, add_comma = False, separator = ", "):
"""Gets the list of arguments as they are in GL."""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedHelperArgString(self, prefix, add_comma = False):
"""Gets a list of typed GL arguments after removing unneeded arguments."""
args = self.GetOriginalArgs()
arg_string = ", ".join(
["%s %s%s" % (
arg.type,
prefix,
arg.name,
) for arg in args if not arg.IsConstant()])
return self._MaybePrependComma(arg_string, add_comma)
def MakeHelperArgString(self, prefix, add_comma = False, separator = ", "):
"""Gets a list of GL arguments after removing unneeded arguments."""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name)
for arg in args if not arg.IsConstant()])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedPepperArgString(self, prefix):
"""Gets a list of arguments as they need to be for Pepper."""
if self.GetInfo("pepper_args"):
return self.GetInfo("pepper_args")
else:
return self.MakeTypedOriginalArgString(prefix, False)
def MapCTypeToPepperIdlType(self, ctype, is_for_return_type=False):
"""Converts a C type name to the corresponding Pepper IDL type."""
idltype = {
'char*': '[out] str_t',
'const GLchar* const*': '[out] cstr_t',
'const char*': 'cstr_t',
'const void*': 'mem_t',
'void*': '[out] mem_t',
'void**': '[out] mem_ptr_t',
}.get(ctype, ctype)
# We use "GLxxx_ptr_t" for "GLxxx*".
matched = re.match(r'(const )?(GL\w+)\*$', ctype)
if matched:
idltype = matched.group(2) + '_ptr_t'
if not matched.group(1):
idltype = '[out] ' + idltype
# If an in/out specifier is not specified yet, prepend [in].
if idltype[0] != '[':
idltype = '[in] ' + idltype
# Strip the in/out specifier for a return type.
if is_for_return_type:
idltype = re.sub(r'\[\w+\] ', '', idltype)
return idltype
def MakeTypedPepperIdlArgStrings(self):
"""Gets a list of arguments as they need to be for Pepper IDL."""
args = self.GetOriginalArgs()
return ["%s %s" % (self.MapCTypeToPepperIdlType(arg.type), arg.name)
for arg in args]
def GetPepperName(self):
if self.GetInfo("pepper_name"):
return self.GetInfo("pepper_name")
return self.name
def MakeTypedCmdArgString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeCmdArgString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedInitString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeInitString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeLogArgString(self):
"""Makes a string of the arguments for the LOG macros"""
args = self.GetOriginalArgs()
return ' << ", " << '.join([arg.GetLogArg() for arg in args])
def WriteCommandDescription(self, file):
"""Writes a description of the command."""
file.Write("//! Command that corresponds to gl%s.\n" % self.original_name)
def WriteHandlerValidation(self, file):
"""Writes validation code for the function."""
for arg in self.GetOriginalArgs():
arg.WriteValidationCode(file, self)
self.WriteValidationCode(file)
def WriteHandlerImplementation(self, file):
"""Writes the handler implementation for this command."""
self.type_handler.WriteHandlerImplementation(self, file)
def WriteValidationCode(self, file):
"""Writes the validation code for a command."""
pass
def WriteCmdFlag(self, file):
"""Writes the cmd cmd_flags constant."""
flags = []
# By default trace only at the highest level 3.
trace_level = int(self.GetInfo('trace_level', default = 3))
if trace_level not in xrange(0, 4):
raise KeyError("Unhandled trace_level: %d" % trace_level)
flags.append('CMD_FLAG_SET_TRACE_LEVEL(%d)' % trace_level)
if len(flags) > 0:
cmd_flags = ' | '.join(flags)
else:
cmd_flags = 0
file.Write(" static const uint8 cmd_flags = %s;\n" % cmd_flags)
def WriteCmdArgFlag(self, file):
"""Writes the cmd kArgFlags constant."""
file.Write(" static const cmd::ArgFlags kArgFlags = cmd::kFixed;\n")
def WriteCmdComputeSize(self, file):
"""Writes the ComputeSize function for the command."""
file.Write(" static uint32_t ComputeSize() {\n")
file.Write(
" return static_cast<uint32_t>(sizeof(ValueType)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteCmdSetHeader(self, file):
"""Writes the cmd's SetHeader function."""
file.Write(" void SetHeader() {\n")
file.Write(" header.SetCmd<ValueType>();\n")
file.Write(" }\n")
file.Write("\n")
def WriteCmdInit(self, file):
"""Writes the cmd's Init function."""
file.Write(" void Init(%s) {\n" % self.MakeTypedCmdArgString("_"))
file.Write(" SetHeader();\n")
args = self.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" }\n")
file.Write("\n")
def WriteCmdSet(self, file):
"""Writes the cmd's Set function."""
copy_args = self.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s) {\n" %
self.MakeTypedCmdArgString("_", True))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
file.Write(" return NextCmdAddress<ValueType>(cmd);\n")
file.Write(" }\n")
file.Write("\n")
def WriteStruct(self, file):
self.type_handler.WriteStruct(self, file)
def WriteDocs(self, file):
self.type_handler.WriteDocs(self, file)
def WriteCmdHelper(self, file):
"""Writes the cmd's helper."""
self.type_handler.WriteCmdHelper(self, file)
def WriteServiceImplementation(self, file):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceImplementation(self, file)
def WriteServiceUnitTest(self, file, *extras):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceUnitTest(self, file, *extras)
def WriteGLES2CLibImplementation(self, file):
"""Writes the GLES2 C Lib Implemention."""
self.type_handler.WriteGLES2CLibImplementation(self, file)
def WriteGLES2InterfaceHeader(self, file):
"""Writes the GLES2 Interface declaration."""
self.type_handler.WriteGLES2InterfaceHeader(self, file)
def WriteMojoGLES2ImplHeader(self, file):
"""Writes the Mojo GLES2 implementation header declaration."""
self.type_handler.WriteMojoGLES2ImplHeader(self, file)
def WriteMojoGLES2Impl(self, file):
"""Writes the Mojo GLES2 implementation declaration."""
self.type_handler.WriteMojoGLES2Impl(self, file)
def WriteGLES2InterfaceStub(self, file):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStub(self, file)
def WriteGLES2InterfaceStubImpl(self, file):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStubImpl(self, file)
def WriteGLES2ImplementationHeader(self, file):
"""Writes the GLES2 Implemention declaration."""
self.type_handler.WriteGLES2ImplementationHeader(self, file)
def WriteGLES2Implementation(self, file):
"""Writes the GLES2 Implemention definition."""
self.type_handler.WriteGLES2Implementation(self, file)
def WriteGLES2TraceImplementationHeader(self, file):
"""Writes the GLES2 Trace Implemention declaration."""
self.type_handler.WriteGLES2TraceImplementationHeader(self, file)
def WriteGLES2TraceImplementation(self, file):
"""Writes the GLES2 Trace Implemention definition."""
self.type_handler.WriteGLES2TraceImplementation(self, file)
def WriteGLES2Header(self, file):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2Header(self, file)
def WriteGLES2ImplementationUnitTest(self, file):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2ImplementationUnitTest(self, file)
def WriteDestinationInitalizationValidation(self, file):
"""Writes the client side destintion initialization validation."""
self.type_handler.WriteDestinationInitalizationValidation(self, file)
def WriteFormatTest(self, file):
"""Writes the cmd's format test."""
self.type_handler.WriteFormatTest(self, file)
class PepperInterface(object):
"""A class that represents a function."""
def __init__(self, info):
self.name = info["name"]
self.dev = info["dev"]
def GetName(self):
return self.name
def GetInterfaceName(self):
upperint = ""
dev = ""
if self.name:
upperint = "_" + self.name.upper()
if self.dev:
dev = "_DEV"
return "PPB_OPENGLES2%s%s_INTERFACE" % (upperint, dev)
def GetInterfaceString(self):
dev = ""
if self.dev:
dev = "(Dev)"
return "PPB_OpenGLES2%s%s" % (self.name, dev)
def GetStructName(self):
dev = ""
if self.dev:
dev = "_Dev"
return "PPB_OpenGLES2%s%s" % (self.name, dev)
class ImmediateFunction(Function):
"""A class that represnets an immediate function command."""
def __init__(self, func):
Function.__init__(
self,
"%sImmediate" % func.name,
func.info)
def InitFunction(self):
# Override args in original_args and args_for_cmds with immediate versions
# of the args.
new_original_args = []
for arg in self.original_args:
new_arg = arg.GetImmediateVersion()
if new_arg:
new_original_args.append(new_arg)
self.original_args = new_original_args
new_args_for_cmds = []
for arg in self.args_for_cmds:
new_arg = arg.GetImmediateVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
self.args_for_cmds = new_args_for_cmds
Function.InitFunction(self)
def IsImmediate(self):
return True
def WriteCommandDescription(self, file):
"""Overridden from Function"""
file.Write("//! Immediate version of command that corresponds to gl%s.\n" %
self.original_name)
def WriteServiceImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateServiceImplementation(self, file)
def WriteHandlerImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateHandlerImplementation(self, file)
def WriteServiceUnitTest(self, file, *extras):
"""Writes the service implementation for a command."""
self.type_handler.WriteImmediateServiceUnitTest(self, file, *extras)
def WriteValidationCode(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateValidationCode(self, file)
def WriteCmdArgFlag(self, file):
"""Overridden from Function"""
file.Write(" static const cmd::ArgFlags kArgFlags = cmd::kAtLeastN;\n")
def WriteCmdComputeSize(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdComputeSize(self, file)
def WriteCmdSetHeader(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSetHeader(self, file)
def WriteCmdInit(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdInit(self, file)
def WriteCmdSet(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSet(self, file)
def WriteCmdHelper(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdHelper(self, file)
def WriteFormatTest(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateFormatTest(self, file)
class BucketFunction(Function):
"""A class that represnets a bucket version of a function command."""
def __init__(self, func):
Function.__init__(
self,
"%sBucket" % func.name,
func.info)
def InitFunction(self):
# Override args in original_args and args_for_cmds with bucket versions
# of the args.
new_original_args = []
for arg in self.original_args:
new_arg = arg.GetBucketVersion()
if new_arg:
new_original_args.append(new_arg)
self.original_args = new_original_args
new_args_for_cmds = []
for arg in self.args_for_cmds:
new_arg = arg.GetBucketVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
self.args_for_cmds = new_args_for_cmds
Function.InitFunction(self)
def WriteCommandDescription(self, file):
"""Overridden from Function"""
file.Write("//! Bucket version of command that corresponds to gl%s.\n" %
self.original_name)
def WriteServiceImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteBucketServiceImplementation(self, file)
def WriteHandlerImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteBucketHandlerImplementation(self, file)
def WriteServiceUnitTest(self, file, *extras):
"""Overridden from Function"""
self.type_handler.WriteBucketServiceUnitTest(self, file, *extras)
def MakeOriginalArgString(self, prefix, add_comma = False, separator = ", "):
"""Overridden from Function"""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name[0:-10] if arg.name.endswith("_bucket_id")
else arg.name) for arg in args])
return super(BucketFunction, self)._MaybePrependComma(arg_string, add_comma)
def CreateArg(arg_string):
"""Creates an Argument."""
arg_parts = arg_string.split()
if len(arg_parts) == 1 and arg_parts[0] == 'void':
return None
# Is this a pointer argument?
elif arg_string.find('*') >= 0:
return PointerArgument(
arg_parts[-1],
" ".join(arg_parts[0:-1]))
# Is this a resource argument? Must come after pointer check.
elif arg_parts[0].startswith('GLidBind'):
return ResourceIdBindArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLidZero'):
return ResourceIdZeroArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLid'):
return ResourceIdArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLenum') and len(arg_parts[0]) > 6:
return EnumArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLbitfield') and len(arg_parts[0]) > 10:
return BitFieldArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLboolean') and len(arg_parts[0]) > 9:
return ValidatedBoolArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLboolean'):
return BoolArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLintUniformLocation'):
return UniformLocationArgument(arg_parts[-1])
elif (arg_parts[0].startswith('GLint') and len(arg_parts[0]) > 5 and
not arg_parts[0].startswith('GLintptr')):
return IntArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif (arg_parts[0].startswith('GLsizeiNotNegative') or
arg_parts[0].startswith('GLintptrNotNegative')):
return SizeNotNegativeArgument(arg_parts[-1],
" ".join(arg_parts[0:-1]),
arg_parts[0][0:-11])
elif arg_parts[0].startswith('GLsize'):
return SizeArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
else:
return Argument(arg_parts[-1], " ".join(arg_parts[0:-1]))
class GLGenerator(object):
"""A class to generate GL command buffers."""
_function_re = re.compile(r'GL_APICALL(.*?)GL_APIENTRY (.*?) \((.*?)\);')
def __init__(self, verbose):
self.original_functions = []
self.functions = []
self.verbose = verbose
self.errors = 0
self.pepper_interfaces = []
self.interface_info = {}
self.generated_cpp_filenames = []
for interface in _PEPPER_INTERFACES:
interface = PepperInterface(interface)
self.pepper_interfaces.append(interface)
self.interface_info[interface.GetName()] = interface
def AddFunction(self, func):
"""Adds a function."""
self.functions.append(func)
def GetFunctionInfo(self, name):
"""Gets a type info for the given function name."""
if name in _FUNCTION_INFO:
func_info = _FUNCTION_INFO[name].copy()
else:
func_info = {}
if not 'type' in func_info:
func_info['type'] = ''
return func_info
def Log(self, msg):
"""Prints something if verbose is true."""
if self.verbose:
print msg
def Error(self, msg):
"""Prints an error."""
print "Error: %s" % msg
self.errors += 1
def WriteLicense(self, file):
"""Writes the license."""
file.Write(_LICENSE)
def WriteNamespaceOpen(self, file):
"""Writes the code for the namespace."""
file.Write("namespace gpu {\n")
file.Write("namespace gles2 {\n")
file.Write("\n")
def WriteNamespaceClose(self, file):
"""Writes the code to close the namespace."""
file.Write("} // namespace gles2\n")
file.Write("} // namespace gpu\n")
file.Write("\n")
def ParseGLH(self, filename):
"""Parses the cmd_buffer_functions.txt file and extracts the functions"""
f = open(filename, "r")
functions = f.read()
f.close()
for line in functions.splitlines():
match = self._function_re.match(line)
if match:
func_name = match.group(2)[2:]
func_info = self.GetFunctionInfo(func_name)
if func_info['type'] == 'Noop':
continue
parsed_func_info = {
'original_name': func_name,
'original_args': match.group(3),
'return_type': match.group(1).strip(),
}
for k in parsed_func_info.keys():
if not k in func_info:
func_info[k] = parsed_func_info[k]
f = Function(func_name, func_info)
self.original_functions.append(f)
#for arg in f.GetOriginalArgs():
# if not isinstance(arg, EnumArgument) and arg.type == 'GLenum':
# self.Log("%s uses bare GLenum %s." % (func_name, arg.name))
gen_cmd = f.GetInfo('gen_cmd')
if gen_cmd == True or gen_cmd == None:
if f.type_handler.NeedsDataTransferFunction(f):
methods = f.GetDataTransferMethods()
if 'immediate' in methods:
self.AddFunction(ImmediateFunction(f))
if 'bucket' in methods:
self.AddFunction(BucketFunction(f))
if 'shm' in methods:
self.AddFunction(f)
else:
self.AddFunction(f)
self.Log("Auto Generated Functions : %d" %
len([f for f in self.functions if f.can_auto_generate or
(not f.IsType('') and not f.IsType('Custom') and
not f.IsType('Todo'))]))
funcs = [f for f in self.functions if not f.can_auto_generate and
(f.IsType('') or f.IsType('Custom') or f.IsType('Todo'))]
self.Log("Non Auto Generated Functions: %d" % len(funcs))
for f in funcs:
self.Log(" %-10s %-20s gl%s" % (f.info['type'], f.return_type, f.name))
def WriteCommandIds(self, filename):
"""Writes the command buffer format"""
file = CHeaderWriter(filename)
file.Write("#define GLES2_COMMAND_LIST(OP) \\\n")
id = 256
for func in self.functions:
file.Write(" %-60s /* %d */ \\\n" %
("OP(%s)" % func.name, id))
id += 1
file.Write("\n")
file.Write("enum CommandId {\n")
file.Write(" kStartPoint = cmd::kLastCommonId, "
"// All GLES2 commands start after this.\n")
file.Write("#define GLES2_CMD_OP(name) k ## name,\n")
file.Write(" GLES2_COMMAND_LIST(GLES2_CMD_OP)\n")
file.Write("#undef GLES2_CMD_OP\n")
file.Write(" kNumCommands\n")
file.Write("};\n")
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteFormat(self, filename):
"""Writes the command buffer format"""
file = CHeaderWriter(filename)
# Forward declaration of a few enums used in constant argument
# to avoid including GL header files.
enum_defines = {
'GL_SYNC_GPU_COMMANDS_COMPLETE': '0x9117',
'GL_SYNC_FLUSH_COMMANDS_BIT': '0x00000001',
}
file.Write('\n')
for enum in enum_defines:
file.Write("#define %s %s\n" % (enum, enum_defines[enum]))
file.Write('\n')
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteStruct(file)
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteDocs(self, filename):
"""Writes the command buffer doc version of the commands"""
file = CWriter(filename)
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteDocs(file)
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteFormatTest(self, filename):
"""Writes the command buffer format test."""
file = CHeaderWriter(
filename,
"// This file contains unit tests for gles2 commmands\n"
"// It is included by gles2_cmd_format_test.cc\n"
"\n")
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteFormatTest(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteCmdHelperHeader(self, filename):
"""Writes the gles2 command helper."""
file = CHeaderWriter(filename)
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteCmdHelper(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceContextStateHeader(self, filename):
"""Writes the service context state header."""
file = CHeaderWriter(
filename,
"// It is included by context_state.h\n")
file.Write("struct EnableFlags {\n")
file.Write(" EnableFlags();\n")
for capability in _CAPABILITY_FLAGS:
file.Write(" bool %s;\n" % capability['name'])
file.Write(" bool cached_%s;\n" % capability['name'])
file.Write("};\n\n")
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
for item in state['states']:
if isinstance(item['default'], list):
file.Write("%s %s[%d];\n" % (item['type'], item['name'],
len(item['default'])))
else:
file.Write("%s %s;\n" % (item['type'], item['name']))
if item.get('cached', False):
if isinstance(item['default'], list):
file.Write("%s cached_%s[%d];\n" % (item['type'], item['name'],
len(item['default'])))
else:
file.Write("%s cached_%s;\n" % (item['type'], item['name']))
file.Write("\n")
file.Write("""
inline void SetDeviceCapabilityState(GLenum cap, bool enable) {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write("""\
case GL_%s:
""" % capability['name'].upper())
file.Write("""\
if (enable_flags.cached_%(name)s == enable &&
!ignore_cached_state)
return;
enable_flags.cached_%(name)s = enable;
break;
""" % capability)
file.Write("""\
default:
NOTREACHED();
return;
}
if (enable)
glEnable(cap);
else
glDisable(cap);
}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteClientContextStateHeader(self, filename):
"""Writes the client context state header."""
file = CHeaderWriter(
filename,
"// It is included by client_context_state.h\n")
file.Write("struct EnableFlags {\n")
file.Write(" EnableFlags();\n")
for capability in _CAPABILITY_FLAGS:
file.Write(" bool %s;\n" % capability['name'])
file.Write("};\n\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteContextStateGetters(self, file, class_name):
"""Writes the state getters."""
for gl_type in ["GLint", "GLfloat"]:
file.Write("""
bool %s::GetStateAs%s(
GLenum pname, %s* params, GLsizei* num_written) const {
switch (pname) {
""" % (class_name, gl_type, gl_type))
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if 'enum' in state:
file.Write(" case %s:\n" % state['enum'])
file.Write(" *num_written = %d;\n" % len(state['states']))
file.Write(" if (params) {\n")
for ndx,item in enumerate(state['states']):
file.Write(" params[%d] = static_cast<%s>(%s);\n" %
(ndx, gl_type, item['name']))
file.Write(" }\n")
file.Write(" return true;\n")
else:
for item in state['states']:
file.Write(" case %s:\n" % item['enum'])
if isinstance(item['default'], list):
item_len = len(item['default'])
file.Write(" *num_written = %d;\n" % item_len)
file.Write(" if (params) {\n")
if item['type'] == gl_type:
file.Write(" memcpy(params, %s, sizeof(%s) * %d);\n" %
(item['name'], item['type'], item_len))
else:
file.Write(" for (size_t i = 0; i < %s; ++i) {\n" %
item_len)
file.Write(" params[i] = %s;\n" %
(GetGLGetTypeConversion(gl_type, item['type'],
"%s[i]" % item['name'])))
file.Write(" }\n");
else:
file.Write(" *num_written = 1;\n")
file.Write(" if (params) {\n")
file.Write(" params[0] = %s;\n" %
(GetGLGetTypeConversion(gl_type, item['type'],
item['name'])))
file.Write(" }\n")
file.Write(" return true;\n")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(" *num_written = 1;\n")
file.Write(" if (params) {\n")
file.Write(
" params[0] = static_cast<%s>(enable_flags.%s);\n" %
(gl_type, capability['name']))
file.Write(" }\n")
file.Write(" return true;\n")
file.Write(""" default:
return false;
}
}
""")
def WriteServiceContextStateImpl(self, filename):
"""Writes the context state service implementation."""
file = CHeaderWriter(
filename,
"// It is included by context_state.cc\n")
code = []
for capability in _CAPABILITY_FLAGS:
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
code.append("cached_%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
file.Write("ContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
file.Write("\n")
file.Write("void ContextState::Initialize() {\n")
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
for item in state['states']:
if isinstance(item['default'], list):
for ndx, value in enumerate(item['default']):
file.Write(" %s[%d] = %s;\n" % (item['name'], ndx, value))
else:
file.Write(" %s = %s;\n" % (item['name'], item['default']))
if item.get('cached', False):
if isinstance(item['default'], list):
for ndx, value in enumerate(item['default']):
file.Write(" cached_%s[%d] = %s;\n" % (item['name'], ndx, value))
else:
file.Write(" cached_%s = %s;\n" % (item['name'], item['default']))
file.Write("}\n")
file.Write("""
void ContextState::InitCapabilities(const ContextState* prev_state) const {
""")
def WriteCapabilities(test_prev, es3_caps):
for capability in _CAPABILITY_FLAGS:
capability_name = capability['name']
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3 and not es3_caps or not capability_es3 and es3_caps:
continue
if test_prev:
file.Write(""" if (prev_state->enable_flags.cached_%s !=
enable_flags.cached_%s) {\n""" %
(capability_name, capability_name))
file.Write(" EnableDisable(GL_%s, enable_flags.cached_%s);\n" %
(capability_name.upper(), capability_name))
if test_prev:
file.Write(" }")
file.Write(" if (prev_state) {")
WriteCapabilities(True, False)
file.Write(" if (feature_info_->IsES3Capable()) {\n")
WriteCapabilities(True, True)
file.Write(" }\n")
file.Write(" } else {")
WriteCapabilities(False, False)
file.Write(" if (feature_info_->IsES3Capable()) {\n")
WriteCapabilities(False, True)
file.Write(" }\n")
file.Write(" }")
file.Write("""}
void ContextState::InitState(const ContextState *prev_state) const {
""")
def WriteStates(test_prev):
# We need to sort the keys so the expectations match
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2, state['states'])):
if test_prev:
file.Write(" if (")
args = []
for place, item in enumerate(group):
item_name = CachedStateName(item)
args.append('%s' % item_name)
if test_prev:
if place > 0:
file.Write(' ||\n')
file.Write("(%s != prev_state->%s)" % (item_name, item_name))
if test_prev:
file.Write(")\n")
file.Write(
" gl%s(%s, %s);\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx], ", ".join(args)))
elif state['type'] == 'NamedParameter':
for item in state['states']:
item_name = CachedStateName(item)
if 'extension_flag' in item:
file.Write(" if (feature_info_->feature_flags().%s) {\n " %
item['extension_flag'])
if test_prev:
if isinstance(item['default'], list):
file.Write(" if (memcmp(prev_state->%s, %s, "
"sizeof(%s) * %d)) {\n" %
(item_name, item_name, item['type'],
len(item['default'])))
else:
file.Write(" if (prev_state->%s != %s) {\n " %
(item_name, item_name))
if 'gl_version_flag' in item:
item_name = item['gl_version_flag']
inverted = ''
if item_name[0] == '!':
inverted = '!'
item_name = item_name[1:]
file.Write(" if (%sfeature_info_->gl_version_info().%s) {\n" %
(inverted, item_name))
file.Write(" gl%s(%s, %s);\n" %
(state['func'],
(item['enum_set']
if 'enum_set' in item else item['enum']),
item['name']))
if 'gl_version_flag' in item:
file.Write(" }\n")
if test_prev:
if 'extension_flag' in item:
file.Write(" ")
file.Write(" }")
if 'extension_flag' in item:
file.Write(" }")
else:
if 'extension_flag' in state:
file.Write(" if (feature_info_->feature_flags().%s)\n " %
state['extension_flag'])
if test_prev:
file.Write(" if (")
args = []
for place, item in enumerate(state['states']):
item_name = CachedStateName(item)
args.append('%s' % item_name)
if test_prev:
if place > 0:
file.Write(' ||\n')
file.Write("(%s != prev_state->%s)" %
(item_name, item_name))
if test_prev:
file.Write(" )\n")
file.Write(" gl%s(%s);\n" % (state['func'], ", ".join(args)))
file.Write(" if (prev_state) {")
WriteStates(True)
file.Write(" } else {")
WriteStates(False)
file.Write(" }")
file.Write("}\n")
file.Write("""bool ContextState::GetEnabled(GLenum cap) const {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(" return enable_flags.%s;\n" % capability['name'])
file.Write(""" default:
NOTREACHED();
return false;
}
}
""")
self.WriteContextStateGetters(file, "ContextState")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteClientContextStateImpl(self, filename):
"""Writes the context state client side implementation."""
file = CHeaderWriter(
filename,
"// It is included by client_context_state.cc\n")
code = []
for capability in _CAPABILITY_FLAGS:
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
file.Write(
"ClientContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
file.Write("\n")
file.Write("""
bool ClientContextState::SetCapabilityState(
GLenum cap, bool enabled, bool* changed) {
*changed = false;
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(""" if (enable_flags.%(name)s != enabled) {
*changed = true;
enable_flags.%(name)s = enabled;
}
return true;
""" % capability)
file.Write(""" default:
return false;
}
}
""")
file.Write("""bool ClientContextState::GetEnabled(
GLenum cap, bool* enabled) const {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(" *enabled = enable_flags.%s;\n" % capability['name'])
file.Write(" return true;\n")
file.Write(""" default:
return false;
}
}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceImplementation(self, filename):
"""Writes the service decorder implementation."""
file = CHeaderWriter(
filename,
"// It is included by gles2_cmd_decoder.cc\n")
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteServiceImplementation(file)
file.Write("""
bool GLES2DecoderImpl::SetCapabilityState(GLenum cap, bool enabled) {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
if 'state_flag' in capability:
file.Write("""\
state_.enable_flags.%(name)s = enabled;
if (state_.enable_flags.cached_%(name)s != enabled
|| state_.ignore_cached_state) {
%(state_flag)s = true;
}
return false;
""" % capability)
else:
file.Write("""\
state_.enable_flags.%(name)s = enabled;
if (state_.enable_flags.cached_%(name)s != enabled
|| state_.ignore_cached_state) {
state_.enable_flags.cached_%(name)s = enabled;
return true;
}
return false;
""" % capability)
file.Write(""" default:
NOTREACHED();
return false;
}
}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceUnitTests(self, filename):
"""Writes the service decorder unit tests."""
num_tests = len(self.functions)
FUNCTIONS_PER_FILE = 98 # hard code this so it doesn't change.
count = 0
for test_num in range(0, num_tests, FUNCTIONS_PER_FILE):
count += 1
name = filename % count
file = CHeaderWriter(
name,
"// It is included by gles2_cmd_decoder_unittest_%d.cc\n" % count)
test_name = 'GLES2DecoderTest%d' % count
end = test_num + FUNCTIONS_PER_FILE
if end > num_tests:
end = num_tests
for idx in range(test_num, end):
func = self.functions[idx]
# Do any filtering of the functions here, so that the functions
# will not move between the numbered files if filtering properties
# are changed.
if func.GetInfo('extension_flag'):
continue
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
if func.GetInfo('unit_test') == False:
file.Write("// TODO(gman): %s\n" % func.name)
else:
func.WriteServiceUnitTest(file, {
'test_name': test_name
})
file.Close()
self.generated_cpp_filenames.append(file.filename)
file = CHeaderWriter(
filename % 0,
"// It is included by gles2_cmd_decoder_unittest_base.cc\n")
file.Write(
"""void GLES2DecoderTestBase::SetupInitCapabilitiesExpectations(
bool es3_capable) {""")
for capability in _CAPABILITY_FLAGS:
capability_es3 = 'es3' in capability and capability['es3'] == True
if not capability_es3:
file.Write(" ExpectEnableDisable(GL_%s, %s);\n" %
(capability['name'].upper(),
('false', 'true')['default' in capability]))
file.Write(" if (es3_capable) {")
for capability in _CAPABILITY_FLAGS:
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3:
file.Write(" ExpectEnableDisable(GL_%s, %s);\n" %
(capability['name'].upper(),
('false', 'true')['default' in capability]))
file.Write(""" }
}
void GLES2DecoderTestBase::SetupInitStateExpectations() {
""")
# We need to sort the keys so the expectations match
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2, state['states'])):
args = []
for item in group:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
file.Write(
" EXPECT_CALL(*gl_, %s(%s, %s))\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx], ", ".join(args)))
file.Write(" .Times(1)\n")
file.Write(" .RetiresOnSaturation();\n")
elif state['type'] == 'NamedParameter':
for item in state['states']:
if 'extension_flag' in item:
file.Write(" if (group_->feature_info()->feature_flags().%s) {\n" %
item['extension_flag'])
file.Write(" ")
expect_value = item['default']
if isinstance(expect_value, list):
# TODO: Currently we do not check array values.
expect_value = "_"
file.Write(
" EXPECT_CALL(*gl_, %s(%s, %s))\n" %
(state['func'],
(item['enum_set']
if 'enum_set' in item else item['enum']),
expect_value))
file.Write(" .Times(1)\n")
file.Write(" .RetiresOnSaturation();\n")
if 'extension_flag' in item:
file.Write(" }\n")
else:
if 'extension_flag' in state:
file.Write(" if (group_->feature_info()->feature_flags().%s) {\n" %
state['extension_flag'])
file.Write(" ")
args = []
for item in state['states']:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
# TODO: Currently we do not check array values.
args = ["_" if isinstance(arg, list) else arg for arg in args]
file.Write(" EXPECT_CALL(*gl_, %s(%s))\n" %
(state['func'], ", ".join(args)))
file.Write(" .Times(1)\n")
file.Write(" .RetiresOnSaturation();\n")
if 'extension_flag' in state:
file.Write(" }\n")
file.Write("""}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceUnitTestsForExtensions(self, filename):
"""Writes the service decorder unit tests for functions with extension_flag.
The functions are special in that they need a specific unit test
baseclass to turn on the extension.
"""
functions = [f for f in self.functions if f.GetInfo('extension_flag')]
file = CHeaderWriter(
filename,
"// It is included by gles2_cmd_decoder_unittest_extensions.cc\n")
for func in functions:
if True:
if func.GetInfo('unit_test') == False:
file.Write("// TODO(gman): %s\n" % func.name)
else:
extension = ToCamelCase(
ToGLExtensionString(func.GetInfo('extension_flag')))
func.WriteServiceUnitTest(file, {
'test_name': 'GLES2DecoderTestWith%s' % extension
})
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2Header(self, filename):
"""Writes the GLES2 header."""
file = CHeaderWriter(
filename,
"// This file contains Chromium-specific GLES2 declarations.\n\n")
for func in self.original_functions:
func.WriteGLES2Header(file)
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2CLibImplementation(self, filename):
"""Writes the GLES2 c lib implementation."""
file = CHeaderWriter(
filename,
"// These functions emulate GLES2 over command buffers.\n")
for func in self.original_functions:
func.WriteGLES2CLibImplementation(file)
file.Write("""
namespace gles2 {
extern const NameToFunc g_gles2_function_table[] = {
""")
for func in self.original_functions:
file.Write(
' { "gl%s", reinterpret_cast<GLES2FunctionPointer>(gl%s), },\n' %
(func.name, func.name))
file.Write(""" { NULL, NULL, },
};
} // namespace gles2
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2InterfaceHeader(self, filename):
"""Writes the GLES2 interface header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface.h to declare the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2InterfaceHeader(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteMojoGLES2ImplHeader(self, filename):
"""Writes the Mojo GLES2 implementation header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface.h to declare the\n"
"// GL api functions.\n")
code = """
#include "gpu/command_buffer/client/gles2_interface.h"
#include "third_party/mojo/src/mojo/public/c/gles2/gles2.h"
namespace mojo {
class MojoGLES2Impl : public gpu::gles2::GLES2Interface {
public:
explicit MojoGLES2Impl(MojoGLES2Context context) {
context_ = context;
}
~MojoGLES2Impl() override {}
"""
file.Write(code);
for func in self.original_functions:
func.WriteMojoGLES2ImplHeader(file)
code = """
private:
MojoGLES2Context context_;
};
} // namespace mojo
"""
file.Write(code);
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteMojoGLES2Impl(self, filename):
"""Writes the Mojo GLES2 implementation."""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
code = """
#include "mojo/gpu/mojo_gles2_impl_autogen.h"
#include "base/logging.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_copy_texture.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_image.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_miscellaneous.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_pixel_transfer_buffer_object.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_sub_image.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_sync_point.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_texture_mailbox.h"
#include "third_party/mojo/src/mojo/public/c/gles2/gles2.h"
#include "third_party/mojo/src/mojo/public/c/gles2/occlusion_query_ext.h"
namespace mojo {
"""
file.Write(code);
for func in self.original_functions:
func.WriteMojoGLES2Impl(file)
code = """
} // namespace mojo
"""
file.Write(code);
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2InterfaceStub(self, filename):
"""Writes the GLES2 interface stub header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface_stub.h.\n")
for func in self.original_functions:
func.WriteGLES2InterfaceStub(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2InterfaceStubImpl(self, filename):
"""Writes the GLES2 interface header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface_stub.cc.\n")
for func in self.original_functions:
func.WriteGLES2InterfaceStubImpl(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2ImplementationHeader(self, filename):
"""Writes the GLES2 Implementation header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_implementation.h to declare the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2ImplementationHeader(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2Implementation(self, filename):
"""Writes the GLES2 Implementation."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_implementation.cc to define the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2Implementation(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2TraceImplementationHeader(self, filename):
"""Writes the GLES2 Trace Implementation header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_trace_implementation.h\n")
for func in self.original_functions:
func.WriteGLES2TraceImplementationHeader(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2TraceImplementation(self, filename):
"""Writes the GLES2 Trace Implementation."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_trace_implementation.cc\n")
for func in self.original_functions:
func.WriteGLES2TraceImplementation(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2ImplementationUnitTests(self, filename):
"""Writes the GLES2 helper header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_implementation.h to declare the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2ImplementationUnitTest(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceUtilsHeader(self, filename):
"""Writes the gles2 auto generated utility header."""
file = CHeaderWriter(filename)
for name in sorted(_NAMED_TYPE_INFO.keys()):
named_type = NamedType(_NAMED_TYPE_INFO[name])
if named_type.IsConstant():
continue
file.Write("ValueValidator<%s> %s;\n" %
(named_type.GetType(), ToUnderscore(name)))
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceUtilsImplementation(self, filename):
"""Writes the gles2 auto generated utility implementation."""
file = CHeaderWriter(filename)
names = sorted(_NAMED_TYPE_INFO.keys())
for name in names:
named_type = NamedType(_NAMED_TYPE_INFO[name])
if named_type.IsConstant():
continue
if named_type.GetValidValues():
file.Write("static const %s valid_%s_table[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetValidValues():
file.Write(" %s,\n" % value)
file.Write("};\n")
file.Write("\n")
if named_type.GetValidValuesES3():
file.Write("static const %s valid_%s_table_es3[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetValidValuesES3():
file.Write(" %s,\n" % value)
file.Write("};\n")
file.Write("\n")
if named_type.GetDeprecatedValuesES3():
file.Write("static const %s deprecated_%s_table_es3[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetDeprecatedValuesES3():
file.Write(" %s,\n" % value)
file.Write("};\n")
file.Write("\n")
file.Write("Validators::Validators()")
pre = ' : '
for count, name in enumerate(names):
named_type = NamedType(_NAMED_TYPE_INFO[name])
if named_type.IsConstant():
continue
if named_type.GetValidValues():
code = """%(pre)s%(name)s(
valid_%(name)s_table, arraysize(valid_%(name)s_table))"""
else:
code = "%(pre)s%(name)s()"
file.Write(code % {
'name': ToUnderscore(name),
'pre': pre,
})
pre = ',\n '
file.Write(" {\n");
file.Write("}\n\n");
file.Write("void Validators::UpdateValuesES3() {\n")
for name in names:
named_type = NamedType(_NAMED_TYPE_INFO[name])
if named_type.GetDeprecatedValuesES3():
code = """ %(name)s.RemoveValues(
deprecated_%(name)s_table_es3, arraysize(deprecated_%(name)s_table_es3));
"""
file.Write(code % {
'name': ToUnderscore(name),
})
if named_type.GetValidValuesES3():
code = """ %(name)s.AddValues(
valid_%(name)s_table_es3, arraysize(valid_%(name)s_table_es3));
"""
file.Write(code % {
'name': ToUnderscore(name),
})
file.Write("}\n\n");
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteCommonUtilsHeader(self, filename):
"""Writes the gles2 common utility header."""
file = CHeaderWriter(filename)
type_infos = sorted(_NAMED_TYPE_INFO.keys())
for type_info in type_infos:
if _NAMED_TYPE_INFO[type_info]['type'] == 'GLenum':
file.Write("static std::string GetString%s(uint32_t value);\n" %
type_info)
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteCommonUtilsImpl(self, filename):
"""Writes the gles2 common utility header."""
enum_re = re.compile(r'\#define\s+(GL_[a-zA-Z0-9_]+)\s+([0-9A-Fa-fx]+)')
dict = {}
for fname in ['third_party/khronos/GLES2/gl2.h',
'third_party/khronos/GLES2/gl2ext.h',
'third_party/khronos/GLES3/gl3.h',
'gpu/GLES2/gl2chromium.h',
'gpu/GLES2/gl2extchromium.h']:
lines = open(fname).readlines()
for line in lines:
m = enum_re.match(line)
if m:
name = m.group(1)
value = m.group(2)
if len(value) <= 10:
if not value in dict:
dict[value] = name
# check our own _CHROMIUM macro conflicts with khronos GL headers.
elif dict[value] != name and (name.endswith('_CHROMIUM') or
dict[value].endswith('_CHROMIUM')):
self.Error("code collision: %s and %s have the same code %s" %
(dict[value], name, value))
file = CHeaderWriter(filename)
file.Write("static const GLES2Util::EnumToString "
"enum_to_string_table[] = {\n")
for value in dict:
file.Write(' { %s, "%s", },\n' % (value, dict[value]))
file.Write("""};
const GLES2Util::EnumToString* const GLES2Util::enum_to_string_table_ =
enum_to_string_table;
const size_t GLES2Util::enum_to_string_table_len_ =
sizeof(enum_to_string_table) / sizeof(enum_to_string_table[0]);
""")
enums = sorted(_NAMED_TYPE_INFO.keys())
for enum in enums:
if _NAMED_TYPE_INFO[enum]['type'] == 'GLenum':
file.Write("std::string GLES2Util::GetString%s(uint32_t value) {\n" %
enum)
valid_list = _NAMED_TYPE_INFO[enum]['valid']
if 'valid_es3' in _NAMED_TYPE_INFO[enum]:
valid_list = valid_list + _NAMED_TYPE_INFO[enum]['valid_es3']
assert len(valid_list) == len(set(valid_list))
if len(valid_list) > 0:
file.Write(" static const EnumToString string_table[] = {\n")
for value in valid_list:
file.Write(' { %s, "%s" },\n' % (value, value))
file.Write(""" };
return GLES2Util::GetQualifiedEnumString(
string_table, arraysize(string_table), value);
}
""")
else:
file.Write(""" return GLES2Util::GetQualifiedEnumString(
NULL, 0, value);
}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WritePepperGLES2Interface(self, filename, dev):
"""Writes the Pepper OpenGLES interface definition."""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
file.Write("label Chrome {\n")
file.Write(" M39 = 1.0\n")
file.Write("};\n\n")
if not dev:
# Declare GL types.
file.Write("[version=1.0]\n")
file.Write("describe {\n")
for gltype in ['GLbitfield', 'GLboolean', 'GLbyte', 'GLclampf',
'GLclampx', 'GLenum', 'GLfixed', 'GLfloat', 'GLint',
'GLintptr', 'GLshort', 'GLsizei', 'GLsizeiptr',
'GLubyte', 'GLuint', 'GLushort']:
file.Write(" %s;\n" % gltype)
file.Write(" %s_ptr_t;\n" % gltype)
file.Write("};\n\n")
# C level typedefs.
file.Write("#inline c\n")
file.Write("#include \"ppapi/c/pp_resource.h\"\n")
if dev:
file.Write("#include \"ppapi/c/ppb_opengles2.h\"\n\n")
else:
file.Write("\n#ifndef __gl2_h_\n")
for (k, v) in _GL_TYPES.iteritems():
file.Write("typedef %s %s;\n" % (v, k))
file.Write("#ifdef _WIN64\n")
for (k, v) in _GL_TYPES_64.iteritems():
file.Write("typedef %s %s;\n" % (v, k))
file.Write("#else\n")
for (k, v) in _GL_TYPES_32.iteritems():
file.Write("typedef %s %s;\n" % (v, k))
file.Write("#endif // _WIN64\n")
file.Write("#endif // __gl2_h_\n\n")
file.Write("#endinl\n")
for interface in self.pepper_interfaces:
if interface.dev != dev:
continue
# Historically, we provide OpenGLES2 interfaces with struct
# namespace. Not to break code which uses the interface as
# "struct OpenGLES2", we put it in struct namespace.
file.Write('\n[macro="%s", force_struct_namespace]\n' %
interface.GetInterfaceName())
file.Write("interface %s {\n" % interface.GetStructName())
for func in self.original_functions:
if not func.InPepperInterface(interface):
continue
ret_type = func.MapCTypeToPepperIdlType(func.return_type,
is_for_return_type=True)
func_prefix = " %s %s(" % (ret_type, func.GetPepperName())
file.Write(func_prefix)
file.Write("[in] PP_Resource context")
for arg in func.MakeTypedPepperIdlArgStrings():
file.Write(",\n" + " " * len(func_prefix) + arg)
file.Write(");\n")
file.Write("};\n\n")
file.Close()
def WritePepperGLES2Implementation(self, filename):
"""Writes the Pepper OpenGLES interface implementation."""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
file.Write("#include \"ppapi/shared_impl/ppb_opengles2_shared.h\"\n\n")
file.Write("#include \"base/logging.h\"\n")
file.Write("#include \"gpu/command_buffer/client/gles2_implementation.h\"\n")
file.Write("#include \"ppapi/shared_impl/ppb_graphics_3d_shared.h\"\n")
file.Write("#include \"ppapi/thunk/enter.h\"\n\n")
file.Write("namespace ppapi {\n\n")
file.Write("namespace {\n\n")
file.Write("typedef thunk::EnterResource<thunk::PPB_Graphics3D_API>"
" Enter3D;\n\n")
file.Write("gpu::gles2::GLES2Implementation* ToGles2Impl(Enter3D*"
" enter) {\n")
file.Write(" DCHECK(enter);\n")
file.Write(" DCHECK(enter->succeeded());\n")
file.Write(" return static_cast<PPB_Graphics3D_Shared*>(enter->object())->"
"gles2_impl();\n");
file.Write("}\n\n");
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
original_arg = func.MakeTypedPepperArgString("")
context_arg = "PP_Resource context_id"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
file.Write("%s %s(%s) {\n" %
(func.return_type, func.GetPepperName(), arg))
file.Write(" Enter3D enter(context_id, true);\n")
file.Write(" if (enter.succeeded()) {\n")
return_str = "" if func.return_type == "void" else "return "
file.Write(" %sToGles2Impl(&enter)->%s(%s);\n" %
(return_str, func.original_name,
func.MakeOriginalArgString("")))
file.Write(" }")
if func.return_type == "void":
file.Write("\n")
else:
file.Write(" else {\n")
file.Write(" return %s;\n" % func.GetErrorReturnString())
file.Write(" }\n")
file.Write("}\n\n")
file.Write("} // namespace\n")
for interface in self.pepper_interfaces:
file.Write("const %s* PPB_OpenGLES2_Shared::Get%sInterface() {\n" %
(interface.GetStructName(), interface.GetName()))
file.Write(" static const struct %s "
"ppb_opengles2 = {\n" % interface.GetStructName())
file.Write(" &")
file.Write(",\n &".join(
f.GetPepperName() for f in self.original_functions
if f.InPepperInterface(interface)))
file.Write("\n")
file.Write(" };\n")
file.Write(" return &ppb_opengles2;\n")
file.Write("}\n")
file.Write("} // namespace ppapi\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2ToPPAPIBridge(self, filename):
"""Connects GLES2 helper library to PPB_OpenGLES2 interface"""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
file.Write("#ifndef GL_GLEXT_PROTOTYPES\n")
file.Write("#define GL_GLEXT_PROTOTYPES\n")
file.Write("#endif\n")
file.Write("#include <GLES2/gl2.h>\n")
file.Write("#include <GLES2/gl2ext.h>\n")
file.Write("#include \"ppapi/lib/gl/gles2/gl2ext_ppapi.h\"\n\n")
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
interface = self.interface_info[func.GetInfo('pepper_interface') or '']
file.Write("%s GL_APIENTRY gl%s(%s) {\n" %
(func.return_type, func.GetPepperName(),
func.MakeTypedPepperArgString("")))
return_str = "" if func.return_type == "void" else "return "
interface_str = "glGet%sInterfacePPAPI()" % interface.GetName()
original_arg = func.MakeOriginalArgString("")
context_arg = "glGetCurrentContextPPAPI()"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
if interface.GetName():
file.Write(" const struct %s* ext = %s;\n" %
(interface.GetStructName(), interface_str))
file.Write(" if (ext)\n")
file.Write(" %sext->%s(%s);\n" %
(return_str, func.GetPepperName(), arg))
if return_str:
file.Write(" %s0;\n" % return_str)
else:
file.Write(" %s%s->%s(%s);\n" %
(return_str, interface_str, func.GetPepperName(), arg))
file.Write("}\n\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteMojoGLCallVisitor(self, filename):
"""Provides the GL implementation for mojo"""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
for func in self.original_functions:
if not func.IsCoreGLFunction():
continue
file.Write("VISIT_GL_CALL(%s, %s, (%s), (%s))\n" %
(func.name, func.return_type,
func.MakeTypedOriginalArgString(""),
func.MakeOriginalArgString("")))
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteMojoGLCallVisitorForExtension(self, filename, extension):
"""Provides the GL implementation for mojo for a particular extension"""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
for func in self.original_functions:
if func.GetInfo("extension") != extension:
continue
file.Write("VISIT_GL_CALL(%s, %s, (%s), (%s))\n" %
(func.name, func.return_type,
func.MakeTypedOriginalArgString(""),
func.MakeOriginalArgString("")))
file.Close()
self.generated_cpp_filenames.append(file.filename)
def Format(generated_files):
formatter = "clang-format"
if platform.system() == "Windows":
formatter += ".bat"
for filename in generated_files:
call([formatter, "-i", "-style=chromium", filename])
def main(argv):
"""This is the main function."""
parser = OptionParser()
parser.add_option(
"--output-dir",
help="base directory for resulting files, under chrome/src. default is "
"empty. Use this if you want the result stored under gen.")
parser.add_option(
"-v", "--verbose", action="store_true",
help="prints more output.")
(options, args) = parser.parse_args(args=argv)
# Add in states and capabilites to GLState
gl_state_valid = _NAMED_TYPE_INFO['GLState']['valid']
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if 'extension_flag' in state:
continue
if 'enum' in state:
if not state['enum'] in gl_state_valid:
gl_state_valid.append(state['enum'])
else:
for item in state['states']:
if 'extension_flag' in item:
continue
if not item['enum'] in gl_state_valid:
gl_state_valid.append(item['enum'])
for capability in _CAPABILITY_FLAGS:
valid_value = "GL_%s" % capability['name'].upper()
if not valid_value in gl_state_valid:
gl_state_valid.append(valid_value)
# This script lives under gpu/command_buffer, cd to base directory.
os.chdir(os.path.dirname(__file__) + "/../..")
base_dir = os.getcwd()
gen = GLGenerator(options.verbose)
gen.ParseGLH("gpu/command_buffer/cmd_buffer_functions.txt")
# Support generating files under gen/
if options.output_dir != None:
os.chdir(options.output_dir)
gen.WritePepperGLES2Interface("ppapi/api/ppb_opengles2.idl", False)
gen.WritePepperGLES2Interface("ppapi/api/dev/ppb_opengles2ext_dev.idl", True)
gen.WriteGLES2ToPPAPIBridge("ppapi/lib/gl/gles2/gles2.c")
gen.WritePepperGLES2Implementation(
"ppapi/shared_impl/ppb_opengles2_shared.cc")
os.chdir(base_dir)
gen.WriteCommandIds("gpu/command_buffer/common/gles2_cmd_ids_autogen.h")
gen.WriteFormat("gpu/command_buffer/common/gles2_cmd_format_autogen.h")
gen.WriteFormatTest(
"gpu/command_buffer/common/gles2_cmd_format_test_autogen.h")
gen.WriteGLES2InterfaceHeader(
"gpu/command_buffer/client/gles2_interface_autogen.h")
gen.WriteMojoGLES2ImplHeader(
"mojo/gpu/mojo_gles2_impl_autogen.h")
gen.WriteMojoGLES2Impl(
"mojo/gpu/mojo_gles2_impl_autogen.cc")
gen.WriteGLES2InterfaceStub(
"gpu/command_buffer/client/gles2_interface_stub_autogen.h")
gen.WriteGLES2InterfaceStubImpl(
"gpu/command_buffer/client/gles2_interface_stub_impl_autogen.h")
gen.WriteGLES2ImplementationHeader(
"gpu/command_buffer/client/gles2_implementation_autogen.h")
gen.WriteGLES2Implementation(
"gpu/command_buffer/client/gles2_implementation_impl_autogen.h")
gen.WriteGLES2ImplementationUnitTests(
"gpu/command_buffer/client/gles2_implementation_unittest_autogen.h")
gen.WriteGLES2TraceImplementationHeader(
"gpu/command_buffer/client/gles2_trace_implementation_autogen.h")
gen.WriteGLES2TraceImplementation(
"gpu/command_buffer/client/gles2_trace_implementation_impl_autogen.h")
gen.WriteGLES2CLibImplementation(
"gpu/command_buffer/client/gles2_c_lib_autogen.h")
gen.WriteCmdHelperHeader(
"gpu/command_buffer/client/gles2_cmd_helper_autogen.h")
gen.WriteServiceImplementation(
"gpu/command_buffer/service/gles2_cmd_decoder_autogen.h")
gen.WriteServiceContextStateHeader(
"gpu/command_buffer/service/context_state_autogen.h")
gen.WriteServiceContextStateImpl(
"gpu/command_buffer/service/context_state_impl_autogen.h")
gen.WriteClientContextStateHeader(
"gpu/command_buffer/client/client_context_state_autogen.h")
gen.WriteClientContextStateImpl(
"gpu/command_buffer/client/client_context_state_impl_autogen.h")
gen.WriteServiceUnitTests(
"gpu/command_buffer/service/gles2_cmd_decoder_unittest_%d_autogen.h")
gen.WriteServiceUnitTestsForExtensions(
"gpu/command_buffer/service/"
"gles2_cmd_decoder_unittest_extensions_autogen.h")
gen.WriteServiceUtilsHeader(
"gpu/command_buffer/service/gles2_cmd_validation_autogen.h")
gen.WriteServiceUtilsImplementation(
"gpu/command_buffer/service/"
"gles2_cmd_validation_implementation_autogen.h")
gen.WriteCommonUtilsHeader(
"gpu/command_buffer/common/gles2_cmd_utils_autogen.h")
gen.WriteCommonUtilsImpl(
"gpu/command_buffer/common/gles2_cmd_utils_implementation_autogen.h")
gen.WriteGLES2Header("gpu/GLES2/gl2chromium_autogen.h")
mojo_gles2_prefix = ("third_party/mojo/src/mojo/public/c/gles2/"
"gles2_call_visitor")
gen.WriteMojoGLCallVisitor(mojo_gles2_prefix + "_autogen.h")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_texture_mailbox_autogen.h",
"CHROMIUM_texture_mailbox")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_sync_point_autogen.h",
"CHROMIUM_sync_point")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_sub_image_autogen.h",
"CHROMIUM_sub_image")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_miscellaneous_autogen.h",
"CHROMIUM_miscellaneous")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_occlusion_query_ext_autogen.h",
"occlusion_query_EXT")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_image_autogen.h",
"CHROMIUM_image")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_copy_texture_autogen.h",
"CHROMIUM_copy_texture")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_pixel_transfer_buffer_object_autogen.h",
"CHROMIUM_pixel_transfer_buffer_object")
Format(gen.generated_cpp_filenames)
if gen.errors > 0:
print "%d errors" % gen.errors
return 1
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause | -6,201,626,395,720,655,000 | 30.532246 | 91 | 0.593157 | false |
thushear/MLInAction | ml/word2vec.py | 1 | 1733 | # import logging
# logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
#
# from gensim import corpora
#
# documents = ["Human machine interface for lab abc computer applications",
# "A survey of user opinion of computer system response time",
# "The EPS user interface management system",
# "System and human system engineering testing of EPS",
# "Relation of user perceived response time to error measurement",
# "The generation of random binary unordered trees",
# "The intersection graph of paths in trees",
# "Graph minors IV Widths of trees and well quasi ordering",
# "Graph minors A survey" ]
#
# stoplist = set('for a of the and to in'.split())
# texts = [ [word for word in doc.lower().split() if word not in stoplist] for doc in documents]
#
# from collections import defaultdict
# frequency = defaultdict(int)
# for text in texts:
# for token in text:
# frequency[token] += 1
#
# texts = [[token for token in text if frequency[token] > 1] for text in texts]
#
# from pprint import pprint
# pprint(texts)
#
# dictionary = corpora.Dictionary(texts)
# dictionary.save('/tmp/deerwester.dict')
# print(dictionary)
# pprint(dictionary.token2id)
# new_doc = 'human computer interaction minors human minors'
# new_vec = dictionary.doc2bow(new_doc.lower().split())
# pprint(new_vec)
import gensim,logging
from pprint import pprint
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
sentences = [['first', 'sentence'],['second','sentence']]
model = gensim.models.Word2Vec(sentences,min_count=1)
pprint(model)
pprint(model.wv.vocab)
| apache-2.0 | 635,369,198,885,061,200 | 36.673913 | 96 | 0.680323 | false |
pczhaoyun/wolf | wolf/spiders/wolves/kuyi.py | 1 | 1132 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import urlparse
from scrapy import log
from scrapy.http import Request
from base.base_wolf import Base_Wolf
class Wolf(Base_Wolf):
def __init__(self, *args, **kwargs):
super(Wolf, self).__init__(*args, **kwargs)
self.name = 'kuyi'
self.seed_urls = [
'http://www.kuyi.tv/forum-index-fid-1.htm',
]
self.base_url = 'http://www.kuyi.tv/'
self.rule['follow'] = re.compile(r'thread-index-fid-\d+-tid-\d+.htm')
self.anchor['desc'] = "//*[@class='bg1 border post']"
def get_resource(self, item, response, tree):
item = super(Wolf, self).get_resource(item, response, tree)
resource = tree.xpath("//*[@class='attachlist']//a/@href")
downloads = [r.replace('dialog', 'download') for r in resource if 'attach-dialog' in r]
if len(downloads):
return self.download_bt(item, [Request(d, cookies=self.cookiejar._cookies,) for d in downloads])
else:
self.log("No Resource DropItem %s" % item['source'], level=log.WARNING)
return None
| apache-2.0 | 3,645,572,560,154,432,000 | 34.375 | 108 | 0.595406 | false |
jhunkeler/cbc | cbc/cli/remote_purge.py | 1 | 1921 | #!/usr/bin/env python
'''
DANGER. THIS WILL WIPE AN ENTIRE ANACONDA.ORG REPOSITORY CHANNEL.
YOU HAVE BEEN WARNED.
'''
import argparse
from subprocess import check_output, STDOUT
def choose(answer):
answer = answer.upper()
if answer == 'Y' or answer == 'YES':
return True
return False
def prompt_user(channel):
message = 'You about to REMOVE every package inside of: {0}'.format(channel)
message_length = len(message)
print('!' * message_length)
print(message)
print('!' * message_length)
print('')
print('Continue? (y/N) ', end='')
answer = input()
print('')
return choose(answer)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('channel', help='Name of channel to be purged of its contents')
args = parser.parse_args()
channel = args.channel
show_command = 'conda-server channel --show {0}'.format(channel).split()
show_output = check_output(show_command, stderr=STDOUT)
show_output = show_output.decode()
found = []
for line in show_output.splitlines():
line = line.lstrip()
if not line:
continue
if not line.startswith('+'):
continue
line = line.replace('+', '').lstrip()
package = '/'.join(line.split('/')[:2])
found.append(package)
if found:
print("Packages to remove:")
for pkg in found:
print(pkg)
if not prompt_user(channel):
print('Operation aborted by user...')
exit(0)
print('')
for pkg in found:
purge_command = 'conda-server remove -f {0}'.format(pkg).split()
print("Removing [{0:>10s}] :: {1:>10s}".format(channel, pkg))
purge_output = check_output(purge_command, stderr=STDOUT)
else:
print("No packages in channel: {0}".format(channel))
if __name__ == '__main__':
main()
| bsd-3-clause | -7,318,447,129,900,656,000 | 25.315068 | 87 | 0.588235 | false |
uclouvain/OSIS-Louvain | base/tests/factories/organization.py | 1 | 1749 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import factory
from base.models.enums import organization_type
from osis_common.utils.datetime import get_tzinfo
class OrganizationFactory(factory.DjangoModelFactory):
class Meta:
model = 'base.Organization'
external_id = factory.Faker('text', max_nb_chars=100)
changed = factory.Faker('date_time_this_month', tzinfo=get_tzinfo())
is_current_partner = True
type = factory.Iterator(organization_type.ORGANIZATION_TYPE, getter=lambda c: c[0])
| agpl-3.0 | 8,396,260,698,860,064,000 | 42.7 | 87 | 0.671053 | false |
Jumpscale/jumpscale_core8 | lib/JumpScale/tools/openwrt/base.py | 1 | 1470 | import functools
class BaseService:
def __init__(self, wrt):
self._wrt = wrt
self._package = None
@property
def package(self):
if self._package is None:
self._package = self._wrt.get(self.PACKAGE)
return self._package
class BaseServiceSection:
def __new__(cls, *args, **kwargs):
exposed_fields = cls.EXPOSED_FIELDS \
if hasattr(cls, 'EXPOSED_FIELDS') else []
for field in exposed_fields:
prop = property(
functools.partial(cls._get, field=field),
functools.partial(cls._set, field=field)
)
setattr(cls, field, prop)
exposed_bool_fields = cls.EXPOSED_BOOLEAN_FIELDS \
if hasattr(cls, 'EXPOSED_BOOLEAN_FIELDS') else []
for field in exposed_bool_fields:
prop = property(
functools.partial(cls._getb, field=field),
functools.partial(cls._set, field=field)
)
setattr(cls, field, prop)
return super(BaseServiceSection, cls).__new__(cls, *args, **kwargs)
def _get(self, field):
return self.section.get(field)
def _getb(self, field):
return self.section.getBool(field, True)
def _set(self, value, field):
self.section[field] = value
def __init__(self, section):
self._section = section
@property
def section(self):
return self._section
| apache-2.0 | -5,721,080,247,273,872,000 | 24.344828 | 75 | 0.564626 | false |
slightlynybbled/tk_tools | tests/test_ButtonGrid.py | 1 | 1266 | import tkinter as tk
import pytest
from tk_tools import ButtonGrid
from tests.test_basic import root
@pytest.fixture
def btn_grid_3col(root):
eg = ButtonGrid(root, 3, headers=['a', 'b', 'c'])
yield eg
eg._redraw()
def test_creation_with_header(root):
ButtonGrid(root, 3, headers=['a', 'b', 'c'])
def test_header_len_doesnt_match_cols(root):
with pytest.raises(ValueError):
ButtonGrid(root, 2, headers=['a', 'b', 'c'])
def test_add_row(btn_grid_3col):
data = [
('1', lambda: print('1')),
('2', lambda: print('2')),
('3', lambda: print('3')),
]
btn_grid_3col.add_row(data)
def test_add_row_wrong_format(btn_grid_3col):
data = ['1', '2', '3']
with pytest.raises(ValueError):
btn_grid_3col.add_row(data)
def test_add_row_len_doesnt_match_cols(btn_grid_3col):
data = ['1', '2', '3', '4']
with pytest.raises(ValueError):
btn_grid_3col.add_row(data)
def test_remove_row(btn_grid_3col):
data = [
('1', lambda: print('1')),
('2', lambda: print('2')),
('3', lambda: print('3')),
]
btn_grid_3col.add_row(data)
btn_grid_3col.add_row(data)
btn_grid_3col.add_row(data)
# remove row 1
btn_grid_3col.remove_row(1)
| mit | -2,284,509,175,240,753,200 | 20.1 | 54 | 0.580569 | false |
hasteur/g13bot_tools_new | tests/eventstreams_tests.py | 1 | 7693 | # -*- coding: utf-8 -*-
"""Tests for the eventstreams module."""
#
# (C) Pywikibot team, 2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
from types import FunctionType
import mock
from pywikibot.comms.eventstreams import EventStreams
from pywikibot import config
from pywikibot.family import WikimediaFamily
from tests.aspects import unittest, TestCase, DefaultSiteTestCase
@mock.patch('pywikibot.comms.eventstreams.EventSource', new=mock.MagicMock())
class TestEventStreamsUrlTests(TestCase):
"""Url tests for eventstreams module."""
sites = {
'de.wp': {
'family': 'wikipedia',
'code': 'de',
'hostname': 'de.wikipedia.org',
},
'en.wq': {
'family': 'wikiquote',
'code': 'en',
'hostname': 'en.wikiquote.org',
},
}
def test_url_parameter(self, key):
"""Test EventStreams with given url."""
e = EventStreams(url=self.sites[key]['hostname'])
self.assertEqual(e._url, self.sites[key]['hostname'])
self.assertEqual(e._url, e.url)
self.assertEqual(e._url, e.sse_kwargs.get('url'))
self.assertIsNone(e._total)
self.assertIsNone(e._stream)
def test_url_from_site(self, key):
"""Test EventStreams with url from site."""
site = self.get_site(key)
stream = 'recentchanges'
e = EventStreams(site=site, stream=stream)
self.assertEqual(
e._url, 'https://stream.wikimedia.org/v2/stream/' + stream)
self.assertEqual(e._url, e.url)
self.assertEqual(e._url, e.sse_kwargs.get('url'))
self.assertIsNone(e._total)
self.assertEqual(e._stream, stream)
@mock.patch('pywikibot.comms.eventstreams.EventSource', new=mock.MagicMock())
class TestEventStreamsStreamTests(DefaultSiteTestCase):
"""Stream tests for eventstreams module."""
def test_url_with_stream(self):
"""Test EventStreams with url from default site."""
site = self.get_site()
fam = site.family
if not isinstance(fam, WikimediaFamily):
self.skipTest(
"Family '{0}' of site '{1}' is not a WikimediaFamily."
.format(fam, site))
stream = 'recentchanges'
e = EventStreams(stream=stream)
self.assertEqual(
e._url, 'https://stream.wikimedia.org/v2/stream/' + stream)
self.assertEqual(e._url, e.url)
self.assertEqual(e._url, e.sse_kwargs.get('url'))
self.assertIsNone(e._total)
self.assertEqual(e._stream, stream)
def test_url_missing_stream(self):
"""Test EventStreams with url from site with missing stream."""
with self.assertRaises(NotImplementedError):
EventStreams()
class TestEventStreamsSettingTests(TestCase):
"""Setting tests for eventstreams module."""
dry = True
def setUp(self):
"""Set up unit test."""
super(TestEventStreamsSettingTests, self).setUp()
with mock.patch('pywikibot.comms.eventstreams.EventSource'):
self.es = EventStreams(url='dummy url')
def test_maximum_items(self):
"""Test EventStreams total value."""
total = 4711
self.es.set_maximum_items(total)
self.assertEqual(self.es._total, total)
def test_timeout_setting(self):
"""Test EventStreams timeout value."""
self.assertEqual(self.es.sse_kwargs.get('timeout'),
config.socket_timeout)
def test_filter_function_settings(self):
"""Test EventStreams filter function settings."""
def foo():
"""Dummy function."""
return True
self.es.register_filter(foo)
self.assertEqual(self.es.filter['all'][0], foo)
self.assertEqual(self.es.filter['any'], [])
self.assertEqual(self.es.filter['none'], [])
self.es.register_filter(foo, ftype='none')
self.assertEqual(self.es.filter['all'][0], foo)
self.assertEqual(self.es.filter['any'], [])
self.assertEqual(self.es.filter['none'][0], foo)
self.es.register_filter(foo, ftype='any')
self.assertEqual(self.es.filter['all'][0], foo)
self.assertEqual(self.es.filter['any'][0], foo)
self.assertEqual(self.es.filter['none'][0], foo)
def test_filter_function_settings_fail(self):
"""Test EventStreams failing filter function settings."""
with self.assertRaises(TypeError):
self.es.register_filter('test')
def test_filter_settings(self):
"""Test EventStreams filter settings."""
self.es.register_filter(foo='bar')
self.assertIsInstance(self.es.filter['all'][0], FunctionType)
self.es.register_filter(bar='baz')
self.assertEqual(len(self.es.filter['all']), 2)
class TestEventStreamsFilterTests(TestCase):
"""Filter tests for eventstreams module."""
dry = True
data = {'foo': True, 'bar': 'baz'}
def setUp(self):
"""Set up unit test."""
super(TestEventStreamsFilterTests, self).setUp()
with mock.patch('pywikibot.comms.eventstreams.EventSource'):
self.es = EventStreams(url='dummy url')
def test_filter_function_all(self):
"""Test EventStreams filter all function."""
self.es.register_filter(lambda x: True)
self.assertTrue(self.es.streamfilter(self.data))
self.es.register_filter(lambda x: False)
self.assertFalse(self.es.streamfilter(self.data))
def test_filter_function_any(self):
"""Test EventStreams filter any function."""
self.es.register_filter(lambda x: True, ftype='any')
self.assertTrue(self.es.streamfilter(self.data))
self.es.register_filter(lambda x: False, ftype='any')
self.assertTrue(self.es.streamfilter(self.data))
def test_filter_function_none(self):
"""Test EventStreams filter none function."""
self.es.register_filter(lambda x: False, ftype='none')
self.assertTrue(self.es.streamfilter(self.data))
self.es.register_filter(lambda x: True, ftype='none')
self.assertFalse(self.es.streamfilter(self.data))
def _test_filter(self, none_type, all_type, any_type, result):
"""Test a single fixed filter."""
self.es.filter = {'all': [], 'any': [], 'none': []}
self.es.register_filter(lambda x: none_type, ftype='none')
self.es.register_filter(lambda x: all_type, ftype='all')
if any_type is not None:
self.es.register_filter(lambda x: any_type, ftype='any')
self.assertEqual(self.es.streamfilter(self.data), result,
'Test EventStreams filter mixed function failed for\n'
"'none': {0}, 'all': {1}, 'any': {2}\n"
'(expected {3}, given {4})'
.format(none_type, all_type, any_type,
result, not result))
def test_filter_mixed_function(self):
"""Test EventStreams filter mixed function."""
for none_type in (False, True):
for all_type in (False, True):
for any_type in (False, True, None):
if none_type is False and all_type is True and (
any_type is None or any_type is True):
result = True
else:
result = False
self._test_filter(none_type, all_type, any_type, result)
if __name__ == '__main__': # pragma: no cover
try:
unittest.main()
except SystemExit:
pass
| mit | -6,757,897,212,735,869,000 | 35.117371 | 79 | 0.602106 | false |
daknuett/python3-nf | py/nf/integral/base/rect/line.py | 1 | 1526 | from ..do.line import integrate_numbers_rect
from ....util.do.iterators import WidthIterator, StepIterator
def integrate_steps(f, rnge, steps):
"""
Approximate the integral of ``f``
in the range ``rnge`` using a decomposition
with ``steps`` subintervalls.
``f`` must return ``float`` or ``int``.
``rnge`` must be ``range`` or a ``tuple`` containing
``start`` and ``stop``
Algorithm: *Riemann upper sum*
"""
if(isinstance(rnge, range)):
start = rnge.start
stop = rnge.stop
elif(isinstance(rnge, list, tuple)):
if(len(rnge) != 2):
raise ValueError("rnge must have length 2: (start, stop)")
start, stop = rnge
else:
raise TypeError("rnge must be range or list or tuple")
width = (stop - start) / steps
return integrate_numbers_rect(StepIterator(start, stop, width), WidthIterator(width), f, 0)
def integrate_width(f, rnge, width):
"""
Approximate the integral of ``f``
in the range ``rnge`` using a decomposition
with subintervalls with ``width`` length.
``f`` must return ``float`` or ``int``.
``rnge`` must be ``range`` or a ``tuple`` containing
``start`` and ``stop``
Algorithm: *Riemann upper sum*
"""
if(isinstance(rnge, range)):
start = rnge.start
stop = rnge.stop
elif(isinstance(rnge, list, tuple)):
if(len(rnge) != 2):
raise ValueError("rnge must have length 2: (start, stop)")
start, stop = rnge
else:
raise TypeError("rnge must be range or list or tuple")
return integrate_numbers_rect(StepIterator(start, stop, width), WidthIterator(width), f, 0)
| agpl-3.0 | -7,385,630,607,984,555,000 | 25.310345 | 92 | 0.671691 | false |
seebass/drf-tools | drf_tools/views.py | 1 | 8766 | from datetime import datetime
import logging
from rest_framework import status
from rest_framework.mixins import RetrieveModelMixin, ListModelMixin, DestroyModelMixin
from rest_framework.parsers import MultiPartParser
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from rest_framework.settings import api_settings
from rest_framework.views import APIView
from rest_framework.viewsets import GenericViewSet
from rest_framework.exceptions import ParseError
import drf_hal_json
from drf_hal_json.views import HalCreateModelMixin
from drf_nested_fields.views import CustomFieldsMixin, copy_meta_attributes
from drf_nested_routing.views import CreateNestedModelMixin, UpdateNestedModelMixin
from drf_tools import utils
from drf_tools.serializers import HalNestedFieldsModelSerializer, CsvSerializer, XlsxSerializer
logger = logging.getLogger(__name__)
def _add_parent_to_hal_request_data(request, parentKey):
if not drf_hal_json.is_hal_content_type(request.content_type):
return
links = request.data.get('_links')
if links and parentKey in links:
return
if not links:
links = {}
request.data['_links'] = links
uriSplit = request.build_absolute_uri().split('/')
if request.method == 'PUT':
uriSplit = uriSplit[:-3] # in case of PUT the id must be removed as well
else:
uriSplit = uriSplit[:-2]
links[parentKey] = '/'.join(uriSplit) + '/'
class RestLoggingMixin(object):
"""Provides full logging of requests and responses"""
def finalize_response(self, request, response, *args, **kwargs):
if logger.isEnabledFor(logging.DEBUG):
logger.debug("{} {}".format(response.status_code, response.data))
return super(RestLoggingMixin, self).finalize_response(request, response, *args, **kwargs)
def initial(self, request, *args, **kwargs):
if logger.isEnabledFor(logging.DEBUG):
logger.debug("{} {} {} {}".format(request.method, request.path, request.query_params, request.data))
super(RestLoggingMixin, self).initial(request, *args, **kwargs)
class DefaultSerializerMixin(object):
"""
If a view has no serializer_class specified, this mixin takes care of creating a default serializer_class that inherits
HalNestedFieldsModelSerializer
"""
def get_serializer_class(self):
if not self.serializer_class:
class DefaultSerializer(HalNestedFieldsModelSerializer):
class Meta:
model = self.queryset.model
fields = '__all__'
self.serializer_class = DefaultSerializer
return self.serializer_class
class HalNoLinksMixin(ListModelMixin):
"""
For responses with a high amount of data, link generation can be switched of via query-param 'no_links'. Instead of links,
simple ids are returned
"""
def get_serializer_class(self):
no_links = extract_boolean_from_query_params(self.get_serializer_context().get('request'), "no_links")
if not no_links:
return super(HalNoLinksMixin, self).get_serializer_class()
self.always_included_fields = ["id"]
serializer_class = super(HalNoLinksMixin, self).get_serializer_class()
class HalNoLinksSerializer(serializer_class):
serializer_related_field = PrimaryKeyRelatedField
class Meta:
pass
copy_meta_attributes(serializer_class.Meta, Meta)
@staticmethod
def _is_link_field(field):
return False
@staticmethod
def _get_links_serializer(model_cls, link_field_names):
return None
return HalNoLinksSerializer
class CreateModelMixin(CreateNestedModelMixin, HalCreateModelMixin):
"""
Parents of nested resources are automatically added to the request content, so that they don't have to be defined twice
(url and request content)
"""
def _add_parent_to_request_data(self, request, parentKey, parentId):
_add_parent_to_hal_request_data(request, parentKey)
class ReadModelMixin(HalNoLinksMixin, CustomFieldsMixin, RetrieveModelMixin, ListModelMixin):
always_included_fields = ["id", api_settings.URL_FIELD_NAME]
class UpdateModelMixin(UpdateNestedModelMixin):
"""
Additionally to the django-method it is checked if the resource exists and 404 is returned if not,
instead of creating that resource
Parents of nested resources are automatically added to the request content, so that they don't have to be defined twice
(url and request content)
"""
def update(self, request, *args, **kwargs):
instance = self.get_object()
if instance is None:
return Response("Resource with the given id/pk does not exist.", status=status.HTTP_404_NOT_FOUND)
return super(UpdateModelMixin, self).update(request, *args, **kwargs)
def _add_parent_to_request_data(self, request, parentKey, parentId):
_add_parent_to_hal_request_data(request, parentKey)
class PartialUpdateOnlyMixin(object):
def partial_update(self, request, *args, **kwargs):
instance = self.get_object()
serializer = self.get_serializer(instance, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
self.perform_update(serializer)
return Response(serializer.data)
@staticmethod
def perform_update(serializer):
serializer.save()
class BaseViewSet(RestLoggingMixin, DefaultSerializerMixin, GenericViewSet):
pass
class ModelViewSet(CreateModelMixin, ReadModelMixin, UpdateModelMixin, DestroyModelMixin, BaseViewSet):
pass
class FileUploadView(RestLoggingMixin, APIView):
parser_classes = (MultiPartParser,)
renderer_classes = (JSONRenderer,)
def _get_file_and_name(self, request):
file = self._get_file_from_request(request)
return file.file, file.name
def _get_file_bytes_and_name(self, request):
file = self._get_file_from_request(request)
return file.read(), file.name
@staticmethod
def _get_file_from_request(request):
in_memory_upload_file = request.data.get('file')
if not in_memory_upload_file or not in_memory_upload_file.file:
raise ValueError("Mulitpart content must contain file.")
return in_memory_upload_file
class XlsxImportView(FileUploadView):
default_sheet_name = None
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
def _get_xlsx_content_as_list_and_file_info(self, request):
file_bytes, filename = self._get_file_bytes_and_name(request)
sheetName = request.query_params.get('sheetName') or self.default_sheet_name
return XlsxSerializer.deserialize(file_bytes, sheetName), filename, file_bytes
class CsvImportView(FileUploadView):
media_type = 'text/csv'
def _get_csv_content_as_list_and_file_info(self, request):
file_bytes, filename = self._get_file_bytes_and_name(request)
return CsvSerializer.deserialize(file_bytes), filename, file_bytes
def extract_int_from_query_params(request, key):
value = request.query_params.get(key)
if value:
try:
value = int(value)
except ValueError:
raise ParseError("Type of parameter '{}' must be 'int'".format(key))
return value
def extract_datetime_from_query_params(request, key):
value = request.query_params.get(key)
if value:
try:
value = datetime.strptime(value, utils.DATETIME_FORMAT_ISO)
except ValueError:
raise ParseError(
"Value of parameter '{}' has wrong format. Use '{}' instead".format(key, utils.DATETIME_FORMAT_ISO))
return value
def extract_enum_from_query_params(request, key, enum_type):
value = request.query_params.get(key)
choices = [context.value for context in enum_type]
if value:
if value.upper() not in choices:
raise ParseError("Value of query-parameter '{}' must be one out of {}".format(key, choices))
return enum_type[value.upper()]
return value
def extract_boolean_from_query_params(request, key):
value = request.query_params.get(key)
if not value:
return None
return value == 'true'
def get_instance_from_params(request, key, model_cls, optional=False):
value = extract_int_from_query_params(request, key)
if not value:
if not optional:
raise ParseError("Query-parameter '{}' must be set".format(key))
else:
return None
return model_cls.objects.get(id=value)
| mit | 4,416,811,976,637,126,700 | 33.924303 | 126 | 0.689824 | false |
Triv90/Heat | heat/openstack/common/version.py | 1 | 3271 |
# Copyright 2012 OpenStack LLC
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utilities for consuming the version from pkg_resources.
"""
import pkg_resources
class VersionInfo(object):
def __init__(self, package):
"""Object that understands versioning for a package
:param package: name of the python package, such as glance, or
python-glanceclient
"""
self.package = package
self.release = None
self.version = None
self._cached_version = None
def _get_version_from_pkg_resources(self):
"""Get the version of the package from the pkg_resources record
associated with the package."""
try:
requirement = pkg_resources.Requirement.parse(self.package)
provider = pkg_resources.get_provider(requirement)
return provider.version
except pkg_resources.DistributionNotFound:
# The most likely cause for this is running tests in a tree with
# produced from a tarball where the package itself has not been
# installed into anything. Check for a PKG-INFO file.
from heat.openstack.common import setup
return setup.get_version_from_pkg_info(self.package)
def release_string(self):
"""Return the full version of the package including suffixes indicating
VCS status.
"""
if self.release is None:
self.release = self._get_version_from_pkg_resources()
return self.release
def version_string(self):
"""Return the short version minus any alpha/beta tags."""
if self.version is None:
parts = []
for part in self.release_string().split('.'):
if part[0].isdigit():
parts.append(part)
else:
break
self.version = ".".join(parts)
return self.version
# Compatibility functions
canonical_version_string = version_string
version_string_with_vcs = release_string
def cached_version_string(self, prefix=""):
"""Generate an object which will expand in a string context to
the results of version_string(). We do this so that don't
call into pkg_resources every time we start up a program when
passing version information into the CONF constructor, but
rather only do the calculation when and if a version is requested
"""
if not self._cached_version:
self._cached_version = "%s%s" % (prefix,
self.version_string())
return self._cached_version
| apache-2.0 | 3,530,254,261,941,602,000 | 37.034884 | 79 | 0.631611 | false |
she11c0de/cubes | tests/test_browser.py | 1 | 9674 | import unittest
from cubes.browser import *
from cubes.errors import *
from .common import CubesTestCaseBase
class CutsTestCase(CubesTestCaseBase):
def setUp(self):
super(CutsTestCase, self).setUp()
self.workspace = self.create_workspace(model="browser_test.json")
self.cube = self.workspace.cube("transactions")
self.dim_date = self.cube.dimension("date")
def test_cut_depth(self):
dim = self.cube.dimension("date")
self.assertEqual(1, PointCut(dim, [1]).level_depth())
self.assertEqual(3, PointCut(dim, [1, 1, 1]).level_depth())
self.assertEqual(1, RangeCut(dim, [1], [1]).level_depth())
self.assertEqual(3, RangeCut(dim, [1, 1, 1], [1]).level_depth())
self.assertEqual(1, SetCut(dim, [[1], [1]]).level_depth())
self.assertEqual(3, SetCut(dim, [[1], [1], [1, 1, 1]]).level_depth())
def test_cut_from_dict(self):
# d = {"type":"point", "path":[2010]}
# self.assertRaises(Exception, cubes.cut_from_dict, d)
d = {"type": "point", "path": [2010], "dimension": "date",
"level_depth": 1, "hierarchy": None, "invert": False,
"hidden": False}
cut = cut_from_dict(d)
tcut = PointCut("date", [2010])
self.assertEqual(tcut, cut)
self.assertEqual(dict(d), tcut.to_dict())
self._assert_invert(d, cut, tcut)
d = {"type": "range", "from": [2010], "to": [2012, 10], "dimension":
"date", "level_depth": 2, "hierarchy": None, "invert": False,
"hidden": False}
cut = cut_from_dict(d)
tcut = RangeCut("date", [2010], [2012, 10])
self.assertEqual(tcut, cut)
self.assertEqual(dict(d), tcut.to_dict())
self._assert_invert(d, cut, tcut)
d = {"type": "set", "paths": [[2010], [2012, 10]], "dimension": "date",
"level_depth": 2, "hierarchy": None, "invert": False,
"hidden": False}
cut = cut_from_dict(d)
tcut = SetCut("date", [[2010], [2012, 10]])
self.assertEqual(tcut, cut)
self.assertEqual(dict(d), tcut.to_dict())
self._assert_invert(d, cut, tcut)
self.assertRaises(ArgumentError, cut_from_dict, {"type": "xxx"})
def _assert_invert(self, d, cut, tcut):
cut.invert = True
tcut.invert = True
d["invert"] = True
self.assertEqual(tcut, cut)
self.assertEqual(dict(d), tcut.to_dict())
class StringConversionsTestCase(unittest.TestCase):
def test_cut_string_conversions(self):
cut = PointCut("foo", ["10"])
self.assertEqual("foo:10", str(cut))
self.assertEqual(cut, cut_from_string("foo:10"))
cut = PointCut("foo", ["123_abc_", "10", "_"])
self.assertEqual("foo:123_abc_,10,_", str(cut))
self.assertEqual(cut, cut_from_string("foo:123_abc_,10,_"))
cut = PointCut("foo", ["123_ abc_"])
self.assertEqual(r"foo:123_ abc_", str(cut))
self.assertEqual(cut, cut_from_string("foo:123_ abc_"))
cut = PointCut("foo", ["a-b"])
self.assertEqual("foo:a\-b", str(cut))
self.assertEqual(cut, cut_from_string("foo:a\-b"))
cut = PointCut("foo", ["a+b"])
self.assertEqual("foo:a+b", str(cut))
self.assertEqual(cut, cut_from_string("foo:a+b"))
def test_special_characters(self):
self.assertEqual('\\:q\\-we,a\\\\sd\\;,100',
string_from_path([":q-we", "a\\sd;", 100]))
def test_string_from_path(self):
self.assertEqual('qwe,asd,100',
string_from_path(["qwe", "asd", 100]))
self.assertEqual('', string_from_path([]))
self.assertEqual('', string_from_path(None))
def test_path_from_string(self):
self.assertEqual(["qwe", "asd", "100"],
path_from_string('qwe,asd,100'))
self.assertEqual([], path_from_string(''))
self.assertEqual([], path_from_string(None))
def test_set_cut_string(self):
cut = SetCut("foo", [["1"], ["2", "3"], ["qwe", "asd", "100"]])
self.assertEqual("foo:1;2,3;qwe,asd,100", str(cut))
self.assertEqual(cut, cut_from_string("foo:1;2,3;qwe,asd,100"))
# single-element SetCuts cannot go round trip, they become point cuts
cut = SetCut("foo", [["a+b"]])
self.assertEqual("foo:a+b", str(cut))
self.assertEqual(PointCut("foo", ["a+b"]), cut_from_string("foo:a+b"))
cut = SetCut("foo", [["a-b"]])
self.assertEqual("foo:a\-b", str(cut))
self.assertEqual(PointCut("foo", ["a-b"]), cut_from_string("foo:a\-b"))
def test_range_cut_string(self):
cut = RangeCut("date", ["2010"], ["2011"])
self.assertEqual("date:2010-2011", str(cut))
self.assertEqual(cut, cut_from_string("date:2010-2011"))
cut = RangeCut("date", ["2010"], None)
self.assertEqual("date:2010-", str(cut))
cut = cut_from_string("date:2010-")
if cut.to_path:
self.fail('there should be no to path, is: %s' % (cut.to_path, ))
cut = RangeCut("date", None, ["2010"])
self.assertEqual("date:-2010", str(cut))
cut = cut_from_string("date:-2010")
if cut.from_path:
self.fail('there should be no from path is: %s' % (cut.from_path, ))
cut = RangeCut("date", ["2010", "11", "12"], ["2011", "2", "3"])
self.assertEqual("date:2010,11,12-2011,2,3", str(cut))
self.assertEqual(cut, cut_from_string("date:2010,11,12-2011,2,3"))
cut = RangeCut("foo", ["a+b"], ["1"])
self.assertEqual("foo:a+b-1", str(cut))
self.assertEqual(cut, cut_from_string("foo:a+b-1"))
cut = RangeCut("foo", ["a-b"], ["1"])
self.assertEqual(r"foo:a\-b-1", str(cut))
self.assertEqual(cut, cut_from_string(r"foo:a\-b-1"))
def test_hierarchy_cut(self):
cut = PointCut("date", ["10"], "dqmy")
self.assertEqual("date@dqmy:10", str(cut))
self.assertEqual(cut, cut_from_string("date@dqmy:10"))
class BrowserTestCase(CubesTestCaseBase):
def setUp(self):
super(BrowserTestCase, self).setUp()
self.workspace = self.create_workspace(model="model.json")
self.cube = self.workspace.cube("contracts")
class AggregationBrowserTestCase(BrowserTestCase):
def setUp(self):
super(AggregationBrowserTestCase, self).setUp()
self.browser = AggregationBrowser(self.cube)
def test_cutting(self):
full_cube = Cell(self.cube)
self.assertEqual(self.cube, full_cube.cube)
self.assertEqual(0, len(full_cube.cuts))
cell = full_cube.slice(PointCut("date", [2010]))
self.assertEqual(1, len(cell.cuts))
cell = cell.slice(PointCut("supplier", [1234]))
cell = cell.slice(PointCut("cpv", [50, 20]))
self.assertEqual(3, len(cell.cuts))
self.assertEqual(self.cube, cell.cube)
# Adding existing slice should result in changing the slice properties
cell = cell.slice(PointCut("date", [2011]))
self.assertEqual(3, len(cell.cuts))
def test_multi_slice(self):
full_cube = Cell(self.cube)
cuts_list = (
PointCut("date", [2010]),
PointCut("cpv", [50, 20]),
PointCut("supplier", [1234]))
cell_list = full_cube.multi_slice(cuts_list)
self.assertEqual(3, len(cell_list.cuts))
self.assertRaises(CubesError, full_cube.multi_slice, {})
def test_get_cell_dimension_cut(self):
full_cube = Cell(self.cube)
cell = full_cube.slice(PointCut("date", [2010]))
cell = cell.slice(PointCut("supplier", [1234]))
cut = cell.cut_for_dimension("date")
self.assertEqual(str(cut.dimension), "date")
self.assertRaises(NoSuchDimensionError, cell.cut_for_dimension, "someunknown")
cut = cell.cut_for_dimension("cpv")
self.assertEqual(cut, None)
def test_hierarchy_path(self):
dim = self.cube.dimension("cpv")
hier = dim.hierarchy()
levels = hier.levels_for_path([])
self.assertEqual(len(levels), 0)
levels = hier.levels_for_path(None)
self.assertEqual(len(levels), 0)
levels = hier.levels_for_path([1, 2, 3, 4])
self.assertEqual(len(levels), 4)
names = [level.name for level in levels]
self.assertEqual(names, ['division', 'group', 'class', 'category'])
self.assertRaises(HierarchyError, hier.levels_for_path,
[1, 2, 3, 4, 5, 6, 7, 8])
def test_hierarchy_drilldown_levels(self):
dim = self.cube.dimension("cpv")
hier = dim.hierarchy()
levels = hier.levels_for_path([], drilldown=True)
self.assertEqual(len(levels), 1)
self.assertEqual(levels[0].name, 'division')
levels = hier.levels_for_path(None, drilldown=True)
self.assertEqual(len(levels), 1)
self.assertEqual(levels[0].name, 'division')
def test_slice_drilldown(self):
cut = PointCut("date", [])
original_cell = Cell(self.cube, [cut])
cell = original_cell.drilldown("date", 2010)
self.assertEqual([2010], cell.cut_for_dimension("date").path)
cell = cell.drilldown("date", 1)
self.assertEqual([2010, 1], cell.cut_for_dimension("date").path)
cell = cell.drilldown("date", 2)
self.assertEqual([2010, 1, 2], cell.cut_for_dimension("date").path)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(AggregationBrowserTestCase))
suite.addTest(unittest.makeSuite(CellsAndCutsTestCase))
return suite
| mit | -7,718,235,660,885,802,000 | 36.207692 | 86 | 0.580112 | false |
EricACooper/perfrunner | perfrunner/tests/__init__.py | 1 | 7421 | import exceptions as exc
import glob
import shutil
import sys
import time
from logger import logger
from perfrunner.helpers.cbmonitor import CbAgent
from perfrunner.helpers.experiments import ExperimentHelper
from perfrunner.helpers.memcached import MemcachedHelper
from perfrunner.helpers.metrics import MetricHelper
from perfrunner.helpers.misc import log_phase, target_hash, pretty_dict
from perfrunner.helpers.monitor import Monitor
from perfrunner.helpers.remote import RemoteHelper
from perfrunner.helpers.reporter import Reporter
from perfrunner.helpers.rest import RestHelper, SyncGatewayRequestHelper
from perfrunner.helpers.worker import WorkerManager
from perfrunner.settings import TargetSettings
class TargetIterator(object):
def __init__(self, cluster_spec, test_config, prefix=None):
self.cluster_spec = cluster_spec
self.test_config = test_config
self.prefix = prefix
def __iter__(self):
password = self.test_config.bucket.password
prefix = self.prefix
for master in self.cluster_spec.yield_masters():
for bucket in self.test_config.buckets:
if self.prefix is None:
prefix = target_hash(master.split(':')[0])
yield TargetSettings(master, bucket, password, prefix)
class PerfTest(object):
COLLECTORS = {}
MONITORING_DELAY = 10
def __init__(self, cluster_spec, test_config, verbose, experiment=None):
self.cluster_spec = cluster_spec
self.test_config = test_config
self.target_iterator = TargetIterator(cluster_spec, test_config)
self.memcached = MemcachedHelper(test_config)
self.monitor = Monitor(cluster_spec)
self.rest = RestHelper(cluster_spec)
self.remote = RemoteHelper(cluster_spec, test_config, verbose)
if experiment:
self.experiment = ExperimentHelper(experiment,
cluster_spec, test_config)
self.master_node = cluster_spec.yield_masters().next()
if self.remote and self.remote.gateways:
self.build = SyncGatewayRequestHelper().get_version(
self.remote.gateways[0]
)
else:
self.build = self.rest.get_version(self.master_node)
self.cbagent = CbAgent(self, verbose=verbose)
self.metric_helper = MetricHelper(self)
self.reporter = Reporter(self)
self.reports = {}
self.snapshots = []
self.master_events = []
if self.test_config.test_case.use_workers:
self.worker_manager = WorkerManager(cluster_spec, test_config)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.test_config.test_case.use_workers:
self.worker_manager.terminate()
if exc_type != exc.KeyboardInterrupt and '--nodebug' not in sys.argv:
self.debug()
self.check_core_dumps()
for master in self.cluster_spec.yield_masters():
if not self.rest.is_balanced(master):
logger.interrupt('Rebalance failed')
self.check_failover(master)
def check_failover(self, master):
if hasattr(self, 'rebalance_settings'):
if self.rebalance_settings.failover or \
self.rebalance_settings.graceful_failover:
return
num_failovers = self.rest.get_failover_counter(master)
if num_failovers:
logger.interrupt(
'Failover happened {} time(s)'.format(num_failovers)
)
def check_core_dumps(self):
dumps_per_host = self.remote.detect_core_dumps()
core_dumps = {
host: dumps for host, dumps in dumps_per_host.items() if dumps
}
if core_dumps:
logger.interrupt(pretty_dict(core_dumps))
def compact_bucket(self):
for master in self.cluster_spec.yield_masters():
for bucket in self.test_config.buckets:
self.rest.trigger_bucket_compaction(master, bucket)
time.sleep(self.MONITORING_DELAY)
for master in self.cluster_spec.yield_masters():
self.monitor.monitor_task(master, 'bucket_compaction')
def wait_for_persistence(self):
for master in self.cluster_spec.yield_masters():
for bucket in self.test_config.buckets:
self.monitor.monitor_disk_queues(master, bucket)
self.monitor.monitor_tap_queues(master, bucket)
self.monitor.monitor_upr_queues(master, bucket)
def load(self, load_settings=None, target_iterator=None):
if load_settings is None:
load_settings = self.test_config.load_settings
if target_iterator is None:
target_iterator = self.target_iterator
if self.test_config.spatial_settings:
load_settings.spatial = self.test_config.spatial_settings
log_phase('load phase', load_settings)
self.worker_manager.run_workload(load_settings, target_iterator)
self.worker_manager.wait_for_workers()
def hot_load(self):
hot_load_settings = self.test_config.hot_load_settings
if self.test_config.spatial_settings:
hot_load_settings.spatial = self.test_config.spatial_settings
log_phase('hot load phase', hot_load_settings)
self.worker_manager.run_workload(hot_load_settings,
self.target_iterator)
self.worker_manager.wait_for_workers()
def access(self, access_settings=None):
if access_settings is None:
access_settings = self.test_config.access_settings
if self.test_config.spatial_settings:
access_settings.spatial = self.test_config.spatial_settings
log_phase('access phase', access_settings)
self.worker_manager.run_workload(access_settings, self.target_iterator)
self.worker_manager.wait_for_workers()
def access_bg(self, access_settings=None):
if access_settings is None:
access_settings = self.test_config.access_settings
if self.test_config.spatial_settings:
access_settings.spatial = self.test_config.spatial_settings
log_phase('access phase in background', access_settings)
access_settings.index_type = self.test_config.index_settings.index_type
access_settings.ddocs = getattr(self, 'ddocs', None)
self.worker_manager.run_workload(access_settings, self.target_iterator,
timer=access_settings.time)
def timer(self):
access_settings = self.test_config.access_settings
logger.info('Running phase for {} seconds'.format(access_settings.time))
time.sleep(access_settings.time)
def debug(self):
self.remote.collect_info()
for hostname in self.cluster_spec.yield_hostnames():
for fname in glob.glob('{}/*.zip'.format(hostname)):
shutil.move(fname, '{}.zip'.format(hostname))
self.reporter.save_web_logs()
if self.test_config.cluster.run_cbq:
self.remote.collect_cbq_logs()
for hostname in self.cluster_spec.yield_hostnames():
for fname in glob.glob('{}/cbq.log'.format(hostname)):
shutil.move(fname, '{}-cbq.log'.format(hostname))
| apache-2.0 | 6,741,527,375,062,040,000 | 38.473404 | 80 | 0.640614 | false |
heromod/migrid | mig/webserver/webserver.py | 1 | 1966 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# webserver - [insert a few words of module description on this line]
# Copyright (C) 2003-2009 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
"""Simple test CGI server"""
import sys
import CGIHTTPServer
import BaseHTTPServer
import SocketServer
class Handler(CGIHTTPServer.CGIHTTPRequestHandler):
cgi_directories = ['/cgi-bin']
class ThreadingServer(SocketServer.ThreadingMixIn,
BaseHTTPServer.HTTPServer):
pass
class ForkingServer(SocketServer.ForkingMixIn,
BaseHTTPServer.HTTPServer):
pass
# Listen address
IP = '127.0.0.1'
PORT = 8080
print 'Serving at %s port %d' % (IP, PORT)
print 'before attr override: have fork: %s' % Handler.have_fork
Handler.have_fork = False
print 'after attr override: have fork: %s' % Handler.have_fork
# server = BaseHTTPServer.HTTPServer((IP, PORT), Handler)
# server.serve_forever()
# server = ThreadingServer((IP,PORT), Handler)
server = ForkingServer((IP, PORT), Handler)
print 'server attr: have fork: %s'\
% server.RequestHandlerClass.have_fork
try:
while True:
sys.stdout.flush()
server.handle_request()
except KeyboardInterrupt:
print 'Server killed'
| gpl-2.0 | -8,052,238,269,252,702,000 | 23.886076 | 81 | 0.722787 | false |
phalladar/SCOTUSHaikus | haikufinder/__init__.py | 1 | 10233 | # A hack to find "haikus" in English text. For the purposes of this program
# a "haiku" is one or more complete sentences that, together, can be broken
# into groups of 5, 7, and 5 syllables. Each canididate haiku line, and then
# the entire haiku, has to make it through a few heuristics to filter out
# constructions that are likely to scan awkwardly (like verb phrases split
# across lines). Since this code doesn't really try to understand the texts,
# it might throw away a few legitimate phrases, and it certainly lets through
# some bad ones.
#
# Any improvements would be welcomed.
#
# License:
#
# Copyright (c) 2009, Jonathan Feinberg <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from __future__ import with_statement
import nltk
import re
import pickle
import gzip
import os.path
import sys
def file(relpath):
return os.path.join(os.path.dirname(__file__), relpath)
def read_alternates(which):
with open(file('data/awkward_%s'%which), 'r') as baddies:
return '|'.join([e.strip() for e in baddies.readlines() if len(e.strip()) > 0])
single_line_filters = [
re.compile(r'^[a-z][^.?!;:]+([.?!;:]+[^.?!;:]+)+$', re.IGNORECASE),
re.compile(r'[.?!;:]+\s+[\'"]?[A-Za-z]+(?:\'[a-z]+)?$'),
]
single_line_filters.append(re.compile(r'^(?:%s)\b'%read_alternates('starts')))
single_line_filters.append(re.compile(r'\b(?:%s)$'%read_alternates('ends'), re.IGNORECASE))
first_word_comma = re.compile(r'^\s*[a-z]\w*,')
with open(file('data/awkward_breaks'), 'r') as breaks:
alts = '|'.join([r'\b%s\b' % ('\n'.join(e.strip().split())) for e in breaks.readlines() if len(e.strip()) > 0]
+ ['[^\'".?!;:,]\n[a-z]+(?:\'[a-z]+)?[".?!;:]+.',
'"\S+\n\S+"',
])
break_filter = re.compile(alts, re.IGNORECASE)
# load the syllable-count dictionary
with open(file('cmudict/cmudict.pickle'), 'rb') as p:
syllables = pickle.load(p)
with open(file('cmudict/custom.dict'), 'r') as p:
for line in p.readlines():
(word, count) = line.split()
syllables[word] = int(count)
# Use the NLTK to determine sentence boundaries.
sentence_tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
number_syllables = (
# 0 1 2 3 4 5 6 7 8 9
2, 1, 1, 1, 1, 1, 1, 2, 1, 1,
1, 3, 1, 2, 2, 2, 2, 3, 2, 2,
2, 3, 3, 3, 3, 3, 3, 4, 3, 3,
2, 3, 3, 3, 3, 3, 3, 4, 3, 3,
2, 3, 3, 3, 3, 3, 3, 4, 3, 3,
2, 3, 3, 3, 3, 3, 3, 4, 3, 3,
2, 3, 3, 3, 3, 3, 3, 4, 3, 3,
3, 4, 4, 4, 4, 4, 4, 5, 4, 4,
2, 3, 3, 3, 3, 3, 3, 4, 3, 3,
2, 3, 3, 3, 3, 3, 3, 4, 3, 3,
)
has_digit = re.compile(r'\d')
ordinal = re.compile(r'^(\d\d?)(?:rd|th|st)$', re.IGNORECASE)
too_many_digits = re.compile('\d\d\d')
short_time = re.compile(r'^([1-2]?[0-9])(?:[ap]m)$',re.IGNORECASE)
time = re.compile(r'^([1-2]?[0-9]):(\d\d)([ap]m)?$',re.IGNORECASE)
word_shapes = (
re.compile(r'^[^a-z0-9\$@]*([-@&_0-9a-z\+]+(?:\'[a-z]+)?)[^a-z0-9]*$', re.IGNORECASE),
re.compile(r'^[^\$]*(\$\d+(?:\.\d{1,2})?)[^a-z0-9]*$', re.IGNORECASE),
re.compile(r'^[^a-z0-9]*([1-2]?[0-9]:\d\d(\s*[ap]m)?)[^a-z0-9]*$', re.IGNORECASE),
)
class Nope(Exception):
pass
class TooShort(Exception):
pass
class LineSyllablizer:
def __init__(self, line, unknown_word_handler=None):
self.words = line.split()
self.index = 0
self.lines = []
self.unknown_word_handler = unknown_word_handler
def _count_chunk_syllables(self, chunk):
if has_digit.search(chunk):
return number_syllables[int(chunk)]
else:
return syllables[chunk]
def _count_syllables(self, word, splitter=re.compile(r'(?<=\D)(?=\d)|(?<=\d)(?=\D)')):
"Raises KeyError, Nope"
if not word or len(word) == 0:
return 0
if 'http:' in word:
raise Nope
if '0' == word[0] and len(word) > 1:
return 1 + self._count_syllables(word[1:]) # oh seven
if '$' == word[0]:
return 2 + self._count_syllables(word[1:]) # 13 dollars
if '@' == word[0]:
return 1 + self._count_syllables(word[1:]) # user name
if '&' in word and len(word) > 1:
return 1 + sum(self._count_syllables(w) for w in word.split('&'))
if '-' in word:
return sum(self._count_syllables(w) for w in word.split('-'))
if '_' in word:
return sum(self._count_syllables(w) for w in word.split('_'))
if not has_digit.search(word):
return syllables[word]
if too_many_digits.search(word):
raise Nope
m = short_time.match(word)
if m:
return 2 + number_syllables[int(m.group(1))]
m = time.match(word)
if m:
if m.group(2) == '00':
minutes = 2
else:
minutes = number_syllables[int(m.group(2))]
partial = number_syllables[int(m.group(1))] + minutes
if m.group(3):
return 2 + partial
return partial
m = ordinal.match(word)
if m:
return number_syllables[int(m.group(1))]
count = 0
start = 0
for m in splitter.finditer(word):
boundary = m.start()
count += self._count_chunk_syllables(word[start:boundary])
start = boundary
count += self._count_chunk_syllables(word[start:])
return count
def clean(self, word):
for shape in word_shapes:
m = shape.match(word)
if m:
return m.group(1).upper()
return None
def count_syllables(self):
si = 0
syllable_count = 0
try:
for word in self.words:
syllable_count += self._count_syllables(self.clean(word))
except KeyError:
print("I don't know '%s'"%word)
return -1
except Nope:
print("I can't do '%s'"%word)
return -1
return syllable_count
def seek(self, n):
si = self.index
syllable_count = 0
try:
while syllable_count < n:
word = self.clean(self.words[self.index])
syllable_count += self._count_syllables(word)
self.index += 1
except KeyError:
if word and self.unknown_word_handler:
self.unknown_word_handler(word)
raise Nope
except IndexError:
raise TooShort
if syllable_count > n:
raise Nope
line = ' '.join(self.words[si:self.index])
for f in single_line_filters:
if f.search(line):
raise Nope
self.lines.append(line)
def seek_eol(self):
if self.index != len(self.words):
raise Nope
def bad_split(self, n):
return awkward_in_front_without_punct_before.search(self.lines[n]) and not self.lines[n - 1][-1] in (',', ';', '-')
def find_haiku(self):
self.seek(5)
self.seek(7)
self.seek(5)
self.seek_eol()
if first_word_comma.search(self.lines[1]) or first_word_comma.search(self.lines[2]):
raise Nope
if break_filter.search('\n'.join(self.lines)):
raise Nope
return self.lines
class HaikuFinder:
def __init__(self, text, unknown_word_handler=None):
self.lines = sentence_tokenizer.tokenize(text)
self.unknown_word_handler = unknown_word_handler
def find_haikus(self):
haikus = []
line_index = 0
line_count = len(self.lines)
while line_index < line_count:
offset = 0
line = ""
while line_index + offset < line_count:
line = "%s %s" % (line, self.lines[line_index + offset])
try:
haikus.append(LineSyllablizer(line, self.unknown_word_handler).find_haiku())
break
except Nope:
break
except TooShort:
offset += 1
line_index += 1
return haikus
@classmethod
def add_word(cls, word, syllable_count):
syllables[word.upper()] = syllable_count
def find_haikus(text, unknown_word_handler=None):
return HaikuFinder(text, unknown_word_handler).find_haikus()
def count_syllables(text):
return LineSyllablizer(text).count_syllables()
| bsd-3-clause | -299,711,272,710,562,400 | 36.346715 | 123 | 0.552722 | false |
Desenho-2-2017/Ecom_merci | products/viewsets.py | 1 | 8123 | from rest_framework.viewsets import ModelViewSet
from .models import (
ProductCategory,
Product
)
from .serializers import (
ProductCategorySerializerDefault,
ProductCategorySerializerPOST,
ProductSerializerDefault,
ProductSerializerPOST
)
from .permissions import (
ProductPermissions
)
class ProductCategoryViewSet(ModelViewSet):
"""
API endpoint that allows category to be
viewed, created, deleted or edited.
"""
queryset = ProductCategory.objects.all()
serializer_class = ProductCategorySerializerDefault
permission_classes = (ProductPermissions,)
def get_serializer_class(self):
if self.action == 'create':
return ProductCategorySerializerPOST
return ProductCategorySerializerDefault
def list(self, request):
"""
API endpoint that allows category to be viewed.
---
Response example:
Return a list of:
```
{
"id": "integer",
"category_name": "string",
"father_category": "string"
}
```
"""
response = super(ProductCategoryViewSet, self).list(request)
return response
def create(self, request):
"""
API endpoint that allows category to be created.
---
Body example:
"father_category" is optional
```
{
"category_name": "string",
"father_category": "string"
}
```
Response example:
```
{
"pk": "integer",
"category_name": "string",
"father_category": "string"
}
```
"""
response = super(ProductCategoryViewSet, self).create(request)
return response
def destroy(self, request, pk=None):
"""
API endpoint that allows category to be deleted.
Receive the ID of product category.
"""
response = super(ProductCategoryViewSet, self).destroy(request, pk)
return response
def retrieve(self, request, pk=None):
"""
API endpoint that allows the return\
of a product category through the method Get.
Receive the ID of product category.
---
Response example:
```
{
"id": "integer",
"category_name": "string",
"father_category": "string"
}
```
"""
response = super(ProductCategoryViewSet, self).retrieve(request, pk)
return response
def partial_update(self, request, pk=None, **kwargs):
"""
API endpoint that allows partial update of category.
---
Parameters:
Category ID and a JSON with one or more attributes of category.
Example:
```
{
"category_name": "string"
}
```
"""
response = super(ProductCategoryViewSet, self).\
partial_update(request, pk, **kwargs)
return response
def update(self, request, pk=None, **kwargs):
"""
API endpoint that allows category to be updated.
Parameters:
Cart ID and a JSON with all with all required attributes.
Example:
```
{
"category_name": "string"
}
```
"""
response = super(ProductCategoryViewSet, self).update(
request, pk, **kwargs
)
return response
class ProductViewSet(ModelViewSet):
"""
API endpoint that allows products to be\
viewed, created, deleted or edited.
"""
queryset = Product.objects.all()
serializer_class = ProductSerializerDefault
permission_classes = (ProductPermissions,)
def get_serializer_class(self):
if self.action == 'create':
return ProductSerializerPOST
return ProductSerializerDefault
def list(self, request):
"""
API endpoint that allows product to be viewed.
---
Response example:
Return a list of:
```
{
"id": "integer",
"product_name": "string",
"stock_quantity": "integer",
"price": "integer",
"weight": "integer",
"width": "integer",
"height": "integer",
"product_type": "string",
"illustration": "path",
"category_id": "integer"
}
```
"""
response = super(ProductViewSet, self).list(request)
return response
def create(self, request):
"""
API endpoint that allows product to be created.
Body example:
```
{
"stock_quantity": "integer",
"product_name": "string",
"price": "integer",
"width": "integer",
"height": "integer",
"category_id": "string",
"product_type": "string",
"illustration": "string",
"weight": "integer"
}
```
---
Response example:
```
{
"id": "integer",
"product_name": "string",
"stock_quantity": "integer",
"price": "integer",
"weight": "integer",
"width": "integer",
"height": "integer",
"product_type": "string",
"illustration": "path",
"category_id": "integer"
}
```
"""
response = super(ProductViewSet, self).create(request)
return response
def destroy(self, request, pk=None):
"""
API endpoint that allows product to be deleted.
"""
response = super(ProductViewSet, self).destroy(request, pk)
return response
def retrieve(self, request, pk=None):
"""
API endpoint that allows allow the return\
of a product through the method Get.
Receive the ID of product.
---
Receive
Response example:
```
{
"id": "integer",
"product_name": "string",
"stock_quantity": "integer",
"price": "integer",
"weight": "integer",
"width": "integer",
"height": "integer",
"product_type": "string",
"illustration": "path",
"category_id": "integer"
}
```
"""
response = super(ProductViewSet, self).retrieve(request, pk)
return response
def partial_update(self, request, pk=None, **kwargs):
"""
API endpoint that allows product to be updated.
---
Parameters:
Cart ID and a JSON with one or more attributes of product.
Full updade:
```
{
"stock_quantity": "integer",
"product_name": "string",
"price": "integer",
"width": "integer",
"height": "integer",
"category_id": "string",
"product_type": "string",
"illustration": "string",
"weight": "integer"
}
```
---
Partial updade:
```
{
"stock_quantity": "integer",
"product_name": "string",
"price": "integer",
"illustration": "string",
"weight": "integer"
}
```
"""
response = super(ProductViewSet, self).\
partial_update(request, pk, **kwargs)
return response
def update(self, request, pk=None, **kwargs):
"""
API endpoint that allows category to be updated.
---
Parameters:
Product ID and a JSON with all required attributes.
```
{
"stock_quantity": "integer",
"product_name": "string",
"price": "integer",
"category_id": "string",
"illustration": "string"
}
```
"""
response = super(ProductViewSet, self).update(
request, pk, **kwargs
)
return response
| mit | 1,955,034,141,973,025,500 | 26.442568 | 76 | 0.508187 | false |
Williams224/davinci-scripts | ksteta3pi/PotentialBackgrounds/MC_12_12103450_MagUp.py.py | 2 | 5770 | #-- GAUDI jobOptions generated on Fri Jul 24 17:08:37 2015
#-- Contains event types :
#-- 12103450 - 52 files - 1001933 events - 308.74 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-127420
#-- StepId : 127420
#-- StepName : Reco14c for MC - 2012
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p10
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r207
#-- Visible : Y
#-- Processing Pass Step-127160
#-- StepId : 127160
#-- StepName : Stripping21-NoPrescalingFlagged for Sim08 - MU - Implicit merging.
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v36r1p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping21-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DV-RedoCaloPID-Stripping21.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : sim-20141210-1-vc-mu100
#-- ExtraPackages : AppConfig.v3r205
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000001_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000002_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000003_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000004_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000005_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000006_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000007_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000008_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000009_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000010_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000011_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000012_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000013_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000014_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000015_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000016_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000017_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000018_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000019_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000020_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000021_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000022_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000023_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000024_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000025_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000026_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000027_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000028_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000029_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000030_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000031_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000032_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000033_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000034_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000035_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000036_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000037_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000038_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000039_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000040_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000041_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000042_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000043_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000044_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000045_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000046_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000047_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000048_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000049_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000050_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000051_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044124/0000/00044124_00000052_2.AllStreams.dst'
], clear=True)
| mit | 3,266,887,118,599,757,000 | 64.568182 | 268 | 0.783709 | false |
hudalao/Langevin_dynamics | travis_pypi_setup.py | 1 | 3763 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Update encrypted deploy password in Travis config file
"""
from __future__ import print_function
import base64
import json
import os
from getpass import getpass
import yaml
from cryptography.hazmat.primitives.serialization import load_pem_public_key
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15
try:
from urllib import urlopen
except:
from urllib.request import urlopen
GITHUB_REPO = 'hudalao/Langevin_dynamics'
TRAVIS_CONFIG_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), '.travis.yml')
def load_key(pubkey):
"""Load public RSA key, with work-around for keys using
incorrect header/footer format.
Read more about RSA encryption with cryptography:
https://cryptography.io/latest/hazmat/primitives/asymmetric/rsa/
"""
try:
return load_pem_public_key(pubkey.encode(), default_backend())
except ValueError:
# workaround for https://github.com/travis-ci/travis-api/issues/196
pubkey = pubkey.replace('BEGIN RSA', 'BEGIN').replace('END RSA', 'END')
return load_pem_public_key(pubkey.encode(), default_backend())
def encrypt(pubkey, password):
"""Encrypt password using given RSA public key and encode it with base64.
The encrypted password can only be decrypted by someone with the
private key (in this case, only Travis).
"""
key = load_key(pubkey)
encrypted_password = key.encrypt(password, PKCS1v15())
return base64.b64encode(encrypted_password)
def fetch_public_key(repo):
"""Download RSA public key Travis will use for this repo.
Travis API docs: http://docs.travis-ci.com/api/#repository-keys
"""
keyurl = 'https://api.travis-ci.org/repos/{0}/key'.format(repo)
data = json.loads(urlopen(keyurl).read().decode())
if 'key' not in data:
errmsg = "Could not find public key for repo: {}.\n".format(repo)
errmsg += "Have you already added your GitHub repo to Travis?"
raise ValueError(errmsg)
return data['key']
def prepend_line(filepath, line):
"""Rewrite a file adding a line to its beginning.
"""
with open(filepath) as f:
lines = f.readlines()
lines.insert(0, line)
with open(filepath, 'w') as f:
f.writelines(lines)
def load_yaml_config(filepath):
with open(filepath) as f:
return yaml.load(f)
def save_yaml_config(filepath, config):
with open(filepath, 'w') as f:
yaml.dump(config, f, default_flow_style=False)
def update_travis_deploy_password(encrypted_password):
"""Update the deploy section of the .travis.yml file
to use the given encrypted password.
"""
config = load_yaml_config(TRAVIS_CONFIG_FILE)
config['deploy']['password'] = dict(secure=encrypted_password)
save_yaml_config(TRAVIS_CONFIG_FILE, config)
line = ('# This file was autogenerated and will overwrite'
' each time you run travis_pypi_setup.py\n')
prepend_line(TRAVIS_CONFIG_FILE, line)
def main(args):
public_key = fetch_public_key(args.repo)
password = args.password or getpass('PyPI password: ')
update_travis_deploy_password(encrypt(public_key, password.encode()))
print("Wrote encrypted password to .travis.yml -- you're ready to deploy")
if '__main__' == __name__:
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--repo', default=GITHUB_REPO,
help='GitHub repo (default: %s)' % GITHUB_REPO)
parser.add_argument('--password',
help='PyPI password (will prompt if not provided)')
args = parser.parse_args()
main(args)
| mit | 1,190,196,389,545,195,300 | 29.844262 | 79 | 0.679777 | false |
AmineChikhaoui/nixops | nixops/resources/s3_bucket.py | 1 | 6521 | # -*- coding: utf-8 -*-
# Automatic provisioning of AWS S3 buckets.
import time
import botocore
import boto3
import nixops.util
import nixops.resources
import nixops.ec2_utils
class S3BucketDefinition(nixops.resources.ResourceDefinition):
"""Definition of an S3 bucket."""
@classmethod
def get_type(cls):
return "s3-bucket"
@classmethod
def get_resource_type(cls):
return "s3Buckets"
def __init__(self, xml, config={}):
nixops.resources.ResourceDefinition.__init__(self, xml, config)
self.bucket_name = xml.find("attrs/attr[@name='name']/string").get("value")
self.region = xml.find("attrs/attr[@name='region']/string").get("value")
self.access_key_id = xml.find("attrs/attr[@name='accessKeyId']/string").get("value")
self.policy = xml.find("attrs/attr[@name='policy']/string").get("value")
self.website_enabled = self.config["website"]["enabled"]
self.website_suffix = self.config["website"]["suffix"]
self.website_error_document = self.config["website"]["errorDocument"]
def show_type(self):
return "{0} [{1}]".format(self.get_type(), self.region)
class S3BucketState(nixops.resources.ResourceState):
"""State of an S3 bucket."""
state = nixops.util.attr_property("state", nixops.resources.ResourceState.MISSING, int)
bucket_name = nixops.util.attr_property("ec2.bucketName", None)
access_key_id = nixops.util.attr_property("ec2.accessKeyId", None)
region = nixops.util.attr_property("ec2.region", None)
@classmethod
def get_type(cls):
return "s3-bucket"
def __init__(self, depl, name, id):
nixops.resources.ResourceState.__init__(self, depl, name, id)
self._conn = None
def show_type(self):
s = super(S3BucketState, self).show_type()
if self.region: s = "{0} [{1}]".format(s, self.region)
return s
@property
def resource_id(self):
return self.bucket_name
def get_definition_prefix(self):
return "resources.s3Buckets."
def connect(self):
if self._conn: return
(access_key_id, secret_access_key) = nixops.ec2_utils.fetch_aws_secret_key(self.access_key_id)
self._conn = boto3.session.Session(region_name=self.region if self.region != "US" else "us-east-1",
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key)
def create(self, defn, check, allow_reboot, allow_recreate):
self.access_key_id = defn.access_key_id or nixops.ec2_utils.get_access_key_id()
if not self.access_key_id:
raise Exception("please set ‘accessKeyId’, $EC2_ACCESS_KEY or $AWS_ACCESS_KEY_ID")
if len(defn.bucket_name) > 63:
raise Exception("bucket name ‘{0}’ is longer than 63 characters.".format(defn.bucket_name))
self.connect()
s3client = self._conn.client('s3')
if check or self.state != self.UP:
self.log("creating S3 bucket ‘{0}’...".format(defn.bucket_name))
try:
ACL = 'private' # ..or: public-read, public-read-write, authenticated-read
s3loc = region_to_s3_location(defn.region)
if s3loc == "US":
s3client.create_bucket(ACL = ACL,
Bucket = defn.bucket_name)
else:
s3client.create_bucket(ACL = ACL,
Bucket = defn.bucket_name,
CreateBucketConfiguration = {
'LocationConstraint': s3loc
})
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != "BucketAlreadyOwnedByYou": raise
with self.depl._db:
self.state = self.UP
self.bucket_name = defn.bucket_name
self.region = defn.region
if defn.policy:
self.log("setting S3 bucket policy on ‘{0}’...".format(defn.bucket_name))
s3client.put_bucket_policy(Bucket = defn.bucket_name,
Policy = defn.policy.strip())
else:
try:
s3client.delete_bucket_policy(Bucket = defn.bucket_name)
except botocore.exceptions.ClientError as e:
# This seems not to happen - despite docs indicating it should:
# [http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETEpolicy.html]
if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise # (204 : Bucket didn't have any policy to delete)
if not defn.website_enabled:
try:
s3client.delete_bucket_website(Bucket = defn.bucket_name)
except botocore.exceptions.ClientError as e:
if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise
else:
website_config = { 'IndexDocument': { 'Suffix': defn.website_suffix } }
if defn.website_error_document != "":
website_config['ErrorDocument'] = { 'Key': defn.website_error_document}
s3client.put_bucket_website(Bucket = defn.bucket_name, WebsiteConfiguration = website_config)
def destroy(self, wipe=False):
if self.state == self.UP:
self.connect()
try:
self.log("destroying S3 bucket ‘{0}’...".format(self.bucket_name))
bucket = self._conn.resource('s3').Bucket(self.bucket_name)
try:
bucket.delete()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != "BucketNotEmpty": raise
if not self.depl.logger.confirm("are you sure you want to destroy S3 bucket ‘{0}’?".format(self.bucket_name)): return False
bucket.objects.all().delete()
bucket.delete()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != "NoSuchBucket": raise
return True
def region_to_s3_location(region):
# S3 location names are identical to EC2 regions, except for
# us-east-1 and eu-west-1.
if region == "eu-west-1": return "EU"
elif region == "us-east-1": return "US"
else: return region
| lgpl-3.0 | 7,310,174,858,576,745,000 | 39.861635 | 143 | 0.576266 | false |
leppa/home-assistant | homeassistant/components/rfxtrx/light.py | 1 | 2679 | """Support for RFXtrx lights."""
import logging
import RFXtrx as rfxtrxmod
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
Light,
)
from homeassistant.const import CONF_NAME
from homeassistant.helpers import config_validation as cv
from . import (
CONF_AUTOMATIC_ADD,
CONF_DEVICES,
CONF_FIRE_EVENT,
CONF_SIGNAL_REPETITIONS,
DEFAULT_SIGNAL_REPETITIONS,
RECEIVED_EVT_SUBSCRIBERS,
RfxtrxDevice,
apply_received_command,
get_devices_from_config,
get_new_device,
)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DEVICES, default={}): {
cv.string: vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean,
}
)
},
vol.Optional(CONF_AUTOMATIC_ADD, default=False): cv.boolean,
vol.Optional(
CONF_SIGNAL_REPETITIONS, default=DEFAULT_SIGNAL_REPETITIONS
): vol.Coerce(int),
}
)
SUPPORT_RFXTRX = SUPPORT_BRIGHTNESS
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the RFXtrx platform."""
lights = get_devices_from_config(config, RfxtrxLight)
add_entities(lights)
def light_update(event):
"""Handle light updates from the RFXtrx gateway."""
if (
not isinstance(event.device, rfxtrxmod.LightingDevice)
or not event.device.known_to_be_dimmable
):
return
new_device = get_new_device(event, config, RfxtrxLight)
if new_device:
add_entities([new_device])
apply_received_command(event)
# Subscribe to main RFXtrx events
if light_update not in RECEIVED_EVT_SUBSCRIBERS:
RECEIVED_EVT_SUBSCRIBERS.append(light_update)
class RfxtrxLight(RfxtrxDevice, Light):
"""Representation of a RFXtrx light."""
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_RFXTRX
def turn_on(self, **kwargs):
"""Turn the light on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
if brightness is None:
self._brightness = 255
self._send_command("turn_on")
else:
self._brightness = brightness
_brightness = brightness * 100 // 255
self._send_command("dim", _brightness)
| apache-2.0 | -6,649,617,365,527,922,000 | 26.618557 | 77 | 0.62486 | false |
chenghao/haoAdmin | service/sys/SysMenuService.py | 1 | 4894 | # coding:utf-8
__author__ = "gaunt"
from models import (SysMenuDao)
def get_user_menu(auths):
"""
根据权限获取主菜单
:param auths:
:return:
"""
menus = SysMenuDao.get_all_menu()
# 获取自己权限的菜单
self_menus = []
[self_menus.append(m) for m in menus if not m["authority"] or m["authority"] in auths]
# 上级菜单集合
ids = []
[ids.append(menu["parent_id"]) for menu in self_menus if menu["parent_id"] not in ids]
# 删除空菜单
menus = []
[menus.append(m) for m in self_menus if m["menu_url"] or (not m["menu_url"] and m["menu_id"] in ids)]
# 构建树形菜单
tree_menus = _build_tree_menu(menus, -1)
return tree_menus
def _build_tree_menu(menus, parent_id):
tree_menus = []
for menu in menus:
if parent_id == menu["parent_id"]:
name = menu["menu_name"]
icon = menu["menu_icon"]
url = menu["menu_url"] if menu["menu_url"] else "javascript:;"
sub_menus = _build_tree_menu(menus, menu["menu_id"])
tree_menus.append({"name": name, "icon": icon, "url": url, "subMenus": sub_menus})
return tree_menus
def get_menu_list(keyword):
"""
获取菜单列表
:param keyword:
:return:
"""
menus = SysMenuDao.get_all_menu()
# 设置父节点名称
for menu in menus:
bo = True
for m in menus:
if menu["parent_id"] == m["menu_id"]:
menu["parent_name"] = m["menu_name"]
bo = False
break
if bo:
menu["parent_name"] = ""
# 筛选结果
menu_list = []
if keyword:
[menu_list.append(m) for m in menus if keyword in m["menu_name"] or (m["parent_name"] and keyword in m["parent_name"])]
else:
menu_list = menus
# 排序
menu_dict = {}
list_dict = {}
for m in menu_list:
menu_id_key = "menu{}".format(m["menu_id"])
parent_id_key = "menu{}".format(m["parent_id"])
if m["parent_id"] == -1:
menu_dict[menu_id_key] = m
else:
if len(list_dict) == 0:
list_dict[parent_id_key] = [m]
else:
if parent_id_key in list_dict.keys():
list = list_dict[parent_id_key]
list.append(m)
list_dict[parent_id_key] = list
else:
list_dict[parent_id_key] = [m]
lists = []
# 父级数据为空,菜单数据有值
if len(menu_dict) == 0 and len(list_dict) > 0:
for key in list_dict:
[lists.append(l) for l in list_dict[key]]
# 都有值时
for key in menu_dict.keys():
lists.append(menu_dict[key])
ll = list_dict[key]
[lists.append(l) for l in ll]
return lists
def del_menu(menu_id):
"""
根据ID删除菜单
:param menu_id:
:return:
"""
# 先判断该菜单是否存在
menu = SysMenuDao.get(menu_id)
if menu is None:
return "该菜单不存在"
# 获取该菜单的子菜单
childs = SysMenuDao.get_child_menu(menu_id)
if len(childs) > 0:
return "该菜单还有子菜单,不能删除"
# 删除菜单
SysMenuDao.del_menu(menu_id)
def add_menu(parent_id, menu_name, menu_url, menu_icon, sort_number, authority):
"""
新增菜单
:param parent_id:
:param menu_name:
:param menu_url:
:param menu_icon:
:param sort_number:
:param authority:
:return:
"""
# 判断父级ID是否存在
if parent_id and parent_id != -1:
menu = SysMenuDao.get(parent_id)
if menu is None:
return "该父级ID不存在"
# 保存
data = {"parent_id": parent_id, "menu_name": menu_name, "menu_url": menu_url, "menu_icon": menu_icon,
"sort_number": sort_number, "authority": authority}
i = SysMenuDao.add_menu(data)
if i <= 0:
return "保存菜单失败"
def up_menu(menu_id, parent_id, menu_name, menu_url, menu_icon, sort_number, authority):
"""
修改菜单
:param menu_id:
:param parent_id:
:param menu_name:
:param menu_url:
:param menu_icon:
:param sort_number:
:param authority:
:return:
"""
# 判断当前菜单是否存在
menu = SysMenuDao.get(menu_id)
if menu is None:
return "该菜单不存在"
# 判断父级ID是否存在
if parent_id and parent_id != -1:
menu = SysMenuDao.get(parent_id)
if menu is None:
return "该父级ID不存在"
# 保存
data = {"menu_id": menu_id, "parent_id": parent_id, "menu_name": menu_name, "menu_url": menu_url,
"menu_icon": menu_icon, "sort_number": sort_number, "authority": authority}
i = SysMenuDao.up_menu(data)
if i <= 0:
return "修改菜单失败"
| apache-2.0 | 7,500,983,009,860,910,000 | 24.393258 | 127 | 0.536947 | false |
lsaffre/lino-welfare | lino_welfare/modlib/cbss/fixtures/purposes.py | 1 | 9137 | # -*- coding: UTF-8 -*-
# Copyright 2012 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""
Fills the cbss.Purposes table with the official values defined in
`lijst_hoedanigheidscodes.pdf
<http://www.bcss.fgov.be/binaries/documentation/fr/documentation/general/lijst_hoedanigheidscodes.pdf>`__.
"""
from lino.api.dd import babel_values
from lino.core.utils import resolve_model
PURPOSES = u"""
1 10 INDEMNISATION AUX VICTIMES | VERGOEDING AAN SLACHTOFFERS
1 20 RENTES DES AYANTS DROIT | RENTEN AAN RECHTHEBBENDEN
1 30 PAIEMENTS À DES TIERS & AUTRES CORRESPONDANTS | BETALINGEN AAN DERDEN EN ANDERE CORRESPONDENTEN
1 40 TRAVAILLEURS ASSIMILÉS - CARRIÈRE INCOMPLÈTE | GELIJKGESTELDE WERKNEMERS - ONVOLLEDIGE LOOPBAAN
1 50 Débiteurs | debiteuren
5 1 PREMIER CONTACT | EERSTE CONTACT
5 10 DOSSIER ESTIMATION | RAMINGDOSSIER
5 20 DOSSIER ATTRIBUTION | TOEKENNINGSDOSSIER
5 30 ATTRIBUTION GRAPA | TOEKENNING IGO
5 31 COHABITANT GRAPA | SAMENWONENDE IGO
5 100 DOSSIER PAIEMENT | BETALINGSDOSSIER
5 110 DOSSIER PAIEMENT RESIDUAIRE | BETALINGSDOSSIER RESIDUAIR RECHT
5 150 DOSSIER CADASTRE | DOSSIER KADASTER
5 500 Dossier échange bilatéral | Dossier bilaterale uitwisseling
6 10 INDEMNISATION AUX VICTIMES | VERGOEDING AAN SLACHTOFFERS
6 20 RENTES DES AYANTS DROIT | RENTEN AAN RECHTHEBBENDEN
6 30 PAIEMENTS À DES TIERS & AUTRES CORRESPONDANTS | BETALINGEN AAN DERDEN EN ANDERE CORRESPONDENTEN
6 40 TRAVAILLEURS ASSIMILÉS - CARRIÈRE INCOMPLÈTE | GELIJKGESTELDE WERKNEMERS - ONVOLLEDIGE LOOPBAAN
6 50 FEMME ENCEINTE ECART2E | VERWIJDERDE ZWANGERE VROUW
7 101 BÉNÉFICIAIRE | RECHTHEBBENDE
7 102 ALLOCATAIRE TYPE 1 | BIJSLAGTREKKENDE TYPE 1
7 103 ALLOCATAIRE TYPE 2 | BIJSLAGTREKKENDE TYPE 2
7 104 ENFANT BÉNÉFICIAIRE | RECHTGEVEND KIND
7 105 TIERCE PERSONNE TYPE 1 | DERDE PERSOON TYPE 1
7 106 TIERCE PERSONNE TYPE 2 | DERDE PERSOON TYPE 2
7 107 PERSONNE EN RECHERCHE | PERSOON IN ONDERZOEK
9 2 AYANT DROIT À UNE INTERVENTION MAJORÉE DE L’ASSURANCE SOINS DE SANTÉ (TITULAIRE OU BÉNÉFICIAIRE) | RECHTHEBBENDE OP VERHOOGDE TUSSENKOMST IN HET KADER VAN DE GEZONDHEIDSZORG (TITULARIS OF GERECHTIGDE)
9 12 AYANT DROIT À UNE INTERVENTION MAJORÉE DE L’ASSURANCE SOINS DE SANTÉ (PERSONNE À CHARGE) | RECHTHEBBENDE OP VERHOOGDE TUSSENKOMST IN HET KADER VAN DE GEZONDHEIDSZORG (PERSOON TEN LASTE)
10 10 OUVRIER | ARBEIDER
11 1 ASSURABILITÉ SOINS DE SANTÉ | VERZEKERBAARHEID GENEESKUNDIGE VERZORGING
11 2 PERSONNE AVEC DOSSIER INDEMNITÉ | PERSOON MET DOSSIER ARBEIDSONGESCHIKTHEID
12 2 MEMBRE DU PERSONNEL | PERSONEELSLID
12 10 SALARIÉ | WERKNEMER
12 30 DIMONA | DIMONA
12 40 ENQUETE EMPLOYEUR | ONDERZOEK WERKGEVER
13 10 TRAVAILLEUR | WERKNEMER
13 30 DIMONA | DIMONA
14 10 SALARIÉ AVEC TENUE DE COMPTE PENSION | WERKNEMER MET PENSIOENREKENING
14 21 SALARIÉ SANS TENUE DE COMPTE PENSION | WERKNEMER ZONDER PENSIOENREKENING
14 30 TRAVAILLEUR POUR QUI UNE DÉCLARATION DIMONA A ÉTÉ FAITE | WERKNEMER VOOR WIE EEN DIMONA-AANGIFTE WERD VERRICHT
14 50 Carrière fonctionnaires, employées contractuels | Loopbaan ambtenaren, contractuele werknemers
15 1 DOSSIER EN EXAMEN | DOSSIER IN ONDERZOEK
15 2 STATUT SOCIAL DE TRAVAILLEUR INDÉPENDANT | SOCIAAL STATUUT VAN ZELFSTANDIGE
15 3 BÉNÉFICIAIRE DES ALLOCATIONS FAMILIALES SECTEUR INDÉPENDANTS | RECHTGEVEND KIND OP GEZINSBIJSLAG SECTOR ZELFSTANDIGEN
15 6 (EX-)CONJOINT DE L’INDÉPENDANT, AYANT DROIT DANS LE STATUT SOCIAL DES INDÉPENDANTS | (EX-)PARTNER VAN DE ZELFSTANDIGE, RECHTHEBBENDE IN HET SOCIAAL STATUUT DER ZELFSTANDIGEN
15 7 ACTEUR QUI PEUT INFLUENCER LA DETERMINATION DU DROIT AUX PRESTATIONS FAMILIALES | ACTOR DIE EEN INVLOED KAN UITOEFENEN OP HET BEPALEN VAN HET RECHT OP GEZINSBIJSLAG
15 8 ALLOCATAIRE (PRESTATIONS FAMILIALES) | BIJSLAGTREKKENDE (GEZINSBIJSLAG)
16 1 PERSONNE HANDICAPÉE (ALLOCATION) | PERSOON MET EEN HANDICAP (TEGEMOETKOMING)
16 2 ENFANT HANDICAPÉ | KIND MET EEN HANDICAP
16 3 RECONNAISSANCE MEDICALE | MEDISCHE ERKENNING
16 4 PERSONNE AVEC LAQUELLE LA PERSONNE HANDICAPÉE FORME UN MÉNAGE | PERSOON MET WIE DE PERSOON MET EEN HANDICAP EEN GEZIN VORMT
16 5 DOSSIER BENEFICIAIRE D’UNE ALLOCATION D’INTEGRATION / ALLOCATION DE REMPLACEMENT DE REVENU | DOSSIER GERECHTIGDE OP INKOMENSVERVANGENDE TEGEMOETKOMING / INTEGRATIETEGEMOETKOMING
16 6 PERSONNE FAISANT PARTIE DU MENAGE D’UNE PERSONNE HANDICAPEE BENEFICIANT D’UNE ALLOCATION D’INTEGRATION / ALLOCATION DE REMPLACEMENT DE REVENU | PERSOON DIE EEN HUISHOUDEN VORMT MET DE PERSOON MET EEN HANDICAP IN HET KADER VAN EEN DOSSIER INKOMENSVERVANGENDE TEGEMOETKOMING / INTEGRATIETEGEMOETKOMING
16 7 DOSSIER BENEFICIAIRE D’UNE ALLOCATION D’AIDE AUX PERSONNES AGEES | DOSSIER GERECHTIGDE OP TEGEMOETKOMING VOOR HULP AAN BEJAARDEN
16 8 PERSONNE FAISANT PARTIE DU MENAGE D’UNE PERSONNE HANDICAPEE BENEFICIANT D’UNE ALLOCATION D’AIDE AUX PERSONNES AGEES | PERSOON DIE HUISHOUDEN VORMT MET DE PERSOON MET EEN HANDICAP IN HET KADER VAN DE TEGEMOETKOMING VOOR HULP AAN BEJAARDEN
18 1 CHÔMEUR CONTRÔLÉ | GECONTROLEERDE WERKLOZE
18 2 TRAVAILLEUR EN INTERRUPTION DE CARRIÈRE | WERKNEMER IN LOOPBAANONDERBREKING
18 3 TRAVAILLEUR VICTIME D’UNE FERMETURE D’ENTREPRISE | WERKNEMER SLACHTOFFER VAN EEN SLUITING VAN EEN ONDERNEMING
18 4 DEMANDEUR D’EMPLOI | WERKZOEKENDE
18 5 MEMBRE DU PERSONNEL ONEM | PERSONEELSLID RVA
19 1 DOSSIER ACTIF | ACTIEF DOSSIER
19 2 DOSSIER INACTIF | NIET-ACTIEF DOSSIER
19 10 MÉDECIN | GENEESHEER
19 20 PHARMACIEN | APOTHEKER
19 21 ASSISTANT-PHARMACEUTICA-TECHNIQUE | FARMACEUTISCH-TECHNISCH ASSISTENT
19 30 DENTISTE | TANDARTS
19 40 ACCOUCHEUSE | VROEDVROUW
19 41 INFIRMIER | VERPLEGER
19 42 AIDE-SOIGNANT | ZORGKUNDIGE
19 50 KINÉSITHÉRAPEUTE | KINESITHERAPEUT
19 51 ORTHESISTE | ORTHESIST
19 54 PROTHESISTE | PROTHESIST
19 57 AUDIOLOGUE | AUDIOLOOG
19 58 LOGOPÈDES | LOGOPEDISTEN
19 59 ORTHOPTISTES | ORTHOPTISTEN
19 60 PODOLOGUE | PODOLOOG
19 61 ORTHOPÉDISTES | ORTHOPEDISTEN
19 62 BANDAGISTES | BANDAGISTEN
19 63 DISPENSATEURS D’IMPLANTS | VERSTREKKERS VAN IMPLANTATEN
19 64 ERGOTHÉRAPEUTE | ERGOTHERAPEUT
19 65 DIÉTÉTICIEN | DIETIST
19 66 OPTICIENS | OPTICIENS
19 67 AUDICIENS | AUDICIENS
19 68 PHARMACIENS BIOLOGISTES | APOTHEKERS-BIOLOGEN
19 69 TECHNOLOGUE DE LABORATOIRE | LABORATORIUMTECHNOLOOG
19 70 PUÉRICULTRICE | KINDERVERZORGSTER
19 71 TECHNOLOGUE EN IMAGERIE MEDICALE | TECHNOLOOG MEDISCHE BEELDVORMING
19 80 AMBULANCIER (TRANSPORT NON-URGENT DE PATIENT) | AMBULANCIER (NIET-DRINGEND PATIENTENVERVOER )
19 99 PROFESSIONNEL DE SANTE POTENTIEL | POTENTIELE GEZONDHEIDSZORGBEOEFENAAR
* 902 Inscription provisoire | Voorlopige inschrijving | Provisorische Einschreibung
* 999 NISS remplacé | INSZ vervangen | NISS ersetzt
* 0 Inscription définitive | Definitieve inschrijvinG | Definitive Einschreibung
17 1 Dossier en examen | Dossier in onderzoek | Akte in Untersuchung
17 2 Revenu d’intégration | Leefloon | Eingliederungseinkommen
17 3 Équivalent revenu d’intégration | Equivalent leefloon | Gleichgestelltes Eingliederungseinkommen
17 4 Autre aide | Andere hulp | Sonstige Hilfe
17 5 Cohabitant | Inwonende | Mitbewohner
17 6 Personne occupée par le biais d’un CPAS | Persoon tewerkgesteld via OCMW | Durch das ÖSHZ beschäftigte Person
17 7 Médiation collective de dettes / accompagnement budgétaire | Collectieve schuldbemiddeling / budgetbegeleiding | Schuldnerberatung
17 8 Dossiers de service | Dienstendossier | Dienstakte
17 9 Autres formes d’accompagnement | Andere vormen van begeleiding | Sonstige Begleitungsformen
17 11 Encadrant | Encadrant | Begleiter
17 12 Participant | Deelnemer | Teilnehmer
17 20 Collaborateur en enquête | Medewerker in onderzoek | Mitarbeiter auf Probe
17 21 Collaborateur (définitif) | Medewerker (definitif) | Mitarbeiter (definitiv)
17 40 Bénéficiaire de l’allocation de chauffage | Begunstigde verwarmingstoelage | Heizkostenbeihilfe
"""
def objects():
Sector = resolve_model('cbss.Sector')
#~ for ln in SECTORS.splitlines():
#~ if ln:
#~ a = ln.split(None,1)
#~ labels = [s.strip() for s in a[1].split('|')]
#~ if len(labels) != 3:
#~ raise Exception("Line %r : labels is %r" %(ln,labels))
#~ if not labels[2]:
#~ labels[2] = labels[0]
#~ yield Sector(code=int(a[0]),**babel_values('name',fr=labels[0],nl=labels[1],de=labels[2]))
Purpose = resolve_model('cbss.Purpose')
for ln in PURPOSES.splitlines():
if ln:
a = ln.split(None, 2)
#~ assert a[0] in ('*', '17')
sc = a[0]
if sc == '*':
sc = None
else:
#~ sector = Sector.objects.get(code=int(sc))
sc = int(sc)
labels = [s.strip() for s in a[2].split('|')]
if len(labels) == 2:
labels.append(labels[0])
elif len(labels) != 3:
raise Exception("Line %r : labels is %r" % (ln, labels))
yield Purpose(sector_code=sc, code=int(a[1]), **babel_values('name', en=labels[0], fr=labels[0], nl=labels[1], de=labels[2]))
| agpl-3.0 | 7,014,070,598,673,712,000 | 57.2 | 304 | 0.764439 | false |
ross/carbonate | tests/test_sync.py | 1 | 3076 | import unittest
import os
import whisper
import time
import random
from carbonate.sync import heal_metric
class SyncTest(unittest.TestCase):
db = "db.wsp"
@classmethod
def setUpClass(cls):
cls._removedb()
@classmethod
def _removedb(cls):
try:
if os.path.exists(cls.db):
os.unlink(cls.db)
except (IOError, OSError):
pass
def test_heal_empty(self):
testdb = "test-%s" % self.db
self._removedb()
try:
os.unlink(testdb)
except (IOError, OSError):
pass
schema = [(1, 20)]
emptyData = []
self._createdb(self.db, schema)
self._createdb(testdb, schema, emptyData)
heal_metric(self.db, testdb)
original_data = whisper.fetch(self.db, 0)
filled_data = whisper.fetch(testdb, 0)
self.assertEqual(original_data, filled_data)
# Heal again, should still be equal
heal_metric(self.db, testdb)
filled_data = whisper.fetch(testdb, 0)
self.assertEqual(original_data, filled_data)
def test_heal_target_corrupt(self):
testdb = "/dev/null"
self._removedb()
schema = [(1, 20)]
self._createdb(self.db, schema)
original_data = whisper.fetch(self.db, 0)
# This should log complaints but exit successfully as it cannot
# heal its target /dev/null
heal_metric(self.db, testdb)
data = whisper.fetch(self.db, 0)
self.assertEqual(original_data, data)
def test_heal_target_missing(self):
testdb = "test-%s" % self.db
try:
os.unlink(testdb)
except (IOError, OSError):
pass
self._removedb()
schema = [(1, 20)]
self._createdb(self.db, schema)
original_data = whisper.fetch(self.db, 0)
# This should log complaints but exit successfully as it cannot
# heal its target /dev/null
heal_metric(self.db, testdb)
data = whisper.fetch(testdb, 0)
self.assertEqual(original_data, data)
def test_heal_source_corrupt(self):
testdb = "/dev/null"
self._removedb()
schema = [(1, 20)]
self._createdb(self.db, schema)
original_data = whisper.fetch(self.db, 0)
# This should log complaints but exit successfully as it cannot
# read from the source /dev/null
heal_metric(testdb, self.db)
data = whisper.fetch(self.db, 0)
self.assertEqual(original_data, data)
def _createdb(self, wsp, schema=[(1, 20)], data=None):
whisper.create(wsp, schema)
if data is None:
tn = time.time() - 20
data = []
for i in range(20):
data.append((tn + 1 + i, random.random() * 10))
whisper.update_many(wsp, data)
return data
@classmethod
def tearDownClass(cls):
try:
cls._removedb()
os.unlink("test-%s" % cls.db)
except (IOError, OSError):
pass
| mit | 5,415,602,980,656,477,000 | 24.848739 | 71 | 0.564694 | false |
jtrobec/pants | tests/python/pants_test/tasks/test_listtargets.py | 1 | 5412 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from textwrap import dedent
from pants.backend.core.tasks.listtargets import ListTargets
from pants.backend.jvm.artifact import Artifact
from pants.backend.jvm.repository import Repository
from pants.backend.jvm.scala_artifact import ScalaArtifact
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.build_graph.target import Target
from pants_test.tasks.task_test_base import ConsoleTaskTestBase
class BaseListTargetsTest(ConsoleTaskTestBase):
@classmethod
def task_type(cls):
return ListTargets
class ListTargetsTestEmpty(BaseListTargetsTest):
def test_list_all_empty(self):
self.assertEqual('', self.execute_task())
self.assertEqual('', self.execute_task(options={'sep': '###'}))
self.assertEqual([], self.execute_console_task())
class ListTargetsTest(BaseListTargetsTest):
@property
def alias_groups(self):
return BuildFileAliases(
targets={
'target': Target,
'java_library': JavaLibrary,
},
objects={
'pants': lambda x: x,
'artifact': Artifact,
'scala_artifact': ScalaArtifact,
'public': Repository(name='public',
url='http://maven.example.com',
push_db_basedir='/tmp'),
}
)
def setUp(self):
super(ListTargetsTest, self).setUp()
# Setup a BUILD tree for various list tests
class Lib(object):
def __init__(self, name, provides=False):
self.name = name
self.provides = dedent("""
artifact(
org='com.example',
name='{0}',
repo=public
)
""".format(name)).strip() if provides else 'None'
def create_library(path, *libs):
libs = libs or [Lib(os.path.basename(os.path.dirname(self.build_path(path))))]
for lib in libs:
target = "java_library(name='{name}', provides={provides}, sources=[])\n".format(
name=lib.name, provides=lib.provides)
self.add_to_build_file(path, target)
create_library('a')
create_library('a/b', Lib('b', provides=True))
create_library('a/b/c', Lib('c'), Lib('c2', provides=True), Lib('c3'))
create_library('a/b/d')
create_library('a/b/e', Lib('e1'))
self.add_to_build_file('f', dedent('''
target(
name='alias',
dependencies=[
'a/b/c:c3',
'a/b/d:d',
],
description = """
Exercises alias resolution.
Further description.
""",
)
'''))
def test_list_path(self):
self.assert_console_output('a/b:b', targets=[self.target('a/b')])
def test_list_siblings(self):
self.assert_console_output('a/b:b', targets=self.targets('a/b:'))
self.assert_console_output('a/b/c:c', 'a/b/c:c2', 'a/b/c:c3',
targets=self.targets('a/b/c/:'))
def test_list_descendants(self):
self.assert_console_output('a/b/c:c', 'a/b/c:c2', 'a/b/c:c3',
targets=self.targets('a/b/c/::'))
self.assert_console_output(
'a/b:b',
'a/b/c:c',
'a/b/c:c2',
'a/b/c:c3',
'a/b/d:d',
'a/b/e:e1',
targets=self.targets('a/b::'))
def test_list_all(self):
self.assert_entries('\n',
'a:a',
'a/b:b',
'a/b/c:c',
'a/b/c:c2',
'a/b/c:c3',
'a/b/d:d',
'a/b/e:e1',
'f:alias')
self.assert_entries(', ',
'a:a',
'a/b:b',
'a/b/c:c',
'a/b/c:c2',
'a/b/c:c3',
'a/b/d:d',
'a/b/e:e1',
'f:alias',
options={'sep': ', '})
self.assert_console_output(
'a:a',
'a/b:b',
'a/b/c:c',
'a/b/c:c2',
'a/b/c:c3',
'a/b/d:d',
'a/b/e:e1',
'f:alias')
def test_list_provides(self):
self.assert_console_output(
'a/b:b com.example#b',
'a/b/c:c2 com.example#c2',
options={'provides': True})
def test_list_provides_customcols(self):
self.assert_console_output(
'/tmp a/b:b http://maven.example.com public com.example#b',
'/tmp a/b/c:c2 http://maven.example.com public com.example#c2',
options={'provides': True,
'provides_columns': 'push_db_basedir,address,repo_url,repo_name,artifact_id'}
)
def test_list_dedups(self):
targets = []
targets.extend(self.targets('a/b/d/::'))
targets.extend(self.target('f:alias').dependencies)
self.assertEquals(3, len(targets), "Expected a duplicate of a/b/d:d")
self.assert_console_output(
'a/b/c:c3',
'a/b/d:d',
targets=targets
)
def test_list_documented(self):
self.assert_console_output(
# Confirm empty listing
targets=[self.target('a/b')],
options={'documented': True},
)
self.assert_console_output(
dedent("""
f:alias
Exercises alias resolution.
Further description.
""").strip(),
options={'documented': True}
)
| apache-2.0 | 6,289,840,360,712,417,000 | 27.634921 | 94 | 0.566149 | false |
stratis-storage/stratisd | tests/client-dbus/tests/dbus_interface/filesystem/test_rename.py | 1 | 2948 | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test renaming a filesystem.
"""
# isort: LOCAL
from stratisd_client_dbus import (
Filesystem,
Manager,
ObjectManager,
Pool,
StratisdErrors,
filesystems,
get_object,
)
from stratisd_client_dbus._constants import TOP_OBJECT
from .._misc import SimTestCase, device_name_list
_DEVICE_STRATEGY = device_name_list(1)
class SetNameTestCase(SimTestCase):
"""
Set up a pool with a name and one filesystem.
"""
_POOLNAME = "deadpool"
_FSNAME = "fs"
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
super().setUp()
self._proxy = get_object(TOP_OBJECT)
((_, (pool_object_path, _)), _, _) = Manager.Methods.CreatePool(
self._proxy,
{
"name": self._POOLNAME,
"redundancy": (True, 0),
"devices": _DEVICE_STRATEGY(),
},
)
pool_object = get_object(pool_object_path)
((_, created), _, _) = Pool.Methods.CreateFilesystems(
pool_object, {"specs": [self._FSNAME]}
)
self._filesystem_object_path = created[0][0]
def test_null_mapping(self):
"""
Test rename to same name.
"""
filesystem = get_object(self._filesystem_object_path)
((is_some, result), return_code, _) = Filesystem.Methods.SetName(
filesystem, {"name": self._FSNAME}
)
self.assertEqual(return_code, StratisdErrors.OK)
self.assertFalse(is_some)
self.assertEqual(result, "0" * 32)
def test_new_name(self):
"""
Test rename to new name.
"""
filesystem = get_object(self._filesystem_object_path)
(result, return_code, _) = Filesystem.Methods.SetName(
filesystem, {"name": "new"}
)
self.assertEqual(return_code, StratisdErrors.OK)
self.assertTrue(result)
managed_objects = ObjectManager.Methods.GetManagedObjects(self._proxy, {})
(fs_object_path, _) = next(
filesystems(props={"Name": "new"}).search(managed_objects)
)
self.assertEqual(self._filesystem_object_path, fs_object_path)
fs_object_path = next(
filesystems(props={"Name": self._FSNAME}).search(managed_objects), None
)
self.assertIsNone(fs_object_path)
| mpl-2.0 | -495,794,747,404,045,700 | 29.391753 | 83 | 0.607191 | false |
borisd13/GridCompute | docs/source/conf.py | 1 | 9304 | # -*- coding: utf-8 -*-
#
# GridCompute documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 8 19:36:13 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
from unittest.mock import MagicMock # to ignore modules that don't import well in RTD
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../source'))
sys.path.insert(0, os.path.abspath('../../source/admin'))
sys.path.insert(0, os.path.abspath('../../template/Shared_Folder/Settings/Applications/Random Counter'))
# -- General configuration ------------------------------------------------
# Ignore modules that cannot be imported by RTD
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()
MOCK_MODULES = ['tkinter', 'tkinter.filedialog', 'tkinter.font',
'tkinter.messagebox', 'tkinter.scrolledtext',
'tkinter.ttk', 'cx_Freeze']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc', 'sphinxcontrib.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'GridCompute'
copyright = u'2014, Boris Dayma'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2'
# The full version, including alpha/beta/rc tags.
release = '0.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['global.rst']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'GridComputedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'GridCompute.tex', u'GridCompute Documentation',
u'Boris Dayma', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'gridcompute', u'GridCompute Documentation',
[u'Boris Dayma'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'GridCompute', u'GridCompute Documentation',
u'Boris Dayma', 'GridCompute', 'GridCompute is a cross-platform tool that implements quickly distributed computing over a local grid.',
'Distributed Computing'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| gpl-3.0 | -6,566,207,551,344,717,000 | 31.760563 | 138 | 0.707975 | false |
koehlma/uv | uv/common.py | 1 | 3450 | # -*- coding: utf-8 -*-
# Copyright (C) 2016, Maximilian Köhl <[email protected]>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import platform
import sys
from collections import OrderedDict
is_py2 = sys.version_info[0] == 2
is_py3 = sys.version_info[0] == 3
is_pypy = platform.python_implementation().lower() == 'pypy'
is_cpython = platform.python_implementation().lower() == 'cpython'
def with_metaclass(meta, *bases):
class Metaclass(meta):
def __new__(cls, name, _, attributes):
return meta(name, bases, attributes)
return type.__new__(Metaclass, str('Metaclass'), (), {})
class _EnumerationMeta(type):
_members = []
_value_member_map = {}
def __prepare__(mcs, *args, **kwargs):
return OrderedDict()
def __new__(mcs, cls_name, cls_bases, attributes):
members = [(name, value) for name, value in attributes.items()
if not (hasattr(value, '__get__') or hasattr(value, '__set__') or
hasattr(value, '__delete__') or name.startswith('_'))]
for name, value in members:
attributes[name] = None
value_member_map = {}
attributes['_members'] = members
attributes['_value_member_map'] = value_member_map
cls = type.__new__(mcs, cls_name, cls_bases, attributes)
for name, value in members:
instance = super(_EnumerationMeta, mcs).__call__(cls, value)
instance.name = name
instance.value = value
value_member_map[value] = instance
setattr(cls, name, value_member_map[value])
return cls
def __call__(cls, value):
try:
return cls._value_member_map[value]
except KeyError:
raise ValueError(value)
def __iter__(cls):
return iter(cls._value_member_map.values())
try:
from enum import IntEnum
except ImportError:
class IntEnum(with_metaclass(_EnumerationMeta, int)):
def __repr__(self):
return '<{self.__class__.__name__}.{self.name}: {self}>'.format(self=self)
class Enumeration(IntEnum):
@classmethod
def get(cls, integer):
try:
return cls(integer)
except ValueError:
return integer
try:
import builtins
except ImportError:
if isinstance(__builtins__, dict):
class _Builtins(object):
def __getattr__(self, item):
try:
return __builtins__[item]
except KeyError:
raise AttributeError()
builtins = _Builtins()
else:
builtins = __builtins__
is_posix = os.name == 'posix'
is_nt = os.name == 'nt'
is_linux = sys.platform.startswith('linux')
is_win32 = sys.platform == 'win32'
def dummy_callback(*arguments, **keywords): pass
| lgpl-3.0 | -7,197,550,074,827,018,000 | 30.354545 | 86 | 0.616411 | false |
smhilde/dhhd_project | dhhd/plan/migrations/0001_initial.py | 1 | 4011 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False)),
('name', models.CharField(max_length=100)),
('acct_num', models.IntegerField(verbose_name='account number')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False)),
('street_number', models.IntegerField()),
('street_name', models.CharField(max_length=250)),
('town', models.CharField(max_length=250)),
('state', models.CharField(max_length=250)),
('zip_code', models.IntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Plan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False)),
('number', models.PositiveIntegerField(unique=True, verbose_name='plan number')),
('title', models.CharField(null=True, unique=True, max_length=1000)),
('area', models.PositiveIntegerField(verbose_name='square feet')),
('bed', models.FloatField(verbose_name='bedrooms')),
('bath', models.FloatField(verbose_name='bathrooms')),
('floor', models.PositiveIntegerField(default=1, verbose_name='number of floors')),
('garage', models.PositiveIntegerField(verbose_name='number of garages')),
('width', models.FloatField(null=True, verbose_name='house width')),
('depth', models.FloatField(null=True, verbose_name='house width')),
('height', models.FloatField(null=True, verbose_name='house height')),
('ceiling', models.FloatField(null=True, verbose_name='ceiling height')),
('price', models.FloatField(null=True)),
('pub_date', models.DateTimeField(verbose_name='date published')),
('customer', models.ManyToManyField(null=True, to='plan.Customer')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='SpecialFeature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False)),
('room', models.CharField(max_length=100)),
('feature', models.CharField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False)),
('usrid', models.CharField(unique=True, max_length=100)),
('usrpwd', models.CharField(max_length=100)),
('plan', models.ManyToManyField(to='plan.Plan')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='plan',
name='feature',
field=models.ManyToManyField(null=True, to='plan.SpecialFeature'),
preserve_default=True,
),
migrations.AddField(
model_name='location',
name='plan',
field=models.ForeignKey(to='plan.Plan'),
preserve_default=True,
),
]
| apache-2.0 | -599,572,122,547,535,100 | 40.78125 | 114 | 0.529294 | false |
FriedrichK/python-edifact | edifact/segments/base.py | 1 | 2357 | # -*- coding: utf-8 -*-
"""This module provides the base for segment wrappers."""
import six
class Composite(object):
"""Part of a segment."""
_content = None
def __init__(self, index=0, max_length=3, required=False):
"""Constructor."""
self.index = index
self.max_length = max_length
self.required = required
@property
def content(self):
"""Get value."""
return self._content
@content.setter
def content(self, content):
"""Set content."""
if len(content) > self.max_length:
raise ValueError('trying to set content {0} for composite with maximum length {1}'.format(content, unicode(self.max_length)))
self._content = content
def __str__(self):
"""Return value."""
return self.content or u''
class SegmentMeta(type):
"""Meta class for segments."""
def __new__(cls, name, bases, attrs):
"""Create class."""
cleanup = []
# composites
composites = {}
for key, value in attrs.iteritems():
if isinstance(value, Composite):
composites[key] = value
cleanup.append(key)
attrs['_composites'] = composites
# cleanup
for key in cleanup:
del attrs[key]
# Meta
attrs['_meta'] = attrs.pop('Meta', None)
return super(SegmentMeta, cls).__new__(cls, name, bases, attrs)
class Segment(six.with_metaclass(SegmentMeta)):
"""Base class for segments."""
def __init__(self, una, **kwargs):
"""Constructor."""
self.una = una
for key, value in kwargs.iteritems():
if key not in self._composites:
raise IndexError('composite {0} not found'.format(key,))
self._composites[key].content = value
def __str__(self):
"""Return the string representation of this segment."""
ordered_composites = [unicode(composite) for composite in sorted(self._composites.values(), key=lambda x: x.index)]
return ''.join((
self._meta.identifier, # segment tag
self.una.data_element_separator, # segment tag separator
self.una.component_data_element_separator.join(ordered_composites), # composites
self.una.segment_terminator, # terminator
))
| apache-2.0 | -4,451,063,390,457,396,000 | 28.4625 | 137 | 0.574883 | false |
Zyell/home-assistant | homeassistant/components/light/__init__.py | 1 | 9429 | """
Provides functionality to interact with lights.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/light/
"""
import logging
import os
import csv
import voluptuous as vol
from homeassistant.components import (
group, discovery, wemo, wink, isy994,
zwave, insteon_hub, mysensors, tellstick, vera)
from homeassistant.config import load_yaml_config_file
from homeassistant.const import (
STATE_ON, SERVICE_TURN_ON, SERVICE_TURN_OFF, SERVICE_TOGGLE,
ATTR_ENTITY_ID)
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
DOMAIN = "light"
SCAN_INTERVAL = 30
GROUP_NAME_ALL_LIGHTS = 'all lights'
ENTITY_ID_ALL_LIGHTS = group.ENTITY_ID_FORMAT.format('all_lights')
ENTITY_ID_FORMAT = DOMAIN + ".{}"
# Integer that represents transition time in seconds to make change.
ATTR_TRANSITION = "transition"
# Lists holding color values
ATTR_RGB_COLOR = "rgb_color"
ATTR_XY_COLOR = "xy_color"
ATTR_COLOR_TEMP = "color_temp"
# int with value 0 .. 255 representing brightness of the light.
ATTR_BRIGHTNESS = "brightness"
# String representing a profile (built-in ones or external defined).
ATTR_PROFILE = "profile"
# If the light should flash, can be FLASH_SHORT or FLASH_LONG.
ATTR_FLASH = "flash"
FLASH_SHORT = "short"
FLASH_LONG = "long"
# Apply an effect to the light, can be EFFECT_COLORLOOP.
ATTR_EFFECT = "effect"
EFFECT_COLORLOOP = "colorloop"
EFFECT_RANDOM = "random"
EFFECT_WHITE = "white"
LIGHT_PROFILES_FILE = "light_profiles.csv"
# Maps discovered services to their platforms.
DISCOVERY_PLATFORMS = {
wemo.DISCOVER_LIGHTS: 'wemo',
wink.DISCOVER_LIGHTS: 'wink',
insteon_hub.DISCOVER_LIGHTS: 'insteon_hub',
isy994.DISCOVER_LIGHTS: 'isy994',
discovery.SERVICE_HUE: 'hue',
zwave.DISCOVER_LIGHTS: 'zwave',
mysensors.DISCOVER_LIGHTS: 'mysensors',
tellstick.DISCOVER_LIGHTS: 'tellstick',
vera.DISCOVER_LIGHTS: 'vera',
}
PROP_TO_ATTR = {
'brightness': ATTR_BRIGHTNESS,
'color_temp': ATTR_COLOR_TEMP,
'rgb_color': ATTR_RGB_COLOR,
'xy_color': ATTR_XY_COLOR,
}
# Service call validation schemas
VALID_TRANSITION = vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900))
LIGHT_TURN_ON_SCHEMA = vol.Schema({
ATTR_ENTITY_ID: cv.entity_ids,
ATTR_PROFILE: str,
ATTR_TRANSITION: VALID_TRANSITION,
ATTR_BRIGHTNESS: cv.byte,
ATTR_RGB_COLOR: vol.All(vol.ExactSequence((cv.byte, cv.byte, cv.byte)),
vol.Coerce(tuple)),
ATTR_XY_COLOR: vol.All(vol.ExactSequence((cv.small_float, cv.small_float)),
vol.Coerce(tuple)),
ATTR_COLOR_TEMP: vol.All(int, vol.Range(min=154, max=500)),
ATTR_FLASH: vol.In([FLASH_SHORT, FLASH_LONG]),
ATTR_EFFECT: vol.In([EFFECT_COLORLOOP, EFFECT_RANDOM, EFFECT_WHITE]),
})
LIGHT_TURN_OFF_SCHEMA = vol.Schema({
ATTR_ENTITY_ID: cv.entity_ids,
ATTR_TRANSITION: VALID_TRANSITION,
})
LIGHT_TOGGLE_SCHEMA = vol.Schema({
ATTR_ENTITY_ID: cv.entity_ids,
ATTR_TRANSITION: VALID_TRANSITION,
})
PROFILE_SCHEMA = vol.Schema(
vol.ExactSequence((str, cv.small_float, cv.small_float, cv.byte))
)
_LOGGER = logging.getLogger(__name__)
def is_on(hass, entity_id=None):
"""Return if the lights are on based on the statemachine."""
entity_id = entity_id or ENTITY_ID_ALL_LIGHTS
return hass.states.is_state(entity_id, STATE_ON)
# pylint: disable=too-many-arguments
def turn_on(hass, entity_id=None, transition=None, brightness=None,
rgb_color=None, xy_color=None, color_temp=None, profile=None,
flash=None, effect=None):
"""Turn all or specified light on."""
data = {
key: value for key, value in [
(ATTR_ENTITY_ID, entity_id),
(ATTR_PROFILE, profile),
(ATTR_TRANSITION, transition),
(ATTR_BRIGHTNESS, brightness),
(ATTR_RGB_COLOR, rgb_color),
(ATTR_XY_COLOR, xy_color),
(ATTR_COLOR_TEMP, color_temp),
(ATTR_FLASH, flash),
(ATTR_EFFECT, effect),
] if value is not None
}
hass.services.call(DOMAIN, SERVICE_TURN_ON, data)
def turn_off(hass, entity_id=None, transition=None):
"""Turn all or specified light off."""
data = {
key: value for key, value in [
(ATTR_ENTITY_ID, entity_id),
(ATTR_TRANSITION, transition),
] if value is not None
}
hass.services.call(DOMAIN, SERVICE_TURN_OFF, data)
def toggle(hass, entity_id=None, transition=None):
"""Toggle all or specified light."""
data = {
key: value for key, value in [
(ATTR_ENTITY_ID, entity_id),
(ATTR_TRANSITION, transition),
] if value is not None
}
hass.services.call(DOMAIN, SERVICE_TOGGLE, data)
# pylint: disable=too-many-branches, too-many-locals, too-many-statements
def setup(hass, config):
"""Expose light control via statemachine and services."""
component = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL, DISCOVERY_PLATFORMS,
GROUP_NAME_ALL_LIGHTS)
component.setup(config)
# Load built-in profiles and custom profiles
profile_paths = [os.path.join(os.path.dirname(__file__),
LIGHT_PROFILES_FILE),
hass.config.path(LIGHT_PROFILES_FILE)]
profiles = {}
for profile_path in profile_paths:
if not os.path.isfile(profile_path):
continue
with open(profile_path) as inp:
reader = csv.reader(inp)
# Skip the header
next(reader, None)
try:
for rec in reader:
profile, color_x, color_y, brightness = PROFILE_SCHEMA(rec)
profiles[profile] = (color_x, color_y, brightness)
except vol.MultipleInvalid as ex:
_LOGGER.error("Error parsing light profile from %s: %s",
profile_path, ex)
return False
def handle_light_service(service):
"""Hande a turn light on or off service call."""
# Get the validated data
params = service.data.copy()
# Convert the entity ids to valid light ids
target_lights = component.extract_from_service(service)
params.pop(ATTR_ENTITY_ID, None)
service_fun = None
if service.service == SERVICE_TURN_OFF:
service_fun = 'turn_off'
elif service.service == SERVICE_TOGGLE:
service_fun = 'toggle'
if service_fun:
for light in target_lights:
getattr(light, service_fun)(**params)
for light in target_lights:
if light.should_poll:
light.update_ha_state(True)
return
# Processing extra data for turn light on request.
profile = profiles.get(params.pop(ATTR_PROFILE, None))
if profile:
params.setdefault(ATTR_XY_COLOR, profile[:2])
params.setdefault(ATTR_BRIGHTNESS, profile[2])
for light in target_lights:
light.turn_on(**params)
for light in target_lights:
if light.should_poll:
light.update_ha_state(True)
# Listen for light on and light off service calls.
descriptions = load_yaml_config_file(
os.path.join(os.path.dirname(__file__), 'services.yaml'))
hass.services.register(DOMAIN, SERVICE_TURN_ON, handle_light_service,
descriptions.get(SERVICE_TURN_ON),
schema=LIGHT_TURN_ON_SCHEMA)
hass.services.register(DOMAIN, SERVICE_TURN_OFF, handle_light_service,
descriptions.get(SERVICE_TURN_OFF),
schema=LIGHT_TURN_OFF_SCHEMA)
hass.services.register(DOMAIN, SERVICE_TOGGLE, handle_light_service,
descriptions.get(SERVICE_TOGGLE),
schema=LIGHT_TOGGLE_SCHEMA)
return True
class Light(ToggleEntity):
"""Representation of a light."""
# pylint: disable=no-self-use
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return None
@property
def xy_color(self):
"""Return the XY color value [float, float]."""
return None
@property
def rgb_color(self):
"""Return the RGB color value [int, int, int]."""
return None
@property
def color_temp(self):
"""Return the CT color value in mireds."""
return None
@property
def state_attributes(self):
"""Return optional state attributes."""
data = {}
if self.is_on:
for prop, attr in PROP_TO_ATTR.items():
value = getattr(self, prop)
if value:
data[attr] = value
if ATTR_RGB_COLOR not in data and ATTR_XY_COLOR in data and \
ATTR_BRIGHTNESS in data:
data[ATTR_RGB_COLOR] = color_util.color_xy_brightness_to_RGB(
data[ATTR_XY_COLOR][0], data[ATTR_XY_COLOR][1],
data[ATTR_BRIGHTNESS])
return data
| mit | 90,728,029,092,372,830 | 30.747475 | 79 | 0.622972 | false |
excelly/xpy-ml | sdss_iii/proc_transform_data.py | 1 | 4304 | from ex import *
from ex.ioo.FITS import FITS
import multiprocessing as mp
import sdss.utils as utils
import sdss_iii.settings as settings
def usage():
print('''
transform the data into standard form
python proc_transform_data.py --input_files={input_files} --npixel=[500] --nproc=[1]
''')
sys.exit(1)
def Filter(sf, vf):
'''filter out bad objects
'''
n, dim = vf['spectrum'].shape
#### filter by bad pixels
sd = float32(1/np.sqrt(np.maximum(vf['invvar'], 1e-6)))
filt_pix = (sd > settings.bad_pixel_std_thresh).sum(1) < dim*settings.bad_pixel_num_thresh
#### filter by s2n
# stars
filt_star = sf['spec_cln'] == 1
n_star = filt_star.sum()
filt_star = reduce(AND, [filt_pix, filt_star, sf['s2n'] >= 3])
# galaxy
filt_gla = sf['spec_cln'] == 2
n_gla = filt_gla.sum()
filt_gla = reduce(AND, [filt_pix, filt_gla, sf['s2n'] >= 10])
# qso
filt_qso = sf['spec_cln'] == 3
n_qso = filt_qso.sum()
filt_qso = reduce(AND, [filt_pix, filt_qso, sf['s2n'] >= 10])
log.info('''
Selected
%d / %d stars
%d / %d galaxies
%d / %d quasars
''' % (filt_star.sum(), n_star,
filt_gla.sum(), n_gla,
filt_qso.sum(), n_qso))
return reduce(OR, [filt_star, filt_gla, filt_qso])
def ResampleSpectrum(y_np):
y, npixel = y_np
return utils.SplineResample(y, npixel)
def main(input_files, npixel=500, nproc=1):
input_files = ExpandWildcard(input_files)
MakeDir('./compact')
log.info("Transforming {0} SDSS-III files using {1} processes. Output=./compact/".format(len(input_files), nproc))
pool = mp.Pool(nproc)
for input_file in input_files:
output_file = "./compact/{0}.pkl".format(
SplitFilename(input_file)[0])
if os.path.exists(output_file):
log.info('Already processed {0}'.format(input_file))
continue
log.info("Processing %s -> %s" % (input_file,output_file))
fits = FITS(input_file)
vf = {'spectrum': FixEndian(fits.HDUs[0].data),
'invvar': FixEndian(fits.HDUs[4].data)}
log10_wl = FixEndian(fits.HDUs[3].data)
sf = dict([(name, FixEndian(fits.HDUs[1].data.field(name)))
for name in fits.HDUs[1].data.names])
del sf['length']
sf['mag'] = FixEndian(fits.HDUs[2].data)
sf['spec_cln'] = arr(EncodeList(
[c.strip().lower() for c in sf['class']],
settings.spec_cln_code.keys(),
settings.spec_cln_code.values()))
sf['PMF'] = utils.PMF_S2N(sf['plate'],sf['mjd'],sf['fiber'])
sf['stamp'] = zeros(len(vf['spectrum']), dtype = np.int64)
sf['stamp'][:] = fits.HDUs[1].header['stamp']
log.info("The following scalar features found: \n{0}".format(
sf.keys()))
filt = Filter(sf, vf)
for key in sf.keys():
sf[key] = sf[key][filt]
for key in vf.keys():
vf[key] = vf[key][filt]
log.info("%d / %d objects left after filtering" % (
filt.sum(), filt.size))
log.info('Resampling %d spectra %d -> %d...'%(
len(vf['spectrum']), vf['spectrum'].shape[1], npixel))
jobs = [(spec, npixel) for spec in vf['spectrum']]
spectrum = pool.map(ResampleSpectrum, jobs)
log.info('Resampling %d invvar...'%len(vf['invvar']))
jobs = [(iv, npixel) for iv in vf['invvar']]
invvar = pool.map(ResampleSpectrum, jobs)
log10_wl = linspace(log10_wl.min(), log10_wl.max(), npixel)
# from ex.plott import *
# h = figure();
# subplot(h,211); plot(vf['spectrum'][0])
# subplot(h,212); plot(spectrum[0])
# show()
vf['spectrum'] = spectrum
vf['invvar'] = invvar
log.info('Saving %s...'%output_file)
SavePickle(output_file, {'SF': sf, 'VF': vf,
'log10_wl': log10_wl})
fits.Close()
if __name__ == '__main__':
InitLog()
opts = CmdArgs(sys.argv[1:],
['nproc=','input_files=','npixel='],
usage)
input_files = opts.get('--input_files')
nproc = int(opts.get('--nproc', 1))
npixel = int(opts.get('--npixel', 500))
main(input_files, npixel, nproc)
| apache-2.0 | -8,651,753,374,808,732,000 | 31.360902 | 118 | 0.549024 | false |
ericholscher/djangoembed | oembed/sites.py | 1 | 8297 | import datetime
import re
import simplejson
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.db.models import signals
from oembed.constants import DEFAULT_OEMBED_TTL, MIN_OEMBED_TTL, RESOURCE_TYPES
from oembed.exceptions import AlreadyRegistered, NotRegistered, OEmbedMissingEndpoint, OEmbedException
from oembed.models import StoredOEmbed, StoredProvider
from oembed.providers import BaseProvider, DjangoProvider
from oembed.resources import OEmbedResource
from oembed.utils import fetch_url, relative_to_full
class ProviderSite(object):
def __init__(self):
self.clear()
def invalidate_providers(self):
self._populated = False
def clear(self):
self._registry = {}
self._registered_providers = []
self.invalidate_providers()
def register(self, provider_class):
"""
Registers a provider with the site.
"""
if not issubclass(provider_class, BaseProvider):
raise TypeError('%s is not a subclass of BaseProvider' % provider_class.__name__)
if provider_class in self._registered_providers:
raise AlreadyRegistered('%s is already registered' % provider_class.__name__)
if issubclass(provider_class, DjangoProvider):
# set up signal handler for cache invalidation
signals.post_save.connect(
self.invalidate_stored_oembeds,
sender=provider_class._meta.model
)
# don't build the regex yet - if not all urlconfs have been loaded
# and processed at this point, the DjangoProvider instances will fail
# when attempting to reverse urlpatterns that haven't been created.
# Rather, the regex-list will be populated once, on-demand.
self._registered_providers.append(provider_class)
# flag for re-population
self.invalidate_providers()
def unregister(self, provider_class):
"""
Unregisters a provider from the site.
"""
if not issubclass(provider_class, BaseProvider):
raise TypeError('%s must be a subclass of BaseProvider' % provider_class.__name__)
if provider_class not in self._registered_providers:
raise NotRegistered('%s is not registered' % provider_class.__name__)
self._registered_providers.remove(provider_class)
# flag for repopulation
self.invalidate_providers()
def populate(self):
"""
Populate the internal registry's dictionary with the regexes for each
provider instance
"""
self._registry = {}
for provider_class in self._registered_providers:
instance = provider_class()
self._registry[instance] = instance.regex
for stored_provider in StoredProvider.objects.active():
self._registry[stored_provider] = stored_provider.regex
self._populated = True
def ensure_populated(self):
"""
Ensure not only that the internal registry of Python-class providers is
populated, but also make sure the cached queryset of database-providers
is up-to-date
"""
if not self._populated:
self.populate()
def get_registry(self):
"""
Return a dictionary of {provider_instance: regex}
"""
self.ensure_populated()
return self._registry
def get_providers(self):
"""Provide a list of all oembed providers that are being used."""
return self.get_registry().keys()
def provider_for_url(self, url):
"""
Find the right provider for a URL
"""
for provider, regex in self.get_registry().items():
if re.match(regex, url) is not None:
return provider
raise OEmbedMissingEndpoint('No endpoint matches URL: %s' % url)
def invalidate_stored_oembeds(self, sender, instance, created, **kwargs):
"""
A hook for django-based oembed providers to delete any stored oembeds
"""
ctype = ContentType.objects.get_for_model(instance)
StoredOEmbed.objects.filter(
object_id=instance.pk,
content_type=ctype).delete()
def embed(self, url, **kwargs):
"""
The heart of the matter
"""
try:
# first figure out the provider
provider = self.provider_for_url(url)
except OEmbedMissingEndpoint:
raise
else:
try:
# check the database for a cached response, because of certain
# race conditions that exist with get_or_create(), do a filter
# lookup and just grab the first item
stored_match = StoredOEmbed.objects.filter(
match=url,
maxwidth=kwargs.get('maxwidth', None),
maxheight=kwargs.get('maxheight', None),
date_expires__gte=datetime.datetime.now())[0]
return OEmbedResource.create_json(stored_match.response_json)
except IndexError:
# query the endpoint and cache response in db
# prevent None from being passed in as a GET param
params = dict([(k, v) for k, v in kwargs.items() if v])
# request an oembed resource for the url
resource = provider.request_resource(url, **params)
try:
cache_age = int(resource.cache_age)
if cache_age < MIN_OEMBED_TTL:
cache_age = MIN_OEMBED_TTL
except:
cache_age = DEFAULT_OEMBED_TTL
date_expires = datetime.datetime.now() + datetime.timedelta(seconds=cache_age)
stored_oembed, created = StoredOEmbed.objects.get_or_create(
match=url,
maxwidth=kwargs.get('maxwidth', None),
maxheight=kwargs.get('maxheight', None))
stored_oembed.response_json = resource.json
stored_oembed.resource_type = resource.type
stored_oembed.date_expires = date_expires
if resource.content_object:
stored_oembed.content_object = resource.content_object
stored_oembed.save()
return resource
def autodiscover(self, url):
"""
Load up StoredProviders from url if it is an oembed scheme
"""
headers, response = fetch_url(url)
if headers['content-type'] in ('application/json', 'text/javascript'):
provider_data = simplejson.loads(response)
return self.store_providers(provider_data)
def store_providers(self, provider_data):
"""
Iterate over the returned json and try to sort out any new providers
"""
if not hasattr(provider_data, '__iter__'):
raise OEmbedException('Autodiscovered response not iterable')
provider_pks = []
for provider in provider_data:
if 'endpoint' not in provider or \
'matches' not in provider:
continue
resource_type = provider.get('type')
if resource_type not in RESOURCE_TYPES:
continue
stored_provider, created = StoredProvider.objects.get_or_create(
wildcard_regex=provider['matches']
)
if created:
stored_provider.endpoint_url = relative_to_full(
provider['endpoint'],
provider['matches']
)
stored_provider.resource_type = resource_type
stored_provider.save()
provider_pks.append(stored_provider.pk)
return StoredProvider.objects.filter(pk__in=provider_pks)
# just like django admin
site = ProviderSite()
| mit | 7,125,871,521,904,948,000 | 36.542986 | 102 | 0.575148 | false |
ocelot-collab/ocelot | ocelot/adaptors/madx_old.py | 1 | 8302 | __author__ = 'Sergey Tomin'
from ocelot import *
def RFcavity(l, volt, lag, harmon):
rf = Cavity(l = l, volt=volt, id = id)
rf.lag = lag
rf.harmon = harmon
return rf
class MadObj:
name = ""
type = ""
params = ""
def parse_params(self):
self.dic_p = {}
for param in self.params:
param = param.replace('=', ' ')
param = param.split()
self.dic_p[param[0]] = param[1]
def line_transform(file):
"""
replace ":=" by "="
replace '!' by '#'
if there is not ";" at the end line, collect the multiline
all letters in lowercase
"""
lines = []
multiline = ''
for line in file:
#print line
#print line
line = line.lstrip()
if len(line) == 0:
continue
#print line
line = line.replace(':=', '=')
line = line.replace('!', '#')
#print line
multiline += line
#print len(multiline), multiline[0]
if multiline.find(";")<0 and multiline[0] != "#":
ind = line.find("#")
multiline = multiline[:ind]
continue
else:
line = multiline
multiline = ''
line = line.replace(';', '')
line = line.lower()
#print line
lines.append(line)
return lines
def find_objects(lines):
"""
searching mad's objects. if there ara name and ":" it is object
"""
mad_objs = []
for line in lines:
if ":" in line and line[0] != "#":
madObj = MadObj()
i = line.find("#")
line2 = line[:i]
words = line2.split(",")
temp = words[0].split()
madObj.type = temp[-1]
madObj.name = temp[0].replace(":", "")
madObj.params = words[1:]
mad_objs.append(madObj)
return mad_objs
def subs_objects(mad_objs):
for i, mo in enumerate(mad_objs):
for mo2 in mad_objs[i:]:
if mo.name == mo2.type:
mo2.type = mo.type
params = mo.params + mo2.params
mo2.params = params
return mad_objs
def parse_obj(mad_objs):
for mo in mad_objs:
#print mo.name, mo.type, mo.params
mo.parse_params()
#print mo.name, mo.type, mo.dic_p
return mad_objs
def replace_objects(lines, mad_objs):
new_lines = []
multy_line = ''
for line in lines:
if ":" in line and line[0] != "#":
i = line.find("#")
line2 = line[:i]
words = line2.split(",")
temp = words[0].split()
name = temp[0].replace(":", "")
for mo in mad_objs:
if name == mo.name:
line = ""
line = mo.name + " = " + mo.type + "("
for p in mo.dic_p:
line += p + " = " + mo.dic_p[p] +", "
line += ")"
line = line.replace(', )', ')')
new_lines.append(line)
return new_lines
def translate(lines):
lines2 = []
for line in lines:
line = line.replace('quadrupole', "Quadrupole")
line = line.replace('sbend', "Bend")
#line = line.replace('rbend', "RBend")
#line = line.replace('bend', "Bend")
line = line.replace('monitor', "Monitor")
line = line.replace('matrix', "Matrix")
line = line.replace('rfcavity', "RFcavity")
line = line.replace('sextupole', "Sextupole")
line = line.replace('marker', "Marker")
line = line.replace('instrument', "UnknownElement")
line = line.replace('rcollimator', "UnknownElement")
line = line.replace('ecollimator', "UnknownElement")
line = line.replace('vkicker', "UnknownElement")
line = line.replace('hkicker', "UnknownElement")
line = line.replace('sequence', "Sequence")
line = line.replace('return', "#return")
line = line.replace('->', ".")
line = line.replace('//', "#")
line = line.replace('centre', "'centre'")
line = line.replace('at =', "at=")
lines2.append(line)
#print line
return lines2
def c2py(lines):
lines2 = []
c_block = False
for line in lines:
#remove spases
#print line
#line = line.lstrip()
#if len(line) == 0:
# continue
#print line
#for i in range(8,2,-1):
# line = line.replace(' '*i, "\t")
if line[0] != "#" and "{" in line :
c_block = True
line = line.replace('{', ": # start operator")
lines2.append(line)
continue
if c_block:
#line = line.replace('\t', " "*4)
line = " " + line
if line[0] != "#" and "}" in line :
c_block = False
line = line.replace('}', "#end operator")
lines2.append(line)
continue
# make operator body of "for" or "if" li
#line = line.replace('{', ": # start operator")
#line = line.replace('}', "#end operator")
#line = line.replace('print, text=', "print ")
#line = line.replace('print, text =', "print ")
lines2.append(line)
return lines2
def collect_sequence(lines):
seq = []
first = 1
lines2 = []
for line in lines:
if line.find("at=")>0:
if line[0] == "#":
continue
parts = line.split("at=")
name = parts[0].replace(",", "")
name = name.split()[0]
#print name
pos = parts[1].replace("\n", "")
id = " '" + name + "' "
line = ",".join([name,id, pos])
ind = line.find("#")
if ind>0:
line = line[:ind]
#print "ind = ", ind == True
#print line
if first:
line = "lattice = [[" + line +"],"
first = 0
else:
line = "[" + line + "],"
#print line
seq.append(line)
line = line.replace("endSequence", "]")
lines2.append(line)
return lines2
def lattice_str_from_madx(filename_seq):
f = open(filename_seq,"r")
lines = line_transform(f)
mad_objs = find_objects(lines)
mad_objs = subs_objects(mad_objs)
mo = parse_obj(mad_objs)
new_lines = replace_objects(lines, mo)
lines = translate(new_lines)
lines = c2py(lines)
lines = collect_sequence(lines)
f.close()
return lines
def save_lattice_str(lines, filename):
f_new = open(filename, "w")
for line in lines: f_new.write(line+"\n")
f_new.close()
def madx_seq2ocelot_seq(list_elem_pos, tot_length, exclude_elems = []):
seq = []
azimuth = 0.
for i, term in enumerate(list_elem_pos):
if term[1] in exclude_elems:
continue
element = term[0]
#print element
element.id = term[1]
pos = term[2]
drift = Drift(l = pos - element.l/2. - azimuth, eid = "drift_" + str(i))
azimuth = pos + element.l/2.
seq.append(drift)
seq.append(element)
#print elem[0].l, elem[1], elem[2]
len_last_drift = tot_length - list_elem_pos[-1][-1] - list_elem_pos[-1][0].l/2.
drift = Drift(l = len_last_drift, eid = "drift_last")
seq.append(drift)
return seq
def madx2ocelot(file_seq, exclude_elems):
lines = lattice_str_from_madx(filename_seq=file_seq)
#print lines
file = "\n"
exec(file.join(lines))
seq = madx_seq2ocelot_seq(lattice, tot_length = ring.l, exclude_elems=exclude_elems)
return seq
if __name__ == "__main__":
#name_file = "quadsex_20wig.dat"
name_file = 'petra3.txt'
f = open(name_file,"r")
lines = line_transform(f)
mad_objs = find_objects(lines)
mad_objs = subs_objects(mad_objs)
mo = parse_obj(mad_objs)
new_lines = replace_objects(lines, mo)
lines = translate(new_lines)
lines = c2py(lines)
lines = collect_sequence(lines)
for line in lines:
print (line)
f.close()
part_name = name_file.split(".")
part_name[0] += ".py"
f_new = open(part_name[0], "w")
for line in lines:
f_new.write(line+"\n")
f_new.close()
| gpl-3.0 | -5,919,753,922,739,795,000 | 25.954545 | 88 | 0.49988 | false |
fantasycheng/udacity-deep-learning-project | language-translation/dlnd_language_translation.py | 1 | 34429 |
# coding: utf-8
# # Language Translation
# In this project, you’re going to take a peek into the realm of neural network machine translation. You’ll be training a sequence to sequence model on a dataset of English and French sentences that can translate new sentences from English to French.
# ## Get the Data
# Since translating the whole language of English to French will take lots of time to train, we have provided you with a small portion of the English corpus.
# In[1]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
import helper
import problem_unittests as tests
source_path = 'data/small_vocab_en'
target_path = 'data/small_vocab_fr'
source_text = helper.load_data(source_path)
target_text = helper.load_data(target_path)
# In[6]:
source_text[:1000]
# In[7]:
target_text[:1000]
# ## Explore the Data
# Play around with view_sentence_range to view different parts of the data.
# In[2]:
view_sentence_range = (0, 10)
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
import numpy as np
print('Dataset Stats')
print('Roughly the number of unique words: {}'.format(len({word: None for word in source_text.split()})))
sentences = source_text.split('\n')
word_counts = [len(sentence.split()) for sentence in sentences]
print('Number of sentences: {}'.format(len(sentences)))
print('Average number of words in a sentence: {}'.format(np.average(word_counts)))
print()
print('English sentences {} to {}:'.format(*view_sentence_range))
print('\n'.join(source_text.split('\n')[view_sentence_range[0]:view_sentence_range[1]]))
print()
print('French sentences {} to {}:'.format(*view_sentence_range))
print('\n'.join(target_text.split('\n')[view_sentence_range[0]:view_sentence_range[1]]))
# ## Implement Preprocessing Function
# ### Text to Word Ids
# As you did with other RNNs, you must turn the text into a number so the computer can understand it. In the function `text_to_ids()`, you'll turn `source_text` and `target_text` from words to ids. However, you need to add the `<EOS>` word id at the end of `target_text`. This will help the neural network predict when the sentence should end.
#
# You can get the `<EOS>` word id by doing:
# ```python
# target_vocab_to_int['<EOS>']
# ```
# You can get other word ids using `source_vocab_to_int` and `target_vocab_to_int`.
# In[3]:
def text_to_ids(source_text, target_text, source_vocab_to_int, target_vocab_to_int):
"""
Convert source and target text to proper word ids
:param source_text: String that contains all the source text.
:param target_text: String that contains all the target text.
:param source_vocab_to_int: Dictionary to go from the source words to an id
:param target_vocab_to_int: Dictionary to go from the target words to an id
:return: A tuple of lists (source_id_text, target_id_text)
"""
# TODO: Implement Function
source_vocab_to_int = [[source_vocab_to_int[word] for word in sentence.split()] for sentence in source_text.split('\n')]
target_vocab_toint = [[target_vocab_to_int[word] for word in sentence.split()] + [target_vocab_to_int['<EOS>']] for sentence in target_text.split('\n')]
return source_vocab_to_int, target_vocab_toint
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_text_to_ids(text_to_ids)
# ### Preprocess all the data and save it
# Running the code cell below will preprocess all the data and save it to file.
# In[4]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
helper.preprocess_and_save_data(source_path, target_path, text_to_ids)
# # Check Point
# This is your first checkpoint. If you ever decide to come back to this notebook or have to restart the notebook, you can start from here. The preprocessed data has been saved to disk.
# In[2]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
import numpy as np
import helper
import problem_unittests as tests
(source_int_text, target_int_text), (source_vocab_to_int, target_vocab_to_int), _ = helper.load_preprocess()
# ### Check the Version of TensorFlow and Access to GPU
# This will check to make sure you have the correct version of TensorFlow and access to a GPU
# In[3]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
from distutils.version import LooseVersion
import warnings
import tensorflow as tf
from tensorflow.python.layers.core import Dense
# Check TensorFlow Version
assert LooseVersion(tf.__version__) >= LooseVersion('1.1'), 'Please use TensorFlow version 1.1 or newer'
print('TensorFlow Version: {}'.format(tf.__version__))
# Check for a GPU
if not tf.test.gpu_device_name():
warnings.warn('No GPU found. Please use a GPU to train your neural network.')
else:
print('Default GPU Device: {}'.format(tf.test.gpu_device_name()))
# ## Build the Neural Network
# You'll build the components necessary to build a Sequence-to-Sequence model by implementing the following functions below:
# - `model_inputs`
# - `process_decoder_input`
# - `encoding_layer`
# - `decoding_layer_train`
# - `decoding_layer_infer`
# - `decoding_layer`
# - `seq2seq_model`
#
# ### Input
# Implement the `model_inputs()` function to create TF Placeholders for the Neural Network. It should create the following placeholders:
#
# - Input text placeholder named "input" using the TF Placeholder name parameter with rank 2.
# - Targets placeholder with rank 2.
# - Learning rate placeholder with rank 0.
# - Keep probability placeholder named "keep_prob" using the TF Placeholder name parameter with rank 0.
# - Target sequence length placeholder named "target_sequence_length" with rank 1
# - Max target sequence length tensor named "max_target_len" getting its value from applying tf.reduce_max on the target_sequence_length placeholder. Rank 0.
# - Source sequence length placeholder named "source_sequence_length" with rank 1
#
# Return the placeholders in the following the tuple (input, targets, learning rate, keep probability, target sequence length, max target sequence length, source sequence length)
# In[4]:
def model_inputs():
"""
Create TF Placeholders for input, targets, learning rate, and lengths of source and target sequences.
:return: Tuple (input, targets, learning rate, keep probability, target sequence length,
max target sequence length, source sequence length)
"""
# TODO: Implement Function
inputs = tf.placeholder(tf.int32, [None, None], name='input')
targets = tf.placeholder(tf.int32, [None, None])
learning_rate = tf.placeholder(tf.float32)
keep_prob = tf.placeholder(tf.float32, name='keep_prob')
target_sequence_length = tf.placeholder(tf.int32, (None, ), name='target_sequence_length')
max_target_len = tf.reduce_max(target_sequence_length, name='max_target_len')
source_sequence_length = tf.placeholder(tf.int32, (None, ), name='source_sequence_length')
return inputs, targets, learning_rate, keep_prob, target_sequence_length, max_target_len, source_sequence_length
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_model_inputs(model_inputs)
# ### Process Decoder Input
# Implement `process_decoder_input` by removing the last word id from each batch in `target_data` and concat the GO ID to the begining of each batch.
# In[5]:
def process_decoder_input(target_data, target_vocab_to_int, batch_size):
"""
Preprocess target data for encoding
:param target_data: Target Placehoder
:param target_vocab_to_int: Dictionary to go from the target words to an id
:param batch_size: Batch Size
:return: Preprocessed target data
"""
# TODO: Implement Function
ending = tf.strided_slice(target_data, [0, 0], [batch_size, -1], [1, 1])
dec_input = tf.concat([tf.fill([batch_size, 1], target_vocab_to_int['<GO>']), ending], 1)
return dec_input
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_process_encoding_input(process_decoder_input)
# ### Encoding
# Implement `encoding_layer()` to create a Encoder RNN layer:
# * Embed the encoder input using [`tf.contrib.layers.embed_sequence`](https://www.tensorflow.org/api_docs/python/tf/contrib/layers/embed_sequence)
# * Construct a [stacked](https://github.com/tensorflow/tensorflow/blob/6947f65a374ebf29e74bb71e36fd82760056d82c/tensorflow/docs_src/tutorials/recurrent.md#stacking-multiple-lstms) [`tf.contrib.rnn.LSTMCell`](https://www.tensorflow.org/api_docs/python/tf/contrib/rnn/LSTMCell) wrapped in a [`tf.contrib.rnn.DropoutWrapper`](https://www.tensorflow.org/api_docs/python/tf/contrib/rnn/DropoutWrapper)
# * Pass cell and embedded input to [`tf.nn.dynamic_rnn()`](https://www.tensorflow.org/api_docs/python/tf/nn/dynamic_rnn)
# In[6]:
from imp import reload
reload(tests)
def encoding_layer(rnn_inputs, rnn_size, num_layers, keep_prob,
source_sequence_length, source_vocab_size,
encoding_embedding_size):
"""
Create encoding layer
:param rnn_inputs: Inputs for the RNN
:param rnn_size: RNN Size
:param num_layers: Number of layers
:param keep_prob: Dropout keep probability
:param source_sequence_length: a list of the lengths of each sequence in the batch
:param source_vocab_size: vocabulary size of source data
:param encoding_embedding_size: embedding size of source data
:return: tuple (RNN output, RNN state)
"""
# TODO: Implement Function
enc_input = tf.contrib.layers.embed_sequence(rnn_inputs, source_vocab_size, encoding_embedding_size)
def make_cell(rnn_size, keep_prob):
lstm = tf.contrib.rnn.LSTMCell(rnn_size, initializer=tf.random_uniform_initializer(-0.1, 0.1, seed=2017))
return tf.contrib.rnn.DropoutWrapper(lstm, output_keep_prob=keep_prob)
enc_cell = tf.contrib.rnn.MultiRNNCell([make_cell(rnn_size, keep_prob) for _ in range(num_layers)])
enc_output, enc_state = tf.nn.dynamic_rnn(enc_cell, enc_input, source_sequence_length, dtype=tf.float32)
return enc_output, enc_state
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_encoding_layer(encoding_layer)
# ### Decoding - Training
# Create a training decoding layer:
# * Create a [`tf.contrib.seq2seq.TrainingHelper`](https://www.tensorflow.org/api_docs/python/tf/contrib/seq2seq/TrainingHelper)
# * Create a [`tf.contrib.seq2seq.BasicDecoder`](https://www.tensorflow.org/api_docs/python/tf/contrib/seq2seq/BasicDecoder)
# * Obtain the decoder outputs from [`tf.contrib.seq2seq.dynamic_decode`](https://www.tensorflow.org/api_docs/python/tf/contrib/seq2seq/dynamic_decode)
# In[7]:
def decoding_layer_train(encoder_state, dec_cell, dec_embed_input,
target_sequence_length, max_summary_length,
output_layer, keep_prob):
"""
Create a decoding layer for training
:param encoder_state: Encoder State
:param dec_cell: Decoder RNN Cell
:param dec_embed_input: Decoder embedded input
:param target_sequence_length: The lengths of each sequence in the target batch
:param max_summary_length: The length of the longest sequence in the batch
:param output_layer: Function to apply the output layer
:param keep_prob: Dropout keep probability
:return: BasicDecoderOutput containing training logits and sample_id
"""
# TODO: Implement Function
training_helper = tf.contrib.seq2seq.TrainingHelper(dec_embed_input, target_sequence_length)
training_decoder = tf.contrib.seq2seq.BasicDecoder(dec_cell, training_helper, encoder_state, output_layer)
training_decoder_output = tf.contrib.seq2seq.dynamic_decode(training_decoder,
impute_finished = True,
maximum_iterations = max_summary_length)[0]
return training_decoder_output
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_decoding_layer_train(decoding_layer_train)
# ### Decoding - Inference
# Create inference decoder:
# * Create a [`tf.contrib.seq2seq.GreedyEmbeddingHelper`](https://www.tensorflow.org/api_docs/python/tf/contrib/seq2seq/GreedyEmbeddingHelper)
# * Create a [`tf.contrib.seq2seq.BasicDecoder`](https://www.tensorflow.org/api_docs/python/tf/contrib/seq2seq/BasicDecoder)
# * Obtain the decoder outputs from [`tf.contrib.seq2seq.dynamic_decode`](https://www.tensorflow.org/api_docs/python/tf/contrib/seq2seq/dynamic_decode)
# In[8]:
def decoding_layer_infer(encoder_state, dec_cell, dec_embeddings, start_of_sequence_id,
end_of_sequence_id, max_target_sequence_length,
vocab_size, output_layer, batch_size, keep_prob):
"""
Create a decoding layer for inference
:param encoder_state: Encoder state
:param dec_cell: Decoder RNN Cell
:param dec_embeddings: Decoder embeddings
:param start_of_sequence_id: GO ID
:param end_of_sequence_id: EOS Id
:param max_target_sequence_length: Maximum length of target sequences
:param vocab_size: Size of decoder/target vocabulary
:param decoding_scope: TenorFlow Variable Scope for decoding
:param output_layer: Function to apply the output layer
:param batch_size: Batch size
:param keep_prob: Dropout keep probability
:return: BasicDecoderOutput containing inference logits and sample_id
"""
# TODO: Implement Function
start_tokens = tf.tile(tf.constant([start_of_sequence_id], dtype=tf.int32),
[batch_size], name='start_tokens')
inference_helper = tf.contrib.seq2seq.GreedyEmbeddingHelper(dec_embeddings,
start_tokens,
end_of_sequence_id)
inference_decoder = tf.contrib.seq2seq.BasicDecoder(dec_cell, inference_helper,
encoder_state, output_layer)
inference_decoder_output = tf.contrib.seq2seq.dynamic_decode(inference_decoder,
impute_finished = True,
maximum_iterations=max_target_sequence_length)[0]
return inference_decoder_output
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_decoding_layer_infer(decoding_layer_infer)
# ### Build the Decoding Layer
# Implement `decoding_layer()` to create a Decoder RNN layer.
#
# * Embed the target sequences
# * Construct the decoder LSTM cell (just like you constructed the encoder cell above)
# * Create an output layer to map the outputs of the decoder to the elements of our vocabulary
# * Use the your `decoding_layer_train(encoder_state, dec_cell, dec_embed_input, target_sequence_length, max_target_sequence_length, output_layer, keep_prob)` function to get the training logits.
# * Use your `decoding_layer_infer(encoder_state, dec_cell, dec_embeddings, start_of_sequence_id, end_of_sequence_id, max_target_sequence_length, vocab_size, output_layer, batch_size, keep_prob)` function to get the inference logits.
#
# Note: You'll need to use [tf.variable_scope](https://www.tensorflow.org/api_docs/python/tf/variable_scope) to share variables between training and inference.
# In[9]:
def decoding_layer(dec_input, encoder_state,
target_sequence_length, max_target_sequence_length,
rnn_size,
num_layers, target_vocab_to_int, target_vocab_size,
batch_size, keep_prob, decoding_embedding_size):
"""
Create decoding layer
:param dec_input: Decoder input
:param encoder_state: Encoder state
:param target_sequence_length: The lengths of each sequence in the target batch
:param max_target_sequence_length: Maximum length of target sequences
:param rnn_size: RNN Size
:param num_layers: Number of layers
:param target_vocab_to_int: Dictionary to go from the target words to an id
:param target_vocab_size: Size of target vocabulary
:param batch_size: The size of the batch
:param keep_prob: Dropout keep probability
:param decoding_embedding_size: Decoding embedding size
:return: Tuple of (Training BasicDecoderOutput, Inference BasicDecoderOutput)
"""
# TODO: Implement Function
decode_embedding = tf.Variable(tf.random_uniform([target_vocab_size, decoding_embedding_size]))
decode_embed_input = tf.nn.embedding_lookup(decode_embedding, dec_input)
def make_cell(rnn_size, keep_prob):
lstm = tf.contrib.rnn.LSTMCell(rnn_size, initializer=tf.random_uniform_initializer(-0.1, 0.1, seed=2017))
return tf.contrib.rnn.DropoutWrapper(lstm, output_keep_prob=keep_prob)
decode_cell = tf.contrib.rnn.MultiRNNCell([make_cell(rnn_size, keep_prob) for _ in range(num_layers)])
output_layer = Dense(target_vocab_size,
kernel_initializer=tf.truncated_normal_initializer(mean=0.0, stddev=0.1))
with tf.variable_scope('decode'):
train_decoder_output = decoding_layer_train(encoder_state,
decode_cell,
decode_embed_input,
target_sequence_length,
max_target_sequence_length,
output_layer,
keep_prob)
with tf.variable_scope('decode', reuse=True):
start_of_sequence_id = target_vocab_to_int['<GO>']
end_of_sequence_id = target_vocab_to_int['<EOS>']
infer_decoder_output = decoding_layer_infer(encoder_state,
decode_cell,
decode_embedding,
start_of_sequence_id,
end_of_sequence_id,
max_target_sequence_length,
target_vocab_size,
output_layer,
batch_size,
keep_prob)
return train_decoder_output, infer_decoder_output
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_decoding_layer(decoding_layer)
# ### Build the Neural Network
# Apply the functions you implemented above to:
#
# - Encode the input using your `encoding_layer(rnn_inputs, rnn_size, num_layers, keep_prob, source_sequence_length, source_vocab_size, encoding_embedding_size)`.
# - Process target data using your `process_decoder_input(target_data, target_vocab_to_int, batch_size)` function.
# - Decode the encoded input using your `decoding_layer(dec_input, enc_state, target_sequence_length, max_target_sentence_length, rnn_size, num_layers, target_vocab_to_int, target_vocab_size, batch_size, keep_prob, dec_embedding_size)` function.
# In[10]:
def seq2seq_model(input_data, target_data, keep_prob, batch_size,
source_sequence_length, target_sequence_length,
max_target_sentence_length,
source_vocab_size, target_vocab_size,
enc_embedding_size, dec_embedding_size,
rnn_size, num_layers, target_vocab_to_int):
"""
Build the Sequence-to-Sequence part of the neural network
:param input_data: Input placeholder
:param target_data: Target placeholder
:param keep_prob: Dropout keep probability placeholder
:param batch_size: Batch Size
:param source_sequence_length: Sequence Lengths of source sequences in the batch
:param target_sequence_length: Sequence Lengths of target sequences in the batch
:param source_vocab_size: Source vocabulary size
:param target_vocab_size: Target vocabulary size
:param enc_embedding_size: Decoder embedding size
:param dec_embedding_size: Encoder embedding size
:param rnn_size: RNN Size
:param num_layers: Number of layers
:param target_vocab_to_int: Dictionary to go from the target words to an id
:return: Tuple of (Training BasicDecoderOutput, Inference BasicDecoderOutput)
"""
# TODO: Implement Function
_, enc_state = encoding_layer(input_data,
rnn_size,
num_layers,
keep_prob,
source_sequence_length,
source_vocab_size,
enc_embedding_size)
dec_input = process_decoder_input(target_data, target_vocab_to_int, batch_size)
train_decoder_output, infer_decoder_output = decoding_layer(dec_input,
enc_state,
target_sequence_length,
max_target_sentence_length,
rnn_size,
num_layers,
target_vocab_to_int,
target_vocab_size,
batch_size,
keep_prob,
dec_embedding_size)
return train_decoder_output, infer_decoder_output
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_seq2seq_model(seq2seq_model)
# ## Neural Network Training
# ### Hyperparameters
# Tune the following parameters:
#
# - Set `epochs` to the number of epochs.
# - Set `batch_size` to the batch size.
# - Set `rnn_size` to the size of the RNNs.
# - Set `num_layers` to the number of layers.
# - Set `encoding_embedding_size` to the size of the embedding for the encoder.
# - Set `decoding_embedding_size` to the size of the embedding for the decoder.
# - Set `learning_rate` to the learning rate.
# - Set `keep_probability` to the Dropout keep probability
# - Set `display_step` to state how many steps between each debug output statement
# In[11]:
# Number of Epochs
epochs = 10
# Batch Size
batch_size = 128
# RNN Size
rnn_size = 256
# Number of Layers
num_layers = 3
# Embedding Size
encoding_embedding_size = 256
decoding_embedding_size = 256
# Learning Rate
learning_rate = 0.001
# Dropout Keep Probability
keep_probability = 0.5
display_step = 1000
# ### Build the Graph
# Build the graph using the neural network you implemented.
# In[12]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
save_path = 'checkpoints/dev'
(source_int_text, target_int_text), (source_vocab_to_int, target_vocab_to_int), _ = helper.load_preprocess()
max_target_sentence_length = max([len(sentence) for sentence in source_int_text])
train_graph = tf.Graph()
with train_graph.as_default():
input_data, targets, lr, keep_prob, target_sequence_length, max_target_sequence_length, source_sequence_length = model_inputs()
#sequence_length = tf.placeholder_with_default(max_target_sentence_length, None, name='sequence_length')
input_shape = tf.shape(input_data)
train_logits, inference_logits = seq2seq_model(tf.reverse(input_data, [-1]),
targets,
keep_prob,
batch_size,
source_sequence_length,
target_sequence_length,
max_target_sequence_length,
len(source_vocab_to_int),
len(target_vocab_to_int),
encoding_embedding_size,
decoding_embedding_size,
rnn_size,
num_layers,
target_vocab_to_int)
training_logits = tf.identity(train_logits.rnn_output, name='logits')
inference_logits = tf.identity(inference_logits.sample_id, name='predictions')
masks = tf.sequence_mask(target_sequence_length, max_target_sequence_length, dtype=tf.float32, name='masks')
with tf.name_scope("optimization"):
# Loss function
cost = tf.contrib.seq2seq.sequence_loss(
training_logits,
targets,
masks)
# Optimizer
optimizer = tf.train.AdamOptimizer(lr)
# Gradient Clipping
gradients = optimizer.compute_gradients(cost)
capped_gradients = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in gradients if grad is not None]
train_op = optimizer.apply_gradients(capped_gradients)
# Batch and pad the source and target sequences
# In[13]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
def pad_sentence_batch(sentence_batch, pad_int):
"""Pad sentences with <PAD> so that each sentence of a batch has the same length"""
max_sentence = max([len(sentence) for sentence in sentence_batch])
return [sentence + [pad_int] * (max_sentence - len(sentence)) for sentence in sentence_batch]
def get_batches(sources, targets, batch_size, source_pad_int, target_pad_int):
"""Batch targets, sources, and the lengths of their sentences together"""
for batch_i in range(0, len(sources)//batch_size):
start_i = batch_i * batch_size
# Slice the right amount for the batch
sources_batch = sources[start_i:start_i + batch_size]
targets_batch = targets[start_i:start_i + batch_size]
# Pad
pad_sources_batch = np.array(pad_sentence_batch(sources_batch, source_pad_int))
pad_targets_batch = np.array(pad_sentence_batch(targets_batch, target_pad_int))
# Need the lengths for the _lengths parameters
pad_targets_lengths = []
for target in pad_targets_batch:
pad_targets_lengths.append(len(target))
pad_source_lengths = []
for source in pad_sources_batch:
pad_source_lengths.append(len(source))
yield pad_sources_batch, pad_targets_batch, pad_source_lengths, pad_targets_lengths
# ### Train
# Train the neural network on the preprocessed data. If you have a hard time getting a good loss, check the forms to see if anyone is having the same problem.
# In[14]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
def get_accuracy(target, logits):
"""
Calculate accuracy
"""
max_seq = max(target.shape[1], logits.shape[1])
if max_seq - target.shape[1]:
target = np.pad(
target,
[(0,0),(0,max_seq - target.shape[1])],
'constant')
if max_seq - logits.shape[1]:
logits = np.pad(
logits,
[(0,0),(0,max_seq - logits.shape[1])],
'constant')
return np.mean(np.equal(target, logits))
# Split data to training and validation sets
train_source = source_int_text[batch_size:]
train_target = target_int_text[batch_size:]
valid_source = source_int_text[:batch_size]
valid_target = target_int_text[:batch_size]
(valid_sources_batch, valid_targets_batch, valid_sources_lengths, valid_targets_lengths ) = next(get_batches(valid_source,
valid_target,
batch_size,
source_vocab_to_int['<PAD>'],
target_vocab_to_int['<PAD>']))
with tf.Session(graph=train_graph) as sess:
sess.run(tf.global_variables_initializer())
for epoch_i in range(epochs):
for batch_i, (source_batch, target_batch, sources_lengths, targets_lengths) in enumerate(
get_batches(train_source, train_target, batch_size,
source_vocab_to_int['<PAD>'],
target_vocab_to_int['<PAD>'])):
_, loss = sess.run(
[train_op, cost],
{input_data: source_batch,
targets: target_batch,
lr: learning_rate,
target_sequence_length: targets_lengths,
source_sequence_length: sources_lengths,
keep_prob: keep_probability})
if batch_i % display_step == 0 and batch_i > 0:
batch_train_logits = sess.run(
inference_logits,
{input_data: source_batch,
source_sequence_length: sources_lengths,
target_sequence_length: targets_lengths,
keep_prob: 1.0})
batch_valid_logits = sess.run(
inference_logits,
{input_data: valid_sources_batch,
source_sequence_length: valid_sources_lengths,
target_sequence_length: valid_targets_lengths,
keep_prob: 1.0})
train_acc = get_accuracy(target_batch, batch_train_logits)
valid_acc = get_accuracy(valid_targets_batch, batch_valid_logits)
print('Epoch {:>3} Batch {:>4}/{} - Train Accuracy: {:>6.4f}, Validation Accuracy: {:>6.4f}, Loss: {:>6.4f}'
.format(epoch_i, batch_i, len(source_int_text) // batch_size, train_acc, valid_acc, loss))
# Save Model
saver = tf.train.Saver()
saver.save(sess, save_path)
print('Model Trained and Saved')
# ### Save Parameters
# Save the `batch_size` and `save_path` parameters for inference.
# In[15]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
# Save parameters for checkpoint
helper.save_params(save_path)
# # Checkpoint
# In[16]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
import tensorflow as tf
import numpy as np
import helper
import problem_unittests as tests
_, (source_vocab_to_int, target_vocab_to_int), (source_int_to_vocab, target_int_to_vocab) = helper.load_preprocess()
load_path = helper.load_params()
# ## Sentence to Sequence
# To feed a sentence into the model for translation, you first need to preprocess it. Implement the function `sentence_to_seq()` to preprocess new sentences.
#
# - Convert the sentence to lowercase
# - Convert words into ids using `vocab_to_int`
# - Convert words not in the vocabulary, to the `<UNK>` word id.
# In[17]:
def sentence_to_seq(sentence, vocab_to_int):
"""
Convert a sentence to a sequence of ids
:param sentence: String
:param vocab_to_int: Dictionary to go from the words to an id
:return: List of word ids
"""
# TODO: Implement Function
sentence = sentence.lower()
word_ids = [vocab_to_int.get(word, vocab_to_int['<UNK>']) for word in sentence.split()]
return word_ids
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_sentence_to_seq(sentence_to_seq)
# ## Translate
# This will translate `translate_sentence` from English to French.
# In[21]:
translate_sentence = 'he saw a old yellow truck .'
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
translate_sentence = sentence_to_seq(translate_sentence, source_vocab_to_int)
loaded_graph = tf.Graph()
with tf.Session(graph=loaded_graph) as sess:
# Load saved model
loader = tf.train.import_meta_graph(load_path + '.meta')
loader.restore(sess, load_path)
input_data = loaded_graph.get_tensor_by_name('input:0')
logits = loaded_graph.get_tensor_by_name('predictions:0')
target_sequence_length = loaded_graph.get_tensor_by_name('target_sequence_length:0')
source_sequence_length = loaded_graph.get_tensor_by_name('source_sequence_length:0')
keep_prob = loaded_graph.get_tensor_by_name('keep_prob:0')
translate_logits = sess.run(logits, {input_data: [translate_sentence]*batch_size,
target_sequence_length: [len(translate_sentence)*2]*batch_size,
source_sequence_length: [len(translate_sentence)]*batch_size,
keep_prob: 1.0})[0]
print('Input')
print(' Word Ids: {}'.format([i for i in translate_sentence]))
print(' English Words: {}'.format([source_int_to_vocab[i] for i in translate_sentence]))
print('\nPrediction')
print(' Word Ids: {}'.format([i for i in translate_logits]))
print(' French Words: {}'.format(" ".join([target_int_to_vocab[i] for i in translate_logits])))
# ## Imperfect Translation
# You might notice that some sentences translate better than others. Since the dataset you're using only has a vocabulary of 227 English words of the thousands that you use, you're only going to see good results using these words. For this project, you don't need a perfect translation. However, if you want to create a better translation model, you'll need better data.
#
# You can train on the [WMT10 French-English corpus](http://www.statmt.org/wmt10/training-giga-fren.tar). This dataset has more vocabulary and richer in topics discussed. However, this will take you days to train, so make sure you've a GPU and the neural network is performing well on dataset we provided. Just make sure you play with the WMT10 corpus after you've submitted this project.
# ## Submitting This Project
# When submitting this project, make sure to run all the cells before saving the notebook. Save the notebook file as "dlnd_language_translation.ipynb" and save it as a HTML file under "File" -> "Download as". Include the "helper.py" and "problem_unittests.py" files in your submission.
| mit | -3,339,668,714,111,853,000 | 41.135863 | 398 | 0.631954 | false |
yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/dla/__init__.py | 5 | 1504 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=unused-import
from azure.cli.core import AzCommandsLoader
from azure.cli.command_modules.dla._help import helps # pylint: disable=unused-import
class DataLakeAnalyticsCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core.commands import CliCommandType
from azure.cli.core.profiles import ResourceType
dla_custom = CliCommandType(operations_tmpl='azure.cli.command_modules.dla.custom#{}')
super(DataLakeAnalyticsCommandsLoader, self).__init__(cli_ctx=cli_ctx,
resource_type=ResourceType.MGMT_DATALAKE_ANALYTICS,
custom_command_type=dla_custom)
def load_command_table(self, args):
from azure.cli.command_modules.dla.commands import load_command_table
load_command_table(self, args)
return self.command_table
def load_arguments(self, command):
from azure.cli.command_modules.dla._params import load_arguments
load_arguments(self, command)
COMMAND_LOADER_CLS = DataLakeAnalyticsCommandsLoader
| mit | -5,837,073,634,880,245,000 | 46 | 113 | 0.59375 | false |
lucidlylogicole/scope | plugins/find_files/scope_plugin.py | 1 | 1192 | import os
from PyQt4 import QtGui, QtCore
class Plugin(object):
title = 'Search in Files'
location = 'app' # left, bottom, right, app
widget = None # The widget for the plugin (set at getWidget)
def __init__(self,parent=None):
self.parent = parent
def load(self):
self.btn = self.parent.addLeftBarButton(QtGui.QIcon('icon.png'),tooltip=self.title)
self.btn.clicked.connect(self.addFindFilesWidget)
# store widget with button (with addLeftBarButton. if widget doesn't exist, it calls the getwidget)
def loadWidget(self):
from . import find_files
curdir = os.path.abspath('.')
os.chdir(os.path.dirname(__file__))
self.widget = find_files.Find_Files(self.parent)
os.chdir(curdir)
return self.widget
def addFindFilesWidget(self):
if self.widget == None:
self.loadWidget()
self.parent.addMainWidget(self.widget,self.title,icon=self.btn.icon(),typ='app')
self.parent.Events.workspaceChanged.connect(self.widget.changeWorkspace)
self.toggle()
def toggle(self):
self.parent.changeTab(self.widget.id)
| gpl-3.0 | 7,168,575,003,864,687,000 | 35.121212 | 108 | 0.638423 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.