repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
nmiroshnichenko/scripts
diskinfo/diskinfo.py
1
4642
#!/usr/bin/env python import sys _LINUX_PARTITIONS_FILE_NAME = '/proc/partitions' _LINUX_PARTITION_SIZE_MULTIPLIER = 1024 _LINUX_DISK_TYPE_NUMBERS = (3, 8) class DiskInfo(object): def get_disk_full_list(self): raise NotImplementedError("Should be called in subclasses") class DiskInfoLinux(DiskInfo): def get_disk_full_list(self): return self._get_linux_disk_list() @staticmethod def _get_linux_disk_list(): disk_list = [] with open(_LINUX_PARTITIONS_FILE_NAME) as file: # omit header and empty line lines_total = file.readlines()[2:] hard_disk_number = 0 partition_number = 0 current_hard_disk = None for line in lines_total: # fields: major minor #blocks name fields = line.split() major = int(fields[0]) if major not in _LINUX_DISK_TYPE_NUMBERS: continue size = int(fields[2]) * _LINUX_PARTITION_SIZE_MULTIPLIER is_partition = fields[3][-1].isdigit() if is_partition: partition_number += 1 disk = Disk(partition_number, size, current_hard_disk) else: partition_number = 0 hard_disk_number += 1 disk = Disk(hard_disk_number, size, None) current_hard_disk = disk disk_list.append(disk) return disk_list class DiskInfoWindows(DiskInfo): def get_disk_full_list(self): return self._get_windows_disk_list() @staticmethod def _get_windows_disk_list(): disk_list = [] try: import win32com.client except ImportError: print 'ERROR: you should install lib: pip install pypiwin32' sys.exit(66) strComputer = '.' objWMIService = win32com.client.Dispatch('WbemScripting.SWbemLocator') objSWbemServices = objWMIService.ConnectServer(strComputer,'root\cimv2') colItems = objSWbemServices.ExecQuery('Select * from Win32_DiskDrive') hd_list = [] for objItem in colItems: hd_list.append((objItem.DeviceID, objItem.Size)) hd_list.sort() hard_disk_number = 0 current_hard_disk = None for hd in hd_list: partition_number = 0 hard_disk_number += 1 disk = Disk(hard_disk_number, hd[1], None) current_hard_disk = disk disk_list.append(disk) colItems = objSWbemServices.ExecQuery( 'Select * from Win32_DiskPartition where DiskIndex={}'.format(hard_disk_number - 1)) for objItem in colItems: partition_number += 1 disk = Disk(partition_number, objItem.Size, current_hard_disk) disk_list.append(disk) return disk_list class Disk(object): def __init__(self, number, size, parent=None): self.number = int(number) self.size = int(size) '''size in bytes''' self.parent = parent '''None for hard disk or parent hard disk for partition''' def __repr__(self): return {self.number: self.size}.__str__() def get_disk_full_list(): platform = sys.platform if platform.lower().startswith('linux'): return DiskInfoLinux().get_disk_full_list() elif platform.lower().startswith('win'): return DiskInfoWindows().get_disk_full_list() else: print 'ERROR: unsupported platform: {}'.format(platform) sys.exit(65) def print_disk_list(disk_list): print '\n'.join([str(e) for e in disk_list]) def main(): import argparse parser = argparse.ArgumentParser(description='Print disk info') parser.add_argument('hard_disk_number', type=int, nargs='?', help='hard disk number') args = parser.parse_args() hd_number = args.hard_disk_number if hd_number is not None and hd_number < 1: parser.error('invalid disk number: {}'.format(hd_number)) disk_full_list = get_disk_full_list() hard_disk_list = [e for e in disk_full_list if e.parent is None] result_list = [] if args.hard_disk_number is None: result_list.extend(hard_disk_list) else: if hd_number not in [e.number for e in hard_disk_list]: parser.error('no such disk: {}'.format(hd_number)) result_list.extend( [e for e in disk_full_list if e.parent is not None and e.parent.number == hd_number]) print_disk_list(result_list) if __name__ == '__main__': main()
mit
nathanielvarona/airflow
tests/test_utils/perf/perf_kit/sqlalchemy.py
8
8062
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os import time import traceback from typing import Callable from sqlalchemy import event def _pretty_format_sql(text: str): import pygments from pygments.formatters.terminal import TerminalFormatter from pygments.lexers.sql import SqlLexer text = pygments.highlight(code=text, formatter=TerminalFormatter(), lexer=SqlLexer()).rstrip() return text class TraceQueries: """ Tracking SQL queries in a code block. :param display_num: If True, displays the query number. :param display_time: If True, displays the query execution time. :param display_trace: If True, displays the simplified (one-line) stack trace :param display_sql: If True, displays the SQL statements :param display_parameters: If True, display SQL statement parameters :param print_fn: The function used to display the text. By default,``builtins.print`` """ def __init__( self, *, display_num: bool = True, display_time: bool = True, display_trace: bool = True, display_sql: bool = False, display_parameters: bool = True, print_fn: Callable[[str], None] = print, ): self.display_num = display_num self.display_time = display_time self.display_trace = display_trace self.display_sql = display_sql self.display_parameters = display_parameters self.print_fn = print_fn self.query_count = 0 def before_cursor_execute( self, conn, cursor, # pylint: disable=unused-argument statement, # pylint: disable=unused-argument parameters, # pylint: disable=unused-argument context, # pylint: disable=unused-argument executemany, ): # pylint: disable=unused-argument """ Executed before cursor. :param conn: connection :param cursor: cursor :param statement: statement :param parameters: parameters :param context: context :param executemany: whether many statements executed :return: """ conn.info.setdefault("query_start_time", []).append(time.monotonic()) self.query_count += 1 def after_cursor_execute( self, conn, cursor, # pylint: disable=unused-argument statement, parameters, context, # pylint: disable=unused-argument executemany, ): # pylint: disable=unused-argument """ Executed after cursor. :param conn: connection :param cursor: cursor :param statement: statement :param parameters: parameters :param context: context :param executemany: whether many statements executed :return: """ total = time.monotonic() - conn.info["query_start_time"].pop() file_names = [ f"{f.filename}:{f.name}:{f.lineno}" for f in traceback.extract_stack() if "sqlalchemy" not in f.filename ] file_name = file_names[-1] if file_names else "" stack = [f for f in traceback.extract_stack() if "sqlalchemy" not in f.filename] stack_info = " > ".join([f"{f.filename.rpartition('/')[-1]}:{f.name}:{f.lineno}" for f in stack][-7:]) conn.info.setdefault("query_start_time", []).append(time.monotonic()) output_parts = [] if self.display_num: output_parts.append(f"{self.query_count:>3}") if self.display_time: output_parts.append(f"{total:.5f}") if self.display_trace: output_parts.extend([f"{file_name}", f"{stack_info}"]) if self.display_sql: sql_oneline = statement.replace("\n", " ") output_parts.append(f"{_pretty_format_sql(sql_oneline)}") if self.display_parameters: output_parts.append(f"{parameters}") self.print_fn(" | ".join(output_parts)) def __enter__(self): import airflow.settings event.listen(airflow.settings.engine, "before_cursor_execute", self.before_cursor_execute) event.listen(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) def __exit__(self, type_, value, traceback): # noqa pylint: disable=redefined-outer-name import airflow.settings event.remove(airflow.settings.engine, "before_cursor_execute", self.before_cursor_execute) event.remove(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) trace_queries = TraceQueries # pylint: disable=invalid-name class CountQueriesResult: """ Counter for number of queries. """ def __init__(self): self.count = 0 class CountQueries: """ Counts the number of queries sent to Airflow Database in a given context. Does not support multiple processes. When a new process is started in context, its queries will not be included. :param print_fn: The function used to display the text. By default, ``builtins.print`` """ def __init__(self, print_fn: Callable[[str], None] = print): self.result = CountQueriesResult() self.print_fn = print_fn def __enter__(self): import airflow.settings event.listen(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) return self.result def __exit__(self, type_, value, traceback): # noqa pylint: disable=redefined-outer-name import airflow.settings event.remove(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) self.print_fn(f"Count SQL queries: {self.result.count}") def after_cursor_execute( self, conn, # pylint: disable=unused-argument cursor, # pylint: disable=unused-argument statement, # pylint: disable=unused-argument parameters, # pylint: disable=unused-argument context, # pylint: disable=unused-argument executemany, ): # pylint: disable=unused-argument """ Executed after cursor. :param conn: connection :param cursor: cursor :param statement: statement :param parameters: parameters :param context: context :param executemany: whether many statements executed """ self.result.count += 1 count_queries = CountQueries # pylint: disable=invalid-name if __name__ == "__main__": # Example: def case(): """Case of logging om/""" import logging from unittest import mock from airflow.jobs.scheduler_job import DagFileProcessor with mock.patch.dict( "os.environ", { "PERF_DAGS_COUNT": "200", "PERF_TASKS_COUNT": "10", "PERF_START_AGO": "2d", "PERF_SCHEDULE_INTERVAL": "None", "PERF_SHAPE": "no_structure", }, ): log = logging.getLogger(__name__) processor = DagFileProcessor(dag_ids=[], log=log) dag_file = os.path.join(os.path.dirname(__file__), os.path.pardir, "dags", "elastic_dag.py") processor.process_file(file_path=dag_file, callback_requests=[]) with trace_queries(), count_queries(): case()
apache-2.0
ShownX/incubator-mxnet
example/rcnn/rcnn/processing/nms.py
41
2329
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import numpy as np from ..cython.cpu_nms import cpu_nms try: from ..cython.gpu_nms import gpu_nms except ImportError: gpu_nms = None def py_nms_wrapper(thresh): def _nms(dets): return nms(dets, thresh) return _nms def cpu_nms_wrapper(thresh): def _nms(dets): return cpu_nms(dets, thresh) return _nms def gpu_nms_wrapper(thresh, device_id): def _nms(dets): return gpu_nms(dets, thresh, device_id) if gpu_nms is not None: return _nms else: return cpu_nms_wrapper(thresh) def nms(dets, thresh): """ greedily select boxes with high confidence and overlap with current maximum <= thresh rule out overlap >= thresh :param dets: [[x1, y1, x2, y2 score]] :param thresh: retain overlap < thresh :return: indexes to keep """ x1 = dets[:, 0] y1 = dets[:, 1] x2 = dets[:, 2] y2 = dets[:, 3] scores = dets[:, 4] areas = (x2 - x1 + 1) * (y2 - y1 + 1) order = scores.argsort()[::-1] keep = [] while order.size > 0: i = order[0] keep.append(i) xx1 = np.maximum(x1[i], x1[order[1:]]) yy1 = np.maximum(y1[i], y1[order[1:]]) xx2 = np.minimum(x2[i], x2[order[1:]]) yy2 = np.minimum(y2[i], y2[order[1:]]) w = np.maximum(0.0, xx2 - xx1 + 1) h = np.maximum(0.0, yy2 - yy1 + 1) inter = w * h ovr = inter / (areas[i] + areas[order[1:]] - inter) inds = np.where(ovr <= thresh)[0] order = order[inds + 1] return keep
apache-2.0
nuxeh/keystone
keystone/trust/routers.py
28
2500
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """WSGI Routers for the Trust service.""" import functools from keystone.common import json_home from keystone.common import wsgi from keystone.trust import controllers _build_resource_relation = functools.partial( json_home.build_v3_extension_resource_relation, extension_name='OS-TRUST', extension_version='1.0') TRUST_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation( 'OS-TRUST', '1.0', 'trust_id') class Routers(wsgi.RoutersBase): def append_v3_routers(self, mapper, routers): trust_controller = controllers.TrustV3() self._add_resource( mapper, trust_controller, path='/OS-TRUST/trusts', get_action='list_trusts', post_action='create_trust', rel=_build_resource_relation(resource_name='trusts')) self._add_resource( mapper, trust_controller, path='/OS-TRUST/trusts/{trust_id}', get_action='get_trust', delete_action='delete_trust', rel=_build_resource_relation(resource_name='trust'), path_vars={ 'trust_id': TRUST_ID_PARAMETER_RELATION, }) self._add_resource( mapper, trust_controller, path='/OS-TRUST/trusts/{trust_id}/roles', get_action='list_roles_for_trust', rel=_build_resource_relation(resource_name='trust_roles'), path_vars={ 'trust_id': TRUST_ID_PARAMETER_RELATION, }) self._add_resource( mapper, trust_controller, path='/OS-TRUST/trusts/{trust_id}/roles/{role_id}', get_head_action='get_role_for_trust', rel=_build_resource_relation(resource_name='trust_role'), path_vars={ 'trust_id': TRUST_ID_PARAMETER_RELATION, 'role_id': json_home.Parameters.ROLE_ID, })
apache-2.0
pzajda/eloquence
scons-local-2.5.0/SCons/Platform/win32.py
3
14950
"""SCons.Platform.win32 Platform-specific initialization for Win32 systems. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Platform.Platform() selection method. """ # # Copyright (c) 2001 - 2016 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Platform/win32.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog" import os import os.path import sys import tempfile from SCons.Platform.posix import exitvalmap from SCons.Platform import TempFileMunge import SCons.Util try: import msvcrt import win32api import win32con msvcrt.get_osfhandle win32api.SetHandleInformation win32con.HANDLE_FLAG_INHERIT except ImportError: parallel_msg = \ "you do not seem to have the pywin32 extensions installed;\n" + \ "\tparallel (-j) builds may not work reliably with open Python files." except AttributeError: parallel_msg = \ "your pywin32 extensions do not support file handle operations;\n" + \ "\tparallel (-j) builds may not work reliably with open Python files." else: parallel_msg = None _builtin_file = file _builtin_open = open class _scons_file(_builtin_file): def __init__(self, *args, **kw): _builtin_file.__init__(self, *args, **kw) win32api.SetHandleInformation(msvcrt.get_osfhandle(self.fileno()), win32con.HANDLE_FLAG_INHERIT, 0) def _scons_open(*args, **kw): fp = _builtin_open(*args, **kw) win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()), win32con.HANDLE_FLAG_INHERIT, 0) return fp file = _scons_file open = _scons_open try: import threading spawn_lock = threading.Lock() # This locked version of spawnve works around a Windows # MSVCRT bug, because its spawnve is not thread-safe. # Without this, python can randomly crash while using -jN. # See the python bug at http://bugs.python.org/issue6476 # and SCons issue at # http://scons.tigris.org/issues/show_bug.cgi?id=2449 def spawnve(mode, file, args, env): spawn_lock.acquire() try: if mode == os.P_WAIT: ret = os.spawnve(os.P_NOWAIT, file, args, env) else: ret = os.spawnve(mode, file, args, env) finally: spawn_lock.release() if mode == os.P_WAIT: pid, status = os.waitpid(ret, 0) ret = status >> 8 return ret except ImportError: # Use the unsafe method of spawnve. # Please, don't try to optimize this try-except block # away by assuming that the threading module is always present. # In the test test/option-j.py we intentionally call SCons with # a fake threading.py that raises an import exception right away, # simulating a non-existent package. def spawnve(mode, file, args, env): return os.spawnve(mode, file, args, env) # The upshot of all this is that, if you are using Python 1.5.2, # you had better have cmd or command.com in your PATH when you run # scons. def piped_spawn(sh, escape, cmd, args, env, stdout, stderr): # There is no direct way to do that in python. What we do # here should work for most cases: # In case stdout (stderr) is not redirected to a file, # we redirect it into a temporary file tmpFileStdout # (tmpFileStderr) and copy the contents of this file # to stdout (stderr) given in the argument if not sh: sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") return 127 else: # one temporary file for stdout and stderr tmpFileStdout = os.path.normpath(tempfile.mktemp()) tmpFileStderr = os.path.normpath(tempfile.mktemp()) # check if output is redirected stdoutRedirected = 0 stderrRedirected = 0 for arg in args: # are there more possibilities to redirect stdout ? if (arg.find( ">", 0, 1 ) != -1 or arg.find( "1>", 0, 2 ) != -1): stdoutRedirected = 1 # are there more possibilities to redirect stderr ? if arg.find( "2>", 0, 2 ) != -1: stderrRedirected = 1 # redirect output of non-redirected streams to our tempfiles if stdoutRedirected == 0: args.append(">" + str(tmpFileStdout)) if stderrRedirected == 0: args.append("2>" + str(tmpFileStderr)) # actually do the spawn try: args = [sh, '/C', escape(' '.join(args)) ] ret = spawnve(os.P_WAIT, sh, args, env) except OSError, e: # catch any error try: ret = exitvalmap[e[0]] except KeyError: sys.stderr.write("scons: unknown OSError exception code %d - %s: %s\n" % (e[0], cmd, e[1])) if stderr is not None: stderr.write("scons: %s: %s\n" % (cmd, e[1])) # copy child output from tempfiles to our streams # and do clean up stuff if stdout is not None and stdoutRedirected == 0: try: stdout.write(open( tmpFileStdout, "r" ).read()) os.remove( tmpFileStdout ) except (IOError, OSError): pass if stderr is not None and stderrRedirected == 0: try: stderr.write(open( tmpFileStderr, "r" ).read()) os.remove( tmpFileStderr ) except (IOError, OSError): pass return ret def exec_spawn(l, env): try: result = spawnve(os.P_WAIT, l[0], l, env) except OSError, e: try: result = exitvalmap[e[0]] sys.stderr.write("scons: %s: %s\n" % (l[0], e[1])) except KeyError: result = 127 if len(l) > 2: if len(l[2]) < 1000: command = ' '.join(l[0:3]) else: command = l[0] else: command = l[0] sys.stderr.write("scons: unknown OSError exception code %d - '%s': %s\n" % (e[0], command, e[1])) return result def spawn(sh, escape, cmd, args, env): if not sh: sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") return 127 return exec_spawn([sh, '/C', escape(' '.join(args))], env) # Windows does not allow special characters in file names anyway, so no # need for a complex escape function, we will just quote the arg, except # that "cmd /c" requires that if an argument ends with a backslash it # needs to be escaped so as not to interfere with closing double quote # that we add. def escape(x): if x[-1] == '\\': x = x + '\\' return '"' + x + '"' # Get the windows system directory name _system_root = None def get_system_root(): global _system_root if _system_root is not None: return _system_root # A resonable default if we can't read the registry val = os.environ.get('SystemRoot', "C:\\WINDOWS") if SCons.Util.can_read_reg: try: # Look for Windows NT system root k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, 'Software\\Microsoft\\Windows NT\\CurrentVersion') val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') except SCons.Util.RegError: try: # Okay, try the Windows 9x system root k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, 'Software\\Microsoft\\Windows\\CurrentVersion') val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') except KeyboardInterrupt: raise except: pass _system_root = val return val # Get the location of the program files directory def get_program_files_dir(): # Now see if we can look in the registry... val = '' if SCons.Util.can_read_reg: try: # Look for Windows Program Files directory k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, 'Software\\Microsoft\\Windows\\CurrentVersion') val, tok = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir') except SCons.Util.RegError: val = '' pass if val == '': # A reasonable default if we can't read the registry # (Actually, it's pretty reasonable even if we can :-) val = os.path.join(os.path.dirname(get_system_root()),"Program Files") return val # Determine which windows CPU were running on. class ArchDefinition(object): """ A class for defining architecture-specific settings and logic. """ def __init__(self, arch, synonyms=[]): self.arch = arch self.synonyms = synonyms SupportedArchitectureList = [ ArchDefinition( 'x86', ['i386', 'i486', 'i586', 'i686'], ), ArchDefinition( 'x86_64', ['AMD64', 'amd64', 'em64t', 'EM64T', 'x86_64'], ), ArchDefinition( 'ia64', ['IA64'], ), ] SupportedArchitectureMap = {} for a in SupportedArchitectureList: SupportedArchitectureMap[a.arch] = a for s in a.synonyms: SupportedArchitectureMap[s] = a def get_architecture(arch=None): """Returns the definition for the specified architecture string. If no string is specified, the system default is returned (as defined by the PROCESSOR_ARCHITEW6432 or PROCESSOR_ARCHITECTURE environment variables). """ if arch is None: arch = os.environ.get('PROCESSOR_ARCHITEW6432') if not arch: arch = os.environ.get('PROCESSOR_ARCHITECTURE') return SupportedArchitectureMap.get(arch, ArchDefinition('', [''])) def generate(env): # Attempt to find cmd.exe (for WinNT/2k/XP) or # command.com for Win9x cmd_interp = '' # First see if we can look in the registry... if SCons.Util.can_read_reg: try: # Look for Windows NT system root k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, 'Software\\Microsoft\\Windows NT\\CurrentVersion') val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') cmd_interp = os.path.join(val, 'System32\\cmd.exe') except SCons.Util.RegError: try: # Okay, try the Windows 9x system root k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, 'Software\\Microsoft\\Windows\\CurrentVersion') val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') cmd_interp = os.path.join(val, 'command.com') except KeyboardInterrupt: raise except: pass # For the special case of not having access to the registry, we # use a temporary path and pathext to attempt to find the command # interpreter. If we fail, we try to find the interpreter through # the env's PATH. The problem with that is that it might not # contain an ENV and a PATH. if not cmd_interp: systemroot = get_system_root() tmp_path = systemroot + os.pathsep + \ os.path.join(systemroot,'System32') tmp_pathext = '.com;.exe;.bat;.cmd' if 'PATHEXT' in os.environ: tmp_pathext = os.environ['PATHEXT'] cmd_interp = SCons.Util.WhereIs('cmd', tmp_path, tmp_pathext) if not cmd_interp: cmd_interp = SCons.Util.WhereIs('command', tmp_path, tmp_pathext) if not cmd_interp: cmd_interp = env.Detect('cmd') if not cmd_interp: cmd_interp = env.Detect('command') if 'ENV' not in env: env['ENV'] = {} # Import things from the external environment to the construction # environment's ENV. This is a potential slippery slope, because we # *don't* want to make builds dependent on the user's environment by # default. We're doing this for SystemRoot, though, because it's # needed for anything that uses sockets, and seldom changes, and # for SystemDrive because it's related. # # Weigh the impact carefully before adding other variables to this list. import_env = [ 'SystemDrive', 'SystemRoot', 'TEMP', 'TMP' ] for var in import_env: v = os.environ.get(var) if v: env['ENV'][var] = v if 'COMSPEC' not in env['ENV']: v = os.environ.get("COMSPEC") if v: env['ENV']['COMSPEC'] = v env.AppendENVPath('PATH', get_system_root() + '\System32') env['ENV']['PATHEXT'] = '.COM;.EXE;.BAT;.CMD' env['OBJPREFIX'] = '' env['OBJSUFFIX'] = '.obj' env['SHOBJPREFIX'] = '$OBJPREFIX' env['SHOBJSUFFIX'] = '$OBJSUFFIX' env['PROGPREFIX'] = '' env['PROGSUFFIX'] = '.exe' env['LIBPREFIX'] = '' env['LIBSUFFIX'] = '.lib' env['SHLIBPREFIX'] = '' env['SHLIBSUFFIX'] = '.dll' env['LIBPREFIXES'] = [ '$LIBPREFIX' ] env['LIBSUFFIXES'] = [ '$LIBSUFFIX' ] env['PSPAWN'] = piped_spawn env['SPAWN'] = spawn env['SHELL'] = cmd_interp env['TEMPFILE'] = TempFileMunge env['TEMPFILEPREFIX'] = '@' env['MAXLINELENGTH'] = 2048 env['ESCAPE'] = escape env['HOST_OS'] = 'win32' env['HOST_ARCH'] = get_architecture().arch # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mit
chapellu/pekee2.0
Python/test_joystick2.py
1
2391
import pygame import sys import time import serial ser = serial.Serial('/dev/ttyACM0',115200) pygame.init() pygame.joystick.init() print (pygame.joystick.get_count()) _joystick = pygame.joystick.Joystick(0) _joystick.init() print (_joystick.get_init()) print (_joystick.get_id()) print (_joystick.get_name()) print (_joystick.get_numaxes()) print (_joystick.get_numballs()) print (_joystick.get_numbuttons()) print (_joystick.get_numhats()) print (_joystick.get_axis(0)) axes = [ 0.0 ] * _joystick.get_numaxes() buttons = [ False ] * _joystick.get_numbuttons() value_xaxes_old = 0 value_yaxes_old = 0 old_message = "a0000" seuil = 10 arret = False keep_alive=True while keep_alive: event = pygame.event.wait() pygame.event.clear() if event.type == pygame.QUIT: keep_alive = False elif event.type == pygame.JOYAXISMOTION: while arret: event_arret = pygame.event.wait() pygame.event.clear() e_arret = event_arret.dict if 'value' in e_arret.keys(): axes[e_arret['axis']] = e_arret['value'] if axes[0] == 0 and axes[1] == 0: arret = False e = event.dict axes[e['axis']] = e['value'] if abs(axes[1]) > 0.2: value_axes = int(axes[1]*(axes[3]-1)*50) if(abs(value_axes - value_yaxes_old) > seuil): message = "a{:4}".format(value_axes) value_yaxes_old = value_axes elif abs(axes[0]) > 0.2: value_axes = int(axes[0]*(axes[3]-1)*50) if(abs(value_axes - value_xaxes_old) > seuil): message = "r{:4}".format(value_axes) value_xaxes_old = value_axes else: message = "a0000" elif event.type in [pygame.JOYBUTTONUP, pygame.JOYBUTTONDOWN ]: e = event.dict buttons[e['button']] ^= True print(e['button']) if(buttons[7] == True): keep_alive = False buttons[7] = False if(buttons[0] == True): arret = True print("STOP") message = "s0000" ser.write(str.encode(message)) buttons[0] = False print("ENVOI") if (message != old_message and not arret ): print(message) ser.write(str.encode(message)) old_message = message
mit
mat12/mytest
lib/python/Components/NimManager.py
1
79035
from time import localtime, mktime from datetime import datetime import xml.etree.cElementTree from os import path from enigma import eDVBSatelliteEquipmentControl as secClass, \ eDVBSatelliteLNBParameters as lnbParam, \ eDVBSatelliteDiseqcParameters as diseqcParam, \ eDVBSatelliteSwitchParameters as switchParam, \ eDVBSatelliteRotorParameters as rotorParam, \ eDVBResourceManager, eDVBDB, eEnv from Tools.HardwareInfo import HardwareInfo from Tools.BoundFunction import boundFunction from Components.About import about from config import config, ConfigSubsection, ConfigSelection, ConfigFloat, ConfigSatlist, ConfigYesNo, ConfigInteger, ConfigSubList, ConfigNothing, ConfigSubDict, ConfigOnOff, ConfigDateTime, ConfigText maxFixedLnbPositions = 0 # LNB65 3601 All satellites 1 (USALS) # LNB66 3602 All satellites 2 (USALS) # LNB67 3603 All satellites 3 (USALS) # LNB68 3604 All satellites 4 (USALS) # LNB69 3605 Selecting satellites 1 (USALS) # LNB70 3606 Selecting satellites 2 (USALS) MAX_LNB_WILDCARDS = 6 MAX_ORBITPOSITION_WILDCARDS = 6 #magic numbers ORBITPOSITION_LIMIT = 3600 def getConfigSatlist(orbpos, satlist): default_orbpos = None for x in satlist: if x[0] == orbpos: default_orbpos = orbpos break return ConfigSatlist(satlist, default_orbpos) class SecConfigure: def getConfiguredSats(self): return self.configuredSatellites def addSatellite(self, sec, orbpos): sec.addSatellite(orbpos) self.configuredSatellites.add(orbpos) def addLNBSimple(self, sec, slotid, diseqcmode, toneburstmode = diseqcParam.NO, diseqcpos = diseqcParam.SENDNO, orbpos = 0, longitude = 0, latitude = 0, loDirection = 0, laDirection = 0, turningSpeed = rotorParam.FAST, useInputPower=True, inputPowerDelta=50, fastDiSEqC = False, setVoltageTone = True, diseqc13V = False, CircularLNB = False): if orbpos is None or orbpos == 3600 or orbpos == 3601: return #simple defaults if sec.addLNB(): print "No space left on m_lnbs (mac No. 144 LNBs exceeded)" return tunermask = 1 << slotid if self.equal.has_key(slotid): for slot in self.equal[slotid]: tunermask |= (1 << slot) if self.linked.has_key(slotid): for slot in self.linked[slotid]: tunermask |= (1 << slot) sec.setLNBSatCR(-1) sec.setLNBSatCRpositionnumber(1) sec.setLNBLOFL(CircularLNB and 10750000 or 9750000) sec.setLNBLOFH(CircularLNB and 10750000 or 10600000) sec.setLNBThreshold(CircularLNB and 10750000 or 11700000) sec.setLNBIncreasedVoltage(False) sec.setRepeats(0) sec.setFastDiSEqC(fastDiSEqC) sec.setSeqRepeat(False) sec.setCommandOrder(0) #user values sec.setDiSEqCMode(3 if diseqcmode == 4 else diseqcmode) sec.setToneburst(toneburstmode) sec.setCommittedCommand(diseqcpos) sec.setUncommittedCommand(0) # SENDNO if 0 <= diseqcmode < 3: self.addSatellite(sec, orbpos) if setVoltageTone: if diseqc13V: sec.setVoltageMode(switchParam.HV_13) else: sec.setVoltageMode(switchParam.HV) sec.setToneMode(switchParam.HILO) else: # noinspection PyProtectedMember sec.setVoltageMode(switchParam._14V) sec.setToneMode(switchParam.OFF) elif 3 <= diseqcmode < 5: # diseqc 1.2 if self.satposdepends.has_key(slotid): for slot in self.satposdepends[slotid]: tunermask |= (1 << slot) sec.setLatitude(latitude) sec.setLaDirection(laDirection) sec.setLongitude(longitude) sec.setLoDirection(loDirection) sec.setUseInputpower(useInputPower) sec.setInputpowerDelta(inputPowerDelta) sec.setRotorTurningSpeed(turningSpeed) user_satList = self.NimManager.satList if diseqcmode == 4: user_satList = [] if orbpos and isinstance(orbpos, str): for user_sat in self.NimManager.satList: if str(user_sat[0]) in orbpos: user_satList.append(user_sat) for x in user_satList: print "Add sat " + str(x[0]) self.addSatellite(sec, int(x[0])) if diseqc13V: sec.setVoltageMode(switchParam.HV_13) else: sec.setVoltageMode(switchParam.HV) sec.setToneMode(switchParam.HILO) sec.setRotorPosNum(0) # USALS sec.setLNBSlotMask(tunermask) def setSatposDepends(self, sec, nim1, nim2): print "tuner", nim1, "depends on satpos of", nim2 sec.setTunerDepends(nim1, nim2) def linkInternally(self, slotid): nim = self.NimManager.getNim(slotid) if nim.internallyConnectableTo is not None: nim.setInternalLink() def linkNIMs(self, sec, nim1, nim2): print "link tuner", nim1, "to tuner", nim2 # for internally connect tuner A to B if '7356' not in about.getChipSetString() and nim2 == (nim1 - 1): self.linkInternally(nim1) elif '7356' in about.getChipSetString(): self.linkInternally(nim1) sec.setTunerLinked(nim1, nim2) def getRoot(self, slotid, connto): visited = [] while self.NimManager.getNimConfig(connto).configMode.value in ("satposdepends", "equal", "loopthrough"): connto = int(self.NimManager.getNimConfig(connto).connectedTo.value) if connto in visited: # prevent endless loop return slotid visited.append(connto) return connto def update(self): sec = secClass.getInstance() self.configuredSatellites = set() for slotid in self.NimManager.getNimListOfType("DVB-S"): if self.NimManager.nimInternallyConnectableTo(slotid) is not None: self.NimManager.nimRemoveInternalLink(slotid) sec.clear() ## this do unlinking NIMs too !! print "sec config cleared" self.linked = { } self.satposdepends = { } self.equal = { } nim_slots = self.NimManager.nim_slots used_nim_slots = [ ] for slot in nim_slots: if slot.type is not None: used_nim_slots.append((slot.slot, slot.description, slot.config.configMode.value != "nothing" and True or False, slot.isCompatible("DVB-S2"), slot.frontend_id is None and -1 or slot.frontend_id)) eDVBResourceManager.getInstance().setFrontendSlotInformations(used_nim_slots) try: for slot in nim_slots: if slot.frontend_id is not None: types = [type for type in ["DVB-C", "DVB-T", "DVB-T2", "DVB-S", "DVB-S2", "ATSC"] if eDVBResourceManager.getInstance().frontendIsCompatible(slot.frontend_id, type)] if "DVB-T2" in types: # DVB-T2 implies DVB-T support types.remove("DVB-T") if "DVB-S2" in types: # DVB-S2 implies DVB-S support types.remove("DVB-S") if len(types) > 1: slot.multi_type = {} for type in types: slot.multi_type[str(types.index(type))] = type except: pass for slot in nim_slots: x = slot.slot nim = slot.config if slot.isCompatible("DVB-S"): # save what nim we link to/are equal to/satposdepends to. # this is stored in the *value* (not index!) of the config list if nim.configMode.value == "equal": connto = self.getRoot(x, int(nim.connectedTo.value)) if not self.equal.has_key(connto): self.equal[connto] = [] self.equal[connto].append(x) elif nim.configMode.value == "loopthrough": self.linkNIMs(sec, x, int(nim.connectedTo.value)) connto = self.getRoot(x, int(nim.connectedTo.value)) if not self.linked.has_key(connto): self.linked[connto] = [] self.linked[connto].append(x) elif nim.configMode.value == "satposdepends": self.setSatposDepends(sec, x, int(nim.connectedTo.value)) connto = self.getRoot(x, int(nim.connectedTo.value)) if not self.satposdepends.has_key(connto): self.satposdepends[connto] = [] self.satposdepends[connto].append(x) for slot in nim_slots: x = slot.slot nim = slot.config hw = HardwareInfo() if slot.isCompatible("DVB-S"): print "slot: " + str(x) + " configmode: " + str(nim.configMode.value) if nim.configMode.value in ( "loopthrough", "satposdepends", "nothing" ): pass else: sec.setSlotNotLinked(x) if nim.configMode.value == "equal": pass elif nim.configMode.value == "simple": #simple config print "diseqcmode: ", nim.diseqcMode.value if nim.diseqcMode.value == "single": #single currentCircular = False if nim.diseqcA.value in ("360", "560"): currentCircular = nim.simpleDiSEqCSetCircularLNB.value if nim.simpleSingleSendDiSEqC.value: self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, diseqc13V = nim.diseqc13V.value, CircularLNB = currentCircular) else: self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.NONE, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value, CircularLNB = currentCircular) elif nim.diseqcMode.value == "toneburst_a_b": #Toneburst A/B self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.A, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.B, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value) elif nim.diseqcMode.value == "diseqc_a_b": #DiSEqC A/B fastDiSEqC = nim.simpleDiSEqCOnlyOnSatChange.value setVoltageTone = nim.simpleDiSEqCSetVoltageTone.value self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) elif nim.diseqcMode.value == "diseqc_a_b_c_d": #DiSEqC A/B/C/D fastDiSEqC = nim.simpleDiSEqCOnlyOnSatChange.value setVoltageTone = nim.simpleDiSEqCSetVoltageTone.value self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcC.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.BA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcD.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.BB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) elif nim.diseqcMode.value in ("positioner", "positioner_select"): #Positioner current_mode = 3 sat = 0 if nim.diseqcMode.value == "positioner_select": current_mode = 4 sat = nim.userSatellitesList.value if nim.latitudeOrientation.value == "north": laValue = rotorParam.NORTH else: laValue = rotorParam.SOUTH if nim.longitudeOrientation.value == "east": loValue = rotorParam.EAST else: loValue = rotorParam.WEST inputPowerDelta=nim.powerThreshold.value useInputPower=False turning_speed=0 if nim.powerMeasurement.value: useInputPower=True turn_speed_dict = { "fast": rotorParam.FAST, "slow": rotorParam.SLOW } if turn_speed_dict.has_key(nim.turningSpeed.value): turning_speed = turn_speed_dict[nim.turningSpeed.value] else: beg_time = localtime(nim.fastTurningBegin.value) end_time = localtime(nim.fastTurningEnd.value) turning_speed = ((beg_time.tm_hour+1) * 60 + beg_time.tm_min + 1) << 16 turning_speed |= (end_time.tm_hour+1) * 60 + end_time.tm_min + 1 self.addLNBSimple(sec, slotid = x, diseqcmode = current_mode, orbpos = sat, longitude = nim.longitude.float, loDirection = loValue, latitude = nim.latitude.float, laDirection = laValue, turningSpeed = turning_speed, useInputPower = useInputPower, inputPowerDelta = inputPowerDelta, diseqc13V = nim.diseqc13V.value) elif nim.configMode.value == "advanced": #advanced config self.updateAdvanced(sec, x) print "sec config completed" def updateAdvanced(self, sec, slotid): try: if config.Nims[slotid].advanced.unicableconnected is not None: if config.Nims[slotid].advanced.unicableconnected.value: config.Nims[slotid].advanced.unicableconnectedTo.save_forced = True self.linkNIMs(sec, slotid, int(config.Nims[slotid].advanced.unicableconnectedTo.value)) connto = self.getRoot(slotid, int(config.Nims[slotid].advanced.unicableconnectedTo.value)) if not self.linked.has_key(connto): self.linked[connto] = [] self.linked[connto].append(slotid) else: config.Nims[slotid].advanced.unicableconnectedTo.save_forced = False except: pass lnbSat = {} for x in range(1, 71): lnbSat[x] = [] #wildcard for all satellites ( for rotor ) for x in range(3601, 3605): lnb = int(config.Nims[slotid].advanced.sat[x].lnb.value) if lnb != 0: for x in self.NimManager.satList: print "add", x[0], "to", lnb lnbSat[lnb].append(x[0]) #wildcard for user satellites ( for rotor ) for x in range(3605, 3607): lnb = int(config.Nims[slotid].advanced.sat[x].lnb.value) if lnb != 0: for user_sat in self.NimManager.satList: if str(user_sat[0]) in config.Nims[slotid].advanced.sat[x].userSatellitesList.value: print "add", user_sat[0], "to", lnb lnbSat[lnb].append(user_sat[0]) for x in self.NimManager.satList: lnb = int(config.Nims[slotid].advanced.sat[x[0]].lnb.value) if lnb != 0: print "add", x[0], "to", lnb lnbSat[lnb].append(x[0]) for x in range(1, 71): if len(lnbSat[x]) > 0: currLnb = config.Nims[slotid].advanced.lnb[x] if sec.addLNB(): print "No space left on m_lnbs (max No. 144 LNBs exceeded)" return posnum = 1; #default if LNB movable if x <= maxFixedLnbPositions: posnum = x; sec.setLNBSatCRpositionnumber(x) # LNB has fixed Position else: sec.setLNBSatCRpositionnumber(0) # or not (movable LNB) tunermask = 1 << slotid if self.equal.has_key(slotid): for slot in self.equal[slotid]: tunermask |= (1 << slot) if self.linked.has_key(slotid): for slot in self.linked[slotid]: tunermask |= (1 << slot) if currLnb.lof.value != "unicable": sec.setLNBSatCR(-1) if currLnb.lof.value == "universal_lnb": sec.setLNBLOFL(9750000) sec.setLNBLOFH(10600000) sec.setLNBThreshold(11700000) elif currLnb.lof.value == "unicable": def setupUnicable(configManufacturer, ProductDict): manufacturer_name = configManufacturer.value manufacturer = ProductDict[manufacturer_name] product_name = manufacturer.product.value if product_name == "None" and manufacturer.product.saved_value != "None": product_name = manufacturer.product.value = manufacturer.product.saved_value manufacturer_scr = manufacturer.scr manufacturer_positions_value = manufacturer.positions[product_name][0].value position_idx = (posnum - 1) % manufacturer_positions_value if product_name in manufacturer_scr: diction = manufacturer.diction[product_name].value if diction !="EN50607" or (posnum <= manufacturer_positions_value and x <= maxFixedLnbPositions): #for every allowed position if diction =="EN50607": sec.setLNBSatCRformat(1) #JESS else: sec.setLNBSatCRformat(0) #DiSEqC sec.setLNBSatCR(manufacturer_scr[product_name].index) sec.setLNBSatCRvco(manufacturer.vco[product_name][manufacturer_scr[product_name].index].value*1000) sec.setLNBSatCRpositions(manufacturer_positions_value) sec.setLNBLOFL(manufacturer.lofl[product_name][position_idx].value * 1000) sec.setLNBLOFH(manufacturer.lofh[product_name][position_idx].value * 1000) sec.setLNBThreshold(manufacturer.loft[product_name][position_idx].value * 1000) configManufacturer.save_forced = True manufacturer.product.save_forced = True manufacturer.vco[product_name][manufacturer_scr[product_name].index].save_forced = True else: #positionnumber out of range print "positionnumber out of range" else: print "no product in list" if currLnb.unicable.value == "unicable_user": #TODO satpositions for satcruser if currLnb.dictionuser.value == "EN50607": sec.setLNBSatCRformat(1) sec.setLNBSatCR(currLnb.satcruserEN50607.index) sec.setLNBSatCRvco(currLnb.satcrvcouserEN50607[currLnb.satcruserEN50607.index].value*1000) else: sec.setLNBSatCRformat(0) sec.setLNBSatCR(currLnb.satcruserEN50494.index) sec.setLNBSatCRvco(currLnb.satcrvcouserEN50494[currLnb.satcruserEN50494.index].value*1000) sec.setLNBLOFL(currLnb.lofl.value * 1000) sec.setLNBLOFH(currLnb.lofh.value * 1000) sec.setLNBThreshold(currLnb.threshold.value * 1000) sec.setLNBSatCRpositions(1) elif currLnb.unicable.value == "unicable_matrix": self.reconstructUnicableDate(currLnb.unicableMatrixManufacturer, currLnb.unicableMatrix, currLnb) setupUnicable(currLnb.unicableMatrixManufacturer, currLnb.unicableMatrix) elif currLnb.unicable.value == "unicable_lnb": self.reconstructUnicableDate(currLnb.unicableLnbManufacturer, currLnb.unicableLnb, currLnb) setupUnicable(currLnb.unicableLnbManufacturer, currLnb.unicableLnb) elif currLnb.lof.value == "c_band": sec.setLNBLOFL(5150000) sec.setLNBLOFH(5150000) sec.setLNBThreshold(5150000) elif currLnb.lof.value == "user_defined": sec.setLNBLOFL(currLnb.lofl.value * 1000) sec.setLNBLOFH(currLnb.lofh.value * 1000) sec.setLNBThreshold(currLnb.threshold.value * 1000) elif currLnb.lof.value == "circular_lnb": sec.setLNBLOFL(10750000) sec.setLNBLOFH(10750000) sec.setLNBThreshold(10750000) if currLnb.increased_voltage.value: sec.setLNBIncreasedVoltage(True) else: sec.setLNBIncreasedVoltage(False) dm = currLnb.diseqcMode.value if dm == "none": sec.setDiSEqCMode(diseqcParam.NONE) elif dm == "1_0": sec.setDiSEqCMode(diseqcParam.V1_0) elif dm == "1_1": sec.setDiSEqCMode(diseqcParam.V1_1) elif dm == "1_2": sec.setDiSEqCMode(diseqcParam.V1_2) if self.satposdepends.has_key(slotid): for slot in self.satposdepends[slotid]: tunermask |= (1 << slot) if dm != "none": if currLnb.toneburst.value == "none": sec.setToneburst(diseqcParam.NO) elif currLnb.toneburst.value == "A": sec.setToneburst(diseqcParam.A) elif currLnb.toneburst.value == "B": sec.setToneburst(diseqcParam.B) # Committed Diseqc Command cdc = currLnb.commitedDiseqcCommand.value c = { "none": diseqcParam.SENDNO, "AA": diseqcParam.AA, "AB": diseqcParam.AB, "BA": diseqcParam.BA, "BB": diseqcParam.BB } if c.has_key(cdc): sec.setCommittedCommand(c[cdc]) else: sec.setCommittedCommand(long(cdc)) sec.setFastDiSEqC(currLnb.fastDiseqc.value) sec.setSeqRepeat(currLnb.sequenceRepeat.value) if currLnb.diseqcMode.value == "1_0": currCO = currLnb.commandOrder1_0.value sec.setRepeats(0) else: currCO = currLnb.commandOrder.value udc = int(currLnb.uncommittedDiseqcCommand.value) if udc > 0: sec.setUncommittedCommand(0xF0|(udc-1)) else: sec.setUncommittedCommand(0) # SENDNO sec.setRepeats({"none": 0, "one": 1, "two": 2, "three": 3}[currLnb.diseqcRepeats.value]) setCommandOrder = False # 0 "committed, toneburst", # 1 "toneburst, committed", # 2 "committed, uncommitted, toneburst", # 3 "toneburst, committed, uncommitted", # 4 "uncommitted, committed, toneburst" # 5 "toneburst, uncommitted, commmitted" order_map = {"ct": 0, "tc": 1, "cut": 2, "tcu": 3, "uct": 4, "tuc": 5} sec.setCommandOrder(order_map[currCO]) if dm == "1_2": latitude = currLnb.latitude.float sec.setLatitude(latitude) longitude = currLnb.longitude.float sec.setLongitude(longitude) if currLnb.latitudeOrientation.value == "north": sec.setLaDirection(rotorParam.NORTH) else: sec.setLaDirection(rotorParam.SOUTH) if currLnb.longitudeOrientation.value == "east": sec.setLoDirection(rotorParam.EAST) else: sec.setLoDirection(rotorParam.WEST) if currLnb.powerMeasurement.value: sec.setUseInputpower(True) sec.setInputpowerDelta(currLnb.powerThreshold.value) turn_speed_dict = { "fast": rotorParam.FAST, "slow": rotorParam.SLOW } if turn_speed_dict.has_key(currLnb.turningSpeed.value): turning_speed = turn_speed_dict[currLnb.turningSpeed.value] else: beg_time = localtime(currLnb.fastTurningBegin.value) end_time = localtime(currLnb.fastTurningEnd.value) turning_speed = ((beg_time.tm_hour + 1) * 60 + beg_time.tm_min + 1) << 16 turning_speed |= (end_time.tm_hour + 1) * 60 + end_time.tm_min + 1 sec.setRotorTurningSpeed(turning_speed) else: sec.setUseInputpower(False) sec.setLNBSlotMask(tunermask) sec.setLNBPrio(int(currLnb.prio.value)) # finally add the orbital positions for y in lnbSat[x]: self.addSatellite(sec, y) if x > maxFixedLnbPositions: satpos = x > maxFixedLnbPositions and (3606-(70 - x)) or y else: satpos = y currSat = config.Nims[slotid].advanced.sat[satpos] if currSat.voltage.value == "polarization": if config.Nims[slotid].diseqc13V.value: sec.setVoltageMode(switchParam.HV_13) else: sec.setVoltageMode(switchParam.HV) elif currSat.voltage.value == "13V": # noinspection PyProtectedMember sec.setVoltageMode(switchParam._14V) elif currSat.voltage.value == "18V": # noinspection PyProtectedMember sec.setVoltageMode(switchParam._18V) if currSat.tonemode.value == "band": sec.setToneMode(switchParam.HILO) elif currSat.tonemode.value == "on": sec.setToneMode(switchParam.ON) elif currSat.tonemode.value == "off": sec.setToneMode(switchParam.OFF) if not currSat.usals.value and x <= maxFixedLnbPositions: sec.setRotorPosNum(currSat.rotorposition.value) else: sec.setRotorPosNum(0) #USALS def reconstructUnicableDate(self, configManufacturer, ProductDict, currLnb): val = currLnb.content.stored_values if currLnb.unicable.value == "unicable_lnb": ManufacturerName = val.get('unicableLnbManufacturer', 'none') SDict = val.get('unicableLnb', None) elif currLnb.unicable.value == "unicable_matrix": ManufacturerName = val.get('unicableMatrixManufacturer', 'none') SDict = val.get('unicableMatrix', None) else: return # print "[reconstructUnicableDate] SDict %s" % SDict if SDict is None: return print "ManufacturerName %s" % ManufacturerName PDict = SDict.get(ManufacturerName, None) #dict contained last stored device data if PDict is None: return PN = PDict.get('product', None) #product name if PN is None: return if ManufacturerName in ProductDict.keys(): # manufacture are listed, use its ConfigSubsection tmp = ProductDict[ManufacturerName] if PN in tmp.product.choices.choices: return else: #if manufacture not in list, then generate new ConfigSubsection print "[reconstructUnicableDate] Manufacturer %s not in unicable.xml" % ManufacturerName tmp = ConfigSubsection() tmp.scr = ConfigSubDict() tmp.vco = ConfigSubDict() tmp.lofl = ConfigSubDict() tmp.lofh = ConfigSubDict() tmp.loft = ConfigSubDict() tmp.diction = ConfigSubDict() tmp.product = ConfigSelection(choices = [], default = None) if PN not in tmp.product.choices.choices: print "[reconstructUnicableDate] Product %s not in unicable.xml" % PN scrlist = [] SatCR = int(PDict.get('scr', {PN,1}).get(PN,1)) - 1 vco = int(PDict.get('vco', {PN,0}).get(PN,0).get(str(SatCR),1)) positionslist=[1,(9750, 10600, 11700)] ##adenin_todo positions = int(positionslist[0]) tmp.positions = ConfigSubDict() tmp.positions[PN] = ConfigSubList() tmp.positions[PN].append(ConfigInteger(default=positions, limits = (positions, positions))) tmp.vco[PN] = ConfigSubList() for cnt in range(0,SatCR + 1): vcofreq = (cnt == SatCR) and vco or 0 # equivalent to vcofreq = (cnt == SatCR) ? 1432 : 0 if vcofreq == 0 : scrlist.append(("%d" %(cnt+1),"SCR %d " %(cnt+1) +_("not used"))) else: scrlist.append(("%d" %(cnt+1),"SCR %d" %(cnt+1))) print "vcofreq %d" % vcofreq tmp.vco[PN].append(ConfigInteger(default=vcofreq, limits = (vcofreq, vcofreq))) tmp.scr[PN] = ConfigSelection(choices = scrlist, default = scrlist[SatCR][0]) tmp.lofl[PN] = ConfigSubList() tmp.lofh[PN] = ConfigSubList() tmp.loft[PN] = ConfigSubList() for cnt in range(1,positions+1): lofl = int(positionslist[cnt][0]) lofh = int(positionslist[cnt][1]) loft = int(positionslist[cnt][2]) tmp.lofl[PN].append(ConfigInteger(default=lofl, limits = (lofl, lofl))) tmp.lofh[PN].append(ConfigInteger(default=lofh, limits = (lofh, lofh))) tmp.loft[PN].append(ConfigInteger(default=loft, limits = (loft, loft))) dictionlist = [("EN50494", "Unicable(EN50494)")] ##adenin_todo tmp.diction[PN] = ConfigSelection(choices = dictionlist, default = dictionlist[0][0]) tmp.product.choices.choices.append(PN) tmp.product.choices.default = PN tmp.scr[PN].save_forced = True tmp.scr.save_forced = True tmp.vco.save_forced = True tmp.product.save_forced = True ProductDict[ManufacturerName] = tmp if ManufacturerName not in configManufacturer.choices.choices: #check if name in choices list configManufacturer.choices.choices.append(ManufacturerName) #add name to choises list def __init__(self, nimmgr): self.NimManager = nimmgr self.configuredSatellites = set() self.update() class NIM(object): def __init__(self, slot, type, description, has_outputs=True, internally_connectable=None, multi_type=None, frontend_id=None, i2c=None, is_empty=False, input_name = None): if not multi_type: multi_type = {} self.slot = slot if type not in ("DVB-S", "DVB-C", "DVB-T", "DVB-S2", "DVB-T2", "DVB-C2", "ATSC", None): print "warning: unknown NIM type %s, not using." % type type = None self.type = type self.description = description self.has_outputs = has_outputs self.internally_connectable = internally_connectable self.multi_type = multi_type self.i2c = i2c self.frontend_id = frontend_id self.__is_empty = is_empty self.input_name = input_name self.compatible = { None: (None,), "DVB-S": ("DVB-S", None), "DVB-C": ("DVB-C", None), "DVB-T": ("DVB-T", None), "DVB-S2": ("DVB-S", "DVB-S2", None), "DVB-C2": ("DVB-C", "DVB-C2", None), "DVB-T2": ("DVB-T", "DVB-T2", None), "ATSC": ("ATSC", None), } def isCompatible(self, what): if not self.isSupported(): return False return what in self.compatible[self.getType()] def canBeCompatible(self, what): if not self.isSupported(): return False if self.isCompatible(what): return True for type in self.multi_type.values(): if what in self.compatible[type]: return True return False def getType(self): try: if self.isMultiType(): return self.multi_type[self.config.multiType.value] except: pass return self.type def connectableTo(self): connectable = { "DVB-S": ("DVB-S", "DVB-S2"), "DVB-C": ("DVB-C", "DVB-C2"), "DVB-T": ("DVB-T","DVB-T2"), "DVB-S2": ("DVB-S", "DVB-S2"), "DVB-C2": ("DVB-C", "DVB-C2"), "DVB-T2": ("DVB-T", "DVB-T2"), "ATSC": "ATSC", } return connectable[self.getType()] def getSlotInputName(self): name = self.input_name if name is None: name = chr(ord('A') + self.slot) return name slot_input_name = property(getSlotInputName) def getSlotName(self): # get a friendly description for a slot name. # we name them "Tuner A/B/C/...", because that's what's usually written on the back # of the device. # for DM7080HD "Tuner A1/A2/B/C/..." descr = _("Tuner ") return descr + self.getSlotInputName() slot_name = property(getSlotName) def getSlotID(self): return chr(ord('A') + self.slot) def getI2C(self): return self.i2c def hasOutputs(self): return self.has_outputs def internallyConnectableTo(self): return self.internally_connectable def setInternalLink(self): if self.internally_connectable is not None: print "setting internal link on frontend id", self.frontend_id f = open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w") f.write("internal") f.close() def removeInternalLink(self): if self.internally_connectable is not None: print "removing internal link on frontend id", self.frontend_id f = open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w") f.write("external") f.close() def isMultiType(self): return len(self.multi_type) > 0 def isEmpty(self): return self.__is_empty # empty tuners are supported! def isSupported(self): return (self.frontend_id is not None) or self.__is_empty # returns dict {<slotid>: <type>} def getMultiTypeList(self): return self.multi_type slot_id = property(getSlotID) def getFriendlyType(self): return { "DVB-S": "DVB-S", "DVB-T": "DVB-T", "DVB-C": "DVB-C", "DVB-S2": "DVB-S2", "DVB-T2": "DVB-T2", "DVB-C2": "DVB-C2", "ATSC": "ATSC", None: _("empty") }[self.getType()] friendly_type = property(getFriendlyType) def getFriendlyFullDescription(self): nim_text = self.slot_name + ": " if self.empty: nim_text += _("(empty)") elif not self.isSupported(): nim_text += self.description + " (" + _("not supported") + ")" else: nim_text += self.description + " (" + self.friendly_type + ")" return nim_text friendly_full_description = property(getFriendlyFullDescription) config_mode = property(lambda self: config.Nims[self.slot].configMode.value) config = property(lambda self: config.Nims[self.slot]) empty = property(lambda self: self.getType is None) class NimManager: def getConfiguredSats(self): return self.sec.getConfiguredSats() def getTransponders(self, pos): if self.transponders.has_key(pos): return self.transponders[pos] else: return [] def getTranspondersCable(self, nim): nimConfig = config.Nims[nim] if nimConfig.configMode.value != "nothing" and nimConfig.cable.scan_type.value == "provider": return self.transponderscable[self.cablesList[nimConfig.cable.scan_provider.index][0]] return [ ] def getTranspondersTerrestrial(self, region): return self.transpondersterrestrial[region] def getCableDescription(self, nim): return self.cablesList[config.Nims[nim].scan_provider.index][0] def getCableFlags(self, nim): return self.cablesList[config.Nims[nim].scan_provider.index][1] def getTerrestrialDescription(self, nim): return self.terrestrialsList[config.Nims[nim].terrestrial.index][0] def getTerrestrialFlags(self, nim): return self.terrestrialsList[config.Nims[nim].terrestrial.index][1] def getSatDescription(self, pos): return self.satellites[pos] def sortFunc(self, x): orbpos = x[0] if orbpos > 1800: return orbpos - 3600 else: return orbpos + 1800 def readTransponders(self): self.satellites = { } self.transponders = { } self.transponderscable = { } self.transpondersterrestrial = { } self.transpondersatsc = { } db = eDVBDB.getInstance() if self.hasNimType("DVB-S"): print "Reading satellites.xml" db.readSatellites(self.satList, self.satellites, self.transponders) self.satList.sort() # sort by orbpos if self.hasNimType("DVB-C") or self.hasNimType("DVB-T") or self.hasNimType("DVB-T2"): print "Reading cables.xml" db.readCables(self.cablesList, self.transponderscable) print "Reading terrestrial.xml" db.readTerrestrials(self.terrestrialsList, self.transpondersterrestrial) def enumerateNIMs(self): # enum available NIMs. This is currently very dreambox-centric and uses the /proc/bus/nim_sockets interface. # the result will be stored into nim_slots. # the content of /proc/bus/nim_sockets looks like: # NIM Socket 0: # Type: DVB-S # Name: BCM4501 DVB-S2 NIM (internal) # NIM Socket 1: # Type: DVB-S # Name: BCM4501 DVB-S2 NIM (internal) # NIM Socket 2: # Type: DVB-T # Name: Philips TU1216 # NIM Socket 3: # Type: DVB-S # Name: Alps BSBE1 702A # # Type will be either "DVB-S", "DVB-S2", "DVB-T", "DVB-C" or None. # nim_slots is an array which has exactly one entry for each slot, even for empty ones. self.nim_slots = [ ] try: nimfile = open("/proc/bus/nim_sockets") except IOError: return current_slot = None entries = {} for line in nimfile: if not line: break line = line.strip() if line.startswith("NIM Socket"): parts = line.split(" ") current_slot = int(parts[2][:-1]) entries[current_slot] = {} elif line.startswith("Type:"): entries[current_slot]["type"] = str(line[6:]) entries[current_slot]["isempty"] = False elif line.strip().startswith("Input_Name:"): entries[current_slot]["input_name"] = str(line.strip()[12:]) elif line.startswith("Name:"): entries[current_slot]["name"] = str(line[6:]) entries[current_slot]["isempty"] = False elif line.startswith("Has_Outputs:"): input = str(line[len("Has_Outputs:") + 1:]) entries[current_slot]["has_outputs"] = (input == "yes") elif line.startswith("Internally_Connectable:"): input = int(line[len("Internally_Connectable:") + 1:]) entries[current_slot]["internally_connectable"] = input elif line.startswith("Frontend_Device:"): input = int(line[len("Frontend_Device:") + 1:]) entries[current_slot]["frontend_device"] = input elif line.startswith("Mode"): # Mode 0: DVB-C # Mode 1: DVB-T # "Mode 1: DVB-T" -> ["Mode 1", "DVB-T"] split = line.split(":") split[1] = split[1].replace(' ','') split2 = split[0].split(" ") modes = entries[current_slot].get("multi_type", {}) modes[split2[1]] = split[1] entries[current_slot]["multi_type"] = modes elif line.startswith("I2C_Device:"): input = int(line[len("I2C_Device:") + 1:]) entries[current_slot]["i2c"] = input elif line.startswith("empty"): entries[current_slot]["type"] = None entries[current_slot]["name"] = _("N/A") entries[current_slot]["isempty"] = True nimfile.close() for id, entry in entries.items(): if not (entry.has_key("name") and entry.has_key("type")): entry["name"] = _("N/A") entry["type"] = None if not (entry.has_key("i2c")): entry["i2c"] = None if not (entry.has_key("has_outputs")): entry["has_outputs"] = True if entry.has_key("frontend_device"): # check if internally connectable if path.exists("/proc/stb/frontend/%d/rf_switch" % entry["frontend_device"]): entry["internally_connectable"] = entry["frontend_device"] - 1 else: entry["internally_connectable"] = None else: entry["frontend_device"] = entry["internally_connectable"] = None if not (entry.has_key("multi_type")): if entry["name"] == "DVB-T2/C USB-Stick": # workaround dvbsky hybrit usb stick entry["multi_type"] = {'0': 'DVB-T'} entry["multi_type"] = {'1': 'DVB-C'} else: entry["multi_type"] = {} if not (entry.has_key("input_name")): entry["input_name"] = chr(ord('A') + id) self.nim_slots.append(NIM(slot = id, description = entry["name"], type = entry["type"], has_outputs = entry["has_outputs"], internally_connectable = entry["internally_connectable"], multi_type = entry["multi_type"], frontend_id = entry["frontend_device"], i2c = entry["i2c"], is_empty = entry["isempty"], input_name = entry.get("input_name", None))) def hasNimType(self, chktype): for slot in self.nim_slots: if slot.isCompatible(chktype): return True for type in slot.getMultiTypeList().values(): if chktype == type: return True return False def getNimType(self, slotid): return self.nim_slots[slotid].type def getNimDescription(self, slotid): return self.nim_slots[slotid].friendly_full_description def getNimName(self, slotid): return self.nim_slots[slotid].description def getNimSlotInputName(self, slotid): # returns just "A", "B", ... return self.nim_slots[slotid].slot_input_name def getNim(self, slotid): return self.nim_slots[slotid] def getI2CDevice(self, slotid): return self.nim_slots[slotid].getI2C() def getNimListOfType(self, type, exception = -1): # returns a list of indexes for NIMs compatible to the given type, except for 'exception' list = [] for x in self.nim_slots: if x.isCompatible(type) and x.slot != exception: list.append(x.slot) return list def __init__(self): sec = secClass.getInstance() global maxFixedLnbPositions maxFixedLnbPositions = sec.getMaxFixedLnbPositions() self.satList = [ ] self.cablesList = [] self.terrestrialsList = [] self.atscList = [] self.enumerateNIMs() self.readTransponders() InitNimManager(self) #init config stuff # get a list with the friendly full description def nimList(self): list = [ ] for slot in self.nim_slots: list.append(slot.friendly_full_description) return list def getSlotCount(self): return len(self.nim_slots) def hasOutputs(self, slotid): return self.nim_slots[slotid].hasOutputs() def nimInternallyConnectableTo(self, slotid): return self.nim_slots[slotid].internallyConnectableTo() def nimRemoveInternalLink(self, slotid): self.nim_slots[slotid].removeInternalLink() def canConnectTo(self, slotid): slots = [] if self.nim_slots[slotid].internallyConnectableTo() is not None: slots.append(self.nim_slots[slotid].internallyConnectableTo()) for type in self.nim_slots[slotid].connectableTo(): for slot in self.getNimListOfType(type, exception = slotid): if self.hasOutputs(slot): slots.append(slot) # remove nims, that have a conntectedTo reference on for testnim in slots[:]: for nim in self.getNimListOfType("DVB-S", slotid): nimConfig = self.getNimConfig(nim) if nimConfig.content.items.has_key("configMode") and nimConfig.configMode.value == "loopthrough" and int(nimConfig.connectedTo.value) == testnim: slots.remove(testnim) break slots.sort() return slots def canEqualTo(self, slotid): type = self.getNimType(slotid) type = type[:5] # DVB-S2 --> DVB-S, DVB-T2 --> DVB-T, DVB-C2 --> DVB-C nimList = self.getNimListOfType(type, slotid) for nim in nimList[:]: mode = self.getNimConfig(nim) if mode.configMode.value == "loopthrough" or mode.configMode.value == "satposdepends": nimList.remove(nim) return nimList def canDependOn(self, slotid): type = self.getNimType(slotid) type = type[:5] # DVB-S2 --> DVB-S, DVB-T2 --> DVB-T, DVB-C2 --> DVB-C nimList = self.getNimListOfType(type, slotid) positionerList = [] for nim in nimList[:]: mode = self.getNimConfig(nim) nimHaveRotor = mode.configMode.value == "simple" and mode.diseqcMode.value in ("positioner", "positioner_select") if not nimHaveRotor and mode.configMode.value == "advanced": for x in range(3601, 3607): lnb = int(mode.advanced.sat[x].lnb.value) if lnb != 0: nimHaveRotor = True break if not nimHaveRotor: for sat in mode.advanced.sat.values(): lnb_num = int(sat.lnb.value) diseqcmode = lnb_num and mode.advanced.lnb[lnb_num].diseqcMode.value or "" if diseqcmode == "1_2": nimHaveRotor = True break if nimHaveRotor: alreadyConnected = False for testnim in nimList: testmode = self.getNimConfig(testnim) if testmode.configMode.value == "satposdepends" and int(testmode.connectedTo.value) == int(nim): alreadyConnected = True break if not alreadyConnected: positionerList.append(nim) return positionerList def getNimConfig(self, slotid): return config.Nims[slotid] def getSatName(self, pos): for sat in self.satList: if sat[0] == pos: return sat[1] return _("N/A") def getSatList(self): return self.satList # returns True if something is configured to be connected to this nim # if slotid == -1, returns if something is connected to ANY nim def somethingConnected(self, slotid = -1): if slotid == -1: connected = False for id in range(self.getSlotCount()): if self.somethingConnected(id): connected = True return connected else: nim = config.Nims[slotid] configMode = nim.configMode.value if self.nim_slots[slotid].isCompatible("DVB-S") or self.nim_slots[slotid].isCompatible("DVB-T") or self.nim_slots[slotid].isCompatible("DVB-C"): return not (configMode == "nothing") def getSatListForNim(self, slotid): list = [] if self.nim_slots[slotid].isCompatible("DVB-S"): nim = config.Nims[slotid] #print "slotid:", slotid #print "self.satellites:", self.satList[config.Nims[slotid].diseqcA.index] #print "diseqcA:", config.Nims[slotid].diseqcA.value configMode = nim.configMode.value if configMode == "equal": slotid = int(nim.connectedTo.value) nim = config.Nims[slotid] configMode = nim.configMode.value elif configMode == "loopthrough": slotid = self.sec.getRoot(slotid, int(nim.connectedTo.value)) nim = config.Nims[slotid] configMode = nim.configMode.value if configMode == "simple": dm = nim.diseqcMode.value if dm in ("single", "toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"): if nim.diseqcA.orbital_position < 3600: list.append(self.satList[nim.diseqcA.index - 2]) if dm in ("toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"): if nim.diseqcB.orbital_position < 3600: list.append(self.satList[nim.diseqcB.index - 2]) if dm == "diseqc_a_b_c_d": if nim.diseqcC.orbital_position < 3600: list.append(self.satList[nim.diseqcC.index - 2]) if nim.diseqcD.orbital_position < 3600: list.append(self.satList[nim.diseqcD.index - 2]) if dm == "positioner": for x in self.satList: list.append(x) if dm == "positioner_select": for x in self.satList: if str(x[0]) in nim.userSatellitesList.value: list.append(x) elif configMode == "advanced": for x in range(3601, 3605): if int(nim.advanced.sat[x].lnb.value) != 0: for x in self.satList: list.append(x) if not list: for x in self.satList: if int(nim.advanced.sat[x[0]].lnb.value) != 0: list.append(x) for x in range(3605, 3607): if int(nim.advanced.sat[x].lnb.value) != 0: for user_sat in self.satList: if str(user_sat[0]) in nim.advanced.sat[x].userSatellitesList.value and user_sat not in list: list.append(user_sat) return list def getRotorSatListForNim(self, slotid): list = [] if self.nim_slots[slotid].isCompatible("DVB-S"): nim = config.Nims[slotid] configMode = nim.configMode.value if configMode == "simple": if nim.diseqcMode.value == "positioner": for x in self.satList: list.append(x) elif nim.diseqcMode.value == "positioner_select": for x in self.satList: if str(x[0]) in nim.userSatellitesList.value: list.append(x) elif configMode == "advanced": for x in range(3601, 3605): if int(nim.advanced.sat[x].lnb.value) != 0: for x in self.satList: list.append(x) if not list: for x in self.satList: lnbnum = int(nim.advanced.sat[x[0]].lnb.value) if lnbnum != 0: lnb = nim.advanced.lnb[lnbnum] if lnb.diseqcMode.value == "1_2": list.append(x) for x in range(3605, 3607): if int(nim.advanced.sat[x].lnb.value) != 0: for user_sat in self.satList: if str(user_sat[0]) in nim.advanced.sat[x].userSatellitesList.value and user_sat not in list: list.append(user_sat) return list def InitSecParams(): config.sec = ConfigSubsection() x = ConfigInteger(default=25, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_CONT_TONE_DISABLE_BEFORE_DISEQC, configElement.value)) config.sec.delay_after_continuous_tone_disable_before_diseqc = x x = ConfigInteger(default=10, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_FINAL_CONT_TONE_CHANGE, configElement.value)) config.sec.delay_after_final_continuous_tone_change = x x = ConfigInteger(default=10, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_FINAL_VOLTAGE_CHANGE, configElement.value)) config.sec.delay_after_final_voltage_change = x x = ConfigInteger(default=120, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BETWEEN_DISEQC_REPEATS, configElement.value)) config.sec.delay_between_diseqc_repeats = x x = ConfigInteger(default=100, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_LAST_DISEQC_CMD, configElement.value)) config.sec.delay_after_last_diseqc_command = x x = ConfigInteger(default=50, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_TONEBURST, configElement.value)) config.sec.delay_after_toneburst = x x = ConfigInteger(default=75, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_SWITCH_CMDS, configElement.value)) config.sec.delay_after_change_voltage_before_switch_command = x x = ConfigInteger(default=200, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_ENABLE_VOLTAGE_BEFORE_SWITCH_CMDS, configElement.value)) config.sec.delay_after_enable_voltage_before_switch_command = x x = ConfigInteger(default=700, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BETWEEN_SWITCH_AND_MOTOR_CMD, configElement.value)) config.sec.delay_between_switch_and_motor_command = x x = ConfigInteger(default=500, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_MEASURE_IDLE_INPUTPOWER, configElement.value)) config.sec.delay_after_voltage_change_before_measure_idle_inputpower = x x = ConfigInteger(default=900, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_ENABLE_VOLTAGE_BEFORE_MOTOR_CMD, configElement.value)) config.sec.delay_after_enable_voltage_before_motor_command = x x = ConfigInteger(default=500, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_MOTOR_STOP_CMD, configElement.value)) config.sec.delay_after_motor_stop_command = x x = ConfigInteger(default=500, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_MOTOR_CMD, configElement.value)) config.sec.delay_after_voltage_change_before_motor_command = x x = ConfigInteger(default=70, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BEFORE_SEQUENCE_REPEAT, configElement.value)) config.sec.delay_before_sequence_repeat = x x = ConfigInteger(default=360, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.MOTOR_RUNNING_TIMEOUT, configElement.value)) config.sec.motor_running_timeout = x x = ConfigInteger(default=1, limits = (0, 5)) x.addNotifier(lambda configElement: secClass.setParam(secClass.MOTOR_COMMAND_RETRIES, configElement.value)) config.sec.motor_command_retries = x x = ConfigInteger(default=50, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_DISEQC_RESET_CMD, configElement.value)) config.sec.delay_after_diseqc_reset_cmd = x x = ConfigInteger(default=150, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_DISEQC_PERIPHERIAL_POWERON_CMD, configElement.value)) config.sec.delay_after_diseqc_peripherial_poweron_cmd = x # TODO add support for satpos depending nims to advanced nim configuration # so a second/third/fourth cable from a motorized lnb can used behind a # diseqc 1.0 / diseqc 1.1 / toneburst switch # the C(++) part should can handle this # the configElement should be only visible when diseqc 1.2 is disabled jess_alias = ("JESS","UNICABLE2","SCD2","EN50607","EN 50607") lscr = ("scr1","scr2","scr3","scr4","scr5","scr6","scr7","scr8","scr9","scr10", "scr11","scr12","scr13","scr14","scr15","scr16","scr17","scr18","scr19","scr20", "scr21","scr22","scr23","scr24","scr25","scr26","scr27","scr28","scr29","scr30", "scr31","scr32") def InitNimManager(nimmgr): hw = HardwareInfo() addNimConfig = False try: config.Nims except: addNimConfig = True if addNimConfig: InitSecParams() config.Nims = ConfigSubList() for x in range(len(nimmgr.nim_slots)): config.Nims.append(ConfigSubsection()) lnb_choices = { "universal_lnb": _("Universal LNB"), "unicable": _("Unicable / JESS"), "c_band": _("C-Band"), "circular_lnb": _("Circular LNB"), "user_defined": _("User defined")} lnb_choices_default = "universal_lnb" unicablelnbproducts = {} unicablematrixproducts = {} file = open(eEnv.resolve("${datadir}/enigma2/unicable.xml"), 'r') doc = xml.etree.cElementTree.parse(file) file.close() root = doc.getroot() entry = root.find("lnb") for manufacturer in entry.getchildren(): m={} m_update = m.update for product in manufacturer.getchildren(): p={} #new dict empty for new product p_update = p.update scr=[] scr_append = scr.append scr_pop = scr.pop for i in range(len(lscr)): scr_append(product.get(lscr[i],"0")) for i in range(len(lscr)): if scr[len(lscr)-i-1] == "0": scr_pop() else: break; p_update({"frequencies":tuple(scr)}) #add scr frequencies to dict product diction = product.get("format","EN50494").upper() if diction in jess_alias: diction = "EN50607" else: diction = "EN50494" p_update({"diction":tuple([diction])}) #add diction to dict product positions=[] positions_append = positions.append positions_append(int(product.get("positions",1))) for cnt in range(positions[0]): lof=[] lof_append = lof.append lof_append(int(product.get("lofl",9750))) lof_append(int(product.get("lofh",10600))) lof_append(int(product.get("threshold",11700))) positions_append(tuple(lof)) p_update({"positions":tuple(positions)}) #add positons to dict product m_update({product.get("name"):p}) #add dict product to dict manufacturer unicablelnbproducts.update({manufacturer.get("name"):m}) entry = root.find("matrix") for manufacturer in entry.getchildren(): m={} m_update = m.update for product in manufacturer.getchildren(): p={} #new dict empty for new product p_update = p.update scr=[] scr_append = scr.append scr_pop = scr.pop for i in range(len(lscr)): scr_append(product.get(lscr[i],"0")) for i in range(len(lscr)): if scr[len(lscr)-i-1] == "0": scr_pop() else: break; p_update({"frequencies":tuple(scr)}) #add scr frequencies to dict product diction = product.get("format","EN50494").upper() if diction in jess_alias: diction = "EN50607" else: diction = "EN50494" p_update({"diction":tuple([diction])}) #add diction to dict product positions=[] positions_append = positions.append positions_append(int(product.get("positions",1))) for cnt in range(positions[0]): lof=[] lof_append = lof.append lof_append(int(product.get("lofl",9750))) lof_append(int(product.get("lofh",10600))) lof_append(int(product.get("threshold",11700))) positions_append(tuple(lof)) p_update({"positions":tuple(positions)}) #add positons to dict product m_update({product.get("name"):p}) #add dict product to dict manufacturer unicablematrixproducts.update({manufacturer.get("name"):m}) #add dict manufacturer to dict unicablematrixproducts UnicableLnbManufacturers = unicablelnbproducts.keys() UnicableLnbManufacturers.sort() UnicableMatrixManufacturers = unicablematrixproducts.keys() UnicableMatrixManufacturers.sort() unicable_choices = { "unicable_lnb": _("Unicable LNB"), "unicable_matrix": _("Unicable Matrix"), "unicable_user": "Unicable "+_("User defined")} unicable_choices_default = "unicable_lnb" advanced_lnb_satcr_user_choicesEN50494 = [("1", "SatCR 1"), ("2", "SatCR 2"), ("3", "SatCR 3"), ("4", "SatCR 4"), ("5", "SatCR 5"), ("6", "SatCR 6"), ("7", "SatCR 7"), ("8", "SatCR 8")] advanced_lnb_satcr_user_choicesEN50607 = [("1", "SatCR 1"), ("2", "SatCR 2"), ("3", "SatCR 3"), ("4", "SatCR 4"), ("5", "SatCR 5"), ("6", "SatCR 6"), ("7", "SatCR 7"), ("8", "SatCR 8"), ("9", "SatCR 9"), ("10", "SatCR 10"), ("11", "SatCR 11"), ("12", "SatCR 12"), ("13", "SatCR 13"), ("14", "SatCR 14"), ("15", "SatCR 15"), ("16", "SatCR 16"), ("17", "SatCR 17"), ("18", "SatCR 18"), ("19", "SatCR 19"), ("20", "SatCR 20"), ("21", "SatCR 21"), ("22", "SatCR 22"), ("23", "SatCR 23"), ("24", "SatCR 24"), ("25", "SatCR 25"), ("26", "SatCR 26"), ("27", "SatCR 27"), ("28", "SatCR 28"), ("29", "SatCR 29"), ("30", "SatCR 30"), ("31", "SatCR 31"), ("32", "SatCR 32")] advanced_lnb_diction_user_choices = [("EN50494", "Unicable(EN50494)"), ("EN50607", "JESS(EN50607)")] prio_list = [ ("-1", _("Auto")) ] for prio in range(65)+range(14000,14065)+range(19000,19065): description = "" if prio == 0: description = _(" (disabled)") elif 0 < prio < 65: description = _(" (lower than any auto)") elif 13999 < prio < 14066: description = _(" (higher than rotor any auto)") elif 18999 < prio < 19066: description = _(" (higher than any auto)") prio_list.append((str(prio), str(prio) + description)) advanced_lnb_csw_choices = [("none", _("None")), ("AA", _("Port A")), ("AB", _("Port B")), ("BA", _("Port C")), ("BB", _("Port D"))] advanced_lnb_ucsw_choices = [("0", _("None"))] + [(str(y), "Input " + str(y)) for y in range(1, 17)] diseqc_mode_choices = [ ("single", _("Single")), ("toneburst_a_b", _("Toneburst A/B")), ("diseqc_a_b", "DiSEqC A/B"), ("diseqc_a_b_c_d", "DiSEqC A/B/C/D"), ("positioner", _("Positioner")), ("positioner_select", _("Positioner (selecting satellites)"))] positioner_mode_choices = [("usals", _("USALS")), ("manual", _("manual"))] diseqc_satlist_choices = [(3600, _('automatic'), 1), (3601, _('nothing connected'), 1)] + nimmgr.satList longitude_orientation_choices = [("east", _("East")), ("west", _("West"))] latitude_orientation_choices = [("north", _("North")), ("south", _("South"))] turning_speed_choices = [("fast", _("Fast")), ("slow", _("Slow")), ("fast epoch", _("Fast epoch"))] advanced_satlist_choices = nimmgr.satList + [ (3601, _('All satellites 1 (USALS)'), 1), (3602, _('All satellites 2 (USALS)'), 1), (3603, _('All satellites 3 (USALS)'), 1), (3604, _('All satellites 4 (USALS)'), 1), (3605, _('Selecting satellites 1 (USALS)'), 1), (3606, _('Selecting satellites 2 (USALS)'), 1)] advanced_lnb_choices = [("0", _("not configured"))] + [(str(y), "LNB " + str(y)) for y in range(1, (maxFixedLnbPositions+1))] advanced_voltage_choices = [("polarization", _("Polarization")), ("13V", _("13 V")), ("18V", _("18 V"))] advanced_tonemode_choices = [("band", _("Band")), ("on", _("On")), ("off", _("Off"))] advanced_lnb_toneburst_choices = [("none", _("None")), ("A", _("A")), ("B", _("B"))] advanced_lnb_allsat_diseqcmode_choices = [("1_2", _("1.2"))] advanced_lnb_diseqcmode_choices = [("none", _("None")), ("1_0", _("1.0")), ("1_1", _("1.1")), ("1_2", _("1.2"))] advanced_lnb_commandOrder1_0_choices = [("ct", "DiSEqC 1.0, toneburst"), ("tc", "toneburst, DiSEqC 1.0")] advanced_lnb_commandOrder_choices = [ ("ct", "DiSEqC 1.0, toneburst"), ("tc", "toneburst, DiSEqC 1.0"), ("cut", "DiSEqC 1.0, DiSEqC 1.1, toneburst"), ("tcu", "toneburst, DiSEqC 1.0, DiSEqC 1.1"), ("uct", "DiSEqC 1.1, DiSEqC 1.0, toneburst"), ("tuc", "toneburst, DiSEqC 1.1, DiSEqC 1.0")] advanced_lnb_diseqc_repeat_choices = [("none", _("None")), ("one", _("One")), ("two", _("Two")), ("three", _("Three"))] advanced_lnb_fast_turning_btime = mktime(datetime(1970, 1, 1, 7, 0).timetuple()) advanced_lnb_fast_turning_etime = mktime(datetime(1970, 1, 1, 19, 0).timetuple()) def configLOFChanged(configElement): if configElement.value == "unicable": x = configElement.slot_id lnb = configElement.lnb_id nim = config.Nims[x] lnbs = nim.advanced.lnb section = lnbs[lnb] if isinstance(section.unicable, ConfigNothing): if lnb == 1 or lnb > maxFixedLnbPositions: section.unicable = ConfigSelection(unicable_choices, unicable_choices_default) # elif lnb == 2: else: section.unicable = ConfigSelection(choices = {"unicable_matrix": _("Unicable Matrix"),"unicable_user": "Unicable "+_("User defined")}, default = "unicable_matrix") # section.unicable = ConfigSelection(choices = {"unicable_user": _("User defined")}, default = "unicable_user") if 1==1: def fillUnicableConf(sectionDict, unicableproducts, vco_null_check): for manufacturer in unicableproducts: products = unicableproducts[manufacturer].keys() products.sort() products_valide = [] products_valide_append = products_valide.append tmp = ConfigSubsection() tmp.scr = ConfigSubDict() tmp.vco = ConfigSubDict() tmp.lofl = ConfigSubDict() tmp.lofh = ConfigSubDict() tmp.loft = ConfigSubDict() tmp.positions = ConfigSubDict() tmp.diction = ConfigSubDict() for article in products: positionslist = unicableproducts[manufacturer][article].get("positions") positions = int(positionslist[0]) dictionlist = [unicableproducts[manufacturer][article].get("diction")] if lnb <= positions or dictionlist[0][0] !="EN50607": tmp.positions[article] = ConfigSubList() tmp.positions[article].append(ConfigInteger(default=positions, limits = (positions, positions))) tmp.diction[article] = ConfigSelection(choices = dictionlist, default = dictionlist[0][0]) scrlist = [] scrlist_append = scrlist.append vcolist=unicableproducts[manufacturer][article].get("frequencies") tmp.vco[article] = ConfigSubList() for cnt in range(1,len(vcolist)+1): vcofreq = int(vcolist[cnt-1]) if vcofreq == 0 and vco_null_check: scrlist_append(("%d" %cnt,"SCR %d " %cnt +_("not used"))) else: scrlist_append(("%d" %cnt,"SCR %d" %cnt)) tmp.vco[article].append(ConfigInteger(default=vcofreq, limits = (vcofreq, vcofreq))) tmp.scr[article] = ConfigSelection(choices = scrlist, default = scrlist[0][0]) tmp.lofl[article] = ConfigSubList() tmp.lofh[article] = ConfigSubList() tmp.loft[article] = ConfigSubList() tmp_lofl_article_append = tmp.lofl[article].append tmp_lofh_article_append = tmp.lofh[article].append tmp_loft_article_append = tmp.loft[article].append for cnt in range(1,positions+1): lofl = int(positionslist[cnt][0]) lofh = int(positionslist[cnt][1]) loft = int(positionslist[cnt][2]) tmp_lofl_article_append(ConfigInteger(default=lofl, limits = (lofl, lofl))) tmp_lofh_article_append(ConfigInteger(default=lofh, limits = (lofh, lofh))) tmp_loft_article_append(ConfigInteger(default=loft, limits = (loft, loft))) products_valide_append(article) if len(products_valide)==0: products_valide_append("None") tmp.product = ConfigSelection(choices = products_valide, default = products_valide[0]) sectionDict[manufacturer] = tmp print "MATRIX" section.unicableMatrix = ConfigSubDict() section.unicableMatrixManufacturer = ConfigSelection(UnicableMatrixManufacturers, UnicableMatrixManufacturers[0]) fillUnicableConf(section.unicableMatrix, unicablematrixproducts, True) print "LNB" section.unicableLnb = ConfigSubDict() section.unicableLnbManufacturer = ConfigSelection(UnicableLnbManufacturers, UnicableLnbManufacturers[0]) fillUnicableConf(section.unicableLnb, unicablelnbproducts, False) #TODO satpositions for satcruser section.dictionuser = ConfigSelection(advanced_lnb_diction_user_choices, default="EN50494") section.satcruserEN50494 = ConfigSelection(advanced_lnb_satcr_user_choicesEN50494, default="1") section.satcruserEN50607 = ConfigSelection(advanced_lnb_satcr_user_choicesEN50607, default="1") tmp = ConfigSubList() tmp.append(ConfigInteger(default=1284, limits = (950, 2150))) tmp.append(ConfigInteger(default=1400, limits = (950, 2150))) tmp.append(ConfigInteger(default=1516, limits = (950, 2150))) tmp.append(ConfigInteger(default=1632, limits = (950, 2150))) tmp.append(ConfigInteger(default=1748, limits = (950, 2150))) tmp.append(ConfigInteger(default=1864, limits = (950, 2150))) tmp.append(ConfigInteger(default=1980, limits = (950, 2150))) tmp.append(ConfigInteger(default=2096, limits = (950, 2150))) section.satcrvcouserEN50494 = tmp tmp.append(ConfigInteger(default=1284, limits = (950, 2150))) tmp.append(ConfigInteger(default=1400, limits = (950, 2150))) tmp.append(ConfigInteger(default=1516, limits = (950, 2150))) tmp.append(ConfigInteger(default=1632, limits = (950, 2150))) tmp.append(ConfigInteger(default=1748, limits = (950, 2150))) tmp.append(ConfigInteger(default=1864, limits = (950, 2150))) tmp.append(ConfigInteger(default=1980, limits = (950, 2150))) tmp.append(ConfigInteger(default=2096, limits = (950, 2150))) tmp.append(ConfigInteger(default=1284, limits = (950, 2150))) tmp.append(ConfigInteger(default=1400, limits = (950, 2150))) tmp.append(ConfigInteger(default=1516, limits = (950, 2150))) tmp.append(ConfigInteger(default=1632, limits = (950, 2150))) tmp.append(ConfigInteger(default=1748, limits = (950, 2150))) tmp.append(ConfigInteger(default=1864, limits = (950, 2150))) tmp.append(ConfigInteger(default=1980, limits = (950, 2150))) tmp.append(ConfigInteger(default=2096, limits = (950, 2150))) tmp.append(ConfigInteger(default=1284, limits = (950, 2150))) tmp.append(ConfigInteger(default=1400, limits = (950, 2150))) tmp.append(ConfigInteger(default=1516, limits = (950, 2150))) tmp.append(ConfigInteger(default=1632, limits = (950, 2150))) tmp.append(ConfigInteger(default=1748, limits = (950, 2150))) tmp.append(ConfigInteger(default=1864, limits = (950, 2150))) tmp.append(ConfigInteger(default=1980, limits = (950, 2150))) tmp.append(ConfigInteger(default=2096, limits = (950, 2150))) section.satcrvcouserEN50607 = tmp nim.advanced.unicableconnected = ConfigYesNo(default=False) nim.advanced.unicableconnectedTo = ConfigSelection([(str(id), nimmgr.getNimDescription(id)) for id in nimmgr.getNimListOfType("DVB-S") if id != x]) def configDiSEqCModeChanged(configElement): section = configElement.section if configElement.value == "1_2" and isinstance(section.longitude, ConfigNothing): section.longitude = ConfigFloat(default = [5,100], limits = [(0,359),(0,999)]) section.longitudeOrientation = ConfigSelection(longitude_orientation_choices, "east") section.latitude = ConfigFloat(default = [50,767], limits = [(0,359),(0,999)]) section.latitudeOrientation = ConfigSelection(latitude_orientation_choices, "north") section.tuningstepsize = ConfigFloat(default = [0,360], limits = [(0,9),(0,999)]) section.rotorPositions = ConfigInteger(default = 99, limits = [1,999]) section.turningspeedH = ConfigFloat(default = [2,3], limits = [(0,9),(0,9)]) section.turningspeedV = ConfigFloat(default = [1,7], limits = [(0,9),(0,9)]) section.powerMeasurement = ConfigYesNo(default=True) section.powerThreshold = ConfigInteger(default=hw.get_device_name() == "dm7025" and 50 or 15, limits=(0, 100)) section.turningSpeed = ConfigSelection(turning_speed_choices, "fast") section.fastTurningBegin = ConfigDateTime(default=advanced_lnb_fast_turning_btime, formatstring = _("%H:%M"), increment = 600) section.fastTurningEnd = ConfigDateTime(default=advanced_lnb_fast_turning_etime, formatstring = _("%H:%M"), increment = 600) def configLNBChanged(configElement): x = configElement.slot_id nim = config.Nims[x] if isinstance(configElement.value, tuple): lnb = int(configElement.value[0]) else: lnb = int(configElement.value) lnbs = nim.advanced.lnb if lnb and lnb not in lnbs: section = lnbs[lnb] = ConfigSubsection() section.lofl = ConfigInteger(default=9750, limits = (0, 99999)) section.lofh = ConfigInteger(default=10600, limits = (0, 99999)) section.threshold = ConfigInteger(default=11700, limits = (0, 99999)) section.increased_voltage = ConfigYesNo(False) section.toneburst = ConfigSelection(advanced_lnb_toneburst_choices, "none") section.longitude = ConfigNothing() if lnb > maxFixedLnbPositions: tmp = ConfigSelection(advanced_lnb_allsat_diseqcmode_choices, "1_2") tmp.section = section configDiSEqCModeChanged(tmp) else: tmp = ConfigSelection(advanced_lnb_diseqcmode_choices, "none") tmp.section = section tmp.addNotifier(configDiSEqCModeChanged) section.diseqcMode = tmp section.commitedDiseqcCommand = ConfigSelection(advanced_lnb_csw_choices) section.fastDiseqc = ConfigYesNo(False) section.sequenceRepeat = ConfigYesNo(False) section.commandOrder1_0 = ConfigSelection(advanced_lnb_commandOrder1_0_choices, "ct") section.commandOrder = ConfigSelection(advanced_lnb_commandOrder_choices, "ct") section.uncommittedDiseqcCommand = ConfigSelection(advanced_lnb_ucsw_choices) section.diseqcRepeats = ConfigSelection(advanced_lnb_diseqc_repeat_choices, "none") section.prio = ConfigSelection(prio_list, "-1") section.unicable = ConfigNothing() tmp = ConfigSelection(lnb_choices, lnb_choices_default) tmp.slot_id = x tmp.lnb_id = lnb tmp.addNotifier(configLOFChanged, initial_call = False) section.lof = tmp def configModeChanged(configMode): slot_id = configMode.slot_id nim = config.Nims[slot_id] if configMode.value == "advanced" and isinstance(nim.advanced, ConfigNothing): # advanced config: nim.advanced = ConfigSubsection() nim.advanced.sat = ConfigSubDict() nim.advanced.sats = getConfigSatlist(192, advanced_satlist_choices) nim.advanced.lnb = ConfigSubDict() nim.advanced.lnb[0] = ConfigNothing() for x in nimmgr.satList: tmp = ConfigSubsection() tmp.voltage = ConfigSelection(advanced_voltage_choices, "polarization") tmp.tonemode = ConfigSelection(advanced_tonemode_choices, "band") tmp.usals = ConfigYesNo(True) tmp.rotorposition = ConfigInteger(default=1, limits=(1, 255)) lnb = ConfigSelection(advanced_lnb_choices, "0") lnb.slot_id = slot_id lnb.addNotifier(configLNBChanged, initial_call = False) tmp.lnb = lnb nim.advanced.sat[x[0]] = tmp for x in range(3601, 3607): tmp = ConfigSubsection() tmp.voltage = ConfigSelection(advanced_voltage_choices, "polarization") tmp.tonemode = ConfigSelection(advanced_tonemode_choices, "band") tmp.usals = ConfigYesNo(default=True) tmp.userSatellitesList = ConfigText('[]') tmp.rotorposition = ConfigInteger(default=1, limits=(1, 255)) lnbnum = maxFixedLnbPositions + x - 3600 lnb = ConfigSelection([("0", _("not configured")), (str(lnbnum), "LNB %d"%(lnbnum))], "0") lnb.slot_id = slot_id lnb.addNotifier(configLNBChanged, initial_call = False) tmp.lnb = lnb nim.advanced.sat[x] = tmp def scpcSearchRangeChanged(configElement): fe_id = configElement.fe_id slot_id = configElement.slot_id name = nimmgr.nim_slots[slot_id].description if path.exists("/proc/stb/frontend/%d/use_scpc_optimized_search_range" % fe_id): f = open("/proc/stb/frontend/%d/use_scpc_optimized_search_range" % fe_id, "w") f.write(configElement.value) f.close() def toneAmplitudeChanged(configElement): fe_id = configElement.fe_id slot_id = configElement.slot_id if path.exists("/proc/stb/frontend/%d/tone_amplitude" % fe_id): f = open("/proc/stb/frontend/%d/tone_amplitude" % fe_id, "w") f.write(configElement.value) f.close() def connectedToChanged(slot_id, nimmgr, configElement): configMode = nimmgr.getNimConfig(slot_id).configMode if configMode.value == 'loopthrough': internally_connectable = nimmgr.nimInternallyConnectableTo(slot_id) dest_slot = configElement.value if internally_connectable is not None and int(internally_connectable) == int(dest_slot): configMode.choices.updateItemDescription(configMode.index, _("internally loopthrough to")) else: configMode.choices.updateItemDescription(configMode.index, _("externally loopthrough to")) def createSatConfig(nim, x, empty_slots): try: nim.toneAmplitude except: nim.toneAmplitude = ConfigSelection([("11", "340mV"), ("10", "360mV"), ("9", "600mV"), ("8", "700mV"), ("7", "800mV"), ("6", "900mV"), ("5", "1100mV")], "7") nim.toneAmplitude.fe_id = x - empty_slots nim.toneAmplitude.slot_id = x nim.toneAmplitude.addNotifier(toneAmplitudeChanged) nim.scpcSearchRange = ConfigSelection([("0", _("no")), ("1", _("yes"))], "0") nim.scpcSearchRange.fe_id = x - empty_slots nim.scpcSearchRange.slot_id = x nim.scpcSearchRange.addNotifier(scpcSearchRangeChanged) nim.diseqc13V = ConfigYesNo(False) nim.diseqcMode = ConfigSelection(diseqc_mode_choices, "single") nim.connectedTo = ConfigSelection([(str(id), nimmgr.getNimDescription(id)) for id in nimmgr.getNimListOfType("DVB-S") if id != x]) nim.simpleSingleSendDiSEqC = ConfigYesNo(False) nim.simpleDiSEqCSetVoltageTone = ConfigYesNo(True) nim.simpleDiSEqCOnlyOnSatChange = ConfigYesNo(False) nim.simpleDiSEqCSetCircularLNB = ConfigYesNo(True) nim.diseqcA = ConfigSatlist(list = diseqc_satlist_choices) nim.diseqcB = ConfigSatlist(list = diseqc_satlist_choices) nim.diseqcC = ConfigSatlist(list = diseqc_satlist_choices) nim.diseqcD = ConfigSatlist(list = diseqc_satlist_choices) nim.positionerMode = ConfigSelection(positioner_mode_choices, "usals") nim.userSatellitesList = ConfigText('[]') nim.pressOKtoList = ConfigNothing() nim.longitude = ConfigFloat(default=[5,100], limits=[(0,359),(0,999)]) nim.longitudeOrientation = ConfigSelection(longitude_orientation_choices, "east") nim.latitude = ConfigFloat(default=[50,767], limits=[(0,359),(0,999)]) nim.latitudeOrientation = ConfigSelection(latitude_orientation_choices, "north") nim.tuningstepsize = ConfigFloat(default = [0,360], limits = [(0,9),(0,999)]) nim.rotorPositions = ConfigInteger(default = 99, limits = [1,999]) nim.turningspeedH = ConfigFloat(default = [2,3], limits = [(0,9),(0,9)]) nim.turningspeedV = ConfigFloat(default = [1,7], limits = [(0,9),(0,9)]) nim.powerMeasurement = ConfigYesNo(False) nim.powerThreshold = ConfigInteger(default=hw.get_device_name() == "dm8000" and 15 or 50, limits=(0, 100)) nim.turningSpeed = ConfigSelection(turning_speed_choices, "fast") btime = datetime(1970, 1, 1, 7, 0) nim.fastTurningBegin = ConfigDateTime(default = mktime(btime.timetuple()), formatstring = _("%H:%M"), increment = 900) etime = datetime(1970, 1, 1, 19, 0) nim.fastTurningEnd = ConfigDateTime(default = mktime(etime.timetuple()), formatstring = _("%H:%M"), increment = 900) def createCableConfig(nim, x): try: nim.cable except: list = [ ] n = 0 for x in nimmgr.cablesList: list.append((str(n), x[0])) n += 1 nim.cable = ConfigSubsection() nim.cable.scan_networkid = ConfigInteger(default = 0, limits = (0, 99999)) possible_scan_types = [("bands", _("Frequency bands")), ("steps", _("Frequency steps"))] if n: possible_scan_types.append(("provider", _("Provider"))) nim.cable.scan_provider = ConfigSelection(default = "0", choices = list) nim.cable.scan_type = ConfigSelection(default = "provider", choices = possible_scan_types) nim.cable.scan_band_EU_VHF_I = ConfigYesNo(default = True) nim.cable.scan_band_EU_MID = ConfigYesNo(default = True) nim.cable.scan_band_EU_VHF_III = ConfigYesNo(default = True) nim.cable.scan_band_EU_UHF_IV = ConfigYesNo(default = True) nim.cable.scan_band_EU_UHF_V = ConfigYesNo(default = True) nim.cable.scan_band_EU_SUPER = ConfigYesNo(default = True) nim.cable.scan_band_EU_HYPER = ConfigYesNo(default = True) nim.cable.scan_band_US_LOW = ConfigYesNo(default = False) nim.cable.scan_band_US_MID = ConfigYesNo(default = False) nim.cable.scan_band_US_HIGH = ConfigYesNo(default = False) nim.cable.scan_band_US_SUPER = ConfigYesNo(default = False) nim.cable.scan_band_US_HYPER = ConfigYesNo(default = False) nim.cable.scan_frequency_steps = ConfigInteger(default = 1000, limits = (1000, 10000)) nim.cable.scan_mod_qam16 = ConfigYesNo(default = False) nim.cable.scan_mod_qam32 = ConfigYesNo(default = False) nim.cable.scan_mod_qam64 = ConfigYesNo(default = True) nim.cable.scan_mod_qam128 = ConfigYesNo(default = False) nim.cable.scan_mod_qam256 = ConfigYesNo(default = True) nim.cable.scan_sr_6900 = ConfigYesNo(default = True) nim.cable.scan_sr_6875 = ConfigYesNo(default = True) nim.cable.scan_sr_ext1 = ConfigInteger(default = 0, limits = (0, 7230)) nim.cable.scan_sr_ext2 = ConfigInteger(default = 0, limits = (0, 7230)) def createTerrestrialConfig(nim, x): try: nim.terrestrial except: list = [] n = 0 for x in nimmgr.terrestrialsList: list.append((str(n), x[0])) n += 1 nim.terrestrial = ConfigSelection(choices = list) nim.terrestrial_5V = ConfigOnOff() empty_slots = 0 for slot in nimmgr.nim_slots: x = slot.slot nim = config.Nims[x] if slot.isCompatible("DVB-S"): createSatConfig(nim, x, empty_slots) config_mode_choices = [("nothing", _("nothing connected")), ("simple", _("simple")), ("advanced", _("advanced"))] if len(nimmgr.getNimListOfType(slot.type, exception = x)) > 0: config_mode_choices.append(("equal", _("equal to"))) config_mode_choices.append(("satposdepends", _("second cable of motorized LNB"))) if len(nimmgr.canConnectTo(x)) > 0: config_mode_choices.append(("loopthrough", _("loopthrough to"))) nim.advanced = ConfigNothing() tmp = ConfigSelection(config_mode_choices, "simple") tmp.slot_id = x tmp.addNotifier(configModeChanged, initial_call = False) nim.configMode = tmp nim.configMode.connectedToChanged = boundFunction(connectedToChanged, x, nimmgr) nim.connectedTo.addNotifier(boundFunction(connectedToChanged, x, nimmgr), initial_call = False) elif slot.isCompatible("DVB-C"): nim.configMode = ConfigSelection( choices = { "enabled": _("enabled"), "nothing": _("nothing connected"), }, default = "enabled") createCableConfig(nim, x) elif slot.isCompatible("DVB-T"): nim.configMode = ConfigSelection( choices = { "enabled": _("enabled"), "nothing": _("nothing connected"), }, default = "enabled") createTerrestrialConfig(nim, x) else: empty_slots += 1 nim.configMode = ConfigSelection(choices = { "nothing": _("disabled") }, default="nothing") if slot.type is not None: print "pls add support for this frontend type!", slot.type nimmgr.sec = SecConfigure(nimmgr) def tunerTypeChanged(nimmgr, configElement): fe_id = configElement.fe_id eDVBResourceManager.getInstance().setFrontendType(nimmgr.nim_slots[fe_id].frontend_id, nimmgr.nim_slots[fe_id].getType()) if path.exists("/proc/stb/frontend/%d/mode" % fe_id): cur_type = int(open("/proc/stb/frontend/%d/mode" % fe_id, "r").read()) if cur_type != int(configElement.value): print "tunerTypeChanged feid %d from %d to mode %d" % (fe_id, cur_type, int(configElement.value)) try: oldvalue = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "r").readline() f = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "w") f.write("0") f.close() except: print "[info] no /sys/module/dvb_core/parameters/dvb_shutdown_timeout available" frontend = eDVBResourceManager.getInstance().allocateRawChannel(fe_id).getFrontend() frontend.closeFrontend() f = open("/proc/stb/frontend/%d/mode" % fe_id, "w") f.write(configElement.value) f.close() frontend.reopenFrontend() try: f = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "w") f.write(oldvalue) f.close() except: print "[info] no /sys/module/dvb_core/parameters/dvb_shutdown_timeout available" nimmgr.enumerateNIMs() else: print "tuner type is already already %d" %cur_type empty_slots = 0 for slot in nimmgr.nim_slots: x = slot.slot nim = config.Nims[x] addMultiType = False try: nim.multiType except: if slot.description.find("Sundtek SkyTV Ultimate III") > -1: print"[NimManager] Sundtek SkyTV Ultimate III detected, multiType = False" addMultiType = False else: addMultiType = True if slot.isMultiType() and addMultiType: typeList = [] for id in slot.getMultiTypeList().keys(): type = slot.getMultiTypeList()[id] typeList.append((id, type)) nim.multiType = ConfigSelection(typeList, "0") nim.multiType.fe_id = x - empty_slots nim.multiType.addNotifier(boundFunction(tunerTypeChanged, nimmgr)) print"[NimManager] slotname = %s, slotdescription = %s, multitype = %s" % (slot.input_name, slot.description,(slot.isMultiType() and addMultiType)) empty_slots = 0 for slot in nimmgr.nim_slots: x = slot.slot nim = config.Nims[x] empty = True if slot.canBeCompatible("DVB-S"): createSatConfig(nim, x, empty_slots) empty = False if slot.canBeCompatible("DVB-C"): createCableConfig(nim, x) empty = False if slot.canBeCompatible("DVB-T"): createTerrestrialConfig(nim, x) empty = False if empty: empty_slots += 1 nimmanager = NimManager()
gpl-2.0
codingisacopingstrategy/aa.core
aacore/settings.py
1
1923
# This file is part of Active Archives. # Copyright 2006-2011 the Active Archives contributors (see AUTHORS) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Also add information on how to contact you by electronic and paper mail. from django.conf import settings import os.path EXIFTOOL = getattr(settings, 'AA_EXIFTOOL', 'exiftool') FFMPEG = getattr(settings, 'AA_FFMPEG', 'ffmpeg') IDENTIFY = getattr(settings, 'AA_IDENTIFY', 'identify') CONVERT = getattr(settings, 'AA_CONVERT', 'convert') USER_AGENT = getattr(settings, 'AA_USER_AGENT', "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1) Gecko/20090624 Firefox/3.5") DEFAULT_REL_NAMESPACE = getattr(settings, 'AA_DEFAULT_REL_NAMESPACE', "aa") RDF_STORAGE_NAME = getattr(settings, 'AA_RDF_STORAGE_NAME', "aa") # FIXME: Change this setting to an absolute path as it throws a redland error # on production RDF_STORAGE_DIR = getattr(settings, 'AA_RDF_STORAGE_DIR', ".") # List of models that are indexed by the RDF Store INDEXED_MODELS = getattr(settings, 'AA_INDEXED_MODELS', ("aacore.models.Resource",)) RESOURCE_DELEGATES = getattr(settings, 'AA_RESOURCE_DELEGATES', ()) CACHE_DIR = getattr(settings, 'AA_CACHE_DIR', os.path.join(settings.MEDIA_ROOT, "cache")) CACHE_URL = getattr(settings, 'AA_CACHE_URL', os.path.join(settings.MEDIA_URL, "cache"))
agpl-3.0
shepdl/stream-daemon
twitter_local/stream.py
1
3651
try: import urllib.request as urllib_request import urllib.error as urllib_error import io except ImportError: import urllib2 as urllib_request import urllib2 as urllib_error import simplejson as json from ssl import SSLError import socket from .api import TwitterCall, wrap_response import sys class TwitterJSONIter(object): def __init__(self, handle, uri, arg_data, block=True): self.decoder = json.JSONDecoder() self.handle = handle self.buf = b"" self.block = block def __iter__(self): sock = self.handle.fp._sock.fp._sock sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) if not self.block: sock.setblocking(False) while True: utf8_buf = self.buf.decode('utf8').lstrip() pos = utf8_buf.find("}\r\n{") + 1 if pos: to_yield = utf8_buf[0:pos] self.buf = utf8_buf[pos:].encode('utf8') if to_yield != "": yield wrap_response(json.loads(to_yield), self.handle.headers) continue else: if self.block: pass else: yield None # except urllib_error.HTTPError as e: # raise TwitterHTTPError(e, uri, self.format, arg_data) # this is a non-blocking read (ie, it will return if any data is available) try: self.buf += sock.recv(1024) except SSLError as e: if (not self.block) and (e.errno == 2): # Apparently this means there was nothing in the socket buf pass else: raise def handle_stream_response(req, uri, arg_data, block): handle = urllib_request.urlopen(req,) return iter(TwitterJSONIter(handle, uri, arg_data, block)) class TwitterStreamCall(TwitterCall): def _handle_response(self, req, uri, arg_data, _timeout=None): return handle_stream_response(req, uri, arg_data, block=True) class TwitterStreamCallNonBlocking(TwitterCall): def _handle_response(self, req, uri, arg_data, _timeout=None): return handle_stream_response(req, uri, arg_data, block=False) class TwitterStream(TwitterStreamCall): """ The TwitterStream object is an interface to the Twitter Stream API (stream.twitter.com). This can be used pretty much the same as the Twitter class except the result of calling a method will be an iterator that yields objects decoded from the stream. For example:: twitter_stream = TwitterStream(auth=UserPassAuth('joe', 'joespassword')) iterator = twitter_stream.statuses.sample() for tweet in iterator: ...do something with this tweet... The iterator will yield tweets forever and ever (until the stream breaks at which point it raises a TwitterHTTPError.) The `block` parameter controls if the stream is blocking. Default is blocking (True). When set to False, the iterator will occasionally yield None when there is no available message. """ def __init__( self, domain="stream.twitter.com", secure=True, auth=None, api_version='1', block=True): uriparts = () uriparts += (str(api_version),) if block: call_cls = TwitterStreamCall else: call_cls = TwitterStreamCallNonBlocking TwitterStreamCall.__init__( self, auth=auth, format="json", domain=domain, callable_cls=call_cls, secure=secure, uriparts=uriparts)
mit
mccarrmb/moztrap
scripts/update/update.py
3
1527
""" Deployment for moztrap Requires commander (https://github.com/oremj/commander) which is installed on the systems that need it. """ import os import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) from commander.deploy import task, hostgroups import commander_settings as settings @task def update_code(ctx, tag): with ctx.lcd(settings.SRC_DIR): ctx.local("git fetch") ctx.local("git pull origin %s" % tag) ctx.local("git submodule sync") ctx.local("git submodule update --init --recursive") ctx.local("find . -type f -name '.gitignore' -or -name '*.pyc' -delete") ctx.local("git rev-parse HEAD > media/revision.txt") @task def update_assets(ctx): with ctx.lcd(settings.SRC_DIR): ctx.local("LANG=en_US.UTF-8 python2.6 vendor-manage.py collectstatic --noinput") ctx.local("LANG=en_US.UTF-8 python2.6 vendor-manage.py compress") @task def database(ctx): with ctx.lcd(settings.SRC_DIR): ctx.local("python2.6 vendor-manage.py syncdb --migrate") @task def checkin_changes(ctx): ctx.local(settings.DEPLOY_SCRIPT) @hostgroups(settings.WEB_HOSTGROUP, remote_kwargs={'ssh_key': settings.SSH_KEY}) def deploy_app(ctx): ctx.remote(settings.REMOTE_UPDATE_SCRIPT) ctx.remote("/bin/touch %s" % settings.REMOTE_WSGI) @task def pre_update(ctx, ref=settings.UPDATE_REF): update_code(ref) @task def update(ctx): update_assets() database() @task def deploy(ctx): checkin_changes() deploy_app()
bsd-2-clause
supamii/QttpServer
lib/gmock/scripts/generator/cpp/tokenize.py
679
9703
#!/usr/bin/env python # # Copyright 2007 Neal Norwitz # Portions Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tokenize C++ source code.""" __author__ = '[email protected] (Neal Norwitz)' try: # Python 3.x import builtins except ImportError: # Python 2.x import __builtin__ as builtins import sys from cpp import utils if not hasattr(builtins, 'set'): # Nominal support for Python 2.3. from sets import Set as set # Add $ as a valid identifier char since so much code uses it. _letters = 'abcdefghijklmnopqrstuvwxyz' VALID_IDENTIFIER_CHARS = set(_letters + _letters.upper() + '_0123456789$') HEX_DIGITS = set('0123456789abcdefABCDEF') INT_OR_FLOAT_DIGITS = set('01234567890eE-+') # C++0x string preffixes. _STR_PREFIXES = set(('R', 'u8', 'u8R', 'u', 'uR', 'U', 'UR', 'L', 'LR')) # Token types. UNKNOWN = 'UNKNOWN' SYNTAX = 'SYNTAX' CONSTANT = 'CONSTANT' NAME = 'NAME' PREPROCESSOR = 'PREPROCESSOR' # Where the token originated from. This can be used for backtracking. # It is always set to WHENCE_STREAM in this code. WHENCE_STREAM, WHENCE_QUEUE = range(2) class Token(object): """Data container to represent a C++ token. Tokens can be identifiers, syntax char(s), constants, or pre-processor directives. start contains the index of the first char of the token in the source end contains the index of the last char of the token in the source """ def __init__(self, token_type, name, start, end): self.token_type = token_type self.name = name self.start = start self.end = end self.whence = WHENCE_STREAM def __str__(self): if not utils.DEBUG: return 'Token(%r)' % self.name return 'Token(%r, %s, %s)' % (self.name, self.start, self.end) __repr__ = __str__ def _GetString(source, start, i): i = source.find('"', i+1) while source[i-1] == '\\': # Count the trailing backslashes. backslash_count = 1 j = i - 2 while source[j] == '\\': backslash_count += 1 j -= 1 # When trailing backslashes are even, they escape each other. if (backslash_count % 2) == 0: break i = source.find('"', i+1) return i + 1 def _GetChar(source, start, i): # NOTE(nnorwitz): may not be quite correct, should be good enough. i = source.find("'", i+1) while source[i-1] == '\\': # Need to special case '\\'. if (i - 2) > start and source[i-2] == '\\': break i = source.find("'", i+1) # Try to handle unterminated single quotes (in a #if 0 block). if i < 0: i = start return i + 1 def GetTokens(source): """Returns a sequence of Tokens. Args: source: string of C++ source code. Yields: Token that represents the next token in the source. """ # Cache various valid character sets for speed. valid_identifier_chars = VALID_IDENTIFIER_CHARS hex_digits = HEX_DIGITS int_or_float_digits = INT_OR_FLOAT_DIGITS int_or_float_digits2 = int_or_float_digits | set('.') # Only ignore errors while in a #if 0 block. ignore_errors = False count_ifs = 0 i = 0 end = len(source) while i < end: # Skip whitespace. while i < end and source[i].isspace(): i += 1 if i >= end: return token_type = UNKNOWN start = i c = source[i] if c.isalpha() or c == '_': # Find a string token. token_type = NAME while source[i] in valid_identifier_chars: i += 1 # String and character constants can look like a name if # they are something like L"". if (source[i] == "'" and (i - start) == 1 and source[start:i] in 'uUL'): # u, U, and L are valid C++0x character preffixes. token_type = CONSTANT i = _GetChar(source, start, i) elif source[i] == "'" and source[start:i] in _STR_PREFIXES: token_type = CONSTANT i = _GetString(source, start, i) elif c == '/' and source[i+1] == '/': # Find // comments. i = source.find('\n', i) if i == -1: # Handle EOF. i = end continue elif c == '/' and source[i+1] == '*': # Find /* comments. */ i = source.find('*/', i) + 2 continue elif c in ':+-<>&|*=': # : or :: (plus other chars). token_type = SYNTAX i += 1 new_ch = source[i] if new_ch == c: i += 1 elif c == '-' and new_ch == '>': i += 1 elif new_ch == '=': i += 1 elif c in '()[]{}~!?^%;/.,': # Handle single char tokens. token_type = SYNTAX i += 1 if c == '.' and source[i].isdigit(): token_type = CONSTANT i += 1 while source[i] in int_or_float_digits: i += 1 # Handle float suffixes. for suffix in ('l', 'f'): if suffix == source[i:i+1].lower(): i += 1 break elif c.isdigit(): # Find integer. token_type = CONSTANT if c == '0' and source[i+1] in 'xX': # Handle hex digits. i += 2 while source[i] in hex_digits: i += 1 else: while source[i] in int_or_float_digits2: i += 1 # Handle integer (and float) suffixes. for suffix in ('ull', 'll', 'ul', 'l', 'f', 'u'): size = len(suffix) if suffix == source[i:i+size].lower(): i += size break elif c == '"': # Find string. token_type = CONSTANT i = _GetString(source, start, i) elif c == "'": # Find char. token_type = CONSTANT i = _GetChar(source, start, i) elif c == '#': # Find pre-processor command. token_type = PREPROCESSOR got_if = source[i:i+3] == '#if' and source[i+3:i+4].isspace() if got_if: count_ifs += 1 elif source[i:i+6] == '#endif': count_ifs -= 1 if count_ifs == 0: ignore_errors = False # TODO(nnorwitz): handle preprocessor statements (\ continuations). while 1: i1 = source.find('\n', i) i2 = source.find('//', i) i3 = source.find('/*', i) i4 = source.find('"', i) # NOTE(nnorwitz): doesn't handle comments in #define macros. # Get the first important symbol (newline, comment, EOF/end). i = min([x for x in (i1, i2, i3, i4, end) if x != -1]) # Handle #include "dir//foo.h" properly. if source[i] == '"': i = source.find('"', i+1) + 1 assert i > 0 continue # Keep going if end of the line and the line ends with \. if not (i == i1 and source[i-1] == '\\'): if got_if: condition = source[start+4:i].lstrip() if (condition.startswith('0') or condition.startswith('(0)')): ignore_errors = True break i += 1 elif c == '\\': # Handle \ in code. # This is different from the pre-processor \ handling. i += 1 continue elif ignore_errors: # The tokenizer seems to be in pretty good shape. This # raise is conditionally disabled so that bogus code # in an #if 0 block can be handled. Since we will ignore # it anyways, this is probably fine. So disable the # exception and return the bogus char. i += 1 else: sys.stderr.write('Got invalid token in %s @ %d token:%s: %r\n' % ('?', i, c, source[i-10:i+10])) raise RuntimeError('unexpected token') if i <= 0: print('Invalid index, exiting now.') return yield Token(token_type, source[start:i], start, i) if __name__ == '__main__': def main(argv): """Driver mostly for testing purposes.""" for filename in argv[1:]: source = utils.ReadFile(filename) if source is None: continue for token in GetTokens(source): print('%-12s: %s' % (token.token_type, token.name)) # print('\r%6.2f%%' % (100.0 * index / token.end),) sys.stdout.write('\n') main(sys.argv)
mit
qyx210an/kernel
tools/perf/scripts/python/failed-syscalls-by-pid.py
11180
2058
# failed system call counts, by pid # (c) 2010, Tom Zanussi <[email protected]> # Licensed under the terms of the GNU GPL License version 2 # # Displays system-wide failed system call totals, broken down by pid. # If a [comm] arg is specified, only syscalls called by [comm] are displayed. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n"; for_comm = None for_pid = None if len(sys.argv) > 2: sys.exit(usage) if len(sys.argv) > 1: try: for_pid = int(sys.argv[1]) except: for_comm = sys.argv[1] syscalls = autodict() def trace_begin(): print "Press control+C to stop and show the summary" def trace_end(): print_error_totals() def raw_syscalls__sys_exit(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, ret): if (for_comm and common_comm != for_comm) or \ (for_pid and common_pid != for_pid ): return if ret < 0: try: syscalls[common_comm][common_pid][id][ret] += 1 except TypeError: syscalls[common_comm][common_pid][id][ret] = 1 def print_error_totals(): if for_comm is not None: print "\nsyscall errors for %s:\n\n" % (for_comm), else: print "\nsyscall errors:\n\n", print "%-30s %10s\n" % ("comm [pid]", "count"), print "%-30s %10s\n" % ("------------------------------", \ "----------"), comm_keys = syscalls.keys() for comm in comm_keys: pid_keys = syscalls[comm].keys() for pid in pid_keys: print "\n%s [%d]\n" % (comm, pid), id_keys = syscalls[comm][pid].keys() for id in id_keys: print " syscall: %-16s\n" % syscall_name(id), ret_keys = syscalls[comm][pid][id].keys() for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True): print " err = %-20s %10d\n" % (strerror(ret), val),
gpl-2.0
Kast0rTr0y/ansible
lib/ansible/modules/cloud/misc/ovirt.py
17
17885
#!/usr/bin/python # (c) 2013, Vincent Van der Kussen <vincent at vanderkussen.org> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: ovirt author: "Vincent Van der Kussen (@vincentvdk)" short_description: oVirt/RHEV platform management description: - allows you to create new instances, either from scratch or an image, in addition to deleting or stopping instances on the oVirt/RHEV platform version_added: "1.4" options: user: description: - the user to authenticate with default: null required: true aliases: [] url: description: - the url of the oVirt instance default: null required: true aliases: [] instance_name: description: - the name of the instance to use default: null required: true aliases: [ vmname ] password: description: - password of the user to authenticate with default: null required: true aliases: [] image: description: - template to use for the instance default: null required: false aliases: [] resource_type: description: - whether you want to deploy an image or create an instance from scratch. default: null required: false aliases: [] choices: [ 'new', 'template' ] zone: description: - deploy the image to this oVirt cluster default: null required: false aliases: [] instance_disksize: description: - size of the instance's disk in GB default: null required: false aliases: [ vm_disksize] instance_cpus: description: - the instance's number of cpu's default: 1 required: false aliases: [ vmcpus ] instance_nic: description: - name of the network interface in oVirt/RHEV default: null required: false aliases: [ vmnic ] instance_network: description: - the logical network the machine should belong to default: rhevm required: false aliases: [ vmnetwork ] instance_mem: description: - the instance's amount of memory in MB default: null required: false aliases: [ vmmem ] instance_type: description: - define if the instance is a server or desktop default: server required: false aliases: [ vmtype ] choices: [ 'server', 'desktop' ] disk_alloc: description: - define if disk is thin or preallocated default: thin required: false aliases: [] choices: [ 'thin', 'preallocated' ] disk_int: description: - interface type of the disk default: virtio required: false aliases: [] choices: [ 'virtio', 'ide' ] instance_os: description: - type of Operating System default: null required: false aliases: [ vmos ] instance_cores: description: - define the instance's number of cores default: 1 required: false aliases: [ vmcores ] sdomain: description: - the Storage Domain where you want to create the instance's disk on. default: null required: false aliases: [] region: description: - the oVirt/RHEV datacenter where you want to deploy to default: null required: false aliases: [] instance_dns: description: - define the instance's Primary DNS server required: false aliases: [ dns ] version_added: "2.1" instance_domain: description: - define the instance's Domain required: false aliases: [ domain ] version_added: "2.1" instance_hostname: description: - define the instance's Hostname required: false aliases: [ hostname ] version_added: "2.1" instance_ip: description: - define the instance's IP required: false aliases: [ ip ] version_added: "2.1" instance_netmask: description: - define the instance's Netmask required: false aliases: [ netmask ] version_added: "2.1" instance_rootpw: description: - define the instance's Root password required: false aliases: [ rootpw ] version_added: "2.1" instance_key: description: - define the instance's Authorized key required: false aliases: [ key ] version_added: "2.1" state: description: - create, terminate or remove instances default: 'present' required: false aliases: [] choices: ['present', 'absent', 'shutdown', 'started', 'restarted'] requirements: - "python >= 2.6" - "ovirt-engine-sdk-python" ''' EXAMPLES = ''' # Basic example provisioning from image. ovirt: user: admin@internal url: https://ovirt.example.com instance_name: ansiblevm04 password: secret image: centos_64 zone: cluster01 resource_type: template" # Full example to create new instance from scratch ovirt: instance_name: testansible resource_type: new instance_type: server user: admin@internal password: secret url: https://ovirt.example.com instance_disksize: 10 zone: cluster01 region: datacenter1 instance_cpus: 1 instance_nic: nic1 instance_network: rhevm instance_mem: 1000 disk_alloc: thin sdomain: FIBER01 instance_cores: 1 instance_os: rhel_6x64 disk_int: virtio" # stopping an instance ovirt: instance_name: testansible state: stopped user: admin@internal password: secret url: https://ovirt.example.com # starting an instance ovirt: instance_name: testansible state: started user: admin@internal password: secret url: https://ovirt.example.com # starting an instance with cloud init information ovirt: instance_name: testansible state: started user: admin@internal password: secret url: https://ovirt.example.com hostname: testansible domain: ansible.local ip: 192.0.2.100 netmask: 255.255.255.0 gateway: 192.0.2.1 rootpw: bigsecret ''' try: from ovirtsdk.api import API from ovirtsdk.xml import params HAS_OVIRTSDK = True except ImportError: HAS_OVIRTSDK = False # ------------------------------------------------------------------- # # create connection with API # def conn(url, user, password): api = API(url=url, username=user, password=password, insecure=True) try: value = api.test() except: raise Exception("error connecting to the oVirt API") return api # ------------------------------------------------------------------- # # Create VM from scratch def create_vm(conn, vmtype, vmname, zone, vmdisk_size, vmcpus, vmnic, vmnetwork, vmmem, vmdisk_alloc, sdomain, vmcores, vmos, vmdisk_int): if vmdisk_alloc == 'thin': # define VM params vmparams = params.VM(name=vmname,cluster=conn.clusters.get(name=zone),os=params.OperatingSystem(type_=vmos),template=conn.templates.get(name="Blank"),memory=1024 * 1024 * int(vmmem),cpu=params.CPU(topology=params.CpuTopology(cores=int(vmcores))), type_=vmtype) # define disk params vmdisk= params.Disk(size=1024 * 1024 * 1024 * int(vmdisk_size), wipe_after_delete=True, sparse=True, interface=vmdisk_int, type_="System", format='cow', storage_domains=params.StorageDomains(storage_domain=[conn.storagedomains.get(name=sdomain)])) # define network parameters network_net = params.Network(name=vmnetwork) nic_net1 = params.NIC(name='nic1', network=network_net, interface='virtio') elif vmdisk_alloc == 'preallocated': # define VM params vmparams = params.VM(name=vmname,cluster=conn.clusters.get(name=zone),os=params.OperatingSystem(type_=vmos),template=conn.templates.get(name="Blank"),memory=1024 * 1024 * int(vmmem),cpu=params.CPU(topology=params.CpuTopology(cores=int(vmcores))) ,type_=vmtype) # define disk params vmdisk= params.Disk(size=1024 * 1024 * 1024 * int(vmdisk_size), wipe_after_delete=True, sparse=False, interface=vmdisk_int, type_="System", format='raw', storage_domains=params.StorageDomains(storage_domain=[conn.storagedomains.get(name=sdomain)])) # define network parameters network_net = params.Network(name=vmnetwork) nic_net1 = params.NIC(name=vmnic, network=network_net, interface='virtio') try: conn.vms.add(vmparams) except: raise Exception("Error creating VM with specified parameters") vm = conn.vms.get(name=vmname) try: vm.disks.add(vmdisk) except: raise Exception("Error attaching disk") try: vm.nics.add(nic_net1) except: raise Exception("Error adding nic") # create an instance from a template def create_vm_template(conn, vmname, image, zone): vmparams = params.VM(name=vmname, cluster=conn.clusters.get(name=zone), template=conn.templates.get(name=image),disks=params.Disks(clone=True)) try: conn.vms.add(vmparams) except: raise Exception('error adding template %s' % image) # start instance def vm_start(conn, vmname, hostname=None, ip=None, netmask=None, gateway=None, domain=None, dns=None, rootpw=None, key=None): vm = conn.vms.get(name=vmname) use_cloud_init = False nics = None nic = None if hostname or ip or netmask or gateway or domain or dns or rootpw or key: use_cloud_init = True if ip and netmask and gateway: ipinfo = params.IP(address=ip, netmask=netmask, gateway=gateway) nic = params.GuestNicConfiguration(name='eth0', boot_protocol='STATIC', ip=ipinfo, on_boot=True) nics = params.Nics() nics = params.GuestNicsConfiguration(nic_configuration=[nic]) initialization=params.Initialization(regenerate_ssh_keys=True, host_name=hostname, domain=domain, user_name='root', root_password=rootpw, nic_configurations=nics, dns_servers=dns, authorized_ssh_keys=key) action = params.Action(use_cloud_init=use_cloud_init, vm=params.VM(initialization=initialization)) vm.start(action=action) # Stop instance def vm_stop(conn, vmname): vm = conn.vms.get(name=vmname) vm.stop() # restart instance def vm_restart(conn, vmname): state = vm_status(conn, vmname) vm = conn.vms.get(name=vmname) vm.stop() while conn.vms.get(vmname).get_status().get_state() != 'down': time.sleep(5) vm.start() # remove an instance def vm_remove(conn, vmname): vm = conn.vms.get(name=vmname) vm.delete() # ------------------------------------------------------------------- # # VM statuses # # Get the VMs status def vm_status(conn, vmname): status = conn.vms.get(name=vmname).status.state return status # Get VM object and return it's name if object exists def get_vm(conn, vmname): vm = conn.vms.get(name=vmname) if vm is None: name = "empty" else: name = vm.get_name() return name # ------------------------------------------------------------------- # # Hypervisor operations # # not available yet # ------------------------------------------------------------------- # # Main def main(): module = AnsibleModule( argument_spec = dict( state = dict(default='present', choices=['present', 'absent', 'shutdown', 'started', 'restart']), #name = dict(required=True), user = dict(required=True), url = dict(required=True), instance_name = dict(required=True, aliases=['vmname']), password = dict(required=True, no_log=True), image = dict(), resource_type = dict(choices=['new', 'template']), zone = dict(), instance_disksize = dict(aliases=['vm_disksize']), instance_cpus = dict(default=1, aliases=['vmcpus']), instance_nic = dict(aliases=['vmnic']), instance_network = dict(default='rhevm', aliases=['vmnetwork']), instance_mem = dict(aliases=['vmmem']), instance_type = dict(default='server', aliases=['vmtype'], choices=['server', 'desktop']), disk_alloc = dict(default='thin', choices=['thin', 'preallocated']), disk_int = dict(default='virtio', choices=['virtio', 'ide']), instance_os = dict(aliases=['vmos']), instance_cores = dict(default=1, aliases=['vmcores']), instance_hostname = dict(aliases=['hostname']), instance_ip = dict(aliases=['ip']), instance_netmask = dict(aliases=['netmask']), instance_gateway = dict(aliases=['gateway']), instance_domain = dict(aliases=['domain']), instance_dns = dict(aliases=['dns']), instance_rootpw = dict(aliases=['rootpw']), instance_key = dict(aliases=['key']), sdomain = dict(), region = dict(), ) ) if not HAS_OVIRTSDK: module.fail_json(msg='ovirtsdk required for this module') state = module.params['state'] user = module.params['user'] url = module.params['url'] vmname = module.params['instance_name'] password = module.params['password'] image = module.params['image'] # name of the image to deploy resource_type = module.params['resource_type'] # template or from scratch zone = module.params['zone'] # oVirt cluster vmdisk_size = module.params['instance_disksize'] # disksize vmcpus = module.params['instance_cpus'] # number of cpu vmnic = module.params['instance_nic'] # network interface vmnetwork = module.params['instance_network'] # logical network vmmem = module.params['instance_mem'] # mem size vmdisk_alloc = module.params['disk_alloc'] # thin, preallocated vmdisk_int = module.params['disk_int'] # disk interface virtio or ide vmos = module.params['instance_os'] # Operating System vmtype = module.params['instance_type'] # server or desktop vmcores = module.params['instance_cores'] # number of cores sdomain = module.params['sdomain'] # storage domain to store disk on region = module.params['region'] # oVirt Datacenter hostname = module.params['instance_hostname'] ip = module.params['instance_ip'] netmask = module.params['instance_netmask'] gateway = module.params['instance_gateway'] domain = module.params['instance_domain'] dns = module.params['instance_dns'] rootpw = module.params['instance_rootpw'] key = module.params['instance_key'] #initialize connection try: c = conn(url+"/api", user, password) except Exception as e: module.fail_json(msg='%s' % e) if state == 'present': if get_vm(c, vmname) == "empty": if resource_type == 'template': try: create_vm_template(c, vmname, image, zone) except Exception as e: module.fail_json(msg='%s' % e) module.exit_json(changed=True, msg="deployed VM %s from template %s" % (vmname,image)) elif resource_type == 'new': # FIXME: refactor, use keyword args. try: create_vm(c, vmtype, vmname, zone, vmdisk_size, vmcpus, vmnic, vmnetwork, vmmem, vmdisk_alloc, sdomain, vmcores, vmos, vmdisk_int) except Exception as e: module.fail_json(msg='%s' % e) module.exit_json(changed=True, msg="deployed VM %s from scratch" % vmname) else: module.exit_json(changed=False, msg="You did not specify a resource type") else: module.exit_json(changed=False, msg="VM %s already exists" % vmname) if state == 'started': if vm_status(c, vmname) == 'up': module.exit_json(changed=False, msg="VM %s is already running" % vmname) else: #vm_start(c, vmname) vm_start(c, vmname, hostname, ip, netmask, gateway, domain, dns, rootpw, key) module.exit_json(changed=True, msg="VM %s started" % vmname) if state == 'shutdown': if vm_status(c, vmname) == 'down': module.exit_json(changed=False, msg="VM %s is already shutdown" % vmname) else: vm_stop(c, vmname) module.exit_json(changed=True, msg="VM %s is shutting down" % vmname) if state == 'restart': if vm_status(c, vmname) == 'up': vm_restart(c, vmname) module.exit_json(changed=True, msg="VM %s is restarted" % vmname) else: module.exit_json(changed=False, msg="VM %s is not running" % vmname) if state == 'absent': if get_vm(c, vmname) == "empty": module.exit_json(changed=False, msg="VM %s does not exist" % vmname) else: vm_remove(c, vmname) module.exit_json(changed=True, msg="VM %s removed" % vmname) # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
ehalls/at100-kernel
tools/perf/scripts/python/sched-migration.py
11215
11670
#!/usr/bin/python # # Cpu task migration overview toy # # Copyright (C) 2010 Frederic Weisbecker <[email protected]> # # perf script event handlers have been generated by perf script -g python # # This software is distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. import os import sys from collections import defaultdict from UserList import UserList sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from SchedGui import * threads = { 0 : "idle"} def thread_name(pid): return "%s:%d" % (threads[pid], pid) class RunqueueEventUnknown: @staticmethod def color(): return None def __repr__(self): return "unknown" class RunqueueEventSleep: @staticmethod def color(): return (0, 0, 0xff) def __init__(self, sleeper): self.sleeper = sleeper def __repr__(self): return "%s gone to sleep" % thread_name(self.sleeper) class RunqueueEventWakeup: @staticmethod def color(): return (0xff, 0xff, 0) def __init__(self, wakee): self.wakee = wakee def __repr__(self): return "%s woke up" % thread_name(self.wakee) class RunqueueEventFork: @staticmethod def color(): return (0, 0xff, 0) def __init__(self, child): self.child = child def __repr__(self): return "new forked task %s" % thread_name(self.child) class RunqueueMigrateIn: @staticmethod def color(): return (0, 0xf0, 0xff) def __init__(self, new): self.new = new def __repr__(self): return "task migrated in %s" % thread_name(self.new) class RunqueueMigrateOut: @staticmethod def color(): return (0xff, 0, 0xff) def __init__(self, old): self.old = old def __repr__(self): return "task migrated out %s" % thread_name(self.old) class RunqueueSnapshot: def __init__(self, tasks = [0], event = RunqueueEventUnknown()): self.tasks = tuple(tasks) self.event = event def sched_switch(self, prev, prev_state, next): event = RunqueueEventUnknown() if taskState(prev_state) == "R" and next in self.tasks \ and prev in self.tasks: return self if taskState(prev_state) != "R": event = RunqueueEventSleep(prev) next_tasks = list(self.tasks[:]) if prev in self.tasks: if taskState(prev_state) != "R": next_tasks.remove(prev) elif taskState(prev_state) == "R": next_tasks.append(prev) if next not in next_tasks: next_tasks.append(next) return RunqueueSnapshot(next_tasks, event) def migrate_out(self, old): if old not in self.tasks: return self next_tasks = [task for task in self.tasks if task != old] return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old)) def __migrate_in(self, new, event): if new in self.tasks: self.event = event return self next_tasks = self.tasks[:] + tuple([new]) return RunqueueSnapshot(next_tasks, event) def migrate_in(self, new): return self.__migrate_in(new, RunqueueMigrateIn(new)) def wake_up(self, new): return self.__migrate_in(new, RunqueueEventWakeup(new)) def wake_up_new(self, new): return self.__migrate_in(new, RunqueueEventFork(new)) def load(self): """ Provide the number of tasks on the runqueue. Don't count idle""" return len(self.tasks) - 1 def __repr__(self): ret = self.tasks.__repr__() ret += self.origin_tostring() return ret class TimeSlice: def __init__(self, start, prev): self.start = start self.prev = prev self.end = start # cpus that triggered the event self.event_cpus = [] if prev is not None: self.total_load = prev.total_load self.rqs = prev.rqs.copy() else: self.rqs = defaultdict(RunqueueSnapshot) self.total_load = 0 def __update_total_load(self, old_rq, new_rq): diff = new_rq.load() - old_rq.load() self.total_load += diff def sched_switch(self, ts_list, prev, prev_state, next, cpu): old_rq = self.prev.rqs[cpu] new_rq = old_rq.sched_switch(prev, prev_state, next) if old_rq is new_rq: return self.rqs[cpu] = new_rq self.__update_total_load(old_rq, new_rq) ts_list.append(self) self.event_cpus = [cpu] def migrate(self, ts_list, new, old_cpu, new_cpu): if old_cpu == new_cpu: return old_rq = self.prev.rqs[old_cpu] out_rq = old_rq.migrate_out(new) self.rqs[old_cpu] = out_rq self.__update_total_load(old_rq, out_rq) new_rq = self.prev.rqs[new_cpu] in_rq = new_rq.migrate_in(new) self.rqs[new_cpu] = in_rq self.__update_total_load(new_rq, in_rq) ts_list.append(self) if old_rq is not out_rq: self.event_cpus.append(old_cpu) self.event_cpus.append(new_cpu) def wake_up(self, ts_list, pid, cpu, fork): old_rq = self.prev.rqs[cpu] if fork: new_rq = old_rq.wake_up_new(pid) else: new_rq = old_rq.wake_up(pid) if new_rq is old_rq: return self.rqs[cpu] = new_rq self.__update_total_load(old_rq, new_rq) ts_list.append(self) self.event_cpus = [cpu] def next(self, t): self.end = t return TimeSlice(t, self) class TimeSliceList(UserList): def __init__(self, arg = []): self.data = arg def get_time_slice(self, ts): if len(self.data) == 0: slice = TimeSlice(ts, TimeSlice(-1, None)) else: slice = self.data[-1].next(ts) return slice def find_time_slice(self, ts): start = 0 end = len(self.data) found = -1 searching = True while searching: if start == end or start == end - 1: searching = False i = (end + start) / 2 if self.data[i].start <= ts and self.data[i].end >= ts: found = i end = i continue if self.data[i].end < ts: start = i elif self.data[i].start > ts: end = i return found def set_root_win(self, win): self.root_win = win def mouse_down(self, cpu, t): idx = self.find_time_slice(t) if idx == -1: return ts = self[idx] rq = ts.rqs[cpu] raw = "CPU: %d\n" % cpu raw += "Last event : %s\n" % rq.event.__repr__() raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000) raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6)) raw += "Load = %d\n" % rq.load() for t in rq.tasks: raw += "%s \n" % thread_name(t) self.root_win.update_summary(raw) def update_rectangle_cpu(self, slice, cpu): rq = slice.rqs[cpu] if slice.total_load != 0: load_rate = rq.load() / float(slice.total_load) else: load_rate = 0 red_power = int(0xff - (0xff * load_rate)) color = (0xff, red_power, red_power) top_color = None if cpu in slice.event_cpus: top_color = rq.event.color() self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end) def fill_zone(self, start, end): i = self.find_time_slice(start) if i == -1: return for i in xrange(i, len(self.data)): timeslice = self.data[i] if timeslice.start > end: return for cpu in timeslice.rqs: self.update_rectangle_cpu(timeslice, cpu) def interval(self): if len(self.data) == 0: return (0, 0) return (self.data[0].start, self.data[-1].end) def nr_rectangles(self): last_ts = self.data[-1] max_cpu = 0 for cpu in last_ts.rqs: if cpu > max_cpu: max_cpu = cpu return max_cpu class SchedEventProxy: def __init__(self): self.current_tsk = defaultdict(lambda : -1) self.timeslices = TimeSliceList() def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state, next_comm, next_pid, next_prio): """ Ensure the task we sched out this cpu is really the one we logged. Otherwise we may have missed traces """ on_cpu_task = self.current_tsk[headers.cpu] if on_cpu_task != -1 and on_cpu_task != prev_pid: print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \ (headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid) threads[prev_pid] = prev_comm threads[next_pid] = next_comm self.current_tsk[headers.cpu] = next_pid ts = self.timeslices.get_time_slice(headers.ts()) ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu) def migrate(self, headers, pid, prio, orig_cpu, dest_cpu): ts = self.timeslices.get_time_slice(headers.ts()) ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu) def wake_up(self, headers, comm, pid, success, target_cpu, fork): if success == 0: return ts = self.timeslices.get_time_slice(headers.ts()) ts.wake_up(self.timeslices, pid, target_cpu, fork) def trace_begin(): global parser parser = SchedEventProxy() def trace_end(): app = wx.App(False) timeslices = parser.timeslices frame = RootFrame(timeslices, "Migration") app.MainLoop() def sched__sched_stat_runtime(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, runtime, vruntime): pass def sched__sched_stat_iowait(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, delay): pass def sched__sched_stat_sleep(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, delay): pass def sched__sched_stat_wait(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, delay): pass def sched__sched_process_fork(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, parent_comm, parent_pid, child_comm, child_pid): pass def sched__sched_process_wait(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio): pass def sched__sched_process_exit(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio): pass def sched__sched_process_free(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio): pass def sched__sched_migrate_task(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio, orig_cpu, dest_cpu): headers = EventHeaders(common_cpu, common_secs, common_nsecs, common_pid, common_comm) parser.migrate(headers, pid, prio, orig_cpu, dest_cpu) def sched__sched_switch(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, prev_comm, prev_pid, prev_prio, prev_state, next_comm, next_pid, next_prio): headers = EventHeaders(common_cpu, common_secs, common_nsecs, common_pid, common_comm) parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state, next_comm, next_pid, next_prio) def sched__sched_wakeup_new(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio, success, target_cpu): headers = EventHeaders(common_cpu, common_secs, common_nsecs, common_pid, common_comm) parser.wake_up(headers, comm, pid, success, target_cpu, 1) def sched__sched_wakeup(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio, success, target_cpu): headers = EventHeaders(common_cpu, common_secs, common_nsecs, common_pid, common_comm) parser.wake_up(headers, comm, pid, success, target_cpu, 0) def sched__sched_wait_task(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio): pass def sched__sched_kthread_stop_ret(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, ret): pass def sched__sched_kthread_stop(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid): pass def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm): pass
gpl-2.0
MattCrystal/shiny-octo-happiness
scripts/gcc-wrapper.py
1276
3382
#! /usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2012, The Linux Foundation. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of The Linux Foundation nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Invoke gcc, looking for warnings, and causing a failure if there are # non-whitelisted warnings. import errno import re import os import sys import subprocess # Note that gcc uses unicode, which may depend on the locale. TODO: # force LANG to be set to en_US.UTF-8 to get consistent warnings. allowed_warnings = set([ "return_address.c:62", ]) # Capture the name of the object file, can find it. ofile = None warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''') def interpret_warning(line): """Decode the message from gcc. The messages we care about have a filename, and a warning""" line = line.rstrip('\n') m = warning_re.match(line) if m and m.group(2) not in allowed_warnings: print "error, forbidden warning:", m.group(2) # If there is a warning, remove any object if it exists. if ofile: try: os.remove(ofile) except OSError: pass sys.exit(1) def run_gcc(): args = sys.argv[1:] # Look for -o try: i = args.index('-o') global ofile ofile = args[i+1] except (ValueError, IndexError): pass compiler = sys.argv[0] try: proc = subprocess.Popen(args, stderr=subprocess.PIPE) for line in proc.stderr: print line, interpret_warning(line) result = proc.wait() except OSError as e: result = e.errno if result == errno.ENOENT: print args[0] + ':',e.strerror print 'Is your PATH set correctly?' else: print ' '.join(args), str(e) return result if __name__ == '__main__': status = run_gcc() sys.exit(status)
gpl-2.0
zero-rp/miniblink49
v8_7_5/tools/testrunner/local/statusfile.py
5
11573
# Copyright 2012 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # for py2/py3 compatibility from __future__ import print_function import os import re from variants import ALL_VARIANTS from utils import Freeze # Possible outcomes FAIL = "FAIL" PASS = "PASS" TIMEOUT = "TIMEOUT" CRASH = "CRASH" # Outcomes only for status file, need special handling FAIL_OK = "FAIL_OK" FAIL_SLOPPY = "FAIL_SLOPPY" # Modifiers SKIP = "SKIP" SLOW = "SLOW" NO_VARIANTS = "NO_VARIANTS" FAIL_PHASE_ONLY = "FAIL_PHASE_ONLY" ALWAYS = "ALWAYS" KEYWORDS = {} for key in [SKIP, FAIL, PASS, CRASH, SLOW, FAIL_OK, NO_VARIANTS, FAIL_SLOPPY, ALWAYS, FAIL_PHASE_ONLY]: KEYWORDS[key] = key # Support arches, modes to be written as keywords instead of strings. VARIABLES = {ALWAYS: True} for var in ["debug", "release", "big", "little", "android", "android_arm", "android_arm64", "android_ia32", "android_x64", "arm", "arm64", "ia32", "mips", "mipsel", "mips64", "mips64el", "x64", "ppc", "ppc64", "s390", "s390x", "macos", "windows", "linux", "aix", "r1", "r2", "r3", "r5", "r6"]: VARIABLES[var] = var # Allow using variants as keywords. for var in ALL_VARIANTS: VARIABLES[var] = var class StatusFile(object): def __init__(self, path, variables): """ _rules: {variant: {test name: [rule]}} _prefix_rules: {variant: {test name prefix: [rule]}} """ with open(path) as f: self._rules, self._prefix_rules = ReadStatusFile(f.read(), variables) def get_outcomes(self, testname, variant=None): """Merges variant dependent and independent rules.""" outcomes = frozenset() for key in set([variant or '', '']): rules = self._rules.get(key, {}) prefix_rules = self._prefix_rules.get(key, {}) if testname in rules: outcomes |= rules[testname] for prefix in prefix_rules: if testname.startswith(prefix): outcomes |= prefix_rules[prefix] return outcomes def warn_unused_rules(self, tests, check_variant_rules=False): """Finds and prints unused rules in status file. Rule X is unused when it doesn't apply to any tests, which can also mean that all matching tests were skipped by another rule before evaluating X. Args: tests: list of pairs (testname, variant) check_variant_rules: if set variant dependent rules are checked """ if check_variant_rules: variants = list(ALL_VARIANTS) else: variants = [''] used_rules = set() for testname, variant in tests: variant = variant or '' if testname in self._rules.get(variant, {}): used_rules.add((testname, variant)) if SKIP in self._rules[variant][testname]: continue for prefix in self._prefix_rules.get(variant, {}): if testname.startswith(prefix): used_rules.add((prefix, variant)) if SKIP in self._prefix_rules[variant][prefix]: break for variant in variants: for rule, value in ( list(self._rules.get(variant, {}).iteritems()) + list(self._prefix_rules.get(variant, {}).iteritems())): if (rule, variant) not in used_rules: if variant == '': variant_desc = 'variant independent' else: variant_desc = 'variant: %s' % variant print('Unused rule: %s -> %s (%s)' % (rule, value, variant_desc)) def _JoinsPassAndFail(outcomes1, outcomes2): """Indicates if we join PASS and FAIL from two different outcome sets and the first doesn't already contain both. """ return ( PASS in outcomes1 and not (FAIL in outcomes1 or FAIL_OK in outcomes1) and (FAIL in outcomes2 or FAIL_OK in outcomes2) ) VARIANT_EXPRESSION = object() def _EvalExpression(exp, variables): """Evaluates expression and returns its result. In case of NameError caused by undefined "variant" identifier returns VARIANT_EXPRESSION marker. """ try: return eval(exp, variables) except NameError as e: identifier = re.match("name '(.*)' is not defined", e.message).group(1) assert identifier == "variant", "Unknown identifier: %s" % identifier return VARIANT_EXPRESSION def _EvalVariantExpression( condition, section, variables, variant, rules, prefix_rules): variables_with_variant = dict(variables) variables_with_variant["variant"] = variant result = _EvalExpression(condition, variables_with_variant) assert result != VARIANT_EXPRESSION if result is True: _ReadSection( section, variables_with_variant, rules[variant], prefix_rules[variant], ) else: assert result is False, "Make sure expressions evaluate to boolean values" def _ParseOutcomeList(rule, outcomes, variables, target_dict): """Outcome list format: [condition, outcome, outcome, ...]""" result = set([]) if type(outcomes) == str: outcomes = [outcomes] for item in outcomes: if type(item) == str: result.add(item) elif type(item) == list: condition = item[0] exp = _EvalExpression(condition, variables) assert exp != VARIANT_EXPRESSION, ( "Nested variant expressions are not supported") if exp is False: continue # Ensure nobody uses an identifier by mistake, like "default", # which would evaluate to true here otherwise. assert exp is True, "Make sure expressions evaluate to boolean values" for outcome in item[1:]: assert type(outcome) == str result.add(outcome) else: assert False if len(result) == 0: return if rule in target_dict: # A FAIL without PASS in one rule has always precedence over a single # PASS (without FAIL) in another. Otherwise the default PASS expectation # in a rule with a modifier (e.g. PASS, SLOW) would be joined to a FAIL # from another rule (which intended to mark a test as FAIL and not as # PASS and FAIL). if _JoinsPassAndFail(target_dict[rule], result): target_dict[rule] -= set([PASS]) if _JoinsPassAndFail(result, target_dict[rule]): result -= set([PASS]) target_dict[rule] |= result else: target_dict[rule] = result def ReadContent(content): return eval(content, KEYWORDS) def ReadStatusFile(content, variables): """Status file format Status file := [section] section = [CONDITION, section_rules] section_rules := {path: outcomes} outcomes := outcome | [outcome, ...] outcome := SINGLE_OUTCOME | [CONDITION, SINGLE_OUTCOME, SINGLE_OUTCOME, ...] """ # Empty defaults for rules and prefix_rules. Variant-independent # rules are mapped by "", others by the variant name. rules = {variant: {} for variant in ALL_VARIANTS} rules[""] = {} prefix_rules = {variant: {} for variant in ALL_VARIANTS} prefix_rules[""] = {} variables.update(VARIABLES) for conditional_section in ReadContent(content): assert type(conditional_section) == list assert len(conditional_section) == 2 condition, section = conditional_section exp = _EvalExpression(condition, variables) # The expression is variant-independent and evaluates to False. if exp is False: continue # The expression is variant-independent and evaluates to True. if exp is True: _ReadSection( section, variables, rules[''], prefix_rules[''], ) continue # The expression is variant-dependent (contains "variant" keyword) if exp == VARIANT_EXPRESSION: # If the expression contains one or more "variant" keywords, we evaluate # it for all possible variants and create rules for those that apply. for variant in ALL_VARIANTS: _EvalVariantExpression( condition, section, variables, variant, rules, prefix_rules) continue assert False, "Make sure expressions evaluate to boolean values" return Freeze(rules), Freeze(prefix_rules) def _ReadSection(section, variables, rules, prefix_rules): assert type(section) == dict for rule, outcome_list in section.iteritems(): assert type(rule) == str if rule[-1] == '*': _ParseOutcomeList(rule[:-1], outcome_list, variables, prefix_rules) else: _ParseOutcomeList(rule, outcome_list, variables, rules) JS_TEST_PATHS = { 'debugger': [[]], 'inspector': [[]], 'intl': [[]], 'message': [[]], 'mjsunit': [[]], 'mozilla': [['data']], 'test262': [['data', 'test'], ['local-tests', 'test']], 'webkit': [[]], } def PresubmitCheck(path): with open(path) as f: contents = ReadContent(f.read()) basename = os.path.basename(os.path.dirname(path)) root_prefix = basename + "/" status = {"success": True} def _assert(check, message): # Like "assert", but doesn't throw. if not check: print("%s: Error: %s" % (path, message)) status["success"] = False try: for section in contents: _assert(type(section) == list, "Section must be a list") _assert(len(section) == 2, "Section list must have exactly 2 entries") section = section[1] _assert(type(section) == dict, "Second entry of section must be a dictionary") for rule in section: _assert(type(rule) == str, "Rule key must be a string") _assert(not rule.startswith(root_prefix), "Suite name prefix must not be used in rule keys") _assert(not rule.endswith('.js'), ".js extension must not be used in rule keys.") _assert('*' not in rule or (rule.count('*') == 1 and rule[-1] == '*'), "Only the last character of a rule key can be a wildcard") if basename in JS_TEST_PATHS and '*' not in rule: _assert(any(os.path.exists(os.path.join(os.path.dirname(path), *(paths + [rule + ".js"]))) for paths in JS_TEST_PATHS[basename]), "missing file for %s test %s" % (basename, rule)) return status["success"] except Exception as e: print(e) return False
apache-2.0
florian-dacosta/OpenUpgrade
addons/l10n_fr_hr_payroll/report/__init__.py
424
1091
#-*- coding:utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved # d$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import fiche_paye # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
devendermishrajio/nova
nova/api/openstack/compute/legacy_v2/contrib/extended_ips.py
79
3098
# Copyright 2013 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """The Extended Ips API extension.""" import itertools from nova.api.openstack import common from nova.api.openstack import extensions from nova.api.openstack import wsgi from nova import compute authorize = extensions.soft_extension_authorizer('compute', 'extended_ips') class ExtendedIpsController(wsgi.Controller): def __init__(self, *args, **kwargs): super(ExtendedIpsController, self).__init__(*args, **kwargs) self.compute_api = compute.API() def _extend_server(self, context, server, instance): key = "%s:type" % Extended_ips.alias networks = common.get_networks_for_instance(context, instance) for label, network in networks.items(): # NOTE(vish): ips are hidden in some states via the # hide_server_addresses extension. if label in server['addresses']: all_ips = itertools.chain(network["ips"], network["floating_ips"]) for i, ip in enumerate(all_ips): server['addresses'][label][i][key] = ip['type'] @wsgi.extends def show(self, req, resp_obj, id): context = req.environ['nova.context'] if authorize(context): server = resp_obj.obj['server'] db_instance = req.get_db_instance(server['id']) # server['id'] is guaranteed to be in the cache due to # the core API adding it in its 'show' method. self._extend_server(context, server, db_instance) @wsgi.extends def detail(self, req, resp_obj): context = req.environ['nova.context'] if authorize(context): servers = list(resp_obj.obj['servers']) for server in servers: db_instance = req.get_db_instance(server['id']) # server['id'] is guaranteed to be in the cache due to # the core API adding it in its 'detail' method. self._extend_server(context, server, db_instance) class Extended_ips(extensions.ExtensionDescriptor): """Adds type parameter to the ip list.""" name = "ExtendedIps" alias = "OS-EXT-IPS" namespace = ("http://docs.openstack.org/compute/ext/" "extended_ips/api/v1.1") updated = "2013-01-06T00:00:00Z" def get_controller_extensions(self): controller = ExtendedIpsController() extension = extensions.ControllerExtension(self, 'servers', controller) return [extension]
apache-2.0
KaiSzuttor/espresso
testsuite/python/rotational_inertia.py
2
6230
# Copyright (C) 2010-2019 The ESPResSo project # # This file is part of ESPResSo. # # ESPResSo is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ESPResSo is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import unittest as ut import unittest_decorators as utx import numpy as np import espressomd import tests_common @utx.skipIfMissingFeatures(["MASS", "ROTATIONAL_INERTIA"]) class RotationalInertia(ut.TestCase): longMessage = True # Handle for espresso system system = espressomd.System(box_l=[1.0, 1.0, 1.0]) system.cell_system.skin = 0 # Particle's angular momentum: initial and ongoing L_0_lab = np.zeros((3)) L_lab = np.zeros((3)) # Angular momentum def L_body(self, part): return self.system.part[part].omega_body[:] * \ self.system.part[part].rinertia[:] # Set the angular momentum def set_L_0(self, part): L_0_body = self.L_body(part) self.L_0_lab = tests_common.convert_vec_body_to_space( self.system, part, L_0_body) def set_L(self, part): L_body = self.L_body(part) self.L_lab = tests_common.convert_vec_body_to_space( self.system, part, L_body) def test_stability(self): self.system.part.clear() self.system.part.add( pos=np.array([0.0, 0.0, 0.0]), id=0, rotation=(1, 1, 1)) # Inertial motion around the stable and unstable axes tol = 4E-3 # Anisotropic inertial moment. Stable axes correspond to J[1] and J[2]. # The unstable axis corresponds to J[0]. These values relation is J[1] # < J[0] < J[2]. J = np.array([5, 0.5, 18.5]) self.system.part[0].rinertia = J[:] # Validation of J[1] stability # ---------------------------- self.system.time_step = 0.0006 # Stable omega component should be larger than other components. stable_omega = 57.65 self.system.part[0].omega_body = np.array([0.15, stable_omega, -0.043]) self.set_L_0(0) for i in range(100): self.set_L(0) for k in range(3): self.assertAlmostEqual( self.L_lab[k], self.L_0_lab[k], delta=tol, msg='Inertial motion around stable axis J1: Deviation in ' 'angular momentum is too large. Step {0}, coordinate ' '{1}, expected {2}, got {3}'.format( i, k, self.L_0_lab[k], self.L_lab[k])) self.assertAlmostEqual( self.system.part[0].omega_body[1], stable_omega, delta=tol, msg='Inertial motion around stable axis J1: Deviation in omega ' 'is too large. Step {0}, coordinate 1, expected {1}, got {2}' .format(i, stable_omega, self.system.part[0].omega_body[1])) self.system.integrator.run(10) # Validation of J[2] stability # ---------------------------- self.system.time_step = 0.01 # Stable omega component should be larger than other components. stable_omega = 3.2 self.system.part[0].omega_body = np.array( [0.011, -0.043, stable_omega]) self.set_L_0(0) for i in range(100): self.set_L(0) for k in range(3): self.assertAlmostEqual( self.L_lab[k], self.L_0_lab[k], delta=tol, msg='Inertial motion around stable axis J2: Deviation in ' 'angular momentum is too large. Step {0}, coordinate ' '{1}, expected {2}, got {3}'.format( i, k, self.L_0_lab[k], self.L_lab[k])) self.assertAlmostEqual( self.system.part[0].omega_body[2], stable_omega, delta=tol, msg='Inertial motion around stable axis J2: Deviation in omega ' 'is too large. Step {0}, coordinate 2, expected {1}, got {2}' .format(i, stable_omega, self.system.part[0].omega_body[2])) self.system.integrator.run(10) # Validation of J[0] # ------------------ self.system.time_step = 0.001 # Unstable omega component should be larger than other components. unstable_omega = 5.76 self.system.part[0].omega_body = np.array( [unstable_omega, -0.043, 0.15]) self.set_L_0(0) for i in range(100): self.set_L(0) for k in range(3): self.assertAlmostEqual( self.L_lab[k], self.L_0_lab[k], delta=tol, msg='Inertial motion around stable axis J0: Deviation in ' 'angular momentum is too large. Step {0}, coordinate ' '{1}, expected {2}, got {3}'.format( i, k, self.L_0_lab[k], self.L_lab[k])) self.system.integrator.run(10) def energy(self, p): return 0.5 * np.dot(p.rinertia, p.omega_body**2) def momentum(self, p): return np.linalg.norm(p.rinertia * p.omega_body) def test_energy_and_momentum_conservation(self): system = self.system system.part.clear() system.thermostat.turn_off() p = system.part.add(pos=(0, 0, 0), rinertia=(1.1, 1.3, 1.5), rotation=(1, 1, 1), omega_body=(2, 1, 4)) E0 = self.energy(p) m0 = self.momentum(p) system.time_step = 0.001 for _ in range(1000): system.integrator.run(100) self.assertAlmostEqual(self.energy(p), E0, places=3) self.assertAlmostEqual(self.momentum(p), m0, places=3) if __name__ == '__main__': ut.main()
gpl-3.0
zirou30/MITMf
core/sslstrip/DnsCache.py
5
1699
# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # USA # import logging mitmf_logger = logging.getLogger('mitmf') class DnsCache: ''' The DnsCache maintains a cache of DNS lookups, mirroring the browser experience. ''' _instance = None def __init__(self): self.customAddress = None self.cache = {} @staticmethod def getInstance(): if DnsCache._instance == None: DnsCache._instance = DnsCache() return DnsCache._instance def cacheResolution(self, host, address): self.cache[host] = address def getCachedAddress(self, host): if host in self.cache: return self.cache[host] return None def setCustomRes(self, host, ip_address=None): if ip_address is not None: self.cache[host] = ip_address mitmf_logger.debug("[DNSCache] DNS entry set: %s -> %s" %(host, ip_address)) else: if self.customAddress is not None: self.cache[host] = self.customAddress def setCustomAddress(self, ip_address): self.customAddress = ip_address
gpl-3.0
seecr/meresco-html
meresco/html/login/basichtmlloginform.py
1
17676
## begin license ## # # "Meresco Html" is a template engine based on generators, and a sequel to Slowfoot. # It is also known as "DynamicHtml" or "Seecr Html". # # Copyright (C) 2012 Meertens Instituut (KNAW) http://meertens.knaw.nl # Copyright (C) 2012-2018, 2020-2021 Seecr (Seek You Too B.V.) https://seecr.nl # Copyright (C) 2014 Stichting Bibliotheek.nl (BNL) http://www.bibliotheek.nl # Copyright (C) 2015, 2020-2021 Stichting Kennisnet https://www.kennisnet.nl # Copyright (C) 2020-2021 Data Archiving and Network Services https://dans.knaw.nl # Copyright (C) 2020-2021 SURF https://www.surf.nl # Copyright (C) 2020-2021 The Netherlands Institute for Sound and Vision https://beeldengeluid.nl # # This file is part of "Meresco Html" # # "Meresco Html" is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # "Meresco Html" is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with "Meresco Html"; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # ## end license ## from meresco.components.http.utils import redirectHttp, CRLF, insertHeader, findCookies, okJson from xml.sax.saxutils import quoteattr, escape as xmlEscape from os.path import join from .securezone import ORIGINAL_PATH from simplejson import dumps, loads from meresco.html import PostActions from .labels import getLabel from urllib.parse import urlencode from meresco.html.utils import parse_qs from time import time from ._constants import UNAUTHORIZED TWO_WEEKS = 2*7*24*3600 class BasicHtmlLoginForm(PostActions): def __init__(self, action, loginPath, home="/", name=None, lang='en', rememberMeCookie=False): PostActions.__init__(self, name=name) self._action = action self._loginPath = loginPath self._home = home self.registerAction('changepassword', self.handleChangePassword) self.registerAction('remove', self.handleRemove) self.registerAction('newUser', self.handleNewUser) self.defaultAction(self.handleLogin) self._lang = lang self._rememberMeCookie = rememberMeCookie def handleLogin(self, session=None, Body=None, **kwargs): accept = kwargs.get("Headers", {}).get("Accept", '') jsonResponse = 'application/json' in accept strBody = str(Body, encoding='utf-8') bodyArgs = {d['name']:[d['value']] for d in loads(strBody)} if jsonResponse else parse_qs(strBody, keep_blank_values=True) username = bodyArgs.get('username', [None])[0] password = bodyArgs.get('password', [None])[0] rememberMe = bodyArgs.get('rememberMe', [None])[0] != None if self.call.validateUser(username=username, password=password): user = self.loginAsUser(username) session[USER] = user url = session.pop(ORIGINAL_PATH, self._home) response = redirectHttp if rememberMe and self._rememberMeCookie: cookieValues = self.call.createCookie(user) status, headers = response.split(CRLF, 1) response = CRLF.join([status, cookieValues['header'], headers]) if jsonResponse: yield bytes(okJson, encoding="utf-8") yield dumps(dict(success=True)) return yield bytes(response % url, encoding='utf-8') else: session['BasicHtmlLoginForm.formValues'] = { 'username': username, 'errorMessage': getLabel(self._lang, 'loginForm', 'invalid') } if jsonResponse: yield bytes(okJson, encoding="utf-8") yield dumps(dict(success=False, message=getLabel(self._lang, 'loginForm', 'invalid'))) return yield bytes(redirectHttp % self._loginPath, encoding='utf-8') def getUser(self, username): return self._checkAndCreateUser(username) loginAsUser = getUser def loginForm(self, session, path, lang=None, **kwargs): lang = lang or self._lang formValues = session.get('BasicHtmlLoginForm.formValues', {}) if session else {} yield """<div id="login-form">\n""" if 'errorMessage' in formValues: yield ' <p class="error">%s</p>\n' % xmlEscape(formValues['errorMessage']) values = dict( username=quoteattr(formValues.get('username', '')), action=quoteattr(self._action), formUrl=quoteattr(path), lblUsername=getLabel(lang, 'loginForm', 'username'), lblPassword=getLabel(lang, 'loginForm', 'password'), lblLogin=getLabel(lang, 'loginForm', 'login'), lblRememberMe=getLabel(lang, 'loginForm', 'rememberMe') ) yield """ <form method="POST" name="login" action=%(action)s> <input type="hidden" name="formUrl" value=%(formUrl)s/> <dl> <dt>%(lblUsername)s</dt> <dd><input type="text" name="username" value=%(username)s/></dd> <dt>%(lblPassword)s</dt> <dd><input type="password" name="password"/></dd>""" % values if self._rememberMeCookie: yield """ <dt>&nbsp;</dt><dd class="rememberMe"><input type="checkbox" name="rememberMe" id="rememberMe" /><label for="rememberMe">%(lblRememberMe)s</label></dd>""" % values yield """ <dd class="submit"><input type="submit" id="submitLogin" value="%(lblLogin)s"/></dd> </dl> </form> <script type="text/javascript"> document.getElementById("submitLogin").focus() </script> </div>""" % values session.pop('BasicHtmlLoginForm.formValues', None) def newUserForm(self, session, path, lang=None, extraFields="", **kwargs): lang = lang or self._lang formValues = session.get('BasicHtmlLoginForm.newUserFormValues', {}) if session else {} yield """<div id="login-new-user-form">\n""" if not USER in session: yield '<p class="error">Please login to add new users.</p>\n</div>' return if 'errorMessage' in formValues: yield ' <p class="error">%s</p>\n' % xmlEscape(formValues['errorMessage']) if 'successMessage' in formValues: yield ' <p class="success">%s</p>\n' % xmlEscape(formValues['successMessage']) values = dict( username=quoteattr(formValues.get('username', '')), action=quoteattr(join(self._action, 'newUser')), formUrl=quoteattr(path), returnUrl=quoteattr(kwargs.get('returnUrl', path)), lblUsername=getLabel(lang, 'newuserForm', 'username'), lblPassword=getLabel(lang, 'newuserForm', 'password'), lblPasswordRepeat=getLabel(lang, 'newuserForm', 'password-repeat'), lblCreate=getLabel(lang, 'newuserForm', 'create'), extraFields=extraFields ) yield """ <form method="POST" name="newUser" action=%(action)s> <input type="hidden" name="formUrl" value=%(formUrl)s/> <input type="hidden" name="returnUrl" value=%(returnUrl)s/> <dl> <dt>%(lblUsername)s</dt> <dd><input type="text" name="username" value=%(username)s/></dd> <dt>%(lblPassword)s</dt> <dd><input type="password" name="password"/></dd> <dt>%(lblPasswordRepeat)s</dt> <dd><input type="password" name="retypedPassword"/></dd> %(extraFields)s <dd class="submit"><input type="submit" value="%(lblCreate)s"/></dd> </dl> </form> </div>""" % values session.pop('BasicHtmlLoginForm.newUserFormValues', None) def handleNewUser(self, session, Body, **kwargs): handlingUser = session.get(USER) if handlingUser is None or not handlingUser.canEdit(): yield UNAUTHORIZED return bodyArgs = parse_qs(str(Body, encoding='utf-8'), keep_blank_values=True) if Body else {} username = bodyArgs.get('username', [None])[0] password = bodyArgs.get('password', [None])[0] retypedPassword = bodyArgs.get('retypedPassword', [None])[0] formUrl = bodyArgs.get('formUrl', [self._home])[0] returnUrl = bodyArgs.get('returnUrl', [formUrl])[0] targetUrl = formUrl if password != retypedPassword: session['BasicHtmlLoginForm.newUserFormValues']={'username': username, 'errorMessage': getLabel(self._lang, "newuserForm", 'dontMatch')} else: try: self.do.addUser(username=username, password=password) self.do.handleNewUser(username=username, Body=Body) session['BasicHtmlLoginForm.newUserFormValues']={'successMessage': '%s "%s"' % (getLabel(self._lang, 'newuserForm', 'added'), username)} targetUrl = returnUrl except ValueError as e: session['BasicHtmlLoginForm.newUserFormValues']={'username': username, 'errorMessage': str(e)} yield redirectHttp % targetUrl.format(username=username) def handleChangePassword(self, session, Body, **kwargs): bodyArgs = parse_qs(str(Body, encoding='utf-8'), keep_blank_values=True) if Body else {} username = bodyArgs.get('username', [None])[0] oldPassword = bodyArgs.get('oldPassword', [None])[0] newPassword = bodyArgs.get('newPassword', [None])[0] retypedPassword = bodyArgs.get('retypedPassword', [None])[0] formUrl = bodyArgs.get('formUrl', [self._home])[0] returnUrl = bodyArgs.get('returnUrl', [formUrl])[0] targetUrl = formUrl handlingUser = session.get(USER) if not handlingUser: session['BasicHtmlLoginForm.formValues']={ 'username': username, 'errorMessage': getLabel(self._lang, 'changepasswordForm', 'loginRequired')} yield redirectHttp % targetUrl return if newPassword != retypedPassword: session['BasicHtmlLoginForm.formValues']={ 'username': username, 'errorMessage': getLabel(self._lang, 'changepasswordForm', 'dontMatch')} else: if (not oldPassword and handlingUser.canEdit(username) and handlingUser.name != username) or self.call.validateUser(username=username, password=oldPassword): try: self.call.setPassword(username, newPassword) targetUrl = returnUrl except ValueError: session['BasicHtmlLoginForm.formValues']={ 'username': username, 'errorMessage': getLabel(self._lang, 'changepasswordForm', 'passwordInvalid')} else: session['BasicHtmlLoginForm.formValues']={ 'username': username, 'errorMessage': getLabel(self._lang, 'changepasswordForm', 'usernamePasswordDontMatch')} yield redirectHttp % targetUrl.format(username=username) def changePasswordForm(self, session, path, arguments, user=None, lang=None, onlyNewPassword=False, **kwargs): lang = lang or self._lang formValues = session.get('BasicHtmlLoginForm.formValues', {}) if session else {} yield """<div id="login-change-password-form">\n""" if not USER in session: yield '<p class="error">Please login to change password.</p>\n</div>' return if 'errorMessage' in formValues: yield ' <p class="error">%s</p>\n' % xmlEscape(formValues['errorMessage']) formUrl = path if arguments: formUrl += "?" + urlencode(sorted(arguments.items()), doseq=True) username = session[USER].name if user is None else (user if isinstance(user, str) else user.name) values = dict( action=quoteattr(join(self._action, 'changepassword')), formUrl=quoteattr(formUrl), returnUrl=quoteattr(kwargs.get('returnUrl', path)), username=quoteattr(username), lblOldPassword=getLabel(lang, "changepasswordForm", "old-password"), lblNewPassword=getLabel(lang, "changepasswordForm", "new-password"), lblNewPasswordRepeat=getLabel(lang, "changepasswordForm", "new-password-repeat"), lblChange=getLabel(lang, "changepasswordForm", "change"), ) yield """<form method="POST" name="changePassword" action=%(action)s> <input type="hidden" name="formUrl" value=%(formUrl)s/> <input type="hidden" name="returnUrl" value=%(returnUrl)s/> <input type="hidden" name="username" value=%(username)s/> <dl> """ % values if not onlyNewPassword: yield """<dt>%(lblOldPassword)s</dt> <dd><input type="password" name="oldPassword"/></dd>""" % values yield """ <dt>%(lblNewPassword)s</dt> <dd><input type="password" name="newPassword"/></dd> <dt>%(lblNewPasswordRepeat)s</dt> <dd><input type="password" name="retypedPassword"/></dd> <dd class="submit"><input type="submit" value="%(lblChange)s"/></dd> </dl> </form> </div>""" % values session.pop('BasicHtmlLoginForm.formValues', None) def userList(self, session, path, userLink=None, **kwargs): yield """<div id="login-user-list">\n""" if not USER in session: yield '<p class="error">Please login to show user list.</p>\n</div>' return sessionUser = session[USER] if sessionUser.canEdit(): yield """<script type="text/javascript"> function deleteUser(username) { if (confirm("Are you sure?")) { document.removeUser.username.value = username; document.removeUser.submit(); } } </script>""" yield """<form name="removeUser" method="POST" action=%s> <input type="hidden" name="formUrl" value=%s/> <input type="hidden" name="username"/>""" % ( quoteattr(join(self._action, 'remove')), quoteattr(path), ) yield '</form>\n' yield '<ul>\n' for user in sorted(self._listUsers(), key=lambda u:u.title()): yield '<li>' if userLink: yield '<a href="%s?user=%s">%s</a>' % (userLink, xmlEscape(user.name), xmlEscape(user.title())) else: yield xmlEscape(user.title()) if sessionUser.name != user.name and ( sessionUser.canEdit(user.name) ): yield """ <a href="javascript:deleteUser('%s');">delete</a>""" % user.name yield '</li>\n' yield '</ul>\n' yield '</div>\n' def _sessionUserMayDeleteAUser(self, sessionUser, user): return user is not None and \ sessionUser is not None and \ sessionUser.name != user.name and \ sessionUser.canEdit(user.name) def handleRemove(self, session, Body, **kwargs): bodyArgs = parse_qs(str(Body, encoding='utf-8'), keep_blank_values=True) if Body else {} formUrl = bodyArgs.get('formUrl', [self._home])[0] sessionUser = session.get(USER) user = self._checkAndCreateUser(bodyArgs.get('username', [None])[0]) if not self._sessionUserMayDeleteAUser(sessionUser, user): yield UNAUTHORIZED return self.do.removeUser(user.name) self.do.removeCookies(filter=lambda anObject: anObject.name == user.name if isinstance(anObject, self.User) else False) self.do.removeCookies(filter=lambda anObject: anObject.get(USER).name == user.name if isinstance(anObject, dict) else False) yield redirectHttp % formUrl def logout(self, session, Headers, **ignored): session.pop(USER, None) redirectUrl = self._home response = redirectHttp % redirectUrl if not self._rememberMeCookie: yield response return cookieName = self.call.cookieName() for cookie in findCookies(Headers=Headers, name=cookieName): self.call.removeCookie(cookie) yield insertHeader([response], 'Set-Cookie: {}=; expires=Thu, 01 Jan 1970 00:00:00 GMT; path=/'.format(cookieName)) def _listUsers(self): return [self._createUser(username) for username in self.call.listUsernames()] def _checkAndCreateUser(self, username): if not self.call.hasUser(username): return None return self._createUser(username) def _createUser(self, username): user = self.User(username) user.isValid = lambda: self.call.hasUser(username) self.do.enrichUser(user) return user def _now(self): return time() class User(object): def __init__(inner, name): inner.name = name def title(inner): return inner.name def isAdmin(inner): return inner.name == 'admin' def canEdit(inner, username=None): username = username.name if hasattr(username, 'name') else username return inner.isAdmin() or inner.name == username USER = 'user'
gpl-2.0
vrieni/orange
Orange/OrangeCanvas/gui/toolgrid.py
6
14520
""" A widget containing a grid of clickable actions/buttons. """ from collections import namedtuple, deque from PyQt4.QtGui import ( QFrame, QAction, QToolButton, QGridLayout, QFontMetrics, QSizePolicy, QStyleOptionToolButton, QStylePainter, QStyle ) from PyQt4.QtCore import Qt, QObject, QSize, QVariant, QEvent, QSignalMapper from PyQt4.QtCore import pyqtSignal as Signal from . import utils _ToolGridSlot = namedtuple( "_ToolGridSlot", ["button", "action", "row", "column" ] ) class _ToolGridButton(QToolButton): def __init__(self, *args, **kwargs): QToolButton.__init__(self, *args, **kwargs) self.__text = "" def actionEvent(self, event): QToolButton.actionEvent(self, event) if event.type() == QEvent.ActionChanged or \ event.type() == QEvent.ActionAdded: self.__textLayout() def resizeEvent(self, event): QToolButton.resizeEvent(self, event) self.__textLayout() def __textLayout(self): fm = QFontMetrics(self.font()) text = unicode(self.defaultAction().iconText()) words = deque(text.split()) lines = [] curr_line = "" curr_line_word_count = 0 option = QStyleOptionToolButton() option.initFrom(self) margin = self.style().pixelMetric(QStyle.PM_ButtonMargin, option, self) width = self.width() - 2 * margin while words: w = words.popleft() if curr_line_word_count: line_extended = " ".join([curr_line, w]) else: line_extended = w line_w = fm.boundingRect(line_extended).width() if line_w >= width: if curr_line_word_count == 0 or len(lines) == 1: # A single word that is too long must be elided. # Also if the text overflows 2 lines # Warning: hardcoded max lines curr_line = fm.elidedText(line_extended, Qt.ElideRight, width) curr_line = unicode(curr_line) else: # Put the word back words.appendleft(w) lines.append(curr_line) curr_line = "" curr_line_word_count = 0 if len(lines) == 2: break else: curr_line = line_extended curr_line_word_count += 1 if curr_line: lines.append(curr_line) text = "\n".join(lines) self.__text = text def paintEvent(self, event): p = QStylePainter(self) opt = QStyleOptionToolButton() self.initStyleOption(opt) if self.__text: # Replace the text opt.text = self.__text p.drawComplexControl(QStyle.CC_ToolButton, opt) p.end() class ToolGrid(QFrame): """ A widget containing a grid of actions/buttons. Actions can be added using standard :func:`QWidget.addAction(QAction)` and :func:`QWidget.insertAction(int, QAction)` methods. Parameters ---------- parent : :class:`QWidget` Parent widget. columns : int Number of columns in the grid layout. buttonSize : :class:`QSize`, optional Size of tool buttons in the grid. iconSize : :class:`QSize`, optional Size of icons in the buttons. toolButtonStyle : :class:`Qt.ToolButtonStyle` Tool button style. """ actionTriggered = Signal(QAction) actionHovered = Signal(QAction) def __init__(self, parent=None, columns=4, buttonSize=None, iconSize=None, toolButtonStyle=Qt.ToolButtonTextUnderIcon): QFrame.__init__(self, parent) if buttonSize is not None: buttonSize = QSize(buttonSize) if iconSize is not None: iconSize = QSize(iconSize) self.__columns = columns self.__buttonSize = buttonSize or QSize(50, 50) self.__iconSize = iconSize or QSize(26, 26) self.__toolButtonStyle = toolButtonStyle self.__gridSlots = [] self.__buttonListener = ToolButtonEventListener(self) self.__buttonListener.buttonRightClicked.connect( self.__onButtonRightClick) self.__buttonListener.buttonEnter.connect( self.__onButtonEnter) self.__mapper = QSignalMapper() self.__mapper.mapped[QObject].connect(self.__onClicked) self.__setupUi() def __setupUi(self): layout = QGridLayout() layout.setContentsMargins(0, 0, 0, 0) layout.setSpacing(0) layout.setSizeConstraint(QGridLayout.SetFixedSize) self.setLayout(layout) self.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.MinimumExpanding) def setButtonSize(self, size): """ Set the button size. """ if self.__buttonSize != size: self.__buttonSize = size for slot in self.__gridSlots: slot.button.setFixedSize(size) def buttonSize(self): """ Return the button size. """ return QSize(self.__buttonSize) def setIconSize(self, size): """ Set the button icon size. """ if self.__iconSize != size: self.__iconSize = size for slot in self.__gridSlots: slot.button.setIconSize(size) def iconSize(self): """ Return the icon size """ return QSize(self.__iconSize) def setToolButtonStyle(self, style): """ Set the tool button style. """ if self.__toolButtonStyle != style: self.__toolButtonStyle = style for slot in self.__gridSlots: slot.button.setToolButtonStyle(style) def toolButtonStyle(self): """ Return the tool button style. """ return self.__toolButtonStyle def setColumnCount(self, columns): """ Set the number of button/action columns. """ if self.__columns != columns: self.__columns = columns self.__relayout() def columns(self): """ Return the number of columns in the grid. """ return self.__columns def clear(self): """ Clear all actions/buttons. """ for slot in reversed(list(self.__gridSlots)): self.removeAction(slot.action) self.__gridSlots = [] def insertAction(self, before, action): """ Insert a new action at the position currently occupied by `before` (can also be an index). Parameters ---------- before : :class:`QAction` or int Position where the `action` should be inserted. action : :class:`QAction` Action to insert """ if isinstance(before, int): actions = list(self.actions()) if len(actions) == 0 or before >= len(actions): # Insert as the first action or the last action. return self.addAction(action) before = actions[before] return QFrame.insertAction(self, before, action) def setActions(self, actions): """ Clear the grid and add `actions`. """ self.clear() for action in actions: self.addAction(action) def buttonForAction(self, action): """ Return the :class:`QToolButton` instance button for `action`. """ actions = [slot.action for slot in self.__gridSlots] index = actions.index(action) return self.__gridSlots[index].button def createButtonForAction(self, action): """ Create and return a :class:`QToolButton` for action. """ button = _ToolGridButton(self) button.setDefaultAction(action) if self.__buttonSize.isValid(): button.setFixedSize(self.__buttonSize) if self.__iconSize.isValid(): button.setIconSize(self.__iconSize) button.setToolButtonStyle(self.__toolButtonStyle) button.setProperty("tool-grid-button", QVariant(True)) return button def count(self): """ Return the number of buttons/actions in the grid. """ return len(self.__gridSlots) def actionEvent(self, event): QFrame.actionEvent(self, event) if event.type() == QEvent.ActionAdded: # Note: the action is already in the self.actions() list. actions = list(self.actions()) index = actions.index(event.action()) self.__insertActionButton(index, event.action()) elif event.type() == QEvent.ActionRemoved: self.__removeActionButton(event.action()) def __insertActionButton(self, index, action): """Create a button for the action and add it to the layout at index. """ self.__shiftGrid(index, 1) button = self.createButtonForAction(action) row = index / self.__columns column = index % self.__columns self.layout().addWidget( button, row, column, Qt.AlignLeft | Qt.AlignTop ) self.__gridSlots.insert( index, _ToolGridSlot(button, action, row, column) ) self.__mapper.setMapping(button, action) button.clicked.connect(self.__mapper.map) button.installEventFilter(self.__buttonListener) button.installEventFilter(self) def __removeActionButton(self, action): """Remove the button for the action from the layout and delete it. """ actions = [slot.action for slot in self.__gridSlots] index = actions.index(action) slot = self.__gridSlots.pop(index) slot.button.removeEventFilter(self.__buttonListener) slot.button.removeEventFilter(self) self.__mapper.removeMappings(slot.button) self.layout().removeWidget(slot.button) self.__shiftGrid(index + 1, -1) slot.button.deleteLater() def __shiftGrid(self, start, count=1): """Shift all buttons starting at index `start` by `count` cells. """ button_count = self.layout().count() direction = 1 if count >= 0 else -1 if direction == 1: start, end = button_count - 1, start - 1 else: start, end = start, button_count for index in range(start, end, -direction): item = self.layout().itemAtPosition(index / self.__columns, index % self.__columns) if item: button = item.widget() new_index = index + count self.layout().addWidget(button, new_index / self.__columns, new_index % self.__columns, Qt.AlignLeft | Qt.AlignTop) def __relayout(self): """Relayout the buttons. """ for i in reversed(range(self.layout().count())): self.layout().takeAt(i) self.__gridSlots = [_ToolGridSlot(slot.button, slot.action, i / self.__columns, i % self.__columns) for i, slot in enumerate(self.__gridSlots)] for slot in self.__gridSlots: self.layout().addWidget(slot.button, slot.row, slot.column, Qt.AlignLeft | Qt.AlignTop) def __indexOf(self, button): """Return the index of button widget. """ buttons = [slot.button for slot in self.__gridSlots] return buttons.index(button) def __onButtonRightClick(self, button): pass def __onButtonEnter(self, button): action = button.defaultAction() self.actionHovered.emit(action) def __onClicked(self, action): self.actionTriggered.emit(action) def paintEvent(self, event): return utils.StyledWidget_paintEvent(self, event) def eventFilter(self, obj, event): etype = event.type() if etype == QEvent.KeyPress and obj.hasFocus(): key = event.key() if key in [Qt.Key_Up, Qt.Key_Down, Qt.Key_Left, Qt.Key_Right]: if self.__focusMove(obj, key): event.accept() return True return QFrame.eventFilter(self, obj, event) def __focusMove(self, focus, key): assert(focus is self.focusWidget()) try: index = self.__indexOf(focus) except IndexError: return False if key == Qt.Key_Down: index += self.__columns elif key == Qt.Key_Up: index -= self.__columns elif key == Qt.Key_Left: index -= 1 elif key == Qt.Key_Right: index += 1 if index >= 0 and index < self.count(): button = self.__gridSlots[index].button button.setFocus(Qt.TabFocusReason) return True else: return False class ToolButtonEventListener(QObject): """ An event listener(filter) for :class:`QToolButtons`. """ buttonLeftClicked = Signal(QToolButton) buttonRightClicked = Signal(QToolButton) buttonEnter = Signal(QToolButton) buttonLeave = Signal(QToolButton) def __init__(self, parent=None): QObject.__init__(self, parent) self.button_down = None self.button = None self.button_down_pos = None def eventFilter(self, obj, event): if not isinstance(obj, QToolButton): return False if event.type() == QEvent.MouseButtonPress: self.button = obj self.button_down = event.button() self.button_down_pos = event.pos() elif event.type() == QEvent.MouseButtonRelease: if self.button.underMouse(): if event.button() == Qt.RightButton: self.buttonRightClicked.emit(self.button) elif event.button() == Qt.LeftButton: self.buttonLeftClicked.emit(self.button) elif event.type() == QEvent.Enter: self.buttonEnter.emit(obj) elif event.type() == QEvent.Leave: self.buttonLeave.emit(obj) return False
gpl-3.0
flok99/constatus
examples/lcdproc-overlay.py
1
1079
#! /usr/bin/python3 # this code uses the 'lcdproc-python3' library: # https://github.com/jinglemansweep/lcdproc # for more details about lcdproc, see: # http://lcdproc.org/ from lcdproc.server import Server import time import urllib.request def main(): # 'localhost' must be replaced by the network address on which the # constatus server runs lcd = Server('localhost', debug=True) lcd.start_session() # you can have multiple screens. constatus will switch between them # every 4 seconds screen1 = lcd.add_screen('Screen1') # a screen can have multiple widgets widget1 = screen1.add_string_widget('strwidget1', text='Bitcoin rate:', x=1, y=1) widget2 = screen1.add_string_widget('strwidget2', text='', x=1, y=2) while True: # retrieve bitcoin rate (don't depend on this: this service is often behind # for weeks) h = urllib.request.urlopen('https://vps001.vanheusden.com/btc/latest.txt') widget2.set_text(h.read().decode('utf-8')) time.sleep(30) if __name__ == '__main__': main()
agpl-3.0
ivmech/iviny-scope
lib/xlsxwriter/test/table/test_table07.py
1
2031
############################################################################### # # Tests for XlsxWriter. # # Copyright (c), 2013, John McNamara, [email protected] # import unittest from ...compatibility import StringIO from ..helperfunctions import _xml_to_list from ...table import Table from ...worksheet import Worksheet from ...workbook import WorksheetMeta from ...sharedstrings import SharedStringTable class TestAssembleTable(unittest.TestCase): """ Test assembling a complete Table file. """ def test_assemble_xml_file(self): """Test writing a table""" self.maxDiff = None worksheet = Worksheet() worksheet.worksheet_meta = WorksheetMeta() worksheet.str_table = SharedStringTable() # Set the table properties. worksheet.add_table('C3:F14', {'total_row': 1}) worksheet._prepare_tables(1) fh = StringIO() table = Table() table._set_filehandle(fh) table._set_properties(worksheet.tables[0]) table._assemble_xml_file() exp = _xml_to_list(""" <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <table xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" id="1" name="Table1" displayName="Table1" ref="C3:F14" totalsRowCount="1"> <autoFilter ref="C3:F13"/> <tableColumns count="4"> <tableColumn id="1" name="Column1"/> <tableColumn id="2" name="Column2"/> <tableColumn id="3" name="Column3"/> <tableColumn id="4" name="Column4"/> </tableColumns> <tableStyleInfo name="TableStyleMedium9" showFirstColumn="0" showLastColumn="0" showRowStripes="1" showColumnStripes="0"/> </table> """) got = _xml_to_list(fh.getvalue()) self.assertEqual(got, exp) if __name__ == '__main__': unittest.main()
gpl-3.0
kcompher/topik
topik/intermediaries/raw_data.py
1
9075
""" This file is concerned with providing a simple interface for data stored in Elasticsearch. The class(es) defined here are fed into the preprocessing step. """ import logging import time from abc import ABCMeta, abstractmethod from six import with_metaclass from elasticsearch import Elasticsearch, helpers def _get_hash_identifier(input_data, id_field): return hash(input_data[id_field]) class CorpusInterface(with_metaclass(ABCMeta)): @abstractmethod def __iter__(self): """This is expected to iterate over your data, returning tuples of (doc_id, <selected field>)""" raise NotImplementedError @abstractmethod def __len__(self): raise NotImplementedError @abstractmethod def get_generator_without_id(self, field=None): """Returns a generator that yields field content without doc_id associate""" raise NotImplementedError @abstractmethod def append_to_record(self, record_id, field_name, field_value): """Used to store preprocessed output alongside input data. Field name is destination. Value is processed value.""" raise NotImplementedError class ElasticSearchCorpus(CorpusInterface): def __init__(self, host, index, content_field, port=9200, username=None, password=None, doc_type=None, query=None, iterable=None): super(ElasticSearchCorpus, self).__init__() self.host = host self.port = port self.username = username self.password = password self.instance = Elasticsearch(hosts=[{"host": host, "port": port, "http_auth": "{}:{}".format(username, password)} ]) self.index = index self.content_field = content_field self.doc_type = doc_type self.query = query if iterable: self.import_from_iterable(iterable, content_field) def __iter__(self): results = helpers.scan(self.instance, index=self.index, query=self.query, doc_type=self.doc_type) for result in results: yield result["_id"], result['_source'][self.content_field] def __len__(self): return self.instance.count(index=self.index, doc_type=self.doc_type)["count"] def get_generator_without_id(self, field=None): if not field: field = self.content_field results = helpers.scan(self.instance, index=self.index, query=self.query, doc_type=self.doc_type) for result in results: yield result["_source"][field] def append_to_record(self, record_id, field_name, field_value): self.instance.update(index=self.index, id=record_id, doc_type="continuum", body={"doc": {field_name: field_value}}) def get_field(self, field=None): """Get a different field to iterate over, keeping all other connection details.""" if not field: field = self.content_field return ElasticSearchCorpus(self.host, self.index, field, self.port, self.username, self.password, self.doc_type, self.query) def import_from_iterable(self, iterable, id_field="text", batch_size=500): """Load data into Elasticsearch from iterable. iterable: generally a list of dicts, but possibly a list of strings This is your data. Your dictionary structure defines the schema of the elasticsearch index. id_field: string identifier of field to hash for content ID. For list of dicts, a valid key value in the dictionary is required. For list of strings, a dictionary with one key, "text" is created and used. """ batch = [] for item in iterable: if isinstance(item, basestring): item = {id_field: item} id = _get_hash_identifier(item, id_field) batch.append({"_id": id, "_source": item, "_type": "continuum"}) if len(batch) >= batch_size: helpers.bulk(client=self.instance, actions=batch, index=self.index) batch = [] if batch: helpers.bulk(client=self.instance, actions=batch, index=self.index) # TODO: generalize for datetimes # TODO: validate input data to ensure that it has valid year data def get_data_by_year(self, start_year, end_year, year_field="year"): """Queries elasticsearch for all documents within the specified year range and returns a generator of the results""" index = self.index if self.instance.indices.get_field_mapping(field=year_field, index=index, doc_type="continuum") != 'date': index = self.index+"_{}_date".format(year_field) if not self.instance.indices.exists(index) or self.instance.indices.get_field_mapping(field=year_field, index=index, doc_type="continuum") != 'date': mapping = self.instance.indices.get_mapping(index=self.index, doc_type="continuum") mapping[self.index]["mappings"]["continuum"]["properties"][year_field] = {"type": "date"} self.instance.indices.put_alias(index=self.index, name=index, body=mapping) while self.instance.count(index=self.index) != self.instance.count(index=index): logging.info("Waiting for date indexed data to be indexed...") time.sleep(1) results = helpers.scan(self.instance, index=index, scroll='5m', query={"query": {"range": {year_field: {"gte": start_year, "lte": end_year}}}}) for result in results: yield result["_id"], result['_source'][self.content_field] class DictionaryCorpus(CorpusInterface): def __init__(self, content_field, iterable=None, generate_id=True): super(DictionaryCorpus, self).__init__() self.content_field = content_field self._documents = [] self.idx = 0 if iterable: self.import_from_iterable(iterable, content_field, generate_id) def __iter__(self): for doc in self._documents: yield doc["_id"], doc["_source"][self.content_field] def __len__(self): return len(self._documents) def append_to_record(self, record_id, field_name, field_value): for doc in self._documents: if doc["_id"] == record_id: doc["_source"][field_name] = field_value return raise ValueError("No record with id '{}' was found.".format(record_id)) def get_field(self, field=None): """Get a different field to iterate over, keeping all other details.""" if not field: field = self.content_field return DictionaryCorpus(content_field=field, iterable=self._documents, generate_id=False) def get_generator_without_id(self, field=None): if not field: field = self.content_field for doc in self._documents: yield doc["_source"][field] def import_from_iterable(self, iterable, content_field, generate_id=True): """ iterable: generally a list of dicts, but possibly a list of strings This is your data. Your dictionary structure defines the schema of the elasticsearch index. """ if generate_id: self._documents = [{"_id": hash(doc[content_field]), "_source": doc} for doc in iterable] else: self._documents = [item for item in iterable] def get_number_of_items_stored(self): return len(self._documents) # TODO: generalize for datetimes # TODO: validate input data to ensure that it has valid year data def get_data_by_year(self, start_year, end_year, year_field="year"): for result in self._documents: if start_year <= int(result["_source"][year_field]) <= end_year: yield result["_id"], result["_source"][self.content_field] # Collection of output formats: people put files, folders, etc in, and they can choose from these to be the output # These consume the iterable collection of dictionaries produced by the various iter_ functions. output_formats = {"elasticsearch": ElasticSearchCorpus, "dictionary": DictionaryCorpus, }
bsd-3-clause
romain-dartigues/ansible
lib/ansible/modules/cloud/vmware/vsphere_copy.py
35
6503
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2015, Dag Wieers (@dagwieers) <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: vsphere_copy short_description: Copy a file to a vCenter datastore description: - Upload files to a vCenter datastore version_added: 2.0 author: - Dag Wieers (@dagwieers) options: host: description: - The vCenter server on which the datastore is available. required: true aliases: ['hostname'] login: description: - The login name to authenticate on the vCenter server. required: true aliases: ['username'] password: description: - The password to authenticate on the vCenter server. required: true src: description: - The file to push to vCenter required: true datacenter: description: - The datacenter on the vCenter server that holds the datastore. required: true datastore: description: - The datastore on the vCenter server to push files to. required: true path: description: - The file to push to the datastore on the vCenter server. required: true validate_certs: description: - If C(no), SSL certificates will not be validated. This should only be set to C(no) when no other option exists. default: 'yes' type: bool timeout: description: - The timeout in seconds for the upload to the datastore. default: 10 type: int version_added: "2.8" notes: - "This module ought to be run from a system that can access vCenter directly and has the file to transfer. It can be the normal remote target or you can change it either by using C(transport: local) or using C(delegate_to)." - Tested on vSphere 5.5 ''' EXAMPLES = ''' - vsphere_copy: host: '{{ vhost }}' login: '{{ vuser }}' password: '{{ vpass }}' src: /some/local/file datacenter: DC1 Someplace datastore: datastore1 path: some/remote/file delegate_to: localhost - vsphere_copy: host: '{{ vhost }}' login: '{{ vuser }}' password: '{{ vpass }}' src: /other/local/file datacenter: DC2 Someplace datastore: datastore2 path: other/remote/file delegate_to: other_system ''' import atexit import errno import mmap import socket import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six.moves.urllib.parse import urlencode, quote from ansible.module_utils._text import to_native from ansible.module_utils.urls import open_url def vmware_path(datastore, datacenter, path): ''' Constructs a URL path that VSphere accepts reliably ''' path = "/folder/%s" % quote(path.lstrip("/")) # Due to a software bug in vSphere, it fails to handle ampersand in datacenter names # The solution is to do what vSphere does (when browsing) and double-encode ampersands, maybe others ? datacenter = datacenter.replace('&', '%26') if not path.startswith("/"): path = "/" + path params = dict(dsName=datastore) if datacenter: params["dcPath"] = datacenter params = urlencode(params) return "%s?%s" % (path, params) def main(): module = AnsibleModule( argument_spec=dict( host=dict(required=True, aliases=['hostname']), login=dict(required=True, aliases=['username']), password=dict(required=True, no_log=True), src=dict(required=True, aliases=['name']), datacenter=dict(required=True), datastore=dict(required=True), dest=dict(required=True, aliases=['path']), validate_certs=dict(default=True, type='bool'), timeout=dict(default=10, type='int') ), # Implementing check-mode using HEAD is impossible, since size/date is not 100% reliable supports_check_mode=False, ) host = module.params.get('host') login = module.params.get('login') password = module.params.get('password') src = module.params.get('src') datacenter = module.params.get('datacenter') datastore = module.params.get('datastore') dest = module.params.get('dest') validate_certs = module.params.get('validate_certs') timeout = module.params.get('timeout') fd = open(src, "rb") atexit.register(fd.close) data = mmap.mmap(fd.fileno(), 0, access=mmap.ACCESS_READ) atexit.register(data.close) remote_path = vmware_path(datastore, datacenter, dest) url = 'https://%s%s' % (host, remote_path) headers = { "Content-Type": "application/octet-stream", "Content-Length": str(len(data)), } try: r = open_url(url, data=data, headers=headers, method='PUT', timeout=timeout, url_username=login, url_password=password, validate_certs=validate_certs, force_basic_auth=True) except socket.error as e: if isinstance(e.args, tuple) and e[0] == errno.ECONNRESET: # VSphere resets connection if the file is in use and cannot be replaced module.fail_json(msg='Failed to upload, image probably in use', status=None, errno=e[0], reason=to_native(e), url=url) else: module.fail_json(msg=str(e), status=None, errno=e[0], reason=str(e), url=url, exception=traceback.format_exc()) except Exception as e: error_code = -1 try: if isinstance(e[0], int): error_code = e[0] except KeyError: pass module.fail_json(msg=to_native(e), status=None, errno=error_code, reason=to_native(e), url=url, exception=traceback.format_exc()) status = r.getcode() if 200 <= status < 300: module.exit_json(changed=True, status=status, reason=r.msg, url=url) else: length = r.headers.get('content-length', None) if r.headers.get('transfer-encoding', '').lower() == 'chunked': chunked = 1 else: chunked = 0 module.fail_json(msg='Failed to upload', errno=None, status=status, reason=r.msg, length=length, headers=dict(r.headers), chunked=chunked, url=url) if __name__ == '__main__': main()
gpl-3.0
jtyr/ansible-modules-extras
windows/win_environment.py
90
2870
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2015, Jon Hawkesworth (@jhawkesworth) <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # this is a windows documentation stub. actual code lives in the .ps1 # file of the same name DOCUMENTATION = ''' --- module: win_environment version_added: "2.0" short_description: Modifies environment variables on windows hosts. description: - Uses .net Environment to set or remove environment variables and can set at User, Machine or Process level. - User level environment variables will be set, but not available until the user has logged off and on again. options: state: description: - present to ensure environment variable is set, or absent to ensure it is removed required: false default: present choices: - present - absent name: description: - The name of the environment variable required: true default: no default value: description: - The value to store in the environment variable. Can be omitted for state=absent required: false default: no default level: description: - The level at which to set the environment variable. - Use 'machine' to set for all users. - Use 'user' to set for the current user that ansible is connected as. - Use 'process' to set for the current process. Probably not that useful. required: true default: no default choices: - machine - process - user author: "Jon Hawkesworth (@jhawkesworth)" notes: - This module does not broadcast change events. This means that the minority of windows applications which can have their environment changed without restarting will not be notified and therefore will need restarting to pick up new environment settings. User level environment variables will require the user to log out and in again before they become available. ''' EXAMPLES = ''' # Set an environment variable for all users win_environment: state: present name: TestVariable value: "Test value" level: machine # Remove an environment variable for the current users win_environment: state: absent name: TestVariable level: user '''
gpl-3.0
40123210/-2015cd_40123210
static/Brython3.1.1-20150328-091302/Lib/errno.py
624
4096
""" This module makes available standard errno system symbols. The value of each symbol is the corresponding integer value, e.g., on most systems, errno.ENOENT equals the integer 2. The dictionary errno.errorcode maps numeric codes to symbol names, e.g., errno.errorcode[2] could be the string 'ENOENT'. Symbols that are not relevant to the underlying system are not defined. To map error codes to error messages, use the function os.strerror(), e.g. os.strerror(2) could return 'No such file or directory'. """ errorcode= {1: 'EPERM', 2: 'ENOENT', 3: 'ESRCH', 4: 'EINTR', 5: 'EIO', 6: 'ENXIO', 7: 'E2BIG', 8: 'ENOEXEC', 9: 'EBADF', 10: 'ECHILD', 11: 'EAGAIN', 12: 'ENOMEM', 13: 'EACCES', 14: 'EFAULT', 15: 'ENOTBLK', 16: 'EBUSY', 17: 'EEXIST', 18: 'EXDEV', 19: 'ENODEV', 20: 'ENOTDIR', 21: 'EISDIR', 22: 'EINVAL', 23: 'ENFILE', 24: 'EMFILE', 25: 'ENOTTY', 26: 'ETXTBSY', 27: 'EFBIG', 28: 'ENOSPC', 29: 'ESPIPE', 30: 'EROFS', 31: 'EMLINK', 32: 'EPIPE', 33: 'EDOM', 34: 'ERANGE', 35: 'EDEADLOCK', 36: 'ENAMETOOLONG', 37: 'ENOLCK', 38: 'ENOSYS', 39: 'ENOTEMPTY', 40: 'ELOOP', 42: 'ENOMSG', 43: 'EIDRM', 44: 'ECHRNG', 45: 'EL2NSYNC', 46: 'EL3HLT', 47: 'EL3RST', 48: 'ELNRNG', 49: 'EUNATCH', 50: 'ENOCSI', 51: 'EL2HLT', 52: 'EBADE', 53: 'EBADR', 54: 'EXFULL', 55: 'ENOANO', 56: 'EBADRQC', 57: 'EBADSLT', 59: 'EBFONT', 60: 'ENOSTR', 61: 'ENODATA', 62: 'ETIME', 63: 'ENOSR', 64: 'ENONET', 65: 'ENOPKG', 66: 'EREMOTE', 67: 'ENOLINK', 68: 'EADV', 69: 'ESRMNT', 70: 'ECOMM', 71: 'EPROTO', 72: 'EMULTIHOP', 73: 'EDOTDOT', 74: 'EBADMSG', 75: 'EOVERFLOW', 76: 'ENOTUNIQ', 77: 'EBADFD', 78: 'EREMCHG', 79: 'ELIBACC', 80: 'ELIBBAD', 81: 'ELIBSCN', 82: 'ELIBMAX', 83: 'ELIBEXEC', 84: 'EILSEQ', 85: 'ERESTART', 86: 'ESTRPIPE', 87: 'EUSERS', 88: 'ENOTSOCK', 89: 'EDESTADDRREQ', 90: 'EMSGSIZE', 91: 'EPROTOTYPE', 92: 'ENOPROTOOPT', 93: 'EPROTONOSUPPORT', 94: 'ESOCKTNOSUPPORT', 95: 'ENOTSUP', 96: 'EPFNOSUPPORT', 97: 'EAFNOSUPPORT', 98: 'EADDRINUSE', 99: 'EADDRNOTAVAIL', 100: 'ENETDOWN', 101: 'ENETUNREACH', 102: 'ENETRESET', 103: 'ECONNABORTED', 104: 'ECONNRESET', 105: 'ENOBUFS', 106: 'EISCONN', 107: 'ENOTCONN', 108: 'ESHUTDOWN', 109: 'ETOOMANYREFS', 110: 'ETIMEDOUT', 111: 'ECONNREFUSED', 112: 'EHOSTDOWN', 113: 'EHOSTUNREACH', 114: 'EALREADY', 115: 'EINPROGRESS', 116: 'ESTALE', 117: 'EUCLEAN', 118: 'ENOTNAM', 119: 'ENAVAIL', 120: 'EISNAM', 121: 'EREMOTEIO', 122: 'EDQUOT', 123: 'ENOMEDIUM', 124: 'EMEDIUMTYPE', 125: 'ECANCELED', 126: 'ENOKEY', 127: 'EKEYEXPIRED', 128: 'EKEYREVOKED', 129: 'EKEYREJECTED', 130: 'EOWNERDEAD', 131: 'ENOTRECOVERABLE', 132: 'ERFKILL'} EPERM=1 ENOENT=2 ESRCH=3 EINTR=4 EIO=5 ENXIO=6 E2BIG=7 ENOEXEC=8 EBADF=9 ECHILD=10 EAGAIN=11 ENOMEM=12 EACCES=13 EFAULT=14 ENOTBLK=15 EBUSY=16 EEXIST=17 EXDEV=18 ENODEV=19 ENOTDIR=20 EISDIR=21 EINVAL=22 ENFILE=23 EMFILE=24 ENOTTY=25 ETXTBSY=26 EFBIG=27 ENOSPC=28 ESPIPE=29 EROFS=30 EMLINK=31 EPIPE=32 EDOM=33 ERANGE=34 EDEADLOCK=35 ENAMETOOLONG=36 ENOLCK=37 ENOSYS=38 ENOTEMPTY=39 ELOOP=40 ENOMSG=42 EIDRM=43 ECHRNG=44 EL2NSYNC=45 EL3HLT=46 EL3RST=47 ELNRNG=48 EUNATCH=49 ENOCSI=50 EL2HLT=51 EBADE=52 EBADR=53 EXFULL=54 ENOANO=55 EBADRQC=56 EBADSLT=57 EBFONT=59 ENOSTR=60 ENODATA=61 ETIME=62 ENOSR=63 ENONET=64 ENOPKG=65 EREMOTE=66 ENOLINK=67 EADV=68 ESRMNT=69 ECOMM=70 EPROTO=71 EMULTIHOP=72 EDOTDOT=73 EBADMSG=74 EOVERFLOW=75 ENOTUNIQ=76 EBADFD=77 EREMCHG=78 ELIBACC=79 ELIBBAD=80 ELIBSCN=81 ELIBMAX=82 ELIBEXEC=83 EILSEQ=84 ERESTART=85 ESTRPIPE=86 EUSERS=87 ENOTSOCK=88 EDESTADDRREQ=89 EMSGSIZE=90 EPROTOTYPE=91 ENOPROTOOPT=92 EPROTONOSUPPORT=93 ESOCKTNOSUPPORT=94 ENOTSUP=95 EPFNOSUPPORT=96 EAFNOSUPPORT=97 EADDRINUSE=98 EADDRNOTAVAIL=99 ENETDOWN=100 ENETUNREACH=101 ENETRESET=102 ECONNABORTED=103 ECONNRESET=104 ENOBUFS=105 EISCONN=106 ENOTCONN=107 ESHUTDOWN=108 ETOOMANYREFS=109 ETIMEDOUT=110 ECONNREFUSED=111 EHOSTDOWN=112 EHOSTUNREACH=113 EALREADY=114 EINPROGRESS=115 ESTALE=116 EUCLEAN=117 ENOTNAM=118 ENAVAIL=119 EISNAM=120 EREMOTEIO=121 EDQUOT=122 ENOMEDIUM=123 EMEDIUMTYPE=124 ECANCELED=125 ENOKEY=126 EKEYEXPIRED=127 EKEYREVOKED=128 EKEYREJECTED=129 EOWNERDEAD=130 ENOTRECOVERABLE=131 ERFKILL=132
gpl-3.0
lbdreyer/iris
lib/iris/tests/runner/_runner.py
2
5308
# Copyright Iris contributors # # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ Provides testing capabilities for installed copies of Iris. """ # Because this file is imported by setup.py, there may be additional runtime # imports later in the file. import multiprocessing import os import sys # NOTE: Do not inherit from object as distutils does not like it. class TestRunner: """Run the Iris tests under nose and multiprocessor for performance""" description = ( "Run tests under nose and multiprocessor for performance. " "Default behaviour is to run all non-gallery tests. " "Specifying one or more test flags will run *only* those " "tests." ) user_options = [ ( "no-data", "n", "Override the paths to the data repositories so it " "appears to the tests that it does not exist.", ), ("stop", "x", "Stop running tests after the first error or failure."), ("system-tests", "s", "Run the limited subset of system tests."), ("gallery-tests", "e", "Run the gallery code tests."), ("default-tests", "d", "Run the default tests."), ( "coding-tests", "c", "Run the coding standards tests. (These are a " "subset of the default tests.)", ), ( "num-processors=", "p", "The number of processors used for running " "the tests.", ), ("create-missing", "m", "Create missing test result files."), ] boolean_options = [ "no-data", "system-tests", "stop", "gallery-tests", "default-tests", "coding-tests", "create-missing", ] def initialize_options(self): self.no_data = False self.stop = False self.system_tests = False self.gallery_tests = False self.default_tests = False self.coding_tests = False self.num_processors = None self.create_missing = False def finalize_options(self): # These enviroment variables will be propagated to all the # processes that nose.run creates. if self.no_data: print("Running tests in no-data mode...") import iris.config iris.config.TEST_DATA_DIR = None if self.create_missing: os.environ["IRIS_TEST_CREATE_MISSING"] = "true" tests = [] if self.system_tests: tests.append("system") if self.default_tests: tests.append("default") if self.coding_tests: tests.append("coding") if self.gallery_tests: tests.append("gallery") if not tests: tests.append("default") print("Running test suite(s): {}".format(", ".join(tests))) if self.stop: print("Stopping tests after the first error or failure") if self.num_processors is None: # Choose a magic number that works reasonably well for the default # number of processes. self.num_processors = (multiprocessing.cpu_count() + 1) // 4 + 1 else: self.num_processors = int(self.num_processors) def run(self): import nose if hasattr(self, "distribution") and self.distribution.tests_require: self.distribution.fetch_build_eggs(self.distribution.tests_require) tests = [] if self.system_tests: tests.append("iris.tests.system_test") if self.default_tests: tests.append("iris.tests") if self.coding_tests: tests.append("iris.tests.test_coding_standards") if self.gallery_tests: import iris.config default_doc_path = os.path.join(sys.path[0], "docs", "iris") doc_path = iris.config.get_option( "Resources", "doc_dir", default=default_doc_path ) gallery_path = os.path.join(doc_path, "gallery_tests") if os.path.exists(gallery_path): tests.append(gallery_path) else: print( "WARNING: Gallery path %s does not exist." % (gallery_path) ) if not tests: tests.append("iris.tests") regexp_pat = r"--match=^([Tt]est(?![Mm]ixin)|[Ss]ystem)" n_processors = max(self.num_processors, 1) args = [ "", None, "--processes=%s" % n_processors, "--verbosity=2", regexp_pat, "--process-timeout=180", ] if self.stop: args.append("--stop") result = True for test in tests: args[1] = test print() print( "Running test discovery on %s with %s processors." % (test, n_processors) ) # run the tests at module level i.e. my_module.tests # - test must start with test/Test and must not contain the # word Mixin. result &= nose.run(argv=args) if result is False: exit(1)
lgpl-3.0
yvess/cmsplugin-filer
cmsplugin_filer_file/migrations/0001_initial.py
13
1221
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings import filer.fields.file class Migration(migrations.Migration): dependencies = [ ('cms', '0003_auto_20140926_2347'), ('filer', '0001_initial'), ] operations = [ migrations.CreateModel( name='FilerFile', fields=[ ('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')), ('title', models.CharField(max_length=255, null=True, verbose_name='title', blank=True)), ('target_blank', models.BooleanField(default=False, verbose_name='Open link in new window')), ('style', models.CharField(default=settings.CMSPLUGIN_FILER_FILE_DEFAULT_STYLE, choices=settings.CMSPLUGIN_FILER_FILE_STYLE_CHOICES, verbose_name='Style', blank=True, max_length=255)), ('file', filer.fields.file.FilerFileField(verbose_name='file', to='filer.File')), ], options={ 'abstract': False, }, bases=('cms.cmsplugin',), ), ]
bsd-3-clause
AlanD88/website
web2py/gluon/contrib/login_methods/oneall_account.py
8
4595
#!/usr/bin/env python # coding: utf8 """ Oneall Authentication for web2py Developed by Nathan Freeze (Copyright © 2013) Email <[email protected]> This file contains code to allow using onall.com authentication services with web2py """ import os import base64 from gluon import * from gluon.storage import Storage from gluon.contrib.simplejson import JSONDecodeError from gluon.tools import fetch import gluon.contrib.simplejson as json class OneallAccount(object): """ from gluon.contrib.login_methods.oneall_account import OneallAccount auth.settings.actions_disabled=['register','change_password', 'request_reset_password'] auth.settings.login_form = OneallAccount(request, public_key="...", private_key="...", domain="...", url = "http://localhost:8000/%s/default/user/login" % request.application) """ def __init__(self, request, public_key="", private_key="", domain="", url=None, providers=None, on_login_failure=None): self.request = request self.public_key = public_key self.private_key = private_key self.url = url self.domain = domain self.profile = None self.on_login_failure = on_login_failure self.providers = providers or ["facebook", "google", "yahoo", "openid"] self.mappings = Storage() def defaultmapping(profile): name = profile.get('name',{}) dname = name.get('formatted',profile.get('displayName')) email=profile.get('emails', [{}])[0].get('value') reg_id=profile.get('identity_token','') username=profile.get('preferredUsername',email) first_name=name.get('givenName', dname.split(' ')[0]) last_name=profile.get('familyName', dname.split(' ')[1] if(dname.count(' ') > 0) else None) return dict(registration_id=reg_id,username=username,email=email, first_name=first_name,last_name=last_name) self.mappings.default = defaultmapping def get_user(self): request = self.request user = None if request.vars.connection_token: auth_url = "https://%s.api.oneall.com/connections/%s.json" % \ (self.domain, request.vars.connection_token) auth_pw = "%s:%s" % (self.public_key,self.private_key) auth_pw = base64.b64encode(auth_pw) headers = dict(Authorization="Basic %s" % auth_pw) try: auth_info_json = fetch(auth_url,headers=headers) auth_info = json.loads(auth_info_json) data = auth_info['response']['result']['data'] if data['plugin']['key'] == 'social_login': if data['plugin']['data']['status'] == 'success': userdata = data['user'] self.profile = userdata['identity'] source = self.profile['source']['key'] mapping = self.mappings.get(source,self.mappings['default']) user = mapping(self.profile) except (JSONDecodeError, KeyError): pass if user is None and self.on_login_failure: redirect(self.on_login_failure) return user def login_form(self): scheme = self.request.env.wsgi_url_scheme oneall_url = scheme + "://%s.api.oneall.com/socialize/library.js" % self.domain oneall_lib = SCRIPT(_src=oneall_url,_type='text/javascript') container = DIV(_id="oa_social_login_container") widget = SCRIPT('oneall.api.plugins.social_login.build("oa_social_login_container",', '{providers : %s,' % self.providers, 'callback_uri: "%s"});' % self.url, _type="text/javascript") form = DIV(oneall_lib,container,widget) return form def use_oneall(auth, filename='private/oneall.key', **kwargs): path = os.path.join(current.request.folder, filename) if os.path.exists(path): request = current.request domain, public_key, private_key = open(path, 'r').read().strip().split(':') url = URL('default', 'user', args='login', scheme=True) auth.settings.actions_disabled =\ ['register', 'change_password', 'request_reset_password'] auth.settings.login_form = OneallAccount( request, public_key=public_key,private_key=private_key, domain=domain, url=url, **kwargs)
mit
gangadharkadam/saloon_erp_install
erpnext/selling/report/customer_credit_balance/customer_credit_balance.py
96
1688
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe import _ from frappe.utils import flt from erpnext.selling.doctype.customer.customer import get_customer_outstanding, get_credit_limit def execute(filters=None): if not filters: filters = {} #Check if customer id is according to naming series or customer name customer_naming_type = frappe.db.get_value("Selling Settings", None, "cust_master_name") columns = get_columns(customer_naming_type) data = [] customer_list = get_details(filters) for d in customer_list: row = [] outstanding_amt = get_customer_outstanding(d.name, filters.get("company")) credit_limit = get_credit_limit(d.name, filters.get("company")) bal = flt(credit_limit) - flt(outstanding_amt) if customer_naming_type == "Naming Series": row = [d.name, d.customer_name, credit_limit, outstanding_amt, bal] else: row = [d.name, credit_limit, outstanding_amt, bal] if credit_limit: data.append(row) return columns, data def get_columns(customer_naming_type): columns = [ _("Customer") + ":Link/Customer:120", _("Credit Limit") + ":Currency:120", _("Outstanding Amt") + ":Currency:100", _("Credit Balance") + ":Currency:120" ] if customer_naming_type == "Naming Series": columns.insert(1, _("Customer Name") + ":Data:120") return columns def get_details(filters): conditions = "" if filters.get("customer"): conditions += " where name = %(customer)s" return frappe.db.sql("""select name, customer_name from `tabCustomer` %s""" % conditions, filters, as_dict=1)
agpl-3.0
mezz64/home-assistant
homeassistant/components/tellduslive/binary_sensor.py
14
1068
"""Support for binary sensors using Tellstick Net.""" from homeassistant.components import binary_sensor, tellduslive from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.helpers.dispatcher import async_dispatcher_connect from .entry import TelldusLiveEntity async def async_setup_entry(hass, config_entry, async_add_entities): """Set up tellduslive sensors dynamically.""" async def async_discover_binary_sensor(device_id): """Discover and add a discovered sensor.""" client = hass.data[tellduslive.DOMAIN] async_add_entities([TelldusLiveSensor(client, device_id)]) async_dispatcher_connect( hass, tellduslive.TELLDUS_DISCOVERY_NEW.format( binary_sensor.DOMAIN, tellduslive.DOMAIN ), async_discover_binary_sensor, ) class TelldusLiveSensor(TelldusLiveEntity, BinarySensorEntity): """Representation of a Tellstick sensor.""" @property def is_on(self): """Return true if switch is on.""" return self.device.is_on
apache-2.0
edcast-inc/edx-platform-edcast
lms/djangoapps/lms_xblock/migrations/0001_initial.py
110
4883
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'XBlockAsidesConfig' db.create_table('lms_xblock_xblockasidesconfig', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('change_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('changed_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, on_delete=models.PROTECT)), ('enabled', self.gf('django.db.models.fields.BooleanField')(default=False)), ('disabled_blocks', self.gf('django.db.models.fields.TextField')(default='about course_info static_tab')), )) db.send_create_signal('lms_xblock', ['XBlockAsidesConfig']) def backwards(self, orm): # Deleting model 'XBlockAsidesConfig' db.delete_table('lms_xblock_xblockasidesconfig') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'lms_xblock.xblockasidesconfig': { 'Meta': {'object_name': 'XBlockAsidesConfig'}, 'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}), 'disabled_blocks': ('django.db.models.fields.TextField', [], {'default': "'about course_info static_tab'"}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) } } complete_apps = ['lms_xblock']
agpl-3.0
publicRoman/spark
examples/src/main/python/ml/logistic_regression_summary_example.py
71
2442
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function # $example on$ from pyspark.ml.classification import LogisticRegression # $example off$ from pyspark.sql import SparkSession """ An example demonstrating Logistic Regression Summary. Run with: bin/spark-submit examples/src/main/python/ml/logistic_regression_summary_example.py """ if __name__ == "__main__": spark = SparkSession \ .builder \ .appName("LogisticRegressionSummary") \ .getOrCreate() # Load training data training = spark.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt") lr = LogisticRegression(maxIter=10, regParam=0.3, elasticNetParam=0.8) # Fit the model lrModel = lr.fit(training) # $example on$ # Extract the summary from the returned LogisticRegressionModel instance trained # in the earlier example trainingSummary = lrModel.summary # Obtain the objective per iteration objectiveHistory = trainingSummary.objectiveHistory print("objectiveHistory:") for objective in objectiveHistory: print(objective) # Obtain the receiver-operating characteristic as a dataframe and areaUnderROC. trainingSummary.roc.show() print("areaUnderROC: " + str(trainingSummary.areaUnderROC)) # Set the model threshold to maximize F-Measure fMeasure = trainingSummary.fMeasureByThreshold maxFMeasure = fMeasure.groupBy().max('F-Measure').select('max(F-Measure)').head() bestThreshold = fMeasure.where(fMeasure['F-Measure'] == maxFMeasure['max(F-Measure)']) \ .select('threshold').head()['threshold'] lr.setThreshold(bestThreshold) # $example off$ spark.stop()
apache-2.0
bdero/edx-platform
common/djangoapps/student/migrations/0002_text_to_varchar_and_indexes.py
188
9581
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Changing field 'UserProfile.name' db.alter_column('auth_userprofile', 'name', self.gf('django.db.models.fields.CharField')(max_length=255)) # Adding index on 'UserProfile', fields ['name'] db.create_index('auth_userprofile', ['name']) # Changing field 'UserProfile.language' db.alter_column('auth_userprofile', 'language', self.gf('django.db.models.fields.CharField')(max_length=255)) # Adding index on 'UserProfile', fields ['language'] db.create_index('auth_userprofile', ['language']) # Changing field 'UserProfile.courseware' db.alter_column('auth_userprofile', 'courseware', self.gf('django.db.models.fields.CharField')(max_length=255)) # Changing field 'UserProfile.meta' db.alter_column('auth_userprofile', 'meta', self.gf('django.db.models.fields.CharField')(max_length=255)) # Changing field 'UserProfile.location' db.alter_column('auth_userprofile', 'location', self.gf('django.db.models.fields.CharField')(max_length=255)) # Adding index on 'UserProfile', fields ['location'] db.create_index('auth_userprofile', ['location']) def backwards(self, orm): # Removing index on 'UserProfile', fields ['location'] db.delete_index('auth_userprofile', ['location']) # Removing index on 'UserProfile', fields ['language'] db.delete_index('auth_userprofile', ['language']) # Removing index on 'UserProfile', fields ['name'] db.delete_index('auth_userprofile', ['name']) # Changing field 'UserProfile.name' db.alter_column('auth_userprofile', 'name', self.gf('django.db.models.fields.TextField')()) # Changing field 'UserProfile.language' db.alter_column('auth_userprofile', 'language', self.gf('django.db.models.fields.TextField')()) # Changing field 'UserProfile.courseware' db.alter_column('auth_userprofile', 'courseware', self.gf('django.db.models.fields.TextField')()) # Changing field 'UserProfile.meta' db.alter_column('auth_userprofile', 'meta', self.gf('django.db.models.fields.TextField')()) # Changing field 'UserProfile.location' db.alter_column('auth_userprofile', 'location', self.gf('django.db.models.fields.TextField')()) models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}), 'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}), 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), 'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}), 'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), 'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}), 'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'student.registration': { 'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"}, 'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}) }, 'student.userprofile': { 'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"}, 'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}), 'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}), 'meta': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}) } } complete_apps = ['student']
agpl-3.0
friebsch/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers
Chapter2_MorePyMC/separation_plot.py
86
1494
# separation plot # Author: Cameron Davidson-Pilon,2013 # see http://mdwardlab.com/sites/default/files/GreenhillWardSacks.pdf import matplotlib.pyplot as plt import numpy as np def separation_plot( p, y, **kwargs ): """ This function creates a separation plot for logistic and probit classification. See http://mdwardlab.com/sites/default/files/GreenhillWardSacks.pdf p: The proportions/probabilities, can be a nxM matrix which represents M models. y: the 0-1 response variables. """ assert p.shape[0] == y.shape[0], "p.shape[0] != y.shape[0]" n = p.shape[0] try: M = p.shape[1] except: p = p.reshape( n, 1 ) M = p.shape[1] #colors = np.array( ["#fdf2db", "#e44a32"] ) colors_bmh = np.array( ["#eeeeee", "#348ABD"] ) fig = plt.figure( )#figsize = (8, 1.3*M) ) for i in range(M): ax = fig.add_subplot(M, 1, i+1) ix = np.argsort( p[:,i] ) #plot the different bars bars = ax.bar( np.arange(n), np.ones(n), width=1., color = colors_bmh[ y[ix].astype(int) ], edgecolor = 'none') ax.plot( np.arange(n+1), np.append(p[ix,i], p[ix,i][-1]), "k", linewidth = 1.,drawstyle="steps-post" ) #create expected value bar. ax.vlines( [(1-p[ix,i]).sum()], [0], [1] ) #ax.grid(False) #ax.axis('off') plt.xlim( 0, n) plt.tight_layout() return
mit
kaltsimon/youtube-dl
youtube_dl/extractor/cbsnews.py
96
3204
# encoding: utf-8 from __future__ import unicode_literals import re import json from .common import InfoExtractor class CBSNewsIE(InfoExtractor): IE_DESC = 'CBS News' _VALID_URL = r'http://(?:www\.)?cbsnews\.com/(?:[^/]+/)+(?P<id>[\da-z_-]+)' _TESTS = [ { 'url': 'http://www.cbsnews.com/news/tesla-and-spacex-elon-musks-industrial-empire/', 'info_dict': { 'id': 'tesla-and-spacex-elon-musks-industrial-empire', 'ext': 'flv', 'title': 'Tesla and SpaceX: Elon Musk\'s industrial empire', 'thumbnail': 'http://beta.img.cbsnews.com/i/2014/03/30/60147937-2f53-4565-ad64-1bdd6eb64679/60-0330-pelley-640x360.jpg', 'duration': 791, }, 'params': { # rtmp download 'skip_download': True, }, }, { 'url': 'http://www.cbsnews.com/videos/fort-hood-shooting-army-downplays-mental-illness-as-cause-of-attack/', 'info_dict': { 'id': 'fort-hood-shooting-army-downplays-mental-illness-as-cause-of-attack', 'ext': 'flv', 'title': 'Fort Hood shooting: Army downplays mental illness as cause of attack', 'thumbnail': 're:^https?://.*\.jpg$', 'duration': 205, }, 'params': { # rtmp download 'skip_download': True, }, }, ] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') webpage = self._download_webpage(url, video_id) video_info = json.loads(self._html_search_regex( r'(?:<ul class="media-list items" id="media-related-items"><li data-video-info|<div id="cbsNewsVideoPlayer" data-video-player-options)=\'({.+?})\'', webpage, 'video JSON info')) item = video_info['item'] if 'item' in video_info else video_info title = item.get('articleTitle') or item.get('hed') duration = item.get('duration') thumbnail = item.get('mediaImage') or item.get('thumbnail') formats = [] for format_id in ['RtmpMobileLow', 'RtmpMobileHigh', 'Hls', 'RtmpDesktop']: uri = item.get('media' + format_id + 'URI') if not uri: continue fmt = { 'url': uri, 'format_id': format_id, } if uri.startswith('rtmp'): fmt.update({ 'app': 'ondemand?auth=cbs', 'play_path': 'mp4:' + uri.split('<break>')[-1], 'player_url': 'http://www.cbsnews.com/[[IMPORT]]/vidtech.cbsinteractive.com/player/3_3_0/CBSI_PLAYER_HD.swf', 'page_url': 'http://www.cbsnews.com', 'ext': 'flv', }) elif uri.endswith('.m3u8'): fmt['ext'] = 'mp4' formats.append(fmt) return { 'id': video_id, 'title': title, 'thumbnail': thumbnail, 'duration': duration, 'formats': formats, }
unlicense
Micronaet/micronaet-migration
__UNPORTED__/log_and_mail/__init__.py
1
1076
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved # d$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import log_and_mail # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
quietcoolwu/python-playground
pipeg/gravitate/Board.py
4
10467
#!/usr/bin/env python3 # Copyright © 2012-13 Qtrac Ltd. All rights reserved. # This program or module is free software: you can redistribute it # and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. It is provided for # educational purposes and is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. import collections import heapq import math import random import tkinter as tk import tkinter.messagebox as messagebox from Globals import * # Need to allow for them to be darkened/lightened for 3D shadow. COLORS = [ "#7F0000", # Red "#007F00", # Green "#00007F", # Blue "#007F7F", # Cyan "#7F007F", # Magenta "#7F7F00", # Yellow "#A0A0A4", # Gray "#A52A2A", # Brown ] DEF_COLUMNS = 9 MIN_COLUMNS = 5 MAX_COLUMNS = 30 DEF_ROWS = 9 MIN_ROWS = 5 MAX_ROWS = 30 DEF_MAX_COLORS = 4 MIN_MAX_COLORS = 2 MAX_MAX_COLORS = len(COLORS) class Board(tk.Canvas): def __init__(self, master, set_status_text, scoreText, columns=DEF_COLUMNS, rows=DEF_ROWS, maxColors=DEF_MAX_COLORS, delay=500, size=40, outline="#DFDFDF"): self.columns = columns self.rows = rows self.maxColors = maxColors self.delay = delay self.outline = outline self.size = size self.set_status_text = set_status_text self.scoreText = scoreText self.score = 0 self.highScore = 0 super().__init__(master, width=self.columns * self.size, height=self.rows * self.size) self.pack(fill=tk.BOTH, expand=True) self.bind("<ButtonRelease>", self._click) self.new_game() def new_game(self, event=None): self.score = 0 random.shuffle(COLORS) colors = COLORS[:self.maxColors] self.tiles = [] for x in range(self.columns): self.tiles.append([]) for y in range(self.rows): self.tiles[x].append(random.choice(colors)) self._draw() self.update_score() def _draw(self, *args): self.delete("all") self.config(width=self.columns * self.size, height=self.rows * self.size) for x in range(self.columns): x0 = x * self.size x1 = x0 + self.size for y in range(self.rows): y0 = y * self.size y1 = y0 + self.size self._draw_square(self.size, x0, y0, x1, y1, self.tiles[x][y], self.outline) self.update() # |\__t__/| # |l| m |r| # |/-----\| # ----b---- # def _draw_square(self, size, x0, y0, x1, y1, color, outline): if color is None: light, color, dark = (outline,) * 3 else: light, color, dark = self._three_colors(color) offset = 4 self.create_polygon( # top x0, y0, x0 + offset, y0 + offset, x1 - offset, y0 + offset, x1, y0, fill=light, outline=light) self.create_polygon( # left x0, y0, x0, y1, x0 + offset, y1 - offset, x0 + offset, y0 + offset, fill=light, outline=light) self.create_polygon( # right x1 - offset, y0 + offset, x1, y0, x1, y1, x1 - offset, y1 - offset, fill=dark, outline=dark) self.create_polygon( # bottom x0, y1, x0 + offset, y1 - offset, x1 - offset, y1 - offset, x1, y1, fill=dark, outline=dark) self.create_rectangle( # middle x0 + offset, y0 + offset, x1 - offset, y1 - offset, fill=color, outline=color) def _three_colors(self, color): r, g, b = self.winfo_rgb(color) color = "#{:04X}{:04X}{:04X}".format(r, g, b) dark = "#{:04X}{:04X}{:04X}".format(max(0, int(r * 0.5)), max(0, int(g * 0.5)), max(0, int(b * 0.5))) light = "#{:04X}{:04X}{:04X}".format(min(0xFFFF, int(r * 1.5)), min(0xFFFF, int(g * 1.5)), min(0xFFFF, int(b * 1.5))) return light, color, dark def _click(self, event): x = event.x // self.size y = event.y // self.size color = self.tiles[x][y] if color is None or not self._is_legal(x, y, color): return self._dim_adjoining(x, y, color) def _is_legal(self, x, y, color): """A legal click is on a colored tile that is adjacent to another tile of the same color.""" if x > 0 and self.tiles[x - 1][y] == color: return True if x + 1 < self.columns and self.tiles[x + 1][y] == color: return True if y > 0 and self.tiles[x][y - 1] == color: return True if y + 1 < self.rows and self.tiles[x][y + 1] == color: return True return False def _dim_adjoining(self, x, y, color): adjoining = set() self._populate_adjoining(x, y, color, adjoining) self.score += len(adjoining) ** (self.maxColors - 2) for x, y in adjoining: self.tiles[x][y] = "#F0F0F0" self._draw() self.after(self.delay, lambda: self._delete_adjoining(adjoining)) def _populate_adjoining(self, x, y, color, adjoining): if not ((0 <= x < self.columns) and (0 <= y < self.rows)): return # Fallen off an edge if (x, y) in adjoining or self.tiles[x][y] != color: return # Color doesn't match or already done adjoining.add((x, y)) self._populate_adjoining(x - 1, y, color, adjoining) self._populate_adjoining(x + 1, y, color, adjoining) self._populate_adjoining(x, y - 1, color, adjoining) self._populate_adjoining(x, y + 1, color, adjoining) def _delete_adjoining(self, adjoining): for x, y in adjoining: self.tiles[x][y] = None self._draw() self.after(self.delay, self._close_up) def _close_up(self): self._move() self._draw() self._check_game_over() def _move(self): moved = True while moved: moved = False for x in range(self.columns): for y in range(self.rows): if self.tiles[x][y] is not None: if self._move_if_possible(x, y): moved = True break def _move_if_possible(self, x, y): empty_neighbours = self._empty_neighbours(x, y) if empty_neighbours: move, nx, ny = self._nearest_to_middle(x, y, empty_neighbours) if move: self.tiles[nx][ny] = self.tiles[x][y] self.tiles[x][y] = None return True return False def _empty_neighbours(self, x, y): neighbours = set() for nx, ny in ((x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)): if (0 <= nx < self.columns and 0 <= ny < self.rows and self.tiles[nx][ny] is None): neighbours.add((nx, ny)) return neighbours def _nearest_to_middle(self, x, y, empty_neighbours): color = self.tiles[x][y] midX = self.columns // 2 midY = self.rows // 2 Δold = math.hypot(midX - x, midY - y) heap = [] for nx, ny in empty_neighbours: if self._is_square(nx, ny): Δnew = math.hypot(midX - nx, midY - ny) if self._is_legal(nx, ny, color): Δnew -= 0.1 # Make same colors slightly attractive heapq.heappush(heap, (Δnew, nx, ny)) Δnew, nx, ny = heap[0] return (True, nx, ny) if Δold > Δnew else (False, x, y) def _is_square(self, x, y): if x > 0 and self.tiles[x - 1][y] is not None: return True if x + 1 < self.columns and self.tiles[x + 1][y] is not None: return True if y > 0 and self.tiles[x][y - 1] is not None: return True if y + 1 < self.rows and self.tiles[x][y + 1] is not None: return True return False def _check_game_over(self): userWon, canMove = self._check_tiles() title = message = None if userWon: title, message = self._user_won() elif not canMove: title = "Game Over" message = "Game over with a score of {:,}.".format( self.score) if title is not None: messagebox.showinfo("{} — {}".format(title, APPNAME), message, parent=self) self.new_game() else: self.update_score() def _check_tiles(self): countForColor = collections.defaultdict(int) userWon = True canMove = False for x in range(self.columns): for y in range(self.rows): color = self.tiles[x][y] if color is not None: countForColor[color] += 1 userWon = False if self._is_legal(x, y, color): # We _can_ move canMove = True if 1 in countForColor.values(): canMove = False return userWon, canMove def _user_won(self): title = "Winner!" message = "You won with a score of {:,}.".format(self.score) if self.score > self.highScore: self.highScore = self.score message += "\nThat's a new high score!" return title, message def update_score(self): self.scoreText.set("{:,} ({:,})".format(self.score, self.highScore)) if __name__ == "__main__": import sys if sys.stdout.isatty(): application = tk.Tk() application.title("Board") scoreText = tk.StringVar() board = Board(application, print, scoreText) application.mainloop() else: print("Loaded OK")
mit
johannfaouzi/pyts
pyts/metrics/tests/test_boss.py
1
1130
"""Testing for BOSS metric.""" # Author: Johann Faouzi <[email protected]> # License: BSD-3-Clause import numpy as np import pytest import re from math import sqrt from pyts.metrics import boss x = np.arange(1, 6) y = np.arange(1, 6)[::-1] z = [0, 0, 0, 10, 0] @pytest.mark.parametrize( 'x, y, err_msg', [(x.reshape(1, -1), y, "'x' must a one-dimensional array."), (x, y.reshape(1, -1), "'y' must a one-dimensional array."), (x[:2], y, "'x' and 'y' must have the same shape.")] ) def test_parameter_check(x, y, err_msg): """Test parameter validation.""" with pytest.raises(ValueError, match=re.escape(err_msg)): boss(x, y) @pytest.mark.parametrize( 'x, y, arr_desired', [(x, y, sqrt(np.sum((x - y) ** 2))), (y, x, sqrt(np.sum((x - y) ** 2))), (x, z, sqrt(np.sum((x - z) ** 2))), (z, x, 6), (y, z, sqrt(np.sum((y - z) ** 2))), (z, y, 8)] ) def test_actual_results(x, y, arr_desired): """Test that the actual results are the expected ones.""" arr_actual = boss(x, y) np.testing.assert_allclose(arr_actual, arr_desired, atol=1e-5, rtol=0.)
bsd-3-clause
Nikoala/CouchPotatoServer
libs/guessit/slogging.py
94
3388
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Smewt - A smart collection manager # Copyright (c) 2011 Nicolas Wack <[email protected]> # # Smewt is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # Smewt is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from __future__ import unicode_literals import logging import sys import os, os.path GREEN_FONT = "\x1B[0;32m" YELLOW_FONT = "\x1B[0;33m" BLUE_FONT = "\x1B[0;34m" RED_FONT = "\x1B[0;31m" RESET_FONT = "\x1B[0m" def setupLogging(colored=True, with_time=False, with_thread=False, filename=None, with_lineno=False): """Set up a nice colored logger as the main application logger.""" class SimpleFormatter(logging.Formatter): def __init__(self, with_time, with_thread): self.fmt = (('%(asctime)s ' if with_time else '') + '%(levelname)-8s ' + '[%(name)s:%(funcName)s' + (':%(lineno)s' if with_lineno else '') + ']' + ('[%(threadName)s]' if with_thread else '') + ' -- %(message)s') logging.Formatter.__init__(self, self.fmt) class ColoredFormatter(logging.Formatter): def __init__(self, with_time, with_thread): self.fmt = (('%(asctime)s ' if with_time else '') + '-CC-%(levelname)-8s ' + BLUE_FONT + '[%(name)s:%(funcName)s' + (':%(lineno)s' if with_lineno else '') + ']' + RESET_FONT + ('[%(threadName)s]' if with_thread else '') + ' -- %(message)s') logging.Formatter.__init__(self, self.fmt) def format(self, record): modpath = record.name.split('.') record.mname = modpath[0] record.mmodule = '.'.join(modpath[1:]) result = logging.Formatter.format(self, record) if record.levelno == logging.DEBUG: color = BLUE_FONT elif record.levelno == logging.INFO: color = GREEN_FONT elif record.levelno == logging.WARNING: color = YELLOW_FONT else: color = RED_FONT result = result.replace('-CC-', color) return result if filename is not None: # make sure we can write to our log file logdir = os.path.dirname(filename) if not os.path.exists(logdir): os.makedirs(logdir) ch = logging.FileHandler(filename, mode='w') ch.setFormatter(SimpleFormatter(with_time, with_thread)) else: ch = logging.StreamHandler() if colored and sys.platform != 'win32': ch.setFormatter(ColoredFormatter(with_time, with_thread)) else: ch.setFormatter(SimpleFormatter(with_time, with_thread)) logging.getLogger().addHandler(ch)
gpl-3.0
mikewiebe-ansible/ansible
lib/ansible/modules/cloud/google/gcp_compute_reservation_info.py
13
10575
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2017 Google # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # ---------------------------------------------------------------------------- # # *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** # # ---------------------------------------------------------------------------- # # This file is automatically generated by Magic Modules and manual # changes will be clobbered when the file is regenerated. # # Please read more about how to change this file at # https://www.github.com/GoogleCloudPlatform/magic-modules # # ---------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function __metaclass__ = type ################################################################################ # Documentation ################################################################################ ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gcp_compute_reservation_info description: - Gather info for GCP Reservation short_description: Gather info for GCP Reservation version_added: '2.10' author: Google Inc. (@googlecloudplatform) requirements: - python >= 2.6 - requests >= 2.18.4 - google-auth >= 1.3.0 options: filters: description: - A list of filter value pairs. Available filters are listed here U(https://cloud.google.com/sdk/gcloud/reference/topic/filters). - Each additional filter in the list will act be added as an AND condition (filter1 and filter2) . type: list zone: description: - The zone where the reservation is made. required: true type: str project: description: - The Google Cloud Platform project to use. type: str auth_kind: description: - The type of credential used. type: str required: true choices: - application - machineaccount - serviceaccount service_account_contents: description: - The contents of a Service Account JSON file, either in a dictionary or as a JSON string that represents it. type: jsonarg service_account_file: description: - The path of a Service Account JSON file if serviceaccount is selected as type. type: path service_account_email: description: - An optional service account email address if machineaccount is selected and the user does not wish to use the default email. type: str scopes: description: - Array of scopes to be used type: list env_type: description: - Specifies which Ansible environment you're running this module within. - This should not be set unless you know what you're doing. - This only alters the User Agent string for any API requests. type: str notes: - for authentication, you can set service_account_file using the C(gcp_service_account_file) env variable. - for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS) env variable. - For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL) env variable. - For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable. - For authentication, you can set scopes using the C(GCP_SCOPES) env variable. - Environment variables values will only be used if the playbook values are not set. - The I(service_account_email) and I(service_account_file) options are mutually exclusive. ''' EXAMPLES = ''' - name: get info on a reservation gcp_compute_reservation_info: zone: us-central1-a filters: - name = test_object project: test_project auth_kind: serviceaccount service_account_file: "/tmp/auth.pem" ''' RETURN = ''' resources: description: List of resources returned: always type: complex contains: creationTimestamp: description: - Creation timestamp in RFC3339 text format. returned: success type: str description: description: - An optional description of this resource. returned: success type: str id: description: - The unique identifier for the resource. returned: success type: int name: description: - Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. returned: success type: str commitment: description: - Full or partial URL to a parent commitment. This field displays for reservations that are tied to a commitment. returned: success type: str specificReservationRequired: description: - When set to true, only VMs that target this reservation by name can consume this reservation. Otherwise, it can be consumed by VMs with affinity for any reservation. Defaults to false. returned: success type: bool status: description: - The status of the reservation. returned: success type: str specificReservation: description: - Reservation for instances with specific machine shapes. returned: success type: complex contains: count: description: - The number of resources that are allocated. returned: success type: int inUseCount: description: - How many instances are in use. returned: success type: int instanceProperties: description: - The instance properties for the reservation. returned: success type: complex contains: machineType: description: - The name of the machine type to reserve. returned: success type: str minCpuPlatform: description: - The minimum CPU platform for the reservation. For example, `"Intel Skylake"`. See U(https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform#availablezones) for information on available CPU platforms. returned: success type: str guestAccelerators: description: - Guest accelerator type and count. returned: success type: complex contains: acceleratorType: description: - 'The full or partial URL of the accelerator type to attach to this instance. For example: `projects/my-project/zones/us-central1-c/acceleratorTypes/nvidia-tesla-p100` If you are creating an instance template, specify only the accelerator name.' returned: success type: str acceleratorCount: description: - The number of the guest accelerator cards exposed to this instance. returned: success type: int localSsds: description: - The amount of local ssd to reserve with each instance. This reserves disks of type `local-ssd`. returned: success type: complex contains: interface: description: - The disk interface to use for attaching this disk, one of `SCSI` or `NVME`. The default is `SCSI`. returned: success type: str diskSizeGb: description: - The size of the disk in base-2 GB. returned: success type: int zone: description: - The zone where the reservation is made. returned: success type: str ''' ################################################################################ # Imports ################################################################################ from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest import json ################################################################################ # Main ################################################################################ def main(): module = GcpModule(argument_spec=dict(filters=dict(type='list', elements='str'), zone=dict(required=True, type='str'))) if not module.params['scopes']: module.params['scopes'] = ['https://www.googleapis.com/auth/compute'] return_value = {'resources': fetch_list(module, collection(module), query_options(module.params['filters']))} module.exit_json(**return_value) def collection(module): return "https://www.googleapis.com/compute/v1/projects/{project}/zones/{zone}/reservations".format(**module.params) def fetch_list(module, link, query): auth = GcpSession(module, 'compute') return auth.list(link, return_if_object, array_name='items', params={'filter': query}) def query_options(filters): if not filters: return '' if len(filters) == 1: return filters[0] else: queries = [] for f in filters: # For multiple queries, all queries should have () if f[0] != '(' and f[-1] != ')': queries.append("(%s)" % ''.join(f)) else: queries.append(f) return ' '.join(queries) def return_if_object(module, response): # If not found, return nothing. if response.status_code == 404: return None # If no content, return nothing. if response.status_code == 204: return None try: module.raise_for_status(response) result = response.json() except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst: module.fail_json(msg="Invalid JSON response with error: %s" % inst) if navigate_hash(result, ['error', 'errors']): module.fail_json(msg=navigate_hash(result, ['error', 'errors'])) return result if __name__ == "__main__": main()
gpl-3.0
mikewiebe-ansible/ansible
test/units/modules/network/netvisor/test_pn_vrouter_pim_config.py
23
2463
# Copyright: (c) 2018, Pluribus Networks # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type from units.compat.mock import patch from ansible.modules.network.netvisor import pn_vrouter_pim_config from units.modules.utils import set_module_args from .nvos_module import TestNvosModule class TestVrouterPimConfigModule(TestNvosModule): module = pn_vrouter_pim_config def setUp(self): self.mock_run_nvos_commands = patch('ansible.modules.network.netvisor.pn_vrouter_pim_config.run_cli') self.run_nvos_commands = self.mock_run_nvos_commands.start() self.mock_run_check_cli = patch('ansible.modules.network.netvisor.pn_vrouter_pim_config.check_cli') self.run_check_cli = self.mock_run_check_cli.start() def tearDown(self): self.mock_run_nvos_commands.stop() self.mock_run_check_cli.stop() def run_cli_patch(self, module, cli, state_map): if state_map['update'] == 'vrouter-pim-config-modify': results = dict( changed=True, cli_cmd=cli ) module.exit_json(**results) def load_fixtures(self, commands=None, state=None, transport='cli'): self.run_nvos_commands.side_effect = self.run_cli_patch if state == 'update': self.run_check_cli.return_value = True def test_vrouter_pim_config_t1(self): set_module_args({'pn_cliswitch': 'sw01', 'pn_query_interval': '10', 'pn_querier_timeout': '30', 'pn_vrouter_name': 'foo-vrouter', 'state': 'update'}) result = self.execute_module(changed=True, state='update') expected_cmd = ' switch sw01 vrouter-pim-config-modify vrouter-name foo-vrouter ' expected_cmd += 'querier-timeout 30 query-interval 10' self.assertEqual(result['cli_cmd'], expected_cmd) def test_vrouter_pim_config_t2(self): set_module_args({'pn_cliswitch': 'sw01', 'pn_query_interval': '30', 'pn_hello_interval': '120', 'pn_vrouter_name': 'foo-vrouter', 'state': 'update'}) result = self.execute_module(changed=True, state='update') expected_cmd = ' switch sw01 vrouter-pim-config-modify vrouter-name foo-vrouter ' expected_cmd += 'hello-interval 120 query-interval 30' self.assertEqual(result['cli_cmd'], expected_cmd)
gpl-3.0
shrkey/ardupilot
Tools/LogAnalyzer/tests/TestDualGyroDrift.py
273
5396
from LogAnalyzer import Test,TestResult import DataflashLog # import scipy # import pylab #### TEMP!!! only for dev # from scipy import signal class TestDualGyroDrift(Test): '''test for gyro drift between dual IMU data''' def __init__(self): Test.__init__(self) self.name = "Gyro Drift" self.enable = False def run(self, logdata, verbose): self.result = TestResult() self.result.status = TestResult.StatusType.GOOD # if "IMU" not in logdata.channels or "IMU2" not in logdata.channels: # self.result.status = TestResult.StatusType.NA # return # imuX = logdata.channels["IMU"]["GyrX"].listData # imu2X = logdata.channels["IMU2"]["GyrX"].listData # # NOTE: weird thing about Holger's log is that the counts of IMU+IMU2 are different # print "length 1: %.2f, length 2: %.2f" % (len(imuX),len(imu2X)) # #assert(len(imuX) == len(imu2X)) # # divide the curve into segments and get the average of each segment # # we will get the diff between those averages, rather than a per-sample diff as the IMU+IMU2 arrays are often not the same length # diffThresholdWARN = 0.03 # diffThresholdFAIL = 0.05 # nSamples = 10 # imu1XAverages, imu1YAverages, imu1ZAverages, imu2XAverages, imu2YAverages, imu2ZAverages = ([],[],[],[],[],[]) # imuXDiffAverages, imuYDiffAverages, imuZDiffAverages = ([],[],[]) # maxDiffX, maxDiffY, maxDiffZ = (0,0,0) # sliceLength1 = len(logdata.channels["IMU"]["GyrX"].dictData.values()) / nSamples # sliceLength2 = len(logdata.channels["IMU2"]["GyrX"].dictData.values()) / nSamples # for i in range(0,nSamples): # imu1XAverages.append(numpy.mean(logdata.channels["IMU"]["GyrX"].dictData.values()[i*sliceLength1:i*sliceLength1+sliceLength1])) # imu1YAverages.append(numpy.mean(logdata.channels["IMU"]["GyrY"].dictData.values()[i*sliceLength1:i*sliceLength1+sliceLength1])) # imu1ZAverages.append(numpy.mean(logdata.channels["IMU"]["GyrZ"].dictData.values()[i*sliceLength1:i*sliceLength1+sliceLength1])) # imu2XAverages.append(numpy.mean(logdata.channels["IMU2"]["GyrX"].dictData.values()[i*sliceLength2:i*sliceLength2+sliceLength2])) # imu2YAverages.append(numpy.mean(logdata.channels["IMU2"]["GyrY"].dictData.values()[i*sliceLength2:i*sliceLength2+sliceLength2])) # imu2ZAverages.append(numpy.mean(logdata.channels["IMU2"]["GyrZ"].dictData.values()[i*sliceLength2:i*sliceLength2+sliceLength2])) # imuXDiffAverages.append(imu2XAverages[-1]-imu1XAverages[-1]) # imuYDiffAverages.append(imu2YAverages[-1]-imu1YAverages[-1]) # imuZDiffAverages.append(imu2ZAverages[-1]-imu1ZAverages[-1]) # if abs(imuXDiffAverages[-1]) > maxDiffX: # maxDiffX = imuXDiffAverages[-1] # if abs(imuYDiffAverages[-1]) > maxDiffY: # maxDiffY = imuYDiffAverages[-1] # if abs(imuZDiffAverages[-1]) > maxDiffZ: # maxDiffZ = imuZDiffAverages[-1] # if max(maxDiffX,maxDiffY,maxDiffZ) > diffThresholdFAIL: # self.result.status = TestResult.StatusType.FAIL # self.result.statusMessage = "IMU/IMU2 gyro averages differ by more than %s radians" % diffThresholdFAIL # elif max(maxDiffX,maxDiffY,maxDiffZ) > diffThresholdWARN: # self.result.status = TestResult.StatusType.WARN # self.result.statusMessage = "IMU/IMU2 gyro averages differ by more than %s radians" % diffThresholdWARN # # pylab.plot(zip(*imuX)[0], zip(*imuX)[1], 'g') # # pylab.plot(zip(*imu2X)[0], zip(*imu2X)[1], 'r') # #pylab.plot(range(0,(nSamples*sliceLength1),sliceLength1), imu1ZAverages, 'b') # print "Gyro averages1X: " + `imu1XAverages` # print "Gyro averages1Y: " + `imu1YAverages` # print "Gyro averages1Z: " + `imu1ZAverages` + "\n" # print "Gyro averages2X: " + `imu2XAverages` # print "Gyro averages2Y: " + `imu2YAverages` # print "Gyro averages2Z: " + `imu2ZAverages` + "\n" # print "Gyro averages diff X: " + `imuXDiffAverages` # print "Gyro averages diff Y: " + `imuYDiffAverages` # print "Gyro averages diff Z: " + `imuZDiffAverages` # # lowpass filter using numpy # # cutoff = 100 # # fs = 10000.0 # # b,a = scipy.signal.filter_design.butter(5,cutoff/(fs/2)) # # imuXFiltered = scipy.signal.filtfilt(b,a,zip(*imuX)[1]) # # imu2XFiltered = scipy.signal.filtfilt(b,a,zip(*imu2X)[1]) # #pylab.plot(imuXFiltered, 'r') # # TMP: DISPLAY BEFORE+AFTER plots # pylab.show() # # print "imuX average before lowpass filter: %.8f" % logdata.channels["IMU"]["GyrX"].avg() # # print "imuX average after lowpass filter: %.8f" % numpy.mean(imuXFiltered) # # print "imu2X average before lowpass filter: %.8f" % logdata.channels["IMU2"]["GyrX"].avg() # # print "imu2X average after lowpass filter: %.8f" % numpy.mean(imu2XFiltered) # avg1X = logdata.channels["IMU"]["GyrX"].avg() # avg1Y = logdata.channels["IMU"]["GyrY"].avg() # avg1Z = logdata.channels["IMU"]["GyrZ"].avg() # avg2X = logdata.channels["IMU2"]["GyrX"].avg() # avg2Y = logdata.channels["IMU2"]["GyrY"].avg() # avg2Z = logdata.channels["IMU2"]["GyrZ"].avg() # avgRatioX = (max(avg1X,avg2X) - min(avg1X,avg2X)) / #abs(max(avg1X,avg2X) / min(avg1X,avg2X)) # avgRatioY = abs(max(avg1Y,avg2Y) / min(avg1Y,avg2Y)) # avgRatioZ = abs(max(avg1Z,avg2Z) / min(avg1Z,avg2Z)) # self.result.statusMessage = "IMU gyro avg: %.4f,%.4f,%.4f\nIMU2 gyro avg: %.4f,%.4f,%.4f\nAvg ratio: %.4f,%.4f,%.4f" % (avg1X,avg1Y,avg1Z, avg2X,avg2Y,avg2Z, avgRatioX,avgRatioY,avgRatioZ)
gpl-3.0
siddhartharay007/buck
third-party/py/twitter-commons/src/python/twitter/common/python/translator.py
18
6084
from __future__ import absolute_import from abc import abstractmethod import os import shutil from .common import chmod_plus_w, safe_rmtree, safe_mkdir, safe_mkdtemp from .compatibility import AbstractClass from .installer import WheelInstaller from .interpreter import PythonInterpreter from .package import ( EggPackage, Package, SourcePackage, WheelPackage, ) from .platforms import Platform from .tracer import TRACER from .util import DistributionHelper class TranslatorBase(AbstractClass): """ Translate a link into a distribution. """ @abstractmethod def translate(self, link): pass class ChainedTranslator(TranslatorBase): """ Glue a sequence of Translators together in priority order. The first Translator to resolve a requirement wins. """ def __init__(self, *translators): self._translators = list(filter(None, translators)) for tx in self._translators: if not isinstance(tx, TranslatorBase): raise ValueError('Expected a sequence of translators, got %s instead.' % type(tx)) def translate(self, package): for tx in self._translators: dist = tx.translate(package) if dist: return dist class SourceTranslator(TranslatorBase): @classmethod def run_2to3(cls, path): from lib2to3.refactor import get_fixers_from_package, RefactoringTool rt = RefactoringTool(get_fixers_from_package('lib2to3.fixes')) with TRACER.timed('Translating %s' % path): for root, dirs, files in os.walk(path): for fn in files: full_fn = os.path.join(root, fn) if full_fn.endswith('.py'): with TRACER.timed('%s' % fn, V=3): try: chmod_plus_w(full_fn) rt.refactor_file(full_fn, write=True) except IOError as e: TRACER.log('Failed to translate %s: %s' % (fn, e)) def __init__(self, install_cache=None, interpreter=PythonInterpreter.get(), platform=Platform.current(), use_2to3=False, conn_timeout=None, installer_impl=WheelInstaller): self._interpreter = interpreter self._installer_impl = installer_impl self._use_2to3 = use_2to3 self._install_cache = install_cache or safe_mkdtemp() safe_mkdir(self._install_cache) self._conn_timeout = conn_timeout self._platform = platform def translate(self, package): """From a SourcePackage, translate to a binary distribution.""" if not isinstance(package, SourcePackage): return None unpack_path, installer = None, None version = self._interpreter.version try: unpack_path = package.fetch(conn_timeout=self._conn_timeout) except package.UnreadableLink as e: TRACER.log('Failed to fetch %s: %s' % (package, e)) return None try: if self._use_2to3 and version >= (3,): with TRACER.timed('Translating 2->3 %s' % package.name): self.run_2to3(unpack_path) installer = self._installer_impl( unpack_path, interpreter=self._interpreter, strict=(package.name not in ('distribute', 'setuptools'))) with TRACER.timed('Packaging %s' % package.name): try: dist_path = installer.bdist() except self._installer_impl.InstallFailure: return None target_path = os.path.join(self._install_cache, os.path.basename(dist_path)) # TODO: Make this atomic. shutil.move(dist_path, target_path) target_package = Package.from_href(target_path) if not target_package: return None if not target_package.compatible(self._interpreter.identity, platform=self._platform): return None return DistributionHelper.distribution_from_path(target_path) finally: if installer: installer.cleanup() if unpack_path: safe_rmtree(unpack_path) class BinaryTranslator(TranslatorBase): def __init__(self, package_type, install_cache=None, interpreter=PythonInterpreter.get(), platform=Platform.current(), conn_timeout=None): self._package_type = package_type self._install_cache = install_cache or safe_mkdtemp() self._platform = platform self._identity = interpreter.identity self._conn_timeout = conn_timeout def translate(self, package): """From a binary package, translate to a local binary distribution.""" if not isinstance(package, self._package_type): return None if not package.compatible(identity=self._identity, platform=self._platform): return None try: bdist = package.fetch(location=self._install_cache, conn_timeout=self._conn_timeout) except package.UnreadableLink as e: TRACER.log('Failed to fetch %s: %s' % (package, e)) return None return DistributionHelper.distribution_from_path(bdist) class EggTranslator(BinaryTranslator): def __init__(self, **kw): super(EggTranslator, self).__init__(EggPackage, **kw) class WheelTranslator(BinaryTranslator): def __init__(self, **kw): super(WheelTranslator, self).__init__(WheelPackage, **kw) class Translator(object): @staticmethod def default(install_cache=None, platform=Platform.current(), interpreter=None, conn_timeout=None): # TODO(user) Consider interpreter=None to indicate "universal" packages # since the .whl format can support this. # Also consider platform=None to require platform-inspecific packages. interpreter = interpreter or PythonInterpreter.get() shared_options = dict( install_cache=install_cache, interpreter=interpreter, conn_timeout=conn_timeout) whl_translator = WheelTranslator(platform=platform, **shared_options) egg_translator = EggTranslator(platform=platform, **shared_options) source_translator = SourceTranslator(**shared_options) return ChainedTranslator(whl_translator, egg_translator, source_translator)
apache-2.0
foreni-packages/golismero
thirdparty_libs/django/core/serializers/pyyaml.py
110
2353
""" YAML serializer. Requires PyYaml (http://pyyaml.org/), but that's checked for in __init__. """ import decimal import yaml from io import StringIO from django.db import models from django.core.serializers.base import DeserializationError from django.core.serializers.python import Serializer as PythonSerializer from django.core.serializers.python import Deserializer as PythonDeserializer from django.utils import six class DjangoSafeDumper(yaml.SafeDumper): def represent_decimal(self, data): return self.represent_scalar('tag:yaml.org,2002:str', str(data)) DjangoSafeDumper.add_representer(decimal.Decimal, DjangoSafeDumper.represent_decimal) class Serializer(PythonSerializer): """ Convert a queryset to YAML. """ internal_use_only = False def handle_field(self, obj, field): # A nasty special case: base YAML doesn't support serialization of time # types (as opposed to dates or datetimes, which it does support). Since # we want to use the "safe" serializer for better interoperability, we # need to do something with those pesky times. Converting 'em to strings # isn't perfect, but it's better than a "!!python/time" type which would # halt deserialization under any other language. if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None: self._current[field.name] = str(getattr(obj, field.name)) else: super(Serializer, self).handle_field(obj, field) def end_serialization(self): yaml.dump(self.objects, self.stream, Dumper=DjangoSafeDumper, **self.options) def getvalue(self): # Grand-parent super return super(PythonSerializer, self).getvalue() def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of YAML data. """ if isinstance(stream_or_string, bytes): stream_or_string = stream_or_string.decode('utf-8') if isinstance(stream_or_string, six.string_types): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in PythonDeserializer(yaml.safe_load(stream), **options): yield obj except GeneratorExit: raise except Exception as e: # Map to deserializer error raise DeserializationError(e)
gpl-2.0
eteq/ginga
ginga/gtkw/ImageViewCanvasGtk.py
4
1290
# # ImageViewCanvasGtk.py -- A FITS image widget with canvas drawing in Gtk # # Eric Jeschke ([email protected]) # # Copyright (c) Eric R. Jeschke. All rights reserved. # This is open-source software licensed under a BSD license. # Please see the file LICENSE.txt for details. # from ginga.gtkw import ImageViewGtk from ginga.canvas.mixins import DrawingMixin, CanvasMixin, CompoundMixin from ginga.util.toolbox import ModeIndicator class ImageViewCanvasError(ImageViewGtk.ImageViewGtkError): pass class ImageViewCanvas(ImageViewGtk.ImageViewZoom, DrawingMixin, CanvasMixin, CompoundMixin): def __init__(self, logger=None, rgbmap=None, settings=None, bindmap=None, bindings=None): ImageViewGtk.ImageViewZoom.__init__(self, logger=logger, rgbmap=rgbmap, settings=settings, bindmap=bindmap, bindings=bindings) CompoundMixin.__init__(self) CanvasMixin.__init__(self) DrawingMixin.__init__(self) # we are both a viewer and a canvas self.set_canvas(self, private_canvas=self) self._mi = ModeIndicator(self) #END
bsd-3-clause
kenshay/ImageScripter
ProgramData/SystemFiles/Python/Lib/site-packages/pygments/lexers/_tsql_builtins.py
31
15484
# -*- coding: utf-8 -*- """ pygments.lexers._tsql_builtins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ These are manually translated lists from https://msdn.microsoft.com. :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ # See https://msdn.microsoft.com/en-us/library/ms174986.aspx. OPERATORS = ( '!<', '!=', '!>', '<', '<=', '<>', '=', '>', '>=', '+', '+=', '-', '-=', '*', '*=', '/', '/=', '%', '%=', '&', '&=', '|', '|=', '^', '^=', '~', '::', ) OPERATOR_WORDS = ( 'all', 'and', 'any', 'between', 'except', 'exists', 'in', 'intersect', 'like', 'not', 'or', 'some', 'union', ) _KEYWORDS_SERVER = ( 'add', 'all', 'alter', 'and', 'any', 'as', 'asc', 'authorization', 'backup', 'begin', 'between', 'break', 'browse', 'bulk', 'by', 'cascade', 'case', 'catch', 'check', 'checkpoint', 'close', 'clustered', 'coalesce', 'collate', 'column', 'commit', 'compute', 'constraint', 'contains', 'containstable', 'continue', 'convert', 'create', 'cross', 'current', 'current_date', 'current_time', 'current_timestamp', 'current_user', 'cursor', 'database', 'dbcc', 'deallocate', 'declare', 'default', 'delete', 'deny', 'desc', 'disk', 'distinct', 'distributed', 'double', 'drop', 'dump', 'else', 'end', 'errlvl', 'escape', 'except', 'exec', 'execute', 'exists', 'exit', 'external', 'fetch', 'file', 'fillfactor', 'for', 'foreign', 'freetext', 'freetexttable', 'from', 'full', 'function', 'goto', 'grant', 'group', 'having', 'holdlock', 'identity', 'identity_insert', 'identitycol', 'if', 'in', 'index', 'inner', 'insert', 'intersect', 'into', 'is', 'join', 'key', 'kill', 'left', 'like', 'lineno', 'load', 'merge', 'national', 'nocheck', 'nonclustered', 'not', 'null', 'nullif', 'of', 'off', 'offsets', 'on', 'open', 'opendatasource', 'openquery', 'openrowset', 'openxml', 'option', 'or', 'order', 'outer', 'over', 'percent', 'pivot', 'plan', 'precision', 'primary', 'print', 'proc', 'procedure', 'public', 'raiserror', 'read', 'readtext', 'reconfigure', 'references', 'replication', 'restore', 'restrict', 'return', 'revert', 'revoke', 'right', 'rollback', 'rowcount', 'rowguidcol', 'rule', 'save', 'schema', 'securityaudit', 'select', 'semantickeyphrasetable', 'semanticsimilaritydetailstable', 'semanticsimilaritytable', 'session_user', 'set', 'setuser', 'shutdown', 'some', 'statistics', 'system_user', 'table', 'tablesample', 'textsize', 'then', 'throw', 'to', 'top', 'tran', 'transaction', 'trigger', 'truncate', 'try', 'try_convert', 'tsequal', 'union', 'unique', 'unpivot', 'update', 'updatetext', 'use', 'user', 'values', 'varying', 'view', 'waitfor', 'when', 'where', 'while', 'with', 'within', 'writetext', ) _KEYWORDS_FUTURE = ( 'absolute', 'action', 'admin', 'after', 'aggregate', 'alias', 'allocate', 'are', 'array', 'asensitive', 'assertion', 'asymmetric', 'at', 'atomic', 'before', 'binary', 'bit', 'blob', 'boolean', 'both', 'breadth', 'call', 'called', 'cardinality', 'cascaded', 'cast', 'catalog', 'char', 'character', 'class', 'clob', 'collation', 'collect', 'completion', 'condition', 'connect', 'connection', 'constraints', 'constructor', 'corr', 'corresponding', 'covar_pop', 'covar_samp', 'cube', 'cume_dist', 'current_catalog', 'current_default_transform_group', 'current_path', 'current_role', 'current_schema', 'current_transform_group_for_type', 'cycle', 'data', 'date', 'day', 'dec', 'decimal', 'deferrable', 'deferred', 'depth', 'deref', 'describe', 'descriptor', 'destroy', 'destructor', 'deterministic', 'diagnostics', 'dictionary', 'disconnect', 'domain', 'dynamic', 'each', 'element', 'end-exec', 'equals', 'every', 'exception', 'false', 'filter', 'first', 'float', 'found', 'free', 'fulltexttable', 'fusion', 'general', 'get', 'global', 'go', 'grouping', 'hold', 'host', 'hour', 'ignore', 'immediate', 'indicator', 'initialize', 'initially', 'inout', 'input', 'int', 'integer', 'intersection', 'interval', 'isolation', 'iterate', 'language', 'large', 'last', 'lateral', 'leading', 'less', 'level', 'like_regex', 'limit', 'ln', 'local', 'localtime', 'localtimestamp', 'locator', 'map', 'match', 'member', 'method', 'minute', 'mod', 'modifies', 'modify', 'module', 'month', 'multiset', 'names', 'natural', 'nchar', 'nclob', 'new', 'next', 'no', 'none', 'normalize', 'numeric', 'object', 'occurrences_regex', 'old', 'only', 'operation', 'ordinality', 'out', 'output', 'overlay', 'pad', 'parameter', 'parameters', 'partial', 'partition', 'path', 'percent_rank', 'percentile_cont', 'percentile_disc', 'position_regex', 'postfix', 'prefix', 'preorder', 'prepare', 'preserve', 'prior', 'privileges', 'range', 'reads', 'real', 'recursive', 'ref', 'referencing', 'regr_avgx', 'regr_avgy', 'regr_count', 'regr_intercept', 'regr_r2', 'regr_slope', 'regr_sxx', 'regr_sxy', 'regr_syy', 'relative', 'release', 'result', 'returns', 'role', 'rollup', 'routine', 'row', 'rows', 'savepoint', 'scope', 'scroll', 'search', 'second', 'section', 'sensitive', 'sequence', 'session', 'sets', 'similar', 'size', 'smallint', 'space', 'specific', 'specifictype', 'sql', 'sqlexception', 'sqlstate', 'sqlwarning', 'start', 'state', 'statement', 'static', 'stddev_pop', 'stddev_samp', 'structure', 'submultiset', 'substring_regex', 'symmetric', 'system', 'temporary', 'terminate', 'than', 'time', 'timestamp', 'timezone_hour', 'timezone_minute', 'trailing', 'translate_regex', 'translation', 'treat', 'true', 'uescape', 'under', 'unknown', 'unnest', 'usage', 'using', 'value', 'var_pop', 'var_samp', 'varchar', 'variable', 'whenever', 'width_bucket', 'window', 'within', 'without', 'work', 'write', 'xmlagg', 'xmlattributes', 'xmlbinary', 'xmlcast', 'xmlcomment', 'xmlconcat', 'xmldocument', 'xmlelement', 'xmlexists', 'xmlforest', 'xmliterate', 'xmlnamespaces', 'xmlparse', 'xmlpi', 'xmlquery', 'xmlserialize', 'xmltable', 'xmltext', 'xmlvalidate', 'year', 'zone', ) _KEYWORDS_ODBC = ( 'absolute', 'action', 'ada', 'add', 'all', 'allocate', 'alter', 'and', 'any', 'are', 'as', 'asc', 'assertion', 'at', 'authorization', 'avg', 'begin', 'between', 'bit', 'bit_length', 'both', 'by', 'cascade', 'cascaded', 'case', 'cast', 'catalog', 'char', 'char_length', 'character', 'character_length', 'check', 'close', 'coalesce', 'collate', 'collation', 'column', 'commit', 'connect', 'connection', 'constraint', 'constraints', 'continue', 'convert', 'corresponding', 'count', 'create', 'cross', 'current', 'current_date', 'current_time', 'current_timestamp', 'current_user', 'cursor', 'date', 'day', 'deallocate', 'dec', 'decimal', 'declare', 'default', 'deferrable', 'deferred', 'delete', 'desc', 'describe', 'descriptor', 'diagnostics', 'disconnect', 'distinct', 'domain', 'double', 'drop', 'else', 'end', 'end-exec', 'escape', 'except', 'exception', 'exec', 'execute', 'exists', 'external', 'extract', 'false', 'fetch', 'first', 'float', 'for', 'foreign', 'fortran', 'found', 'from', 'full', 'get', 'global', 'go', 'goto', 'grant', 'group', 'having', 'hour', 'identity', 'immediate', 'in', 'include', 'index', 'indicator', 'initially', 'inner', 'input', 'insensitive', 'insert', 'int', 'integer', 'intersect', 'interval', 'into', 'is', 'isolation', 'join', 'key', 'language', 'last', 'leading', 'left', 'level', 'like', 'local', 'lower', 'match', 'max', 'min', 'minute', 'module', 'month', 'names', 'national', 'natural', 'nchar', 'next', 'no', 'none', 'not', 'null', 'nullif', 'numeric', 'octet_length', 'of', 'on', 'only', 'open', 'option', 'or', 'order', 'outer', 'output', 'overlaps', 'pad', 'partial', 'pascal', 'position', 'precision', 'prepare', 'preserve', 'primary', 'prior', 'privileges', 'procedure', 'public', 'read', 'real', 'references', 'relative', 'restrict', 'revoke', 'right', 'rollback', 'rows', 'schema', 'scroll', 'second', 'section', 'select', 'session', 'session_user', 'set', 'size', 'smallint', 'some', 'space', 'sql', 'sqlca', 'sqlcode', 'sqlerror', 'sqlstate', 'sqlwarning', 'substring', 'sum', 'system_user', 'table', 'temporary', 'then', 'time', 'timestamp', 'timezone_hour', 'timezone_minute', 'to', 'trailing', 'transaction', 'translate', 'translation', 'trim', 'true', 'union', 'unique', 'unknown', 'update', 'upper', 'usage', 'user', 'using', 'value', 'values', 'varchar', 'varying', 'view', 'when', 'whenever', 'where', 'with', 'work', 'write', 'year', 'zone', ) # See https://msdn.microsoft.com/en-us/library/ms189822.aspx. KEYWORDS = sorted(set(_KEYWORDS_FUTURE + _KEYWORDS_ODBC + _KEYWORDS_SERVER)) # See https://msdn.microsoft.com/en-us/library/ms187752.aspx. TYPES = ( 'bigint', 'binary', 'bit', 'char', 'cursor', 'date', 'datetime', 'datetime2', 'datetimeoffset', 'decimal', 'float', 'hierarchyid', 'image', 'int', 'money', 'nchar', 'ntext', 'numeric', 'nvarchar', 'real', 'smalldatetime', 'smallint', 'smallmoney', 'sql_variant', 'table', 'text', 'time', 'timestamp', 'tinyint', 'uniqueidentifier', 'varbinary', 'varchar', 'xml', ) # See https://msdn.microsoft.com/en-us/library/ms174318.aspx. FUNCTIONS = ( '$partition', 'abs', 'acos', 'app_name', 'applock_mode', 'applock_test', 'ascii', 'asin', 'assemblyproperty', 'atan', 'atn2', 'avg', 'binary_checksum', 'cast', 'ceiling', 'certencoded', 'certprivatekey', 'char', 'charindex', 'checksum', 'checksum_agg', 'choose', 'col_length', 'col_name', 'columnproperty', 'compress', 'concat', 'connectionproperty', 'context_info', 'convert', 'cos', 'cot', 'count', 'count_big', 'current_request_id', 'current_timestamp', 'current_transaction_id', 'current_user', 'cursor_status', 'database_principal_id', 'databasepropertyex', 'dateadd', 'datediff', 'datediff_big', 'datefromparts', 'datename', 'datepart', 'datetime2fromparts', 'datetimefromparts', 'datetimeoffsetfromparts', 'day', 'db_id', 'db_name', 'decompress', 'degrees', 'dense_rank', 'difference', 'eomonth', 'error_line', 'error_message', 'error_number', 'error_procedure', 'error_severity', 'error_state', 'exp', 'file_id', 'file_idex', 'file_name', 'filegroup_id', 'filegroup_name', 'filegroupproperty', 'fileproperty', 'floor', 'format', 'formatmessage', 'fulltextcatalogproperty', 'fulltextserviceproperty', 'get_filestream_transaction_context', 'getansinull', 'getdate', 'getutcdate', 'grouping', 'grouping_id', 'has_perms_by_name', 'host_id', 'host_name', 'iif', 'index_col', 'indexkey_property', 'indexproperty', 'is_member', 'is_rolemember', 'is_srvrolemember', 'isdate', 'isjson', 'isnull', 'isnumeric', 'json_modify', 'json_query', 'json_value', 'left', 'len', 'log', 'log10', 'lower', 'ltrim', 'max', 'min', 'min_active_rowversion', 'month', 'nchar', 'newid', 'newsequentialid', 'ntile', 'object_definition', 'object_id', 'object_name', 'object_schema_name', 'objectproperty', 'objectpropertyex', 'opendatasource', 'openjson', 'openquery', 'openrowset', 'openxml', 'original_db_name', 'original_login', 'parse', 'parsename', 'patindex', 'permissions', 'pi', 'power', 'pwdcompare', 'pwdencrypt', 'quotename', 'radians', 'rand', 'rank', 'replace', 'replicate', 'reverse', 'right', 'round', 'row_number', 'rowcount_big', 'rtrim', 'schema_id', 'schema_name', 'scope_identity', 'serverproperty', 'session_context', 'session_user', 'sign', 'sin', 'smalldatetimefromparts', 'soundex', 'sp_helplanguage', 'space', 'sqrt', 'square', 'stats_date', 'stdev', 'stdevp', 'str', 'string_escape', 'string_split', 'stuff', 'substring', 'sum', 'suser_id', 'suser_name', 'suser_sid', 'suser_sname', 'switchoffset', 'sysdatetime', 'sysdatetimeoffset', 'system_user', 'sysutcdatetime', 'tan', 'textptr', 'textvalid', 'timefromparts', 'todatetimeoffset', 'try_cast', 'try_convert', 'try_parse', 'type_id', 'type_name', 'typeproperty', 'unicode', 'upper', 'user_id', 'user_name', 'var', 'varp', 'xact_state', 'year', )
gpl-3.0
stevenbrichards/boto
boto/cognito/__init__.py
473
1123
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. #
mit
oy-vey/algorithms-and-data-structures
6-GenomeAssemblyProgrammingChallenge/Week3/optimal_kmer_size.py
1
2363
# python3 class Edge: def __init__(self, u, v, value): self.u = u self.v = v self.value = value class Graph: def __init__(self): self.edges = [] self.incoming_edges = [] self.graph = dict () #[[] for _ in range(n)] self.incoming_graph = dict() def add_edge(self, from_, to, value): edge = Edge(from_, to, value) if self.graph.get(from_) is not None: self.graph[from_].append(len(self.edges)) else: self.graph[from_] = [len(self.edges)] if self.graph.get(to) is None: self.graph[to] = [] self.edges.append(edge) if self.incoming_graph.get(to) is not None: self.incoming_graph[to].append(len(self.incoming_edges)) else: self.incoming_graph[to] = [len(self.incoming_edges)] if self.incoming_graph.get(from_) is None: self.incoming_graph[from_] = [] self.incoming_edges.append(edge) def size(self): return len(self.graph) def get_ids(self, from_): return self.graph[from_] def get_incoming_ids(self, to): return self.incoming_graph[to] def get_edge(self, id): return self.edges[id] def get_incoming_edge(self, id): return self.incoming_edges[id] def read_data(): n = 400 reads = [] for i in range(n): reads.append(input()) return reads def get_graph(reads): edge_count = len(reads) graph = Graph() for read in reads: u, v, value = read[:-1], read[1:], read graph.add_edge(u, v, value) return graph, edge_count, graph.size() def check_if_balanced(graph): for k in graph.graph.keys(): out_ids = graph.get_ids(k) in_ids = graph.get_incoming_ids(k) if len(out_ids) != len(in_ids): return False return True def generate_new_reads(reads, k): new_reads = [] for read in reads: for s in range(len(read) - (k - 1)): new_read = read[s:][:k] new_reads.append(new_read) return list(set(new_reads)) reads = read_data() graph, edge_count, vertex_count = get_graph(reads) k = len(reads[0]) while not check_if_balanced(graph): k -= 1 reads = generate_new_reads(reads, k) graph, edge_count, vertex_count = get_graph(reads) print(k)
mit
GiladE/birde
venv/lib/python2.7/site-packages/psycopg2/tests/test_cancel.py
62
3705
#!/usr/bin/env python # -*- coding: utf-8 -*- # test_cancel.py - unit test for query cancellation # # Copyright (C) 2010-2011 Jan Urbański <[email protected]> # # psycopg2 is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # In addition, as a special exception, the copyright holders give # permission to link this program with the OpenSSL library (or with # modified versions of OpenSSL that use the same license as OpenSSL), # and distribute linked combinations including the two. # # You must obey the GNU Lesser General Public License in all respects for # all of the code used other than OpenSSL. # # psycopg2 is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # License for more details. import threading import psycopg2 import psycopg2.extensions from psycopg2 import extras from testconfig import dsn from testutils import unittest, ConnectingTestCase, skip_before_postgres class CancelTests(ConnectingTestCase): def setUp(self): ConnectingTestCase.setUp(self) cur = self.conn.cursor() cur.execute(''' CREATE TEMPORARY TABLE table1 ( id int PRIMARY KEY )''') self.conn.commit() def test_empty_cancel(self): self.conn.cancel() @skip_before_postgres(8, 2) def test_cancel(self): errors = [] def neverending(conn): cur = conn.cursor() try: self.assertRaises(psycopg2.extensions.QueryCanceledError, cur.execute, "select pg_sleep(60)") # make sure the connection still works conn.rollback() cur.execute("select 1") self.assertEqual(cur.fetchall(), [(1, )]) except Exception, e: errors.append(e) raise def canceller(conn): cur = conn.cursor() try: conn.cancel() except Exception, e: errors.append(e) raise thread1 = threading.Thread(target=neverending, args=(self.conn, )) # wait a bit to make sure that the other thread is already in # pg_sleep -- ugly and racy, but the chances are ridiculously low thread2 = threading.Timer(0.3, canceller, args=(self.conn, )) thread1.start() thread2.start() thread1.join() thread2.join() self.assertEqual(errors, []) @skip_before_postgres(8, 2) def test_async_cancel(self): async_conn = psycopg2.connect(dsn, async=True) self.assertRaises(psycopg2.OperationalError, async_conn.cancel) extras.wait_select(async_conn) cur = async_conn.cursor() cur.execute("select pg_sleep(10000)") self.assertTrue(async_conn.isexecuting()) async_conn.cancel() self.assertRaises(psycopg2.extensions.QueryCanceledError, extras.wait_select, async_conn) cur.execute("select 1") extras.wait_select(async_conn) self.assertEqual(cur.fetchall(), [(1, )]) def test_async_connection_cancel(self): async_conn = psycopg2.connect(dsn, async=True) async_conn.close() self.assertTrue(async_conn.closed) def test_suite(): return unittest.TestLoader().loadTestsFromName(__name__) if __name__ == "__main__": unittest.main()
mit
romankagan/DDBWorkbench
python/lib/Lib/site-packages/django/template/loaders/filesystem.py
229
2358
""" Wrapper for loading templates from the filesystem. """ from django.conf import settings from django.template.base import TemplateDoesNotExist from django.template.loader import BaseLoader from django.utils._os import safe_join class Loader(BaseLoader): is_usable = True def get_template_sources(self, template_name, template_dirs=None): """ Returns the absolute paths to "template_name", when appended to each directory in "template_dirs". Any paths that don't lie inside one of the template dirs are excluded from the result set, for security reasons. """ if not template_dirs: template_dirs = settings.TEMPLATE_DIRS for template_dir in template_dirs: try: yield safe_join(template_dir, template_name) except UnicodeDecodeError: # The template dir name was a bytestring that wasn't valid UTF-8. raise except ValueError: # The joined path was located outside of this particular # template_dir (it might be inside another one, so this isn't # fatal). pass def load_template_source(self, template_name, template_dirs=None): tried = [] for filepath in self.get_template_sources(template_name, template_dirs): try: file = open(filepath) try: return (file.read().decode(settings.FILE_CHARSET), filepath) finally: file.close() except IOError: tried.append(filepath) if tried: error_msg = "Tried %s" % tried else: error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory." raise TemplateDoesNotExist(error_msg) load_template_source.is_usable = True _loader = Loader() def load_template_source(template_name, template_dirs=None): # For backwards compatibility import warnings warnings.warn( "'django.template.loaders.filesystem.load_template_source' is deprecated; use 'django.template.loaders.filesystem.Loader' instead.", DeprecationWarning ) return _loader.load_template_source(template_name, template_dirs) load_template_source.is_usable = True
apache-2.0
Justin-Yuan/Image2Music-Generator
library/jython2.5.3/Lib/test/test_os_jy.py
5
1551
"""Misc os module tests Made for Jython. """ import os import unittest from test import test_support class OSTestCase(unittest.TestCase): def setUp(self): open(test_support.TESTFN, 'w').close() def tearDown(self): if os.path.exists(test_support.TESTFN): os.remove(test_support.TESTFN) def test_issue1727(self): os.stat(*(test_support.TESTFN,)) def test_issue1755(self): os.remove(test_support.TESTFN) self.assertRaises(OSError, os.utime, test_support.TESTFN, None) def test_issue1824(self): os.remove(test_support.TESTFN) self.assertRaises(OSError, os.link, test_support.TESTFN, test_support.TESTFN) def test_issue1825(self): os.remove(test_support.TESTFN) testfnu = unicode(test_support.TESTFN) try: os.open(testfnu, os.O_RDONLY) except OSError, e: self.assertTrue(isinstance(e.filename, unicode)) self.assertEqual(e.filename, testfnu) else: self.assertTrue(False) # XXX: currently fail #for fn in os.chdir, os.listdir, os.rmdir: for fn in (os.rmdir,): try: fn(testfnu) except OSError, e: self.assertTrue(isinstance(e.filename, unicode)) self.assertEqual(e.filename, testfnu) else: self.assertTrue(False) def test_main(): test_support.run_unittest(OSTestCase) if __name__ == '__main__': test_main()
gpl-2.0
webOS-ports/qtwebkit
Tools/Scripts/webkitpy/common/system/executive_mock.py
117
7106
# Copyright (C) 2011 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import logging import os import StringIO from webkitpy.common.system.executive import ScriptError _log = logging.getLogger(__name__) class MockProcess(object): def __init__(self, stdout='MOCK STDOUT\n', stderr=''): self.pid = 42 self.stdout = StringIO.StringIO(stdout) self.stderr = StringIO.StringIO(stderr) self.stdin = StringIO.StringIO() self.returncode = 0 def wait(self): return # FIXME: This should be unified with MockExecutive2 class MockExecutive(object): PIPE = "MOCK PIPE" STDOUT = "MOCK STDOUT" @staticmethod def ignore_error(error): pass def __init__(self, should_log=False, should_throw=False, should_throw_when_run=None): self._should_log = should_log self._should_throw = should_throw self._should_throw_when_run = should_throw_when_run or set() # FIXME: Once executive wraps os.getpid() we can just use a static pid for "this" process. self._running_pids = {'test-webkitpy': os.getpid()} self._proc = None self.calls = [] self.pid_to_system_pid = {} def check_running_pid(self, pid): return pid in self._running_pids.values() def running_pids(self, process_name_filter): running_pids = [] for process_name, process_pid in self._running_pids.iteritems(): if process_name_filter(process_name): running_pids.append(process_pid) _log.info("MOCK running_pids: %s" % running_pids) return running_pids def run_and_throw_if_fail(self, args, quiet=False, cwd=None, env=None): if self._should_log: env_string = "" if env: env_string = ", env=%s" % env _log.info("MOCK run_and_throw_if_fail: %s, cwd=%s%s" % (args, cwd, env_string)) if self._should_throw_when_run.intersection(args): raise ScriptError("Exception for %s" % args, output="MOCK command output") return "MOCK output of child process" def command_for_printing(self, args): string_args = map(unicode, args) return " ".join(string_args) def run_command(self, args, cwd=None, input=None, error_handler=None, return_exit_code=False, return_stderr=True, decode_output=False, env=None): self.calls.append(args) assert(isinstance(args, list) or isinstance(args, tuple)) if self._should_log: env_string = "" if env: env_string = ", env=%s" % env input_string = "" if input: input_string = ", input=%s" % input _log.info("MOCK run_command: %s, cwd=%s%s%s" % (args, cwd, env_string, input_string)) output = "MOCK output of child process" if self._should_throw_when_run.intersection(args): raise ScriptError("Exception for %s" % args, output="MOCK command output") if self._should_throw: raise ScriptError("MOCK ScriptError", output=output) return output def cpu_count(self): return 2 def kill_all(self, process_name): pass def kill_process(self, pid): pass def popen(self, args, cwd=None, env=None, **kwargs): self.calls.append(args) if self._should_log: cwd_string = "" if cwd: cwd_string = ", cwd=%s" % cwd env_string = "" if env: env_string = ", env=%s" % env _log.info("MOCK popen: %s%s%s" % (args, cwd_string, env_string)) if not self._proc: self._proc = MockProcess() return self._proc def run_in_parallel(self, commands): num_previous_calls = len(self.calls) command_outputs = [] for cmd_line, cwd in commands: command_outputs.append([0, self.run_command(cmd_line, cwd=cwd), '']) new_calls = self.calls[num_previous_calls:] self.calls = self.calls[:num_previous_calls] self.calls.append(new_calls) return command_outputs class MockExecutive2(MockExecutive): """MockExecutive2 is like MockExecutive except it doesn't log anything.""" def __init__(self, output='', exit_code=0, exception=None, run_command_fn=None, stderr=''): self._output = output self._stderr = stderr self._exit_code = exit_code self._exception = exception self._run_command_fn = run_command_fn self.calls = [] def run_command(self, args, cwd=None, input=None, error_handler=None, return_exit_code=False, return_stderr=True, decode_output=False, env=None): self.calls.append(args) assert(isinstance(args, list) or isinstance(args, tuple)) if self._exception: raise self._exception # pylint: disable=E0702 if self._run_command_fn: return self._run_command_fn(args) if return_exit_code: return self._exit_code if self._exit_code and error_handler: script_error = ScriptError(script_args=args, exit_code=self._exit_code, output=self._output) error_handler(script_error) if return_stderr: return self._output + self._stderr return self._output
gpl-2.0
RaoUmer/django
django/contrib/localflavor/si/si_postalcodes.py
110
13147
# *-* coding: utf-8 *-* from __future__ import unicode_literals SI_POSTALCODES = [ (1000, 'Ljubljana'), (1215, 'Medvode'), (1216, 'Smlednik'), (1217, 'Vodice'), (1218, 'Komenda'), (1219, 'Laze v Tuhinju'), (1221, 'Motnik'), (1222, 'Trojane'), (1223, 'Blagovica'), (1225, 'Lukovica'), (1230, 'Dom\u017eale'), (1233, 'Dob'), (1234, 'Menge\u0161'), (1235, 'Radomlje'), (1236, 'Trzin'), (1241, 'Kamnik'), (1242, 'Stahovica'), (1251, 'Morav\u010de'), (1252, 'Va\u010de'), (1262, 'Dol pri Ljubljani'), (1270, 'Litija'), (1272, 'Pol\u0161nik'), (1273, 'Dole pri Litiji'), (1274, 'Gabrovka'), (1275, '\u0160martno pri Litiji'), (1276, 'Primskovo'), (1281, 'Kresnice'), (1282, 'Sava'), (1290, 'Grosuplje'), (1291, '\u0160kofljica'), (1292, 'Ig'), (1293, '\u0160marje - Sap'), (1294, 'Vi\u0161nja Gora'), (1295, 'Ivan\u010dna Gorica'), (1296, '\u0160entvid pri Sti\u010dni'), (1301, 'Krka'), (1303, 'Zagradec'), (1310, 'Ribnica'), (1311, 'Turjak'), (1312, 'Videm - Dobrepolje'), (1313, 'Struge'), (1314, 'Rob'), (1315, 'Velike La\u0161\u010de'), (1316, 'Ortnek'), (1317, 'Sodra\u017eica'), (1318, 'Lo\u0161ki Potok'), (1319, 'Draga'), (1330, 'Ko\u010devje'), (1331, 'Dolenja vas'), (1332, 'Stara Cerkev'), (1336, 'Kostel'), (1337, 'Osilnica'), (1338, 'Ko\u010devska Reka'), (1351, 'Brezovica pri Ljubljani'), (1352, 'Preserje'), (1353, 'Borovnica'), (1354, 'Horjul'), (1355, 'Polhov Gradec'), (1356, 'Dobrova'), (1357, 'Notranje Gorice'), (1358, 'Log pri Brezovici'), (1360, 'Vrhnika'), (1370, 'Logatec'), (1372, 'Hotedr\u0161ica'), (1373, 'Rovte'), (1380, 'Cerknica'), (1381, 'Rakek'), (1382, 'Begunje pri Cerknici'), (1384, 'Grahovo'), (1385, 'Nova vas'), (1386, 'Stari trg pri Lo\u017eu'), (1410, 'Zagorje ob Savi'), (1411, 'Izlake'), (1412, 'Kisovec'), (1413, '\u010cem\u0161enik'), (1414, 'Podkum'), (1420, 'Trbovlje'), (1423, 'Dobovec'), (1430, 'Hrastnik'), (1431, 'Dol pri Hrastniku'), (1432, 'Zidani Most'), (1433, 'Rade\u010de'), (1434, 'Loka pri Zidanem Mostu'), (2000, 'Maribor'), (2201, 'Zgornja Kungota'), (2204, 'Miklav\u017e na Dravskem polju'), (2205, 'Star\u0161e'), (2206, 'Marjeta na Dravskem polju'), (2208, 'Pohorje'), (2211, 'Pesnica pri Mariboru'), (2212, '\u0160entilj v Slovenskih goricah'), (2213, 'Zgornja Velka'), (2214, 'Sladki vrh'), (2215, 'Cer\u0161ak'), (2221, 'Jarenina'), (2222, 'Jakobski Dol'), (2223, 'Jurovski Dol'), (2229, 'Male\u010dnik'), (2230, 'Lenart v Slovenskih goricah'), (2231, 'Pernica'), (2232, 'Voli\u010dina'), (2233, 'Sveta Ana v Slovenskih goricah'), (2234, 'Benedikt'), (2235, 'Sveta Trojica v Slovenskih goricah'), (2236, 'Cerkvenjak'), (2241, 'Spodnji Duplek'), (2242, 'Zgornja Korena'), (2250, 'Ptuj'), (2252, 'Dornava'), (2253, 'Destrnik'), (2254, 'Trnovska vas'), (2255, 'Vitomarci'), (2256, 'Jur\u0161inci'), (2257, 'Polen\u0161ak'), (2258, 'Sveti Toma\u017e'), (2259, 'Ivanjkovci'), (2270, 'Ormo\u017e'), (2272, 'Gori\u0161nica'), (2273, 'Podgorci'), (2274, 'Velika Nedelja'), (2275, 'Miklav\u017e pri Ormo\u017eu'), (2276, 'Kog'), (2277, 'Sredi\u0161\u010de ob Dravi'), (2281, 'Markovci'), (2282, 'Cirkulane'), (2283, 'Zavr\u010d'), (2284, 'Videm pri Ptuju'), (2285, 'Zgornji Leskovec'), (2286, 'Podlehnik'), (2287, '\u017detale'), (2288, 'Hajdina'), (2289, 'Stoperce'), (2310, 'Slovenska Bistrica'), (2311, 'Ho\u010de'), (2312, 'Orehova vas'), (2313, 'Fram'), (2314, 'Zgornja Polskava'), (2315, '\u0160martno na Pohorju'), (2316, 'Zgornja Lo\u017enica'), (2317, 'Oplotnica'), (2318, 'Laporje'), (2319, 'Polj\u010dane'), (2321, 'Makole'), (2322, 'Maj\u0161perk'), (2323, 'Ptujska Gora'), (2324, 'Lovrenc na Dravskem polju'), (2325, 'Kidri\u010devo'), (2326, 'Cirkovce'), (2327, 'Ra\u010de'), (2331, 'Pragersko'), (2341, 'Limbu\u0161'), (2342, 'Ru\u0161e'), (2343, 'Fala'), (2344, 'Lovrenc na Pohorju'), (2345, 'Bistrica ob Dravi'), (2351, 'Kamnica'), (2352, 'Selnica ob Dravi'), (2353, 'Sv. Duh na Ostrem Vrhu'), (2354, 'Bresternica'), (2360, 'Radlje ob Dravi'), (2361, 'O\u017ebalt'), (2362, 'Kapla'), (2363, 'Podvelka'), (2364, 'Ribnica na Pohorju'), (2365, 'Vuhred'), (2366, 'Muta'), (2367, 'Vuzenica'), (2370, 'Dravograd'), (2371, 'Trbonje'), (2372, 'Libeli\u010de'), (2373, '\u0160entjan\u017e pri Dravogradu'), (2380, 'Slovenj Gradec'), (2381, 'Podgorje pri Slovenj Gradcu'), (2382, 'Mislinja'), (2383, '\u0160martno pri Slovenj Gradcu'), (2390, 'Ravne na Koro\u0161kem'), (2391, 'Prevalje'), (2392, 'Me\u017eica'), (2393, '\u010crna na Koro\u0161kem'), (2394, 'Kotlje'), (3000, 'Celje'), (3201, '\u0160martno v Ro\u017eni dolini'), (3202, 'Ljube\u010dna'), (3203, 'Nova Cerkev'), (3204, 'Dobrna'), (3205, 'Vitanje'), (3206, 'Stranice'), (3210, 'Slovenske Konjice'), (3211, '\u0160kofja vas'), (3212, 'Vojnik'), (3213, 'Frankolovo'), (3214, 'Zre\u010de'), (3215, 'Lo\u010de'), (3220, '\u0160tore'), (3221, 'Teharje'), (3222, 'Dramlje'), (3223, 'Loka pri \u017dusmu'), (3224, 'Dobje pri Planini'), (3225, 'Planina pri Sevnici'), (3230, '\u0160entjur'), (3231, 'Grobelno'), (3232, 'Ponikva'), (3233, 'Kalobje'), (3240, '\u0160marje pri Jel\u0161ah'), (3241, 'Podplat'), (3250, 'Roga\u0161ka Slatina'), (3252, 'Rogatec'), (3253, 'Pristava pri Mestinju'), (3254, 'Pod\u010detrtek'), (3255, 'Bu\u010de'), (3256, 'Bistrica ob Sotli'), (3257, 'Podsreda'), (3260, 'Kozje'), (3261, 'Lesi\u010dno'), (3262, 'Prevorje'), (3263, 'Gorica pri Slivnici'), (3264, 'Sveti \u0160tefan'), (3270, 'La\u0161ko'), (3271, '\u0160entrupert'), (3272, 'Rimske Toplice'), (3273, 'Jurklo\u0161ter'), (3301, 'Petrov\u010de'), (3302, 'Gri\u017ee'), (3303, 'Gomilsko'), (3304, 'Tabor'), (3305, 'Vransko'), (3310, '\u017dalec'), (3311, '\u0160empeter v Savinjski dolini'), (3312, 'Prebold'), (3313, 'Polzela'), (3314, 'Braslov\u010de'), (3320, 'Velenje - dostava'), (3322, 'Velenje - po\u0161tni predali'), (3325, '\u0160o\u0161tanj'), (3326, 'Topol\u0161ica'), (3327, '\u0160martno ob Paki'), (3330, 'Mozirje'), (3331, 'Nazarje'), (3332, 'Re\u010dica ob Savinji'), (3333, 'Ljubno ob Savinji'), (3334, 'Lu\u010de'), (3335, 'Sol\u010dava'), (3341, '\u0160martno ob Dreti'), (3342, 'Gornji Grad'), (4000, 'Kranj'), (4201, 'Zgornja Besnica'), (4202, 'Naklo'), (4203, 'Duplje'), (4204, 'Golnik'), (4205, 'Preddvor'), (4206, 'Zgornje Jezersko'), (4207, 'Cerklje na Gorenjskem'), (4208, '\u0160en\u010dur'), (4209, '\u017dabnica'), (4210, 'Brnik - aerodrom'), (4211, 'Mav\u010di\u010de'), (4212, 'Visoko'), (4220, '\u0160kofja Loka'), (4223, 'Poljane nad \u0160kofjo Loko'), (4224, 'Gorenja vas'), (4225, 'Sovodenj'), (4226, '\u017diri'), (4227, 'Selca'), (4228, '\u017delezniki'), (4229, 'Sorica'), (4240, 'Radovljica'), (4243, 'Brezje'), (4244, 'Podnart'), (4245, 'Kropa'), (4246, 'Kamna Gorica'), (4247, 'Zgornje Gorje'), (4248, 'Lesce'), (4260, 'Bled'), (4263, 'Bohinjska Bela'), (4264, 'Bohinjska Bistrica'), (4265, 'Bohinjsko jezero'), (4267, 'Srednja vas v Bohinju'), (4270, 'Jesenice'), (4273, 'Blejska Dobrava'), (4274, '\u017dirovnica'), (4275, 'Begunje na Gorenjskem'), (4276, 'Hru\u0161ica'), (4280, 'Kranjska Gora'), (4281, 'Mojstrana'), (4282, 'Gozd Martuljek'), (4283, 'Rate\u010de - Planica'), (4290, 'Tr\u017ei\u010d'), (4294, 'Kri\u017ee'), (5000, 'Nova Gorica'), (5210, 'Deskle'), (5211, 'Kojsko'), (5212, 'Dobrovo v Brdih'), (5213, 'Kanal'), (5214, 'Kal nad Kanalom'), (5215, 'Ro\u010dinj'), (5216, 'Most na So\u010di'), (5220, 'Tolmin'), (5222, 'Kobarid'), (5223, 'Breginj'), (5224, 'Srpenica'), (5230, 'Bovec'), (5231, 'Log pod Mangartom'), (5232, 'So\u010da'), (5242, 'Grahovo ob Ba\u010di'), (5243, 'Podbrdo'), (5250, 'Solkan'), (5251, 'Grgar'), (5252, 'Trnovo pri Gorici'), (5253, '\u010cepovan'), (5261, '\u0160empas'), (5262, '\u010crni\u010de'), (5263, 'Dobravlje'), (5270, 'Ajdov\u0161\u010dina'), (5271, 'Vipava'), (5272, 'Podnanos'), (5273, 'Col'), (5274, '\u010crni Vrh nad Idrijo'), (5275, 'Godovi\u010d'), (5280, 'Idrija'), (5281, 'Spodnja Idrija'), (5282, 'Cerkno'), (5283, 'Slap ob Idrijci'), (5290, '\u0160empeter pri Gorici'), (5291, 'Miren'), (5292, 'Ren\u010de'), (5293, 'Vol\u010dja Draga'), (5294, 'Dornberk'), (5295, 'Branik'), (5296, 'Kostanjevica na Krasu'), (5297, 'Prva\u010dina'), (6000, 'Koper'), (6210, 'Se\u017eana'), (6215, 'Diva\u010da'), (6216, 'Podgorje'), (6217, 'Vremski Britof'), (6219, 'Lokev'), (6221, 'Dutovlje'), (6222, '\u0160tanjel'), (6223, 'Komen'), (6224, 'Seno\u017ee\u010de'), (6225, 'Hru\u0161evje'), (6230, 'Postojna'), (6232, 'Planina'), (6240, 'Kozina'), (6242, 'Materija'), (6243, 'Obrov'), (6244, 'Podgrad'), (6250, 'Ilirska Bistrica'), (6251, 'Ilirska Bistrica - Trnovo'), (6253, 'Kne\u017eak'), (6254, 'Jel\u0161ane'), (6255, 'Prem'), (6256, 'Ko\u0161ana'), (6257, 'Pivka'), (6258, 'Prestranek'), (6271, 'Dekani'), (6272, 'Gra\u010di\u0161\u010de'), (6273, 'Marezige'), (6274, '\u0160marje'), (6275, '\u010crni Kal'), (6276, 'Pobegi'), (6280, 'Ankaran - Ancarano'), (6281, '\u0160kofije'), (6310, 'Izola - Isola'), (6320, 'Portoro\u017e - Portorose'), (6330, 'Piran - Pirano'), (6333, 'Se\u010dovlje - Sicciole'), (8000, 'Novo mesto'), (8210, 'Trebnje'), (8211, 'Dobrni\u010d'), (8212, 'Velika Loka'), (8213, 'Veliki Gaber'), (8216, 'Mirna Pe\u010d'), (8220, '\u0160marje\u0161ke Toplice'), (8222, 'Oto\u010dec'), (8230, 'Mokronog'), (8231, 'Trebelno'), (8232, '\u0160entrupert'), (8233, 'Mirna'), (8250, 'Bre\u017eice'), (8251, '\u010cate\u017e ob Savi'), (8253, 'Arti\u010de'), (8254, 'Globoko'), (8255, 'Pi\u0161ece'), (8256, 'Sromlje'), (8257, 'Dobova'), (8258, 'Kapele'), (8259, 'Bizeljsko'), (8261, 'Jesenice na Dolenjskem'), (8262, 'Kr\u0161ka vas'), (8263, 'Cerklje ob Krki'), (8270, 'Kr\u0161ko'), (8272, 'Zdole'), (8273, 'Leskovec pri Kr\u0161kem'), (8274, 'Raka'), (8275, '\u0160kocjan'), (8276, 'Bu\u010dka'), (8280, 'Brestanica'), (8281, 'Senovo'), (8282, 'Koprivnica'), (8283, 'Blanca'), (8290, 'Sevnica'), (8292, 'Zabukovje'), (8293, 'Studenec'), (8294, 'Bo\u0161tanj'), (8295, 'Tr\u017ei\u0161\u010de'), (8296, 'Krmelj'), (8297, '\u0160entjan\u017e'), (8310, '\u0160entjernej'), (8311, 'Kostanjevica na Krki'), (8312, 'Podbo\u010dje'), (8321, 'Brusnice'), (8322, 'Stopi\u010de'), (8323, 'Ur\u0161na sela'), (8330, 'Metlika'), (8331, 'Suhor'), (8332, 'Gradac'), (8333, 'Semi\u010d'), (8340, '\u010crnomelj'), (8341, 'Adle\u0161i\u010di'), (8342, 'Stari trg ob Kolpi'), (8343, 'Dragatu\u0161'), (8344, 'Vinica pri \u010crnomlju'), (8350, 'Dolenjske Toplice'), (8351, 'Stra\u017ea'), (8360, '\u017du\u017eemberk'), (8361, 'Dvor'), (8362, 'Hinje'), (9000, 'Murska Sobota'), (9201, 'Puconci'), (9202, 'Ma\u010dkovci'), (9203, 'Petrovci'), (9204, '\u0160alovci'), (9205, 'Hodo\u0161 - Hodos'), (9206, 'Kri\u017eevci'), (9207, 'Prosenjakovci - Partosfalva'), (9208, 'Fokovci'), (9220, 'Lendava - Lendva'), (9221, 'Martjanci'), (9222, 'Bogojina'), (9223, 'Dobrovnik - Dobronak'), (9224, 'Turni\u0161\u010de'), (9225, 'Velika Polana'), (9226, 'Moravske Toplice'), (9227, 'Kobilje'), (9231, 'Beltinci'), (9232, '\u010cren\u0161ovci'), (9233, 'Odranci'), (9240, 'Ljutomer'), (9241, 'Ver\u017eej'), (9242, 'Kri\u017eevci pri Ljutomeru'), (9243, 'Mala Nedelja'), (9244, 'Sveti Jurij ob \u0160\u010davnici'), (9245, 'Spodnji Ivanjci'), (9250, 'Gornja Radgona'), (9251, 'Ti\u0161ina'), (9252, 'Radenci'), (9253, 'Apa\u010de'), (9261, 'Cankova'), (9262, 'Roga\u0161ovci'), (9263, 'Kuzma'), (9264, 'Grad'), (9265, 'Bodonci'), ] SI_POSTALCODES_CHOICES = sorted(SI_POSTALCODES, key=lambda k: k[1])
bsd-3-clause
victorbriz/rethinkdb
external/v8_3.30.33.16/tools/run-tests.py
33
22539
#!/usr/bin/env python # # Copyright 2012 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from collections import OrderedDict import itertools import multiprocessing import optparse import os from os.path import join import platform import random import shlex import subprocess import sys import time from testrunner.local import execution from testrunner.local import progress from testrunner.local import testsuite from testrunner.local import utils from testrunner.local import verbose from testrunner.network import network_execution from testrunner.objects import context ARCH_GUESS = utils.DefaultArch() DEFAULT_TESTS = [ "mjsunit", "unittests", "cctest", "message", "preparser", ] # Map of test name synonyms to lists of test suites. Should be ordered by # expected runtimes (suites with slow test cases first). These groups are # invoked in seperate steps on the bots. TEST_MAP = { "default": [ "mjsunit", "cctest", "message", "preparser", ], "optimize_for_size": [ "mjsunit", "cctest", "webkit", ], "unittests": [ "unittests", ], } TIMEOUT_DEFAULT = 60 TIMEOUT_SCALEFACTOR = {"debug" : 4, "release" : 1 } # Use this to run several variants of the tests. VARIANT_FLAGS = { "default": [], "stress": ["--stress-opt", "--always-opt"], "turbofan": ["--turbo-asm", "--turbo-filter=*", "--always-opt"], "nocrankshaft": ["--nocrankshaft"]} VARIANTS = ["default", "stress", "turbofan", "nocrankshaft"] MODE_FLAGS = { "debug" : ["--nohard-abort", "--nodead-code-elimination", "--nofold-constants", "--enable-slow-asserts", "--debug-code", "--verify-heap"], "release" : ["--nohard-abort", "--nodead-code-elimination", "--nofold-constants"]} GC_STRESS_FLAGS = ["--gc-interval=500", "--stress-compaction", "--concurrent-recompilation-queue-length=64", "--concurrent-recompilation-delay=500", "--concurrent-recompilation"] SUPPORTED_ARCHS = ["android_arm", "android_arm64", "android_ia32", "arm", "ia32", "x87", "mips", "mipsel", "mips64el", "nacl_ia32", "nacl_x64", "x64", "x32", "arm64"] # Double the timeout for these: SLOW_ARCHS = ["android_arm", "android_arm64", "android_ia32", "arm", "mips", "mipsel", "mips64el", "nacl_ia32", "nacl_x64", "x87", "arm64"] def BuildOptions(): result = optparse.OptionParser() result.add_option("--arch", help=("The architecture to run tests for, " "'auto' or 'native' for auto-detect"), default="ia32,x64,arm") result.add_option("--arch-and-mode", help="Architecture and mode in the format 'arch.mode'", default=None) result.add_option("--asan", help="Regard test expectations for ASAN", default=False, action="store_true") result.add_option("--buildbot", help="Adapt to path structure used on buildbots", default=False, action="store_true") result.add_option("--cat", help="Print the source of the tests", default=False, action="store_true") result.add_option("--flaky-tests", help="Regard tests marked as flaky (run|skip|dontcare)", default="dontcare") result.add_option("--slow-tests", help="Regard slow tests (run|skip|dontcare)", default="dontcare") result.add_option("--pass-fail-tests", help="Regard pass|fail tests (run|skip|dontcare)", default="dontcare") result.add_option("--gc-stress", help="Switch on GC stress mode", default=False, action="store_true") result.add_option("--command-prefix", help="Prepended to each shell command used to run a test", default="") result.add_option("--download-data", help="Download missing test suite data", default=False, action="store_true") result.add_option("--extra-flags", help="Additional flags to pass to each test command", default="") result.add_option("--isolates", help="Whether to test isolates", default=False, action="store_true") result.add_option("-j", help="The number of parallel tasks to run", default=0, type="int") result.add_option("-m", "--mode", help="The test modes in which to run (comma-separated)", default="release,debug") result.add_option("--no-i18n", "--noi18n", help="Skip internationalization tests", default=False, action="store_true") result.add_option("--no-network", "--nonetwork", help="Don't distribute tests on the network", default=(utils.GuessOS() != "linux"), dest="no_network", action="store_true") result.add_option("--no-presubmit", "--nopresubmit", help='Skip presubmit checks', default=False, dest="no_presubmit", action="store_true") result.add_option("--no-snap", "--nosnap", help='Test a build compiled without snapshot.', default=False, dest="no_snap", action="store_true") result.add_option("--no-sorting", "--nosorting", help="Don't sort tests according to duration of last run.", default=False, dest="no_sorting", action="store_true") result.add_option("--no-stress", "--nostress", help="Don't run crankshaft --always-opt --stress-op test", default=False, dest="no_stress", action="store_true") result.add_option("--no-variants", "--novariants", help="Don't run any testing variants", default=False, dest="no_variants", action="store_true") result.add_option("--variants", help="Comma-separated list of testing variants") result.add_option("--outdir", help="Base directory with compile output", default="out") result.add_option("--predictable", help="Compare output of several reruns of each test", default=False, action="store_true") result.add_option("-p", "--progress", help=("The style of progress indicator" " (verbose, dots, color, mono)"), choices=progress.PROGRESS_INDICATORS.keys(), default="mono") result.add_option("--quickcheck", default=False, action="store_true", help=("Quick check mode (skip slow/flaky tests)")) result.add_option("--report", help="Print a summary of the tests to be run", default=False, action="store_true") result.add_option("--json-test-results", help="Path to a file for storing json results.") result.add_option("--rerun-failures-count", help=("Number of times to rerun each failing test case. " "Very slow tests will be rerun only once."), default=0, type="int") result.add_option("--rerun-failures-max", help="Maximum number of failing test cases to rerun.", default=100, type="int") result.add_option("--shard-count", help="Split testsuites into this number of shards", default=1, type="int") result.add_option("--shard-run", help="Run this shard from the split up tests.", default=1, type="int") result.add_option("--shell", help="DEPRECATED! use --shell-dir", default="") result.add_option("--shell-dir", help="Directory containing executables", default="") result.add_option("--dont-skip-slow-simulator-tests", help="Don't skip more slow tests when using a simulator.", default=False, action="store_true", dest="dont_skip_simulator_slow_tests") result.add_option("--stress-only", help="Only run tests with --always-opt --stress-opt", default=False, action="store_true") result.add_option("--time", help="Print timing information after running", default=False, action="store_true") result.add_option("-t", "--timeout", help="Timeout in seconds", default= -1, type="int") result.add_option("--tsan", help="Regard test expectations for TSAN", default=False, action="store_true") result.add_option("-v", "--verbose", help="Verbose output", default=False, action="store_true") result.add_option("--valgrind", help="Run tests through valgrind", default=False, action="store_true") result.add_option("--warn-unused", help="Report unused rules", default=False, action="store_true") result.add_option("--junitout", help="File name of the JUnit output") result.add_option("--junittestsuite", help="The testsuite name in the JUnit output file", default="v8tests") result.add_option("--random-seed", default=0, dest="random_seed", help="Default seed for initializing random generator") result.add_option("--msan", help="Regard test expectations for MSAN", default=False, action="store_true") return result def ProcessOptions(options): global VARIANT_FLAGS global VARIANTS # Architecture and mode related stuff. if options.arch_and_mode: options.arch_and_mode = [arch_and_mode.split(".") for arch_and_mode in options.arch_and_mode.split(",")] options.arch = ",".join([tokens[0] for tokens in options.arch_and_mode]) options.mode = ",".join([tokens[1] for tokens in options.arch_and_mode]) options.mode = options.mode.split(",") for mode in options.mode: if not mode.lower() in ["debug", "release", "optdebug"]: print "Unknown mode %s" % mode return False if options.arch in ["auto", "native"]: options.arch = ARCH_GUESS options.arch = options.arch.split(",") for arch in options.arch: if not arch in SUPPORTED_ARCHS: print "Unknown architecture %s" % arch return False # Store the final configuration in arch_and_mode list. Don't overwrite # predefined arch_and_mode since it is more expressive than arch and mode. if not options.arch_and_mode: options.arch_and_mode = itertools.product(options.arch, options.mode) # Special processing of other options, sorted alphabetically. if options.buildbot: # Buildbots run presubmit tests as a separate step. options.no_presubmit = True options.no_network = True if options.command_prefix: print("Specifying --command-prefix disables network distribution, " "running tests locally.") options.no_network = True options.command_prefix = shlex.split(options.command_prefix) options.extra_flags = shlex.split(options.extra_flags) if options.gc_stress: options.extra_flags += GC_STRESS_FLAGS if options.asan: options.extra_flags.append("--invoke-weak-callbacks") if options.tsan: VARIANTS = ["default"] suppressions_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sanitizers', 'tsan_suppressions.txt') tsan_options = '%s suppressions=%s' % ( os.environ.get('TSAN_OPTIONS', ''), suppressions_file) os.environ['TSAN_OPTIONS'] = tsan_options if options.j == 0: options.j = multiprocessing.cpu_count() while options.random_seed == 0: options.random_seed = random.SystemRandom().randint(-2147483648, 2147483647) def excl(*args): """Returns true if zero or one of multiple arguments are true.""" return reduce(lambda x, y: x + y, args) <= 1 if not excl(options.no_stress, options.stress_only, options.no_variants, bool(options.variants)): print("Use only one of --no-stress, --stress-only, --no-variants, " "or --variants.") return False if options.quickcheck: VARIANTS = ["default", "stress"] options.flaky_tests = "skip" options.slow_tests = "skip" options.pass_fail_tests = "skip" if options.no_stress: VARIANTS = ["default", "nocrankshaft"] if options.no_variants: VARIANTS = ["default"] if options.stress_only: VARIANTS = ["stress"] if options.variants: VARIANTS = options.variants.split(",") if not set(VARIANTS).issubset(VARIANT_FLAGS.keys()): print "All variants must be in %s" % str(VARIANT_FLAGS.keys()) return False if options.predictable: VARIANTS = ["default"] options.extra_flags.append("--predictable") options.extra_flags.append("--verify_predictable") options.extra_flags.append("--no-inline-new") if not options.shell_dir: if options.shell: print "Warning: --shell is deprecated, use --shell-dir instead." options.shell_dir = os.path.dirname(options.shell) if options.valgrind: run_valgrind = os.path.join("tools", "run-valgrind.py") # This is OK for distributed running, so we don't need to set no_network. options.command_prefix = (["python", "-u", run_valgrind] + options.command_prefix) def CheckTestMode(name, option): if not option in ["run", "skip", "dontcare"]: print "Unknown %s mode %s" % (name, option) return False return True if not CheckTestMode("flaky test", options.flaky_tests): return False if not CheckTestMode("slow test", options.slow_tests): return False if not CheckTestMode("pass|fail test", options.pass_fail_tests): return False if not options.no_i18n: DEFAULT_TESTS.append("intl") return True def ShardTests(tests, shard_count, shard_run): if shard_count < 2: return tests if shard_run < 1 or shard_run > shard_count: print "shard-run not a valid number, should be in [1:shard-count]" print "defaulting back to running all tests" return tests count = 0 shard = [] for test in tests: if count % shard_count == shard_run - 1: shard.append(test) count += 1 return shard def Main(): parser = BuildOptions() (options, args) = parser.parse_args() if not ProcessOptions(options): parser.print_help() return 1 exit_code = 0 workspace = os.path.abspath(join(os.path.dirname(sys.argv[0]), "..")) if not options.no_presubmit: print ">>> running presubmit tests" exit_code = subprocess.call( [sys.executable, join(workspace, "tools", "presubmit.py")]) suite_paths = utils.GetSuitePaths(join(workspace, "test")) # Expand arguments with grouped tests. The args should reflect the list of # suites as otherwise filters would break. def ExpandTestGroups(name): if name in TEST_MAP: return [suite for suite in TEST_MAP[arg]] else: return [name] args = reduce(lambda x, y: x + y, [ExpandTestGroups(arg) for arg in args], []) if len(args) == 0: suite_paths = [ s for s in DEFAULT_TESTS if s in suite_paths ] else: args_suites = OrderedDict() # Used as set for arg in args: args_suites[arg.split(os.path.sep)[0]] = True suite_paths = [ s for s in args_suites if s in suite_paths ] suites = [] for root in suite_paths: suite = testsuite.TestSuite.LoadTestSuite( os.path.join(workspace, "test", root)) if suite: suites.append(suite) if options.download_data: for s in suites: s.DownloadData() for (arch, mode) in options.arch_and_mode: try: code = Execute(arch, mode, args, options, suites, workspace) except KeyboardInterrupt: return 2 exit_code = exit_code or code return exit_code def Execute(arch, mode, args, options, suites, workspace): print(">>> Running tests for %s.%s" % (arch, mode)) shell_dir = options.shell_dir if not shell_dir: if options.buildbot: shell_dir = os.path.join(workspace, options.outdir, mode) mode = mode.lower() else: shell_dir = os.path.join(workspace, options.outdir, "%s.%s" % (arch, mode)) shell_dir = os.path.relpath(shell_dir) if mode == "optdebug": mode = "debug" # "optdebug" is just an alias. # Populate context object. mode_flags = MODE_FLAGS[mode] timeout = options.timeout if timeout == -1: # Simulators are slow, therefore allow a longer default timeout. if arch in SLOW_ARCHS: timeout = 2 * TIMEOUT_DEFAULT; else: timeout = TIMEOUT_DEFAULT; timeout *= TIMEOUT_SCALEFACTOR[mode] if options.predictable: # Predictable mode is slower. timeout *= 2 ctx = context.Context(arch, mode, shell_dir, mode_flags, options.verbose, timeout, options.isolates, options.command_prefix, options.extra_flags, options.no_i18n, options.random_seed, options.no_sorting, options.rerun_failures_count, options.rerun_failures_max, options.predictable) # TODO(all): Combine "simulator" and "simulator_run". simulator_run = not options.dont_skip_simulator_slow_tests and \ arch in ['arm64', 'arm', 'mips'] and ARCH_GUESS and arch != ARCH_GUESS # Find available test suites and read test cases from them. variables = { "arch": arch, "asan": options.asan, "deopt_fuzzer": False, "gc_stress": options.gc_stress, "isolates": options.isolates, "mode": mode, "no_i18n": options.no_i18n, "no_snap": options.no_snap, "simulator_run": simulator_run, "simulator": utils.UseSimulator(arch), "system": utils.GuessOS(), "tsan": options.tsan, "msan": options.msan, } all_tests = [] num_tests = 0 test_id = 0 for s in suites: s.ReadStatusFile(variables) s.ReadTestCases(ctx) if len(args) > 0: s.FilterTestCasesByArgs(args) all_tests += s.tests s.FilterTestCasesByStatus(options.warn_unused, options.flaky_tests, options.slow_tests, options.pass_fail_tests) if options.cat: verbose.PrintTestSource(s.tests) continue variant_flags = [VARIANT_FLAGS[var] for var in VARIANTS] s.tests = [ t.CopyAddingFlags(v) for t in s.tests for v in s.VariantFlags(t, variant_flags) ] s.tests = ShardTests(s.tests, options.shard_count, options.shard_run) num_tests += len(s.tests) for t in s.tests: t.id = test_id test_id += 1 if options.cat: return 0 # We're done here. if options.report: verbose.PrintReport(all_tests) if num_tests == 0: print "No tests to run." return 0 # Run the tests, either locally or distributed on the network. start_time = time.time() progress_indicator = progress.PROGRESS_INDICATORS[options.progress]() if options.junitout: progress_indicator = progress.JUnitTestProgressIndicator( progress_indicator, options.junitout, options.junittestsuite) if options.json_test_results: progress_indicator = progress.JsonTestProgressIndicator( progress_indicator, options.json_test_results, arch, mode) run_networked = not options.no_network if not run_networked: print("Network distribution disabled, running tests locally.") elif utils.GuessOS() != "linux": print("Network distribution is only supported on Linux, sorry!") run_networked = False peers = [] if run_networked: peers = network_execution.GetPeers() if not peers: print("No connection to distribution server; running tests locally.") run_networked = False elif len(peers) == 1: print("No other peers on the network; running tests locally.") run_networked = False elif num_tests <= 100: print("Less than 100 tests, running them locally.") run_networked = False if run_networked: runner = network_execution.NetworkedRunner(suites, progress_indicator, ctx, peers, workspace) else: runner = execution.Runner(suites, progress_indicator, ctx) exit_code = runner.Run(options.j) overall_duration = time.time() - start_time if options.time: verbose.PrintTestDurations(suites, overall_duration) return exit_code if __name__ == "__main__": sys.exit(Main())
agpl-3.0
ahmadiga/min_edx
common/djangoapps/enrollment/views.py
14
28709
""" The Enrollment API Views should be simple, lean HTTP endpoints for API access. This should consist primarily of authentication, request validation, and serialization. """ import logging from django.core.exceptions import ObjectDoesNotExist from django.utils.decorators import method_decorator from opaque_keys import InvalidKeyError from course_modes.models import CourseMode from lms.djangoapps.commerce.utils import audit_log from openedx.core.djangoapps.user_api.preferences.api import update_email_opt_in from openedx.core.lib.api.permissions import ApiKeyHeaderPermission, ApiKeyHeaderPermissionIsAuthenticated from rest_framework import status from rest_framework.response import Response from rest_framework.throttling import UserRateThrottle from rest_framework.views import APIView from opaque_keys.edx.keys import CourseKey from embargo import api as embargo_api from cors_csrf.authentication import SessionAuthenticationCrossDomainCsrf from cors_csrf.decorators import ensure_csrf_cookie_cross_domain from openedx.core.lib.api.authentication import ( SessionAuthenticationAllowInactiveUser, OAuth2AuthenticationAllowInactiveUser, ) from util.disable_rate_limit import can_disable_rate_limit from enrollment import api from enrollment.errors import ( CourseNotFoundError, CourseEnrollmentError, CourseModeNotFoundError, CourseEnrollmentExistsError ) from student.auth import user_has_role from student.models import User from student.roles import CourseStaffRole, GlobalStaff log = logging.getLogger(__name__) REQUIRED_ATTRIBUTES = { "credit": ["credit:provider_id"], } class EnrollmentCrossDomainSessionAuth(SessionAuthenticationAllowInactiveUser, SessionAuthenticationCrossDomainCsrf): """Session authentication that allows inactive users and cross-domain requests. """ pass class ApiKeyPermissionMixIn(object): """ This mixin is used to provide a convenience function for doing individual permission checks for the presence of API keys. """ def has_api_key_permissions(self, request): """ Checks to see if the request was made by a server with an API key. Args: request (Request): the request being made into the view Return: True if the request has been made with a valid API key False otherwise """ return ApiKeyHeaderPermission().has_permission(request, self) class EnrollmentUserThrottle(UserRateThrottle, ApiKeyPermissionMixIn): """Limit the number of requests users can make to the enrollment API.""" rate = '40/minute' def allow_request(self, request, view): return self.has_api_key_permissions(request) or super(EnrollmentUserThrottle, self).allow_request(request, view) @can_disable_rate_limit class EnrollmentView(APIView, ApiKeyPermissionMixIn): """ **Use Case** Get the user's enrollment status for a course. **Example Request** GET /api/enrollment/v1/enrollment/{username},{course_id} **Response Values** If the request for information about the user is successful, an HTTP 200 "OK" response is returned. The HTTP 200 response has the following values. * course_details: A collection that includes the following values. * course_end: The date and time when the course closes. If null, the course never ends. * course_id: The unique identifier for the course. * course_modes: An array of data about the enrollment modes supported for the course. If the request uses the parameter include_expired=1, the array also includes expired enrollment modes. Each enrollment mode collection includes the following values. * currency: The currency of the listed prices. * description: A description of this mode. * expiration_datetime: The date and time after which users cannot enroll in the course in this mode. * min_price: The minimum price for which a user can enroll in this mode. * name: The full name of the enrollment mode. * slug: The short name for the enrollment mode. * suggested_prices: A list of suggested prices for this enrollment mode. * course_end: The date and time at which the course closes. If null, the course never ends. * course_start: The date and time when the course opens. If null, the course opens immediately when it is created. * enrollment_end: The date and time after which users cannot enroll for the course. If null, the enrollment period never ends. * enrollment_start: The date and time when users can begin enrolling in the course. If null, enrollment opens immediately when the course is created. * invite_only: A value indicating whether students must be invited to enroll in the course. Possible values are true or false. * created: The date the user account was created. * is_active: Whether the enrollment is currently active. * mode: The enrollment mode of the user in this course. * user: The ID of the user. """ authentication_classes = OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser permission_classes = ApiKeyHeaderPermissionIsAuthenticated, throttle_classes = EnrollmentUserThrottle, # Since the course about page on the marketing site uses this API to auto-enroll users, # we need to support cross-domain CSRF. @method_decorator(ensure_csrf_cookie_cross_domain) def get(self, request, course_id=None, username=None): """Create, read, or update enrollment information for a user. HTTP Endpoint for all CRUD operations for a user course enrollment. Allows creation, reading, and updates of the current enrollment for a particular course. Args: request (Request): To get current course enrollment information, a GET request will return information for the current user and the specified course. course_id (str): URI element specifying the course location. Enrollment information will be returned, created, or updated for this particular course. username (str): The username associated with this enrollment request. Return: A JSON serialized representation of the course enrollment. """ username = username or request.user.username # TODO Implement proper permissions if request.user.username != username and not self.has_api_key_permissions(request) \ and not request.user.is_superuser: # Return a 404 instead of a 403 (Unauthorized). If one user is looking up # other users, do not let them deduce the existence of an enrollment. return Response(status=status.HTTP_404_NOT_FOUND) try: return Response(api.get_enrollment(username, course_id)) except CourseEnrollmentError: return Response( status=status.HTTP_400_BAD_REQUEST, data={ "message": ( u"An error occurred while retrieving enrollments for user " u"'{username}' in course '{course_id}'" ).format(username=username, course_id=course_id) } ) @can_disable_rate_limit class EnrollmentCourseDetailView(APIView): """ **Use Case** Get enrollment details for a course. Response values include the course schedule and enrollment modes supported by the course. Use the parameter include_expired=1 to include expired enrollment modes in the response. **Note:** Getting enrollment details for a course does not require authentication. **Example Requests** GET /api/enrollment/v1/course/{course_id} GET /api/enrollment/v1/course/{course_id}?include_expired=1 **Response Values** If the request is successful, an HTTP 200 "OK" response is returned along with a collection of course enrollments for the user or for the newly created enrollment. Each course enrollment contains the following values. * course_end: The date and time when the course closes. If null, the course never ends. * course_id: The unique identifier for the course. * course_modes: An array of data about the enrollment modes supported for the course. If the request uses the parameter include_expired=1, the array also includes expired enrollment modes. Each enrollment mode collection includes the following values. * currency: The currency of the listed prices. * description: A description of this mode. * expiration_datetime: The date and time after which users cannot enroll in the course in this mode. * min_price: The minimum price for which a user can enroll in this mode. * name: The full name of the enrollment mode. * slug: The short name for the enrollment mode. * suggested_prices: A list of suggested prices for this enrollment mode. * course_start: The date and time when the course opens. If null, the course opens immediately when it is created. * enrollment_end: The date and time after which users cannot enroll for the course. If null, the enrollment period never ends. * enrollment_start: The date and time when users can begin enrolling in the course. If null, enrollment opens immediately when the course is created. * invite_only: A value indicating whether students must be invited to enroll in the course. Possible values are true or false. """ authentication_classes = [] permission_classes = [] throttle_classes = EnrollmentUserThrottle, def get(self, request, course_id=None): """Read enrollment information for a particular course. HTTP Endpoint for retrieving course level enrollment information. Args: request (Request): To get current course enrollment information, a GET request will return information for the specified course. course_id (str): URI element specifying the course location. Enrollment information will be returned. Return: A JSON serialized representation of the course enrollment details. """ try: return Response(api.get_course_enrollment_details(course_id, bool(request.GET.get('include_expired', '')))) except CourseNotFoundError: return Response( status=status.HTTP_400_BAD_REQUEST, data={ "message": ( u"No course found for course ID '{course_id}'" ).format(course_id=course_id) } ) @can_disable_rate_limit class EnrollmentListView(APIView, ApiKeyPermissionMixIn): """ **Use Cases** * Get a list of all course enrollments for the currently signed in user. * Enroll the currently signed in user in a course. Currently a user can use this command only to enroll the user in honor mode. If honor mode is not supported for the course, the request fails and returns the available modes. This command can use a server-to-server call to enroll a user in other modes, such as "verified", "professional", or "credit". If the mode is not supported for the course, the request will fail and return the available modes. You can include other parameters as enrollment attributes for a specific course mode. For example, for credit mode, you can include the following parameters to specify the credit provider attribute. * namespace: credit * name: provider_id * value: institution_name **Example Requests** GET /api/enrollment/v1/enrollment POST /api/enrollment/v1/enrollment { "mode": "credit", "course_details":{"course_id": "edX/DemoX/Demo_Course"}, "enrollment_attributes":[{"namespace": "credit","name": "provider_id","value": "hogwarts",},] } **POST Parameters** A POST request can include the following parameters. * user: Optional. The username of the currently logged in user. You cannot use the command to enroll a different user. * mode: Optional. The course mode for the enrollment. Individual users cannot upgrade their enrollment mode from 'honor'. Only server-to-server requests can enroll with other modes. * is_active: Optional. A Boolean value indicating whether the enrollment is active. Only server-to-server requests are allowed to deactivate an enrollment. * course details: A collection that includes the following information. * course_id: The unique identifier for the course. * email_opt_in: Optional. A Boolean value that indicates whether the user wants to receive email from the organization that runs this course. * enrollment_attributes: A dictionary that contains the following values. * namespace: Namespace of the attribute * name: Name of the attribute * value: Value of the attribute * is_active: Optional. A Boolean value that indicates whether the enrollment is active. Only server-to-server requests can deactivate an enrollment. * mode: Optional. The course mode for the enrollment. Individual users cannot upgrade their enrollment mode from "honor". Only server-to-server requests can enroll with other modes. * user: Optional. The user ID of the currently logged in user. You cannot use the command to enroll a different user. **GET Response Values** If an unspecified error occurs when the user tries to obtain a learner's enrollments, the request returns an HTTP 400 "Bad Request" response. If the user does not have permission to view enrollment data for the requested learner, the request returns an HTTP 404 "Not Found" response. **POST Response Values** If the user does not specify a course ID, the specified course does not exist, or the is_active status is invalid, the request returns an HTTP 400 "Bad Request" response. If a user who is not an admin tries to upgrade a learner's course mode, the request returns an HTTP 403 "Forbidden" response. If the specified user does not exist, the request returns an HTTP 406 "Not Acceptable" response. **GET and POST Response Values** If the request is successful, an HTTP 200 "OK" response is returned along with a collection of course enrollments for the user or for the newly created enrollment. Each course enrollment contains the following values. * course_details: A collection that includes the following values. * course_end: The date and time when the course closes. If null, the course never ends. * course_id: The unique identifier for the course. * course_modes: An array of data about the enrollment modes supported for the course. If the request uses the parameter include_expired=1, the array also includes expired enrollment modes. Each enrollment mode collection includes the following values. * currency: The currency of the listed prices. * description: A description of this mode. * expiration_datetime: The date and time after which users cannot enroll in the course in this mode. * min_price: The minimum price for which a user can enroll in this mode. * name: The full name of the enrollment mode. * slug: The short name for the enrollment mode. * suggested_prices: A list of suggested prices for this enrollment mode. * course_start: The date and time when the course opens. If null, the course opens immediately when it is created. * enrollment_end: The date and time after which users cannot enroll for the course. If null, the enrollment period never ends. * enrollment_start: The date and time when users can begin enrolling in the course. If null, enrollment opens immediately when the course is created. * invite_only: A value indicating whether students must be invited to enroll in the course. Possible values are true or false. * created: The date the user account was created. * is_active: Whether the enrollment is currently active. * mode: The enrollment mode of the user in this course. * user: The username of the user. """ authentication_classes = OAuth2AuthenticationAllowInactiveUser, EnrollmentCrossDomainSessionAuth permission_classes = ApiKeyHeaderPermissionIsAuthenticated, throttle_classes = EnrollmentUserThrottle, # Since the course about page on the marketing site # uses this API to auto-enroll users, we need to support # cross-domain CSRF. @method_decorator(ensure_csrf_cookie_cross_domain) def get(self, request): """Gets a list of all course enrollments for a user. Returns a list for the currently logged in user, or for the user named by the 'user' GET parameter. If the username does not match that of the currently logged in user, only courses for which the currently logged in user has the Staff or Admin role are listed. As a result, a course team member can find out which of his or her own courses a particular learner is enrolled in. Only the Staff or Admin role (granted on the Django administrative console as the staff or instructor permission) in individual courses gives the requesting user access to enrollment data. Permissions granted at the organizational level do not give a user access to enrollment data for all of that organization's courses. Users who have the global staff permission can access all enrollment data for all courses. """ username = request.GET.get('user', request.user.username) try: enrollment_data = api.get_enrollments(username) except CourseEnrollmentError: return Response( status=status.HTTP_400_BAD_REQUEST, data={ "message": ( u"An error occurred while retrieving enrollments for user '{username}'" ).format(username=username) } ) if username == request.user.username or GlobalStaff().has_user(request.user) or \ self.has_api_key_permissions(request): return Response(enrollment_data) filtered_data = [] for enrollment in enrollment_data: course_key = CourseKey.from_string(enrollment["course_details"]["course_id"]) if user_has_role(request.user, CourseStaffRole(course_key)): filtered_data.append(enrollment) return Response(filtered_data) def post(self, request): """Enrolls the currently logged-in user in a course. Server-to-server calls may deactivate or modify the mode of existing enrollments. All other requests go through `add_enrollment()`, which allows creation of new and reactivation of old enrollments. """ # Get the User, Course ID, and Mode from the request. username = request.data.get('user', request.user.username) course_id = request.data.get('course_details', {}).get('course_id') if not course_id: return Response( status=status.HTTP_400_BAD_REQUEST, data={"message": u"Course ID must be specified to create a new enrollment."} ) try: course_id = CourseKey.from_string(course_id) except InvalidKeyError: return Response( status=status.HTTP_400_BAD_REQUEST, data={ "message": u"No course '{course_id}' found for enrollment".format(course_id=course_id) } ) mode = request.data.get('mode', CourseMode.HONOR) has_api_key_permissions = self.has_api_key_permissions(request) # Check that the user specified is either the same user, or this is a server-to-server request. if not username: username = request.user.username if username != request.user.username and not has_api_key_permissions: # Return a 404 instead of a 403 (Unauthorized). If one user is looking up # other users, do not let them deduce the existence of an enrollment. return Response(status=status.HTTP_404_NOT_FOUND) if mode != CourseMode.HONOR and not has_api_key_permissions: return Response( status=status.HTTP_403_FORBIDDEN, data={ "message": u"User does not have permission to create enrollment with mode [{mode}].".format( mode=mode ) } ) try: # Lookup the user, instead of using request.user, since request.user may not match the username POSTed. user = User.objects.get(username=username) except ObjectDoesNotExist: return Response( status=status.HTTP_406_NOT_ACCEPTABLE, data={ 'message': u'The user {} does not exist.'.format(username) } ) embargo_response = embargo_api.get_embargo_response(request, course_id, user) if embargo_response: return embargo_response try: is_active = request.data.get('is_active') # Check if the requested activation status is None or a Boolean if is_active is not None and not isinstance(is_active, bool): return Response( status=status.HTTP_400_BAD_REQUEST, data={ 'message': (u"'{value}' is an invalid enrollment activation status.").format(value=is_active) } ) enrollment_attributes = request.data.get('enrollment_attributes') enrollment = api.get_enrollment(username, unicode(course_id)) mode_changed = enrollment and mode is not None and enrollment['mode'] != mode active_changed = enrollment and is_active is not None and enrollment['is_active'] != is_active missing_attrs = [] if enrollment_attributes: actual_attrs = [ u"{namespace}:{name}".format(**attr) for attr in enrollment_attributes ] missing_attrs = set(REQUIRED_ATTRIBUTES.get(mode, [])) - set(actual_attrs) if has_api_key_permissions and (mode_changed or active_changed): if mode_changed and active_changed and not is_active: # if the requester wanted to deactivate but specified the wrong mode, fail # the request (on the assumption that the requester had outdated information # about the currently active enrollment). msg = u"Enrollment mode mismatch: active mode={}, requested mode={}. Won't deactivate.".format( enrollment["mode"], mode ) log.warning(msg) return Response(status=status.HTTP_400_BAD_REQUEST, data={"message": msg}) if len(missing_attrs) > 0: msg = u"Missing enrollment attributes: requested mode={} required attributes={}".format( mode, REQUIRED_ATTRIBUTES.get(mode) ) log.warning(msg) return Response(status=status.HTTP_400_BAD_REQUEST, data={"message": msg}) response = api.update_enrollment( username, unicode(course_id), mode=mode, is_active=is_active, enrollment_attributes=enrollment_attributes ) else: # Will reactivate inactive enrollments. response = api.add_enrollment(username, unicode(course_id), mode=mode, is_active=is_active) email_opt_in = request.data.get('email_opt_in', None) if email_opt_in is not None: org = course_id.org update_email_opt_in(request.user, org, email_opt_in) return Response(response) except CourseModeNotFoundError as error: return Response( status=status.HTTP_400_BAD_REQUEST, data={ "message": ( u"The course mode '{mode}' is not available for course '{course_id}'." ).format(mode=mode, course_id=course_id), "course_details": error.data }) except CourseNotFoundError: return Response( status=status.HTTP_400_BAD_REQUEST, data={ "message": u"No course '{course_id}' found for enrollment".format(course_id=course_id) } ) except CourseEnrollmentExistsError as error: return Response(data=error.enrollment) except CourseEnrollmentError: return Response( status=status.HTTP_400_BAD_REQUEST, data={ "message": ( u"An error occurred while creating the new course enrollment for user " u"'{username}' in course '{course_id}'" ).format(username=username, course_id=course_id) } ) finally: # Assumes that the ecommerce service uses an API key to authenticate. if has_api_key_permissions: current_enrollment = api.get_enrollment(username, unicode(course_id)) audit_log( 'enrollment_change_requested', course_id=unicode(course_id), requested_mode=mode, actual_mode=current_enrollment['mode'] if current_enrollment else None, requested_activation=is_active, actual_activation=current_enrollment['is_active'] if current_enrollment else None, user_id=user.id )
agpl-3.0
jmcarp/osf.io
scripts/staff_public_regs.py
25
1355
# -*- coding: utf-8 -*- """Get public registrations for staff members. python -m scripts.staff_public_regs """ from collections import defaultdict import logging from modularodm import Q from website.models import Node, User from website.app import init_app logger = logging.getLogger('staff_public_regs') STAFF_GUIDS = [ 'jk5cv', # Jeff 'cdi38', # Brian 'edb8y', # Johanna 'hsey5', # Courtney '5hdme', # Melissa ] def main(): init_app(set_backends=True, routes=False) staff_registrations = defaultdict(list) users = [User.load(each) for each in STAFF_GUIDS] for registration in Node.find(Q('is_registration', 'eq', True) & Q('is_public', 'eq', True)): for user in users: if registration in user.node__contributed: staff_registrations[user._id].append(registration) for uid in staff_registrations: user = User.load(uid) user_regs = staff_registrations[uid] logger.info('{} ({}) on {} Public Registrations:'.format( user.fullname, user._id, len(user_regs)) ) for registration in user_regs: logger.info('\t{} ({}): {}'.format(registration.title, registration._id, registration.absolute_url) ) if __name__ == '__main__': main()
apache-2.0
JGarcia-Panach/odoo
addons/hw_scanner/__openerp__.py
220
1738
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Barcode Scanner Hardware Driver', 'version': '1.0', 'category': 'Hardware Drivers', 'sequence': 6, 'summary': 'Hardware Driver for Barcode Scanners', 'website': 'https://www.odoo.com/page/point-of-sale', 'description': """ Barcode Scanner Hardware Driver ================================ This module allows the web client to access a remotely installed barcode scanner, and is used by the posbox to provide barcode scanner support to the point of sale module. """, 'author': 'OpenERP SA', 'depends': ['hw_proxy'], 'external_dependencies': {'python': ['evdev']}, 'test': [ ], 'installable': True, 'auto_install': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
pjdufour/petrarch
petrarch/tests/test_petrarch.py
2
8081
from petrarch import petrarch, PETRglobals, PETRreader, utilities config = petrarch.utilities._get_data('data/config/', 'PETR_config.ini') print("reading config") petrarch.PETRreader.parse_Config(config) print("reading dicts") petrarch.read_dictionaries() petrarch.start_logger() def test_version(): assert petrarch.get_version() == "0.4.0" def test_read(): assert "RUSSIA" in petrarch.PETRglobals.ActorDict ###################################### # # Full sentence tests # ###################################### def test_simple(): text = "Germany invaded France" parse = "(ROOT (S (NP (NNP Germany)) (VP (VBD invaded) (NP (NNP France)))))" parsed = utilities._format_parsed_str(parse) dict = {u'test123': {u'sents': {u'0': {u'content': text, u'parsed': parse}}, u'meta': {u'date': u'20010101'}}} return_dict = petrarch.do_coding(dict,None) print(return_dict) assert return_dict['test123']['sents']['0']['events'] == [['DEU','FRA','192']] def test_simple2(): text = "Germany arrested France" parse = "(ROOT (S (NP (NNP Germany)) (VP (VBD arrested) (NP (NNP France)))))" parsed = utilities._format_parsed_str(parse) dict = {u'test123': {u'sents': {u'0': {u'content': text, u'parsed': parse}}, u'meta': {u'date': u'20010101'}}} return_dict = petrarch.do_coding(dict,None) print(return_dict) assert return_dict['test123']['sents']['0']['events'] == [['DEU','FRA','173']] def test_complex1(): text = "A Tunisian court has jailed a Nigerian student for two years for helping young militants join an armed Islamic group in Lebanon, his lawyer said Wednesday." parse = """( (S (S (NP (DT A) (NNP Tunisian) (NN court)) (VP (AUXZ has) (VP (VBN jailed) (NP (DT a) (JJ Nigerian) (NN student)) (PP (IN for) (NP (CD two) (NNS years))) (PP (IN for) (S (VP (VBG helping) (S (NP (JJ young) (NNS militants)) (VP (VB join) (NP (NP (DT an) (JJ armed) (JJ Islamic) (NN group)) (PP (IN in) (NP (NNP Lebanon)))))))))))) (, ,) (NP (PRP$ his) (NN lawyer)) (VP (VBD said) (NP (NNP Wednesday))) (. .)))""" parsed = utilities._format_parsed_str(parse) dict = {u'test123': {u'sents': {u'0': {u'content': text, u'parsed': parse}}, u'meta': {u'date': u'20010101'}}} return_dict = petrarch.do_coding(dict,None) assert return_dict['test123']['sents']['0']['events'] == [['TUNJUD','NGAEDU','173']] def test_nested(): # In PETRARCH 0.4.0 this event should only code "US claimed that ISIL." # Nested sentences are intentionally not coded, but this can be changed if needed. # (this would be changed at the end of check_verbs where the index is reevaluated) text = "The US claimed that ISIL had attacked Iraq and taken the city of Mosul" parse = """(ROOT (S (NP (DT The) (NNP US)) (VP (VBD claimed) (SBAR (IN that) (S (NP (NNP ISIL)) (VP (VBD had) (VP (VP (VBN attacked) (NP (NNP Iraq))) (CC and) (VP (VBN taken) (NP (NP (DT the) (NN city)) (PP (IN of) (NP (NNP Mosul))))))))))))""" parsed = utilities._format_parsed_str(parse) dict = {u'test123': {u'sents': {u'0': {u'content': text, u'parsed': parse}}, u'meta': {u'date': u'20150101'}}} return_dict = petrarch.do_coding(dict,None) assert return_dict['test123']['sents']['0']['events'] == [['USA','IMGMUSISI','112']] def test_actor_order(): text = "US troops from Syria have just invaded Northern Iraq" parse = """(ROOT (S (NP (NP (NNP US) (NNS troops)) (PP (IN from) (NP (NNP Syria)))) (VP (VBP have) (ADVP (RB just)) (VP (VBN invaded) (NP (JJ Northern) (NNP Iraq))))))""" parsed = utilities._format_parsed_str(parse) dict = {u'test123': {u'sents': {u'0': {u'content': text, u'parsed': parse}}, u'meta': {u'date': u'20150101'}}} return_dict = petrarch.do_coding(dict,None) assert return_dict['test123']['sents']['0']['events'] == [['USAMIL','IRQ','192']] ######################################### # # Individual function tests # ######################################### def test_check_balance(): petrarch.check_balance(['(','~']) try: petrarch.check_balance(['(','~','~']) assert False except: assert True list = [u'(', u'(S', u'(NP1', u'(NE', u'---', u'A', u'500-PAGE', u'REPORT', u'~NE', u'(VP1', u'(VBN', u'RELEASED', u'~VBN', u'(NE', u'---', u'TUESDAY', u'~NE', u'(PP', u'(IN', u'IN', u'~IN', u'(NE', u'---', u'THE', u'RWANDAN', u'CAPITAL', u'~NE', u'~PP', u'~VP1', u'~NP1', u'(VP2', u'(VP3', u'(VBD', u'ALLEGED', u'~VBD', u'(SBAR', u'(SBAR', u'(IN', u'THAT', u'~IN', u'(S', u'(NE', u'---', u'FRANCE', u'~NE', u'(VP4', u'(VBD', u'WAS', u'~VBD', u'(ADJP', u'(JJ', u'AWARE', u'~JJ', u'(PP', u'(IN', u'OF', u'~IN', u'(NE', u'---', u'PREPARATIONS', u'FOR', u'THE', u'GENOCIDE', u'~NE', u'~PP', u'~ADJP', u'~VP4', u'~S', u'~SBAR', u'(,', u',', u'~,', u'(CCP', u'AND', u'~CCP', u'(SBAR', u'(IN', u'THAT', u'~IN', u'(S', u'(NE', u'---', u'THE', u'FRENCH', u'MILITARY', u'IN', u'RWANDA', u'~NE', u'(VP5', u'(VBD', u'CONTRIBUTED', u'~VBD', u'(PP', u'(TO', u'TO', u'~TO', u'(S', u'(VP6', u'(VBG', u'PLANNING', u'~VBG', u'(NE', u'---', u'THE', u'MASSACRES', u'~NE', u'~VP6', u'~S', u'~PP', u'~VP5', u'~S', u'~SBAR', u'~SBAR', u'~VP3', u'(CCP', u'AND', u'~CCP', u'(VP7', u'(ADVP', u'(RB', u'ACTIVELY', u'~RB', u'~ADVP', u'(VBD', u'TOOK', u'~VBD', u'(NE', u'---', u'PART', u'IN', u'THE', u'KILLING', u'~NE', u'~VP7', u'~VP2', u'(.', u'.', u'~.', u'~S', u'~'] petrarch.check_balance(list) try: petrarch.check_balance(list[1:]) assert False except: assert True def test_read_treebank(): list = [u'(', u'(S', u'(NP1', u'(NE', u'---', u'A', u'500-PAGE', u'REPORT', u'~NE', u'(VP1', u'(VBN', u'RELEASED', u'~VBN', u'(NE', u'---', u'TUESDAY', u'~NE', u'(PP', u'(IN', u'IN', u'~IN', u'(NE', u'---', u'THE', u'RWANDAN', u'CAPITAL', u'~NE', u'~PP', u'~VP1', u'~NP1', u'(VP2', u'(VP3', u'(VBD', u'ALLEGED', u'~VBD', u'(SBAR', u'(SBAR', u'(IN', u'THAT', u'~IN', u'(S', u'(NE', u'---', u'FRANCE', u'~NE', u'(VP4', u'(VBD', u'WAS', u'~VBD', u'(ADJP', u'(JJ', u'AWARE', u'~JJ', u'(PP', u'(IN', u'OF', u'~IN', u'(NE', u'---', u'PREPARATIONS', u'FOR', u'THE', u'GENOCIDE', u'~NE', u'~PP', u'~ADJP', u'~VP4', u'~S', u'~SBAR', u'(,', u',', u'~,', u'(CCP', u'AND', u'~CCP', u'(SBAR', u'(IN', u'THAT', u'~IN', u'(S', u'(NE', u'---', u'THE', u'FRENCH', u'MILITARY', u'IN', u'RWANDA', u'~NE', u'(VP5', u'(VBD', u'CONTRIBUTED', u'~VBD', u'(PP', u'(TO', u'TO', u'~TO', u'(S', u'(VP6', u'(VBG', u'PLANNING', u'~VBG', u'(NE', u'---', u'THE', u'MASSACRES', u'~NE', u'~VP6', u'~S', u'~PP', u'~VP5', u'~S', u'~SBAR', u'~SBAR', u'~VP3', u'(CCP', u'AND', u'~CCP', u'(VP7', u'(ADVP', u'(RB', u'ACTIVELY', u'~RB', u'~ADVP', u'(VBD', u'TOOK', u'~VBD', u'(NE', u'---', u'PART', u'IN', u'THE', u'KILLING', u'~NE', u'~VP7', u'~VP2', u'(.', u'.', u'~.', u'~S', u'~'] sent = """( (S (NP (NP (DT A) (JJ 500-page) (NN report)) (VP (VBN released) (NP (NNP Tuesday)) (PP (IN in) (NP (DT the) (NNP Rwandan) (NN capital))))) (VP (VP (VBD alleged) (SBAR (SBAR (IN that) (S (NP (NNP France)) (VP (VBD was) (ADJP (JJ aware) (PP (IN of) (NP (NP (NNS preparations)) (PP (IN for) (NP (DT the) (NN genocide))))))))) (, ,) (CC and) (SBAR (IN that) (S (NP (NP (DT the) (JJ French) (NN military)) (PP (IN in) (NP (NNP Rwanda)))) (VP (VBD contributed) (PP (TO to) (S (VP (VBG planning) (NP (DT the) (NNS massacres)))))))))) (CC and) (VP (ADVP (RB actively)) (VBD took) (NP (NP (NN part)) (PP (IN in) (NP (DT the) (NN killing)))))) (. .)))""" sent = utilities._format_parsed_str(sent) plist, pstart = petrarch.read_TreeBank(sent) assert plist == list and pstart == 2
mit
MarceloCorpucci/lettucetutorial
specs/glue/register_fields_steps.py
1
2016
__author__ = 'corpu' from lettuce import step, world import webtest class RegisterFieldSteps(): #Background steps @step(u'Dado que la aplicacion ACL esta en "([^"]*)"') def dado_que_la_aplicacion_acl_esta_en_group1(step, url): world.webApp = webtest.TestApp(url) @step(u'Cuando accedo a la misma') def cuando_accedo_a_la_misma(step): world.response = world.webApp.get('/') # Opcion de registracion disponible @step(u'Entonces debe aparecer en pantalla la opcion de registracion') def entonces_debe_aparecer_en_pantalla_la_opcion_de_registracion(step): dom = str(world.response.html) assert dom.__contains__('<li><a href="/register/" id="register_btn">Register</a></li>') is True # Campos disponibles para registrarse @step(u'Entonces deben aparecer los campos de "([^"]*)", "([^"]*)", "([^"]*)"') def entonces_deben_aparecer_los_campos_de_group1_group2_group3(step, email_label, pass1_label, pass2_label): world.register_response = world.webApp.get('/register/') world.dom = str(world.register_response.body) assert world.dom.__contains__(email_label) assert world.dom.__contains__('<input class="form-control" id="email" name="email" placeholder="Email address" type="text" value="">') assert world.dom.__contains__(pass1_label) assert world.dom.__contains__('<input class="form-control" id="password" name="password" placeholder="Password" type="password" value="">') assert world.dom.__contains__(pass2_label) assert world.dom.__contains__('<input class="form-control" id="password2" name="password2" placeholder="Repeat password" type="password" value="">') @step(u'Y el boton "([^"]*)" para realizar la registracion') def y_el_boton_group1_para_realizar_la_registracion(step, register_button): assert world.dom.__contains__(register_button) assert world.dom.__contains__('<button type="submit" class="btn btn-success">Register</button>')
gpl-2.0
jgoclawski/django
tests/generic_inline_admin/tests.py
154
22749
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime from django.contrib import admin from django.contrib.admin.sites import AdminSite from django.contrib.auth.models import User from django.contrib.contenttypes.admin import GenericTabularInline from django.contrib.contenttypes.forms import generic_inlineformset_factory from django.contrib.contenttypes.models import ContentType from django.core.urlresolvers import reverse from django.forms.formsets import DEFAULT_MAX_NUM from django.forms.models import ModelForm from django.test import ( RequestFactory, SimpleTestCase, TestCase, override_settings, ) from .admin import MediaInline, MediaPermanentInline, site as admin_site from .models import Category, Episode, EpisodePermanent, Media, PhoneNumber class TestDataMixin(object): @classmethod def setUpTestData(cls): # password = "secret" User.objects.create( pk=100, username='super', first_name='Super', last_name='User', email='[email protected]', password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True, is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10) ) # Set DEBUG to True to ensure {% include %} will raise exceptions. # That is how inlines are rendered and #9498 will bubble up if it is an issue. @override_settings( DEBUG=True, PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF="generic_inline_admin.urls", ) class GenericAdminViewTest(TestDataMixin, TestCase): def setUp(self): self.client.login(username='super', password='secret') # Can't load content via a fixture (since the GenericForeignKey # relies on content type IDs, which will vary depending on what # other tests have been run), thus we do it here. e = Episode.objects.create(name='This Week in Django') self.episode_pk = e.pk m = Media(content_object=e, url='http://example.com/podcast.mp3') m.save() self.mp3_media_pk = m.pk m = Media(content_object=e, url='http://example.com/logo.png') m.save() self.png_media_pk = m.pk def test_basic_add_GET(self): """ A smoke test to ensure GET on the add_view works. """ response = self.client.get(reverse('admin:generic_inline_admin_episode_add')) self.assertEqual(response.status_code, 200) def test_basic_edit_GET(self): """ A smoke test to ensure GET on the change_view works. """ response = self.client.get( reverse('admin:generic_inline_admin_episode_change', args=(self.episode_pk,)) ) self.assertEqual(response.status_code, 200) def test_basic_add_POST(self): """ A smoke test to ensure POST on add_view works. """ post_data = { "name": "This Week in Django", # inline data "generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "1", "generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "0", "generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0", } response = self.client.post(reverse('admin:generic_inline_admin_episode_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_basic_edit_POST(self): """ A smoke test to ensure POST on edit_view works. """ post_data = { "name": "This Week in Django", # inline data "generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "3", "generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "2", "generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0", "generic_inline_admin-media-content_type-object_id-0-id": "%d" % self.mp3_media_pk, "generic_inline_admin-media-content_type-object_id-0-url": "http://example.com/podcast.mp3", "generic_inline_admin-media-content_type-object_id-1-id": "%d" % self.png_media_pk, "generic_inline_admin-media-content_type-object_id-1-url": "http://example.com/logo.png", "generic_inline_admin-media-content_type-object_id-2-id": "", "generic_inline_admin-media-content_type-object_id-2-url": "", } url = reverse('admin:generic_inline_admin_episode_change', args=(self.episode_pk,)) response = self.client.post(url, post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_generic_inline_formset(self): EpisodeMediaFormSet = generic_inlineformset_factory(Media, can_delete=False, exclude=['description', 'keywords'], extra=3) e = Episode.objects.get(name='This Week in Django') # Works with no queryset formset = EpisodeMediaFormSet(instance=e) self.assertEqual(len(formset.forms), 5) self.assertHTMLEqual(formset.forms[0].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="url" name="generic_inline_admin-media-content_type-object_id-0-url" value="http://example.com/podcast.mp3" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>' % self.mp3_media_pk) self.assertHTMLEqual(formset.forms[1].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="url" name="generic_inline_admin-media-content_type-object_id-1-url" value="http://example.com/logo.png" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>' % self.png_media_pk) self.assertHTMLEqual(formset.forms[2].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-2-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-2-url" type="url" name="generic_inline_admin-media-content_type-object_id-2-url" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-2-id" id="id_generic_inline_admin-media-content_type-object_id-2-id" /></p>') # A queryset can be used to alter display ordering formset = EpisodeMediaFormSet(instance=e, queryset=Media.objects.order_by('url')) self.assertEqual(len(formset.forms), 5) self.assertHTMLEqual(formset.forms[0].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="url" name="generic_inline_admin-media-content_type-object_id-0-url" value="http://example.com/logo.png" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>' % self.png_media_pk) self.assertHTMLEqual(formset.forms[1].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="url" name="generic_inline_admin-media-content_type-object_id-1-url" value="http://example.com/podcast.mp3" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>' % self.mp3_media_pk) self.assertHTMLEqual(formset.forms[2].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-2-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-2-url" type="url" name="generic_inline_admin-media-content_type-object_id-2-url" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-2-id" id="id_generic_inline_admin-media-content_type-object_id-2-id" /></p>') # Works with a queryset that omits items formset = EpisodeMediaFormSet(instance=e, queryset=Media.objects.filter(url__endswith=".png")) self.assertEqual(len(formset.forms), 4) self.assertHTMLEqual(formset.forms[0].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="url" name="generic_inline_admin-media-content_type-object_id-0-url" value="http://example.com/logo.png" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>' % self.png_media_pk) self.assertHTMLEqual(formset.forms[1].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="url" name="generic_inline_admin-media-content_type-object_id-1-url" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>') def test_generic_inline_formset_factory(self): # Regression test for #10522. inline_formset = generic_inlineformset_factory(Media, exclude=('url',)) # Regression test for #12340. e = Episode.objects.get(name='This Week in Django') formset = inline_formset(instance=e) self.assertTrue(formset.get_queryset().ordered) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF="generic_inline_admin.urls") class GenericInlineAdminParametersTest(TestDataMixin, TestCase): def setUp(self): self.client.login(username='super', password='secret') self.factory = RequestFactory() def _create_object(self, model): """ Create a model with an attached Media object via GFK. We can't load content via a fixture (since the GenericForeignKey relies on content type IDs, which will vary depending on what other tests have been run), thus we do it here. """ e = model.objects.create(name='This Week in Django') Media.objects.create(content_object=e, url='http://example.com/podcast.mp3') return e def test_no_param(self): """ With one initial form, extra (default) at 3, there should be 4 forms. """ e = self._create_object(Episode) response = self.client.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,))) formset = response.context['inline_admin_formsets'][0].formset self.assertEqual(formset.total_form_count(), 4) self.assertEqual(formset.initial_form_count(), 1) def test_extra_param(self): """ With extra=0, there should be one form. """ class ExtraInline(GenericTabularInline): model = Media extra = 0 modeladmin = admin.ModelAdmin(Episode, admin_site) modeladmin.inlines = [ExtraInline] e = self._create_object(Episode) request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,))) request.user = User(username='super', is_superuser=True) response = modeladmin.changeform_view(request, object_id=str(e.pk)) formset = response.context_data['inline_admin_formsets'][0].formset self.assertEqual(formset.total_form_count(), 1) self.assertEqual(formset.initial_form_count(), 1) def testMaxNumParam(self): """ With extra=5 and max_num=2, there should be only 2 forms. """ class MaxNumInline(GenericTabularInline): model = Media extra = 5 max_num = 2 modeladmin = admin.ModelAdmin(Episode, admin_site) modeladmin.inlines = [MaxNumInline] e = self._create_object(Episode) request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,))) request.user = User(username='super', is_superuser=True) response = modeladmin.changeform_view(request, object_id=str(e.pk)) formset = response.context_data['inline_admin_formsets'][0].formset self.assertEqual(formset.total_form_count(), 2) self.assertEqual(formset.initial_form_count(), 1) def test_min_num_param(self): """ With extra=3 and min_num=2, there should be five forms. """ class MinNumInline(GenericTabularInline): model = Media extra = 3 min_num = 2 modeladmin = admin.ModelAdmin(Episode, admin_site) modeladmin.inlines = [MinNumInline] e = self._create_object(Episode) request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,))) request.user = User(username='super', is_superuser=True) response = modeladmin.changeform_view(request, object_id=str(e.pk)) formset = response.context_data['inline_admin_formsets'][0].formset self.assertEqual(formset.total_form_count(), 5) self.assertEqual(formset.initial_form_count(), 1) def test_get_extra(self): class GetExtraInline(GenericTabularInline): model = Media extra = 4 def get_extra(self, request, obj): return 2 modeladmin = admin.ModelAdmin(Episode, admin_site) modeladmin.inlines = [GetExtraInline] e = self._create_object(Episode) request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,))) request.user = User(username='super', is_superuser=True) response = modeladmin.changeform_view(request, object_id=str(e.pk)) formset = response.context_data['inline_admin_formsets'][0].formset self.assertEqual(formset.extra, 2) def test_get_min_num(self): class GetMinNumInline(GenericTabularInline): model = Media min_num = 5 def get_min_num(self, request, obj): return 2 modeladmin = admin.ModelAdmin(Episode, admin_site) modeladmin.inlines = [GetMinNumInline] e = self._create_object(Episode) request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,))) request.user = User(username='super', is_superuser=True) response = modeladmin.changeform_view(request, object_id=str(e.pk)) formset = response.context_data['inline_admin_formsets'][0].formset self.assertEqual(formset.min_num, 2) def test_get_max_num(self): class GetMaxNumInline(GenericTabularInline): model = Media extra = 5 def get_max_num(self, request, obj): return 2 modeladmin = admin.ModelAdmin(Episode, admin_site) modeladmin.inlines = [GetMaxNumInline] e = self._create_object(Episode) request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,))) request.user = User(username='super', is_superuser=True) response = modeladmin.changeform_view(request, object_id=str(e.pk)) formset = response.context_data['inline_admin_formsets'][0].formset self.assertEqual(formset.max_num, 2) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF="generic_inline_admin.urls") class GenericInlineAdminWithUniqueTogetherTest(TestDataMixin, TestCase): def setUp(self): self.client.login(username='super', password='secret') def test_add(self): category_id = Category.objects.create(name='male').pk post_data = { "name": "John Doe", # inline data "generic_inline_admin-phonenumber-content_type-object_id-TOTAL_FORMS": "1", "generic_inline_admin-phonenumber-content_type-object_id-INITIAL_FORMS": "0", "generic_inline_admin-phonenumber-content_type-object_id-MAX_NUM_FORMS": "0", "generic_inline_admin-phonenumber-content_type-object_id-0-id": "", "generic_inline_admin-phonenumber-content_type-object_id-0-phone_number": "555-555-5555", "generic_inline_admin-phonenumber-content_type-object_id-0-category": "%s" % category_id, } response = self.client.get(reverse('admin:generic_inline_admin_contact_add')) self.assertEqual(response.status_code, 200) response = self.client.post(reverse('admin:generic_inline_admin_contact_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_delete(self): from .models import Contact c = Contact.objects.create(name='foo') PhoneNumber.objects.create( object_id=c.id, content_type=ContentType.objects.get_for_model(Contact), phone_number="555-555-5555", ) response = self.client.post(reverse('admin:generic_inline_admin_contact_delete', args=[c.pk])) self.assertContains(response, 'Are you sure you want to delete') @override_settings(ROOT_URLCONF="generic_inline_admin.urls") class NoInlineDeletionTest(SimpleTestCase): def test_no_deletion(self): inline = MediaPermanentInline(EpisodePermanent, admin_site) fake_request = object() formset = inline.get_formset(fake_request) self.assertFalse(formset.can_delete) class MockRequest(object): pass class MockSuperUser(object): def has_perm(self, perm): return True request = MockRequest() request.user = MockSuperUser() @override_settings(ROOT_URLCONF="generic_inline_admin.urls") class GenericInlineModelAdminTest(SimpleTestCase): def setUp(self): self.site = AdminSite() def test_get_formset_kwargs(self): media_inline = MediaInline(Media, AdminSite()) # Create a formset with default arguments formset = media_inline.get_formset(request) self.assertEqual(formset.max_num, DEFAULT_MAX_NUM) self.assertEqual(formset.can_order, False) # Create a formset with custom keyword arguments formset = media_inline.get_formset(request, max_num=100, can_order=True) self.assertEqual(formset.max_num, 100) self.assertEqual(formset.can_order, True) def test_custom_form_meta_exclude_with_readonly(self): """ Ensure that the custom ModelForm's `Meta.exclude` is respected when used in conjunction with `GenericInlineModelAdmin.readonly_fields` and when no `ModelAdmin.exclude` is defined. """ class MediaForm(ModelForm): class Meta: model = Media exclude = ['url'] class MediaInline(GenericTabularInline): readonly_fields = ['description'] form = MediaForm model = Media class EpisodeAdmin(admin.ModelAdmin): inlines = [ MediaInline ] ma = EpisodeAdmin(Episode, self.site) self.assertEqual( list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), ['keywords', 'id', 'DELETE']) def test_custom_form_meta_exclude(self): """ Ensure that the custom ModelForm's `Meta.exclude` is respected by `GenericInlineModelAdmin.get_formset`, and overridden if `ModelAdmin.exclude` or `GenericInlineModelAdmin.exclude` are defined. Refs #15907. """ # First with `GenericInlineModelAdmin` ----------------- class MediaForm(ModelForm): class Meta: model = Media exclude = ['url'] class MediaInline(GenericTabularInline): exclude = ['description'] form = MediaForm model = Media class EpisodeAdmin(admin.ModelAdmin): inlines = [ MediaInline ] ma = EpisodeAdmin(Episode, self.site) self.assertEqual( list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), ['url', 'keywords', 'id', 'DELETE']) # Then, only with `ModelForm` ----------------- class MediaInline(GenericTabularInline): form = MediaForm model = Media class EpisodeAdmin(admin.ModelAdmin): inlines = [ MediaInline ] ma = EpisodeAdmin(Episode, self.site) self.assertEqual( list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), ['description', 'keywords', 'id', 'DELETE']) def test_get_fieldsets(self): # Test that get_fieldsets is called when figuring out form fields. # Refs #18681. class MediaForm(ModelForm): class Meta: model = Media fields = '__all__' class MediaInline(GenericTabularInline): form = MediaForm model = Media can_delete = False def get_fieldsets(self, request, obj=None): return [(None, {'fields': ['url', 'description']})] ma = MediaInline(Media, self.site) form = ma.get_formset(None).form self.assertEqual(form._meta.fields, ['url', 'description']) def test_get_formsets_with_inlines_returns_tuples(self): """ Ensure that get_formsets_with_inlines() returns the correct tuples. """ class MediaForm(ModelForm): class Meta: model = Media exclude = ['url'] class MediaInline(GenericTabularInline): form = MediaForm model = Media class AlternateInline(GenericTabularInline): form = MediaForm model = Media class EpisodeAdmin(admin.ModelAdmin): inlines = [ AlternateInline, MediaInline ] ma = EpisodeAdmin(Episode, self.site) inlines = ma.get_inline_instances(request) for (formset, inline), other_inline in zip(ma.get_formsets_with_inlines(request), inlines): self.assertIsInstance(formset, other_inline.get_formset(request).__class__)
bsd-3-clause
kyledewey/z3
scripts/mk_win_dist.py
1
8708
############################################ # Copyright (c) 2012 Microsoft Corporation # # Scripts for automatically generating # Window distribution zip files. # # Author: Leonardo de Moura (leonardo) ############################################ import os import glob import re import getopt import sys import shutil import subprocess import zipfile from mk_exception import * from mk_project import * import mk_util BUILD_DIR='build-dist' BUILD_X64_DIR=os.path.join('build-dist', 'x64') BUILD_X86_DIR=os.path.join('build-dist', 'x86') VERBOSE=True DIST_DIR='dist' FORCE_MK=False DOTNET_ENABLED=True JAVA_ENABLED=True GIT_HASH=False def set_verbose(flag): global VERBOSE VERBOSE = flag def is_verbose(): return VERBOSE def mk_dir(d): if not os.path.exists(d): os.makedirs(d) def set_build_dir(path): global BUILD_DIR, BUILD_X86_DIR, BUILD_X64_DIR BUILD_DIR = path BUILD_X86_DIR = os.path.join(path, 'x86') BUILD_X64_DIR = os.path.join(path, 'x64') mk_dir(BUILD_X86_DIR) mk_dir(BUILD_X64_DIR) def display_help(): print("mk_win_dist.py: Z3 Windows distribution generator\n") print("This script generates the zip files containing executables, dlls, header files for Windows.") print("It must be executed from the Z3 root directory.") print("\nOptions:") print(" -h, --help display this message.") print(" -s, --silent do not print verbose messages.") print(" -b <sudir>, --build=<subdir> subdirectory where x86 and x64 Z3 versions will be built (default: build-dist).") print(" -f, --force force script to regenerate Makefiles.") print(" --nodotnet do not include .NET bindings in the binary distribution files.") print(" --nojava do not include Java bindings in the binary distribution files.") print(" --githash include git hash in the Zip file.") exit(0) # Parse configuration option for mk_make script def parse_options(): global FORCE_MK, JAVA_ENABLED, GIT_HASH, DOTNET_ENABLED path = BUILD_DIR options, remainder = getopt.gnu_getopt(sys.argv[1:], 'b:hsf', ['build=', 'help', 'silent', 'force', 'nojava', 'nodotnet', 'githash' ]) for opt, arg in options: if opt in ('-b', '--build'): if arg == 'src': raise MKException('The src directory should not be used to host the Makefile') path = arg elif opt in ('-s', '--silent'): set_verbose(False) elif opt in ('-h', '--help'): display_help() elif opt in ('-f', '--force'): FORCE_MK = True elif opt == '--nodotnet': DOTNET_ENABLED = False elif opt == '--nojava': JAVA_ENABLED = False elif opt == '--githash': GIT_HASH = True else: raise MKException("Invalid command line option '%s'" % opt) set_build_dir(path) # Check whether build directory already exists or not def check_build_dir(path): return os.path.exists(path) and os.path.exists(os.path.join(path, 'Makefile')) # Create a build directory using mk_make.py def mk_build_dir(path, x64): if not check_build_dir(path) or FORCE_MK: opts = ["python", os.path.join('scripts', 'mk_make.py'), "--parallel=24", "-b", path] if DOTNET_ENABLED: opts.append('--dotnet') if JAVA_ENABLED: opts.append('--java') if x64: opts.append('-x') if GIT_HASH: opts.append('--githash=%s' % mk_util.git_hash()) if subprocess.call(opts) != 0: raise MKException("Failed to generate build directory at '%s'" % path) # Create build directories def mk_build_dirs(): mk_build_dir(BUILD_X86_DIR, False) mk_build_dir(BUILD_X64_DIR, True) # Check if on Visual Studio command prompt def check_vc_cmd_prompt(): try: DEVNULL = open(os.devnull, 'wb') subprocess.call(['cl'], stdout=DEVNULL, stderr=DEVNULL) except: raise MKException("You must execute the mk_win_dist.py script on a Visual Studio Command Prompt") def exec_cmds(cmds): cmd_file = 'z3_tmp.cmd' f = open(cmd_file, 'w') for cmd in cmds: f.write(cmd) f.write('\n') f.close() res = 0 try: res = subprocess.call(cmd_file, shell=True) except: res = 1 try: os.erase(cmd_file) except: pass return res # Compile Z3 (if x64 == True, then it builds it in x64 mode). def mk_z3_core(x64): cmds = [] if x64: cmds.append('call "%VCINSTALLDIR%vcvarsall.bat" amd64') cmds.append('cd %s' % BUILD_X64_DIR) else: cmds.append('call "%VCINSTALLDIR%vcvarsall.bat" x86') cmds.append('cd %s' % BUILD_X86_DIR) cmds.append('nmake') if exec_cmds(cmds) != 0: raise MKException("Failed to make z3, x64: %s" % x64) def mk_z3(): mk_z3_core(False) mk_z3_core(True) def get_z3_name(x64): major, minor, build, revision = get_version() if x64: platform = "x64" else: platform = "x86" if GIT_HASH: return 'z3-%s.%s.%s.%s-%s-win' % (major, minor, build, mk_util.git_hash(), platform) else: return 'z3-%s.%s.%s-%s-win' % (major, minor, build, platform) def mk_dist_dir_core(x64): if x64: platform = "x64" build_path = BUILD_X64_DIR else: platform = "x86" build_path = BUILD_X86_DIR dist_path = os.path.join(DIST_DIR, get_z3_name(x64)) mk_dir(dist_path) mk_util.DOTNET_ENABLED = DOTNET_ENABLED mk_util.JAVA_ENABLED = JAVA_ENABLED mk_win_dist(build_path, dist_path) if is_verbose(): print("Generated %s distribution folder at '%s'" % (platform, dist_path)) def mk_dist_dir(): mk_dist_dir_core(False) mk_dist_dir_core(True) def get_dist_path(x64): return get_z3_name(x64) def mk_zip_core(x64): dist_path = get_dist_path(x64) old = os.getcwd() try: os.chdir(DIST_DIR) zfname = '%s.zip' % dist_path zipout = zipfile.ZipFile(zfname, 'w', zipfile.ZIP_DEFLATED) for root, dirs, files in os.walk(dist_path): for f in files: zipout.write(os.path.join(root, f)) if is_verbose(): print("Generated '%s'" % zfname) except: pass os.chdir(old) # Create a zip file for each platform def mk_zip(): mk_zip_core(False) mk_zip_core(True) VS_RUNTIME_PATS = [re.compile('vcomp.*\.dll'), re.compile('msvcp.*\.dll'), re.compile('msvcr.*\.dll')] VS_RUNTIME_FILES = [] def cp_vs_runtime_visitor(pattern, dir, files): global VS_RUNTIME_FILES for filename in files: for pat in VS_RUNTIME_PATS: if pat.match(filename): if fnmatch(filename, pattern): fname = os.path.join(dir, filename) if not os.path.isdir(fname): VS_RUNTIME_FILES.append(fname) break # Copy Visual Studio Runtime libraries def cp_vs_runtime_core(x64): global VS_RUNTIME_FILES if x64: platform = "x64" else: platform = "x86" vcdir = os.environ['VCINSTALLDIR'] path = '%sredist\\%s' % (vcdir, platform) VS_RUNTIME_FILES = [] os.walk(path, cp_vs_runtime_visitor, '*.dll') bin_dist_path = os.path.join(DIST_DIR, get_dist_path(x64), 'bin') for f in VS_RUNTIME_FILES: shutil.copy(f, bin_dist_path) if is_verbose(): print("Copied '%s' to '%s'" % (f, bin_dist_path)) def cp_vs_runtime(): cp_vs_runtime_core(True) cp_vs_runtime_core(False) def cp_license(): shutil.copy("LICENSE.txt", os.path.join(DIST_DIR, get_dist_path(True))) shutil.copy("LICENSE.txt", os.path.join(DIST_DIR, get_dist_path(False))) # Entry point def main(): if os.name != 'nt': raise MKException("This script is for Windows only") parse_options() check_vc_cmd_prompt() mk_build_dirs() mk_z3() init_project_def() mk_dist_dir() cp_license() cp_vs_runtime() mk_zip() main()
mit
liucode/tempest-master
tempest/services/object_storage/container_client.py
9
6222
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from xml.etree import ElementTree as etree from oslo_serialization import jsonutils as json from six.moves.urllib import parse as urllib from tempest.common import service_client class ContainerClient(service_client.ServiceClient): def create_container( self, container_name, metadata=None, remove_metadata=None, metadata_prefix='X-Container-Meta-', remove_metadata_prefix='X-Remove-Container-Meta-'): """ Creates a container, with optional metadata passed in as a dictionary """ url = str(container_name) headers = {} if metadata is not None: for key in metadata: headers[metadata_prefix + key] = metadata[key] if remove_metadata is not None: for key in remove_metadata: headers[remove_metadata_prefix + key] = remove_metadata[key] resp, body = self.put(url, body=None, headers=headers) self.expected_success([201, 202], resp.status) return resp, body def delete_container(self, container_name): """Deletes the container (if it's empty).""" url = str(container_name) resp, body = self.delete(url) self.expected_success(204, resp.status) return resp, body def update_container_metadata( self, container_name, metadata=None, remove_metadata=None, metadata_prefix='X-Container-Meta-', remove_metadata_prefix='X-Remove-Container-Meta-'): """Updates arbitrary metadata on container.""" url = str(container_name) headers = {} if metadata is not None: for key in metadata: headers[metadata_prefix + key] = metadata[key] if remove_metadata is not None: for key in remove_metadata: headers[remove_metadata_prefix + key] = remove_metadata[key] resp, body = self.post(url, body=None, headers=headers) self.expected_success(204, resp.status) return resp, body def delete_container_metadata(self, container_name, metadata, metadata_prefix='X-Remove-Container-Meta-'): """Deletes arbitrary metadata on container.""" url = str(container_name) headers = {} if metadata is not None: for item in metadata: headers[metadata_prefix + item] = metadata[item] resp, body = self.post(url, body=None, headers=headers) self.expected_success(204, resp.status) return resp, body def list_container_metadata(self, container_name): """ Retrieves container metadata headers """ url = str(container_name) resp, body = self.head(url) self.expected_success(204, resp.status) return resp, body def list_all_container_objects(self, container, params=None): """ Returns complete list of all objects in the container, even if item count is beyond 10,000 item listing limit. Does not require any parameters aside from container name. """ # TODO(dwalleck): Rewrite using json format to avoid newlines at end of # obj names. Set limit to API limit - 1 (max returned items = 9999) limit = 9999 if params is not None: if 'limit' in params: limit = params['limit'] if 'marker' in params: limit = params['marker'] resp, objlist = self.list_container_contents( container, params={'limit': limit, 'format': 'json'}) self.expected_success(200, resp.status) return objlist def list_container_contents(self, container, params=None): """ List the objects in a container, given the container name Returns the container object listing as a plain text list, or as xml or json if that option is specified via the 'format' argument. Optional Arguments: limit = integer For an integer value n, limits the number of results to at most n values. marker = 'string' Given a string value x, return object names greater in value than the specified marker. prefix = 'string' For a string value x, causes the results to be limited to names beginning with the substring x. format = 'json' or 'xml' Specify either json or xml to return the respective serialized response. If json, returns a list of json objects if xml, returns a string of xml path = 'string' For a string value x, return the object names nested in the pseudo path (assuming preconditions are met - see below). delimiter = 'character' For a character c, return all the object names nested in the container (without the need for the directory marker objects). """ url = str(container) if params: url += '?' url += '&%s' % urllib.urlencode(params) resp, body = self.get(url, headers={}) if params and params.get('format') == 'json': body = json.loads(body) elif params and params.get('format') == 'xml': body = etree.fromstring(body) self.expected_success([200, 204], resp.status) return resp, body
apache-2.0
wildchildyn/autism-website
yanni_env/lib/python3.6/site-packages/pip/_vendor/requests/packages/chardet/langgreekmodel.py
2763
12628
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # Character Mapping Table: Latin7_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 ) win1253_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 ) # Model Table: # total sequences: 100% # first 512 sequences: 98.2851% # first 1024 sequences:1.7001% # rest sequences: 0.0359% # negative sequences: 0.0148% GreekLangModel = ( 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, 3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, 0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, 2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, 0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, 2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, 2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, 0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, 2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, 0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, 3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, 3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, 2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, 2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, 0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, 0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, 0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, 0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, 0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, 0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, 0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, 0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, 0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, 0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, 0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, 0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, 0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, 0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, 0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, 0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, 0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, 0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, 0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, 0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, 0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, 0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, 0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, 0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, 0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, 0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, 0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, 0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, 0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, 0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, 0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, 0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, 0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, 0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, ) Latin7GreekModel = { 'charToOrderMap': Latin7_CharToOrderMap, 'precedenceMatrix': GreekLangModel, 'mTypicalPositiveRatio': 0.982851, 'keepEnglishLetter': False, 'charsetName': "ISO-8859-7" } Win1253GreekModel = { 'charToOrderMap': win1253_CharToOrderMap, 'precedenceMatrix': GreekLangModel, 'mTypicalPositiveRatio': 0.982851, 'keepEnglishLetter': False, 'charsetName': "windows-1253" } # flake8: noqa
gpl-3.0
louyihua/edx-platform
lms/djangoapps/certificates/tests/test_signals.py
29
1324
""" Unit tests for enabling self-generated certificates by default for self-paced courses. """ from certificates import api as certs_api from certificates.models import CertificateGenerationConfiguration from certificates.signals import _listen_for_course_publish from openedx.core.djangoapps.self_paced.models import SelfPacedConfiguration from xmodule.modulestore.tests.factories import CourseFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase class SelfGeneratedCertsSignalTest(ModuleStoreTestCase): """ Tests for enabling self-generated certificates by default for self-paced courses. """ def setUp(self): super(SelfGeneratedCertsSignalTest, self).setUp() SelfPacedConfiguration(enabled=True).save() self.course = CourseFactory.create(self_paced=True) # Enable the feature CertificateGenerationConfiguration.objects.create(enabled=True) def test_cert_generation_enabled_for_self_paced(self): """ Verify the signal enable the self-generated certificates by default for self-paced courses. """ self.assertFalse(certs_api.cert_generation_enabled(self.course.id)) _listen_for_course_publish('store', self.course.id) self.assertTrue(certs_api.cert_generation_enabled(self.course.id))
agpl-3.0
adelina-t/nova
nova/api/openstack/compute/contrib/hosts.py
48
14023
# Copyright (c) 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """The hosts admin extension.""" from oslo_log import log as logging import six import webob.exc from nova.api.openstack import extensions from nova import compute from nova import context as nova_context from nova import exception from nova.i18n import _ from nova.i18n import _LI from nova import objects LOG = logging.getLogger(__name__) authorize = extensions.extension_authorizer('compute', 'hosts') class HostController(object): """The Hosts API controller for the OpenStack API.""" def __init__(self): self.api = compute.HostAPI() super(HostController, self).__init__() def index(self, req): """Returns a dict in the format: | {'hosts': [{'host_name': 'some.host.name', | 'service': 'cells', | 'zone': 'internal'}, | {'host_name': 'some.other.host.name', | 'service': 'cells', | 'zone': 'internal'}, | {'host_name': 'some.celly.host.name', | 'service': 'cells', | 'zone': 'internal'}, | {'host_name': 'console1.host.com', | 'service': 'consoleauth', | 'zone': 'internal'}, | {'host_name': 'network1.host.com', | 'service': 'network', | 'zone': 'internal'}, | {'host_name': 'netwwork2.host.com', | 'service': 'network', | 'zone': 'internal'}, | {'host_name': 'compute1.host.com', | 'service': 'compute', | 'zone': 'nova'}, | {'host_name': 'compute2.host.com', | 'service': 'compute', | 'zone': 'nova'}, | {'host_name': 'sched1.host.com', | 'service': 'scheduler', | 'zone': 'internal'}, | {'host_name': 'sched2.host.com', | 'service': 'scheduler', | 'zone': 'internal'}, | {'host_name': 'vol1.host.com', | 'service': 'volume', | 'zone': 'internal'}]} """ context = req.environ['nova.context'] authorize(context) # NOTE(alex_xu): back-compatible with db layer hard-code admin # permission checks nova_context.require_admin_context(context) filters = {'disabled': False} zone = req.GET.get('zone', None) if zone: filters['availability_zone'] = zone services = self.api.service_get_all(context, filters=filters, set_zones=True) hosts = [] for service in services: hosts.append({'host_name': service['host'], 'service': service['topic'], 'zone': service['availability_zone']}) return {'hosts': hosts} def update(self, req, id, body): """Updates a specified body. :param body: example format {'status': 'enable', 'maintenance_mode': 'enable'} """ def read_enabled(orig_val, msg): """Checks a specified orig_val and returns True for 'enabled' and False for 'disabled'. :param orig_val: A string with either 'enable' or 'disable'. May be surrounded by whitespace, and case doesn't matter :param msg: The message to be passed to HTTPBadRequest. A single %s will be replaced with orig_val. """ val = orig_val.strip().lower() if val == "enable": return True elif val == "disable": return False else: raise webob.exc.HTTPBadRequest(explanation=msg % orig_val) context = req.environ['nova.context'] authorize(context) # NOTE(alex_xu): back-compatible with db layer hard-code admin # permission checks. This has to be left only for API v2.0 because # this version has to be stable even if it means that only admins # can call this method while the policy could be changed. nova_context.require_admin_context(context) # See what the user wants to 'update' params = {k.strip().lower(): v for k, v in six.iteritems(body)} orig_status = status = params.pop('status', None) orig_maint_mode = maint_mode = params.pop('maintenance_mode', None) # Validate the request if len(params) > 0: # Some extra param was passed. Fail. explanation = _("Invalid update setting: '%s'") % params.keys()[0] raise webob.exc.HTTPBadRequest(explanation=explanation) if orig_status is not None: status = read_enabled(orig_status, _("Invalid status: '%s'")) if orig_maint_mode is not None: maint_mode = read_enabled(orig_maint_mode, _("Invalid mode: '%s'")) if status is None and maint_mode is None: explanation = _("'status' or 'maintenance_mode' needed for " "host update") raise webob.exc.HTTPBadRequest(explanation=explanation) # Make the calls and merge the results result = {'host': id} if status is not None: result['status'] = self._set_enabled_status(context, id, status) if maint_mode is not None: result['maintenance_mode'] = self._set_host_maintenance(context, id, maint_mode) return result def _set_host_maintenance(self, context, host_name, mode=True): """Start/Stop host maintenance window. On start, it triggers guest VMs evacuation. """ LOG.info(_LI("Putting host %(host_name)s in maintenance mode " "%(mode)s."), {'host_name': host_name, 'mode': mode}) try: result = self.api.set_host_maintenance(context, host_name, mode) except NotImplementedError: msg = _("Virt driver does not implement host maintenance mode.") raise webob.exc.HTTPNotImplemented(explanation=msg) except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) except exception.ComputeServiceUnavailable as e: raise webob.exc.HTTPBadRequest(explanation=e.format_message()) if result not in ("on_maintenance", "off_maintenance"): raise webob.exc.HTTPBadRequest(explanation=result) return result def _set_enabled_status(self, context, host_name, enabled): """Sets the specified host's ability to accept new instances. :param enabled: a boolean - if False no new VMs will be able to start on the host """ if enabled: LOG.info(_LI("Enabling host %s.") % host_name) else: LOG.info(_LI("Disabling host %s.") % host_name) try: result = self.api.set_host_enabled(context, host_name=host_name, enabled=enabled) except NotImplementedError: msg = _("Virt driver does not implement host disabled status.") raise webob.exc.HTTPNotImplemented(explanation=msg) except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) except exception.ComputeServiceUnavailable as e: raise webob.exc.HTTPBadRequest(explanation=e.format_message()) if result not in ("enabled", "disabled"): raise webob.exc.HTTPBadRequest(explanation=result) return result def _host_power_action(self, req, host_name, action): """Reboots, shuts down or powers up the host.""" context = req.environ['nova.context'] authorize(context) # NOTE(alex_xu): back-compatible with db layer hard-code admin # permission checks. This has to be left only for API v2.0 because # this version has to be stable even if it means that only admins # can call this method while the policy could be changed. nova_context.require_admin_context(context) try: result = self.api.host_power_action(context, host_name=host_name, action=action) except NotImplementedError: msg = _("Virt driver does not implement host power management.") raise webob.exc.HTTPNotImplemented(explanation=msg) except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) except exception.ComputeServiceUnavailable as e: raise webob.exc.HTTPBadRequest(explanation=e.format_message()) return {"host": host_name, "power_action": result} def startup(self, req, id): return self._host_power_action(req, host_name=id, action="startup") def shutdown(self, req, id): return self._host_power_action(req, host_name=id, action="shutdown") def reboot(self, req, id): return self._host_power_action(req, host_name=id, action="reboot") @staticmethod def _get_total_resources(host_name, compute_node): return {'resource': {'host': host_name, 'project': '(total)', 'cpu': compute_node['vcpus'], 'memory_mb': compute_node['memory_mb'], 'disk_gb': compute_node['local_gb']}} @staticmethod def _get_used_now_resources(host_name, compute_node): return {'resource': {'host': host_name, 'project': '(used_now)', 'cpu': compute_node['vcpus_used'], 'memory_mb': compute_node['memory_mb_used'], 'disk_gb': compute_node['local_gb_used']}} @staticmethod def _get_resource_totals_from_instances(host_name, instances): cpu_sum = 0 mem_sum = 0 hdd_sum = 0 for instance in instances: cpu_sum += instance['vcpus'] mem_sum += instance['memory_mb'] hdd_sum += instance['root_gb'] + instance['ephemeral_gb'] return {'resource': {'host': host_name, 'project': '(used_max)', 'cpu': cpu_sum, 'memory_mb': mem_sum, 'disk_gb': hdd_sum}} @staticmethod def _get_resources_by_project(host_name, instances): # Getting usage resource per project project_map = {} for instance in instances: resource = project_map.setdefault(instance['project_id'], {'host': host_name, 'project': instance['project_id'], 'cpu': 0, 'memory_mb': 0, 'disk_gb': 0}) resource['cpu'] += instance['vcpus'] resource['memory_mb'] += instance['memory_mb'] resource['disk_gb'] += (instance['root_gb'] + instance['ephemeral_gb']) return project_map def show(self, req, id): """Shows the physical/usage resource given by hosts. :param id: hostname :returns: expected to use HostShowTemplate. ex.:: {'host': {'resource':D},..} D: {'host': 'hostname','project': 'admin', 'cpu': 1, 'memory_mb': 2048, 'disk_gb': 30} """ context = req.environ['nova.context'] # NOTE(eliqiao): back-compatible with db layer hard-code admin # permission checks. This has to be left only for API v2.0 because # this version has to be stable even if it means that only admins # can call this method while the policy could be changed. nova_context.require_admin_context(context) host_name = id try: compute_node = ( objects.ComputeNode.get_first_node_by_host_for_old_compat( context, host_name)) except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) instances = self.api.instance_get_all_by_host(context, host_name) resources = [self._get_total_resources(host_name, compute_node)] resources.append(self._get_used_now_resources(host_name, compute_node)) resources.append(self._get_resource_totals_from_instances(host_name, instances)) by_proj_resources = self._get_resources_by_project(host_name, instances) for resource in six.itervalues(by_proj_resources): resources.append({'resource': resource}) return {'host': resources} class Hosts(extensions.ExtensionDescriptor): """Admin-only host administration.""" name = "Hosts" alias = "os-hosts" namespace = "http://docs.openstack.org/compute/ext/hosts/api/v1.1" updated = "2011-06-29T00:00:00Z" def get_resources(self): resources = [extensions.ResourceExtension('os-hosts', HostController(), collection_actions={'update': 'PUT'}, member_actions={"startup": "GET", "shutdown": "GET", "reboot": "GET"})] return resources
apache-2.0
orbnauticus/Pique
pique/rawtty.py
1
5205
#!/usr/bin/python # # Copyright (c) 2010, Ryan Marquardt # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the project nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import collections import Queue import select import signal import sys import termios import threading ESCAPE = '\x1b' NAMES = { 'escape':'\x1b', 'eof':'\x04', 'left':'\x1b[D', 'shift+left':'\x1b[1;2D', 'right':'\x1b[C', 'shift+right':'\x1b[1;2C', 'down':'\x1b[B', 'shift+down':'\x1b[1;2B', 'up':'\x1b[A', 'shift+up':'\x1b[1;2A', 'home':'\x1bOH', 'end':'\x1bOF', 'insert':'\x1b[2~', 'shift+insert':'\x1b[2;2~', 'delete':'\x1b[3~', 'shift+delete':'\x1b[3;2~', 'page_up':'\x1b[5~', 'page_down':'\x1b[6~', 'f1':'\x1bOP', 'shift+f1':'\x1bO1;2P', 'f2':'\x1bOQ', 'shift+f2':'\x1bO1;2Q', 'f3':'\x1bOR', 'shift+f3':'\x1bO1;2R', 'f4':'\x1bOS', 'shift+f4':'\x1bO1;2S', 'f5':'\x1b[15~', 'shift+f5':'\x1b[15;2~', 'f6':'\x1b[17~', 'shift+f6':'\x1b[17;2~', 'f7':'\x1b[18~', 'shift+f7':'\x1b[18;2~', 'f8':'\x1b[19~', 'shift+f8':'\x1b[19;2~', 'f9':'\x1b[20~', 'shift+f9':'\x1b[20;2~', 'f10':'\x1b[21~', 'shift+f10':'\x1b[21;2~', 'f11':'\x1b[23~', 'shift+f11':'\x1b[23;2~', 'f12':'\x1b[24~', 'shift+f12':'\x1b[24;2~', 'bksp':'\x7f', 'tab':'\t', 'shift+tab':'\x1b[Z', 'enter':'\n', 'space':' ', } for k,v in NAMES.items(): NAMES[v] = k KNOWN_SEQUENCES = NAMES.values() class EOF(Exception): pass class rawtty(object): def __init__(self, fd=sys.stdin, echo=False, timeout=1, quit='eof'): self.fd = fd self.echo = echo self.timeout = timeout self.quit = quit self.old = termios.tcgetattr(self.fd.fileno()) self.q = Queue.Queue() def __enter__(self): self.start() def __exit__(self, type, value, traceback): self.restore() def start(self): try: signal.signal(signal.SIGINT, self._recv_interrupt) except: pass new = termios.tcgetattr(self.fd.fileno()) new[3] &= ~termios.ICANON if not self.echo: new[3] &= ~termios.ECHO termios.tcsetattr(self.fd.fileno(), termios.TCSANOW, new) def restore(self): termios.tcsetattr(self.fd.fileno(), termios.TCSADRAIN, self.old) try: signal.signal(signal.SIGINT, signal.SIG_DFL) except: pass def _recv_interrupt(self, sig, frame): self.q.put(KeyboardInterrupt) def __iter__(self): self.start() def readthread(): try: c = True while c: c = self.fd.read(1) self.q.put(c) finally: self.q.put('') self.readthread = threading.Thread(target=readthread) self.readthread.daemon = True self.readthread.start() seq = self.q.get() while True: if not seq: raise EOF elif seq == KeyboardInterrupt: raise KeyboardInterrupt elif seq == ESCAPE: try: seq += self.q.get(timeout=self.timeout) except Queue.Empty: pass #Assume that only escape was pressed else: if not any(s.startswith(seq) for s in KNOWN_SEQUENCES): #Escape key, followed by another sequence yield 'escape' seq = seq[1:] continue else: #Probably not the escape key by itself #Continue reads until we have a full sequence or error while any(s.startswith(seq) for s in KNOWN_SEQUENCES): if seq not in KNOWN_SEQUENCES: seq += self.q.get() else: break if seq not in KNOWN_SEQUENCES: #No match raise IOError('Unrecognized Sequence %r' % seq) #print repr(seq), repr(NAMES.get(quit,quit)), seq == NAMES.get(quit,quit) if seq != NAMES.get(self.quit,self.quit): yield NAMES.get(seq,seq) else: return seq = self.q.get() def getch(fd=sys.stdin, echo=False): with rawtty(fd): return fd.read(1) if __name__=='__main__': print KNOWN_SEQUENCES while True: try: for key in keypresses(): print repr(key) except IOError, e: print e break else: break
bsd-3-clause
dmlc/tvm
python/tvm/contrib/rocm.py
3
5347
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Utility for ROCm backend""" import subprocess from os.path import join, exists import tvm._ffi from tvm._ffi.base import py_str import tvm.runtime import tvm.target from . import utils def find_lld(required=True): """Find ld.lld in system. Parameters ---------- required : bool Whether it is required, runtime error will be raised if the compiler is required. Returns ------- valid_list : list of str List of possible paths. Note ---- This function will first search ld.lld that matches the major llvm version that built with tvm """ lld_list = [] major = tvm.target.codegen.llvm_version_major(allow_none=True) if major is not None: lld_list += ["ld.lld-%d.0" % major] lld_list += ["ld.lld-%d" % major] lld_list += ["ld.lld"] valid_list = [utils.which(x) for x in lld_list] valid_list = [x for x in valid_list if x] if not valid_list and required: raise RuntimeError("cannot find ld.lld, candidates are: " + str(lld_list)) return valid_list def rocm_link(in_file, out_file, lld=None): """Link relocatable ELF object to shared ELF object using lld Parameters ---------- in_file : str Input file name (relocatable ELF object file) out_file : str Output file name (shared ELF object file) lld : str, optional The lld linker, if not specified, we will try to guess the matched clang version. """ # if our result has undefined symbols, it will fail to load # (hipModuleLoad/hipModuleLoadData), but with a somewhat opaque message # so we have ld.lld check this here. # If you get a complaint about missing symbols you might want to check the # list of bitcode files below. args = [ lld if lld is not None else find_lld()[0], "--no-undefined", "-shared", in_file, "-o", out_file, ] proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) (out, _) = proc.communicate() if proc.returncode != 0: msg = "Linking error using ld.lld:\n" msg += py_str(out) raise RuntimeError(msg) @tvm._ffi.register_func("tvm_callback_rocm_link") def callback_rocm_link(obj_bin): """Links object file generated from LLVM to HSA Code Object Parameters ---------- obj_bin : bytearray The object file Return ------ cobj_bin : bytearray The HSA Code Object """ tmp_dir = utils.tempdir() tmp_obj = tmp_dir.relpath("rocm_kernel.o") tmp_cobj = tmp_dir.relpath("rocm_kernel.co") with open(tmp_obj, "wb") as out_file: out_file.write(bytes(obj_bin)) rocm_link(tmp_obj, tmp_cobj) cobj_bin = bytearray(open(tmp_cobj, "rb").read()) return cobj_bin @tvm._ffi.register_func("tvm_callback_rocm_bitcode_path") def callback_rocm_bitcode_path(rocdl_dir=None): """Utility function to find ROCm device library bitcodes Parameters ---------- rocdl_dir : str The path to rocm library directory The default value is the standard location """ # seems link order matters. if rocdl_dir is None: if exists("/opt/rocm/amdgcn/bitcode/"): rocdl_dir = "/opt/rocm/amdgcn/bitcode/" # starting with rocm 3.9 else: rocdl_dir = "/opt/rocm/lib/" # until rocm 3.8 bitcode_names = [ "oclc_daz_opt_on", "ocml", "hc", "irif", # this does not exist in rocm 3.9, drop eventually "ockl", "oclc_correctly_rounded_sqrt_off", "oclc_correctly_rounded_sqrt_on", "oclc_daz_opt_off", "oclc_finite_only_off", "oclc_finite_only_on", "oclc_isa_version_803", # todo (t-vi): an alternative might be to scan for the "oclc_isa_version_900", # isa version files (if the linker throws out "oclc_isa_version_906", # the unneeded ones or we filter for the arch we need) "oclc_unsafe_math_off", "oclc_unsafe_math_on", "oclc_wavefrontsize64_on", ] bitcode_files = [] for n in bitcode_names: p = join(rocdl_dir, n + ".bc") # rocm >= 3.9 if not exists(p): # rocm <= 3.8 p = join(rocdl_dir, n + ".amdgcn.bc") if exists(p): bitcode_files.append(p) elif "isa_version" not in n and n not in {"irif"}: raise RuntimeError("could not find bitcode " + n) return tvm.runtime.convert(bitcode_files)
apache-2.0
Eddy0402/rt-thread
bsp/mb9bf506r/rtconfig.py
41
3310
import os # toolchains options ARCH='arm' CPU='cortex-m3' CROSS_TOOL='iar' if os.getenv('RTT_CC'): CROSS_TOOL = os.getenv('RTT_CC') # cross_tool provides the cross compiler # EXEC_PATH is the compiler execute path, for example, CodeSourcery, Keil MDK, IAR if CROSS_TOOL == 'gcc': PLATFORM = 'gcc' EXEC_PATH = 'C:/Program Files/CodeSourcery/Sourcery G++ Lite/bin' elif CROSS_TOOL == 'keil': PLATFORM = 'armcc' EXEC_PATH = 'C:/Keil' elif CROSS_TOOL == 'iar': PLATFORM = 'iar' IAR_PATH = 'C:/Program Files/IAR Systems/Embedded Workbench 6.0 Evaluation' if os.getenv('RTT_EXEC_PATH'): EXEC_PATH = os.getenv('RTT_EXEC_PATH') BUILD = 'debug' if PLATFORM == 'gcc': # toolchains PREFIX = 'arm-none-eabi-' CC = PREFIX + 'gcc' AS = PREFIX + 'gcc' AR = PREFIX + 'ar' LINK = PREFIX + 'gcc' TARGET_EXT = 'axf' SIZE = PREFIX + 'size' OBJDUMP = PREFIX + 'objdump' OBJCPY = PREFIX + 'objcopy' DEVICE = ' -mcpu=cortex-m3 -mthumb -ffunction-sections -fdata-sections' CFLAGS = DEVICE AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp' LFLAGS = DEVICE + ' -Wl,--gc-sections,-Map=rtthread-fm3.map,-cref,-u,Reset_Handler -T rtthread-mb9bf506.ld' CPATH = '' LPATH = '' if BUILD == 'debug': CFLAGS += ' -O0 -gdwarf-2' AFLAGS += ' -gdwarf-2' else: CFLAGS += ' -O2' POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n' elif PLATFORM == 'armcc': # toolchains CC = 'armcc' AS = 'armasm' AR = 'armar' LINK = 'armlink' TARGET_EXT = 'axf' DEVICE = ' --device DARMSTM' CFLAGS = DEVICE + ' --apcs=interwork' AFLAGS = DEVICE LFLAGS = DEVICE + ' --info sizes --info totals --info unused --info veneers --list rtthread-fm3.map --scatter rtthread-mb9bf506.sct' CFLAGS += ' -I' + EXEC_PATH + '/ARM/RV31/INC' LFLAGS += ' --libpath ' + EXEC_PATH + '/ARM/RV31/LIB' EXEC_PATH += '/arm/bin40/' if BUILD == 'debug': CFLAGS += ' -g -O0' AFLAGS += ' -g' else: CFLAGS += ' -O2' POST_ACTION = 'fromelf --bin $TARGET --output rtthread.bin \nfromelf -z $TARGET' elif PLATFORM == 'iar': # toolchains CC = 'iccarm' AS = 'iasmarm' AR = 'iarchive' LINK = 'ilinkarm' TARGET_EXT = 'out' CFLAGS = '' CFLAGS += ' --diag_suppress Pa050' CFLAGS += ' --no_cse' CFLAGS += ' --no_unroll' CFLAGS += ' --no_inline' CFLAGS += ' --no_code_motion' CFLAGS += ' --no_tbaa' CFLAGS += ' --no_clustering' CFLAGS += ' --no_scheduling' CFLAGS += ' --debug' CFLAGS += ' --endian=little' CFLAGS += ' --cpu=Cortex-M3' CFLAGS += ' -e' CFLAGS += ' --fpu=None' CFLAGS += ' --dlib_config "' + IAR_PATH + '/arm/INC/c/DLib_Config_Normal.h"' CFLAGS += ' -Ol' CFLAGS += ' --use_c++_inline' AFLAGS = '' AFLAGS += ' -s+' AFLAGS += ' -w+' AFLAGS += ' -r' AFLAGS += ' --cpu Cortex-M3' AFLAGS += ' --fpu None' AFLAGS += ' -I"' + IAR_PATH + '/arm/INC"' LFLAGS = ' --config rtthread-mb9bf506.icf' LFLAGS += ' --semihosting' LFLAGS += ' --entry __iar_program_start' EXEC_PATH = IAR_PATH + '/arm/bin/' POST_ACTION = 'ielftool.exe --srec --verbose $TARGET rtthread.srec'
gpl-2.0
poojavade/Genomics_Docker
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/ipython-2.2.0-py2.7.egg/IPython/parallel/tests/test_mongodb.py
12
1563
"""Tests for mongodb backend Authors: * Min RK """ #------------------------------------------------------------------------------- # Copyright (C) 2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # Imports #------------------------------------------------------------------------------- import os from unittest import TestCase from nose import SkipTest from pymongo import Connection from IPython.parallel.controller.mongodb import MongoDB from . import test_db conn_kwargs = {} if 'DB_IP' in os.environ: conn_kwargs['host'] = os.environ['DB_IP'] if 'DBA_MONGODB_ADMIN_URI' in os.environ: # On ShiningPanda, we need a username and password to connect. They are # passed in a mongodb:// URI. conn_kwargs['host'] = os.environ['DBA_MONGODB_ADMIN_URI'] if 'DB_PORT' in os.environ: conn_kwargs['port'] = int(os.environ['DB_PORT']) try: c = Connection(**conn_kwargs) except Exception: c=None class TestMongoBackend(test_db.TaskDBTest, TestCase): """MongoDB backend tests""" def create_db(self): try: return MongoDB(database='iptestdb', _connection=c) except Exception: raise SkipTest("Couldn't connect to mongodb") def teardown(self): if c is not None: c.drop_database('iptestdb')
apache-2.0
bob3000/thumbor_aws
vows/result_storage_vows.py
3
3453
#se!/usr/bin/python # -*- coding: utf-8 -*- from pyvows import Vows, expect from thumbor.context import Context from tc_aws import Config from fixtures.storage_fixture import IMAGE_BYTES, get_server from boto.s3.connection import S3Connection from moto import mock_s3 from tc_aws.result_storages.s3_storage import Storage import logging logging.getLogger('botocore').setLevel(logging.CRITICAL) s3_bucket = 'thumbor-images-test' class Request(object): url = None @Vows.batch class S3ResultStorageVows(Vows.Context): class CanStoreImage(Vows.Context): @mock_s3 def topic(self): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket) ctx = Context(config=config, server=get_server('ACME-SEC')) ctx.request = Request ctx.request.url = 'my-image.jpg' storage = Storage(ctx) path = storage.put(IMAGE_BYTES) return path def should_be_in_catalog(self, topic): expect(topic).to_equal('my-image.jpg') class CanGetImage(Vows.Context): @Vows.async_topic @mock_s3 def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket) ctx = Context(config=config, server=get_server('ACME-SEC')) ctx.request = Request ctx.request.url = 'my-image-2.jpg' storage = Storage(ctx) storage.put(IMAGE_BYTES) storage.get(callback=callback) def should_have_proper_bytes(self, topic): expect(topic.args[0]).not_to_be_null() expect(topic.args[0]).not_to_be_an_error() expect(topic.args[0]).to_equal(IMAGE_BYTES) class CanGetImageWithMetadata(Vows.Context): @Vows.async_topic @mock_s3 def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket, TC_AWS_STORE_METADATA=True) ctx = Context(config=config, server=get_server('ACME-SEC')) ctx.headers = {'Content-Type': 'image/webp', 'Some-Other-Header': 'doge-header'} ctx.request = Request ctx.request.url = 'my-image-meta.jpg' storage = Storage(ctx) storage.put(IMAGE_BYTES) file_abspath = storage._normalize_path(ctx.request.url) storage.storage.get(file_abspath, callback=callback) def should_have_proper_bytes(self, topic): expect(topic.args[0].content_type).to_include('image/webp') expect(topic.args[0].metadata).to_include('some-other-header') expect(topic.args[0].content_type).to_equal(IMAGE_BYTES) class HandlesStoragePrefix(Vows.Context): @mock_s3 def topic(self): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket, TC_AWS_RESULT_STORAGE_ROOT_PATH='tata') ctx = Context(config=config, server=get_server('ACME-SEC')) storage = Storage(ctx) return storage._normalize_path('toto') def should_return_the_same(self, topic): expect(topic).to_equal("tata/toto")
mit
auntieNeo/asterisk-testsuite
lib/python/asterisk/channel_test_condition.py
1
8676
#!/usr/bin/env python """Test condition for channels Copyright (C) 2011-2012, Digium, Inc. Matt Jordan <[email protected]> This program is free software, distributed under the terms of the GNU General Public License Version 2. """ from twisted.internet import defer from test_conditions import TestCondition import logging import unittest import re class ChannelTestCondition(TestCondition): """Test condition that checks for the existence of channels. If channels are detected and the number of active channels is greater than the configured amount, an error is raised. By default, the number of allowed active channels is 0. """ def __init__(self, test_config): """Constructor Keyword Arguments: test_config The TestConfig object for the test """ super(ChannelTestCondition, self).__init__(test_config) self.allowed_channels = 0 if ('allowedchannels' in test_config.config): self.allowed_channels = test_config.config['allowedchannels'] def evaluate(self, related_test_condition=None): """Evaluate this test condition Keyword Argument: related_test_condition The test condition that this condition is related to Returns: A deferred that will be called when evaluation is complete """ def __channel_callback(result): """Callback called from core show channels""" channel_expression = re.compile('^[A-Za-z0-9]+/') channel_tokens = result.output.strip().split('\n') active_channels = 0 referenced_channels = 0 for token in channel_tokens: if channel_expression.match(token): referenced_channels += 1 if 'active channels' in token: active_channel_tokens = token.partition(' ') active_channels = int(active_channel_tokens[0].strip()) if active_channels > self.allowed_channels: msg = ("Detected number of active channels %d is greater than " "the allowed %d on Asterisk %s" % (active_channels, self.allowed_channels, result.host)) super(ChannelTestCondition, self).fail_check(msg) elif referenced_channels > self.allowed_channels: msg = ("Channel leak detected - " "number of referenced channels %d is greater than " "the allowed %d on Asterisk %s" % (referenced_channels, self.allowed_channels, result.host)) super(ChannelTestCondition, self).fail_check(msg) return result def _raise_finished(result, finish_deferred): """Raise the deferred callback""" finish_deferred.callback(self) return result finish_deferred = defer.Deferred() # Set to pass and let a failure override super(ChannelTestCondition, self).pass_check() exec_list = [ast.cli_exec('core show channels'). addCallback(__channel_callback) for ast in self.ast] defer.DeferredList(exec_list).addCallback(_raise_finished, finish_deferred) return finish_deferred class AstMockOutput(object): """mock cli output base class""" def __init__(self): """Constructor""" self.host = "127.0.0.1" def MockDefer(self, output): """use real defer to mock deferred output""" self.output = output deferred = defer.Deferred() deferred.callback(self) return deferred class AstMockObjectInactive(AstMockOutput): """mock cli output showing no active channels""" def cli_exec(self, command): """presume command is core show channels and generate output""" output = "" output += "Channel Location State Application(Data)\n" output += "0 active channels\n" output += "0 active calls\n" output += "2 calls processed\n" output += "Asterisk ending (0).\n" return self.MockDefer(output) class AstMockObjectSingle(AstMockOutput): """mock cli output showing single active channel""" def cli_exec(self, command): """presume command is core show channels and generate output""" output = "" output += "Channel Location State Application(Data)\n" output += "Local/123@default-00 (None) Down ()\n" output += "1 active channels\n" output += "0 active calls\n" output += "2 calls processed\n" output += "Asterisk ending (0).\n" return self.MockDefer(output) class AstMockObjectMultiple(AstMockOutput): """mock cli output showing multiple active channels""" def cli_exec(self, command): """presume command is core show channels and generate output""" output = "" output += "Channel Location State Application(Data)\n" output += "PJSIP/123@default-00 (None) Down ()\n" output += "Local/123@default-00 (None) Down ()\n" output += "SIP/alice@default-00 (None) Down ()\n" output += "3 active channels\n" output += "0 active calls\n" output += "2 calls processed\n" output += "Asterisk ending (0).\n" return self.MockDefer(output) class AstMockObjectLeaked(AstMockOutput): """mock cli output showing leaked channel""" def cli_exec(self, command): """presume command is core show channels and generate output""" output = "" output += "Channel Location State Application(Data)\n" output += "Local/123@default-00 (None) Down ()\n" output += "0 active channels\n" output += "0 active calls\n" output += "2 calls processed\n" output += "Asterisk ending (0).\n" return self.MockDefer(output) class TestConfig(object): """Fake TestConfig object for unittest""" def __init__(self): self.class_type_name = "bogus" self.config = {} self.enabled = True self.pass_expected = True class ChannelTestConditionUnitTest(unittest.TestCase): """Unit Tests for ChannelTestCondition""" def test_evaluate_inactive(self): """test inactive channel condition""" obj = ChannelTestCondition(TestConfig()) obj.register_asterisk_instance(AstMockObjectInactive()) obj.evaluate() self.assertEqual(obj.get_status(), 'Passed') def test_evaluate_multiple_fail(self): """test multiple channel condition""" obj = ChannelTestCondition(TestConfig()) obj.register_asterisk_instance(AstMockObjectMultiple()) obj.evaluate() self.assertEqual(obj.get_status(), 'Failed') def test_evaluate_multiple_fail2(self): """test multiple channel condition""" obj = ChannelTestCondition(TestConfig()) obj.allowed_channels = 2 obj.register_asterisk_instance(AstMockObjectMultiple()) obj.evaluate() self.assertEqual(obj.get_status(), 'Failed') def test_evaluate_multiple_pass(self): """test multiple channel condition""" obj = ChannelTestCondition(TestConfig()) obj.allowed_channels = 3 obj.register_asterisk_instance(AstMockObjectMultiple()) obj.evaluate() self.assertEqual(obj.get_status(), 'Passed') def test_evaluate_single_fail(self): """test single channel condition""" obj = ChannelTestCondition(TestConfig()) obj.register_asterisk_instance(AstMockObjectSingle()) obj.evaluate() self.assertEqual(obj.get_status(), 'Failed') def test_evaluate_single_pass(self): """test single channel condition""" obj = ChannelTestCondition(TestConfig()) obj.allowed_channels = 1 obj.register_asterisk_instance(AstMockObjectSingle()) obj.evaluate() self.assertEqual(obj.get_status(), 'Passed') def test_evaluate_leaked(self): """test leaked channel condition""" obj = ChannelTestCondition(TestConfig()) obj.register_asterisk_instance(AstMockObjectLeaked()) obj.evaluate() self.assertEqual(obj.get_status(), 'Failed') def main(): """Run the unit tests""" logging.basicConfig(level=logging.DEBUG) unittest.main() if __name__ == "__main__": main()
gpl-2.0
terbolous/SickRage
lib/hachoir_parser/audio/flac.py
95
5626
""" FLAC (audio) parser Documentation: * http://flac.sourceforge.net/format.html Author: Esteban Loiseau <baal AT tuxfamily.org> Creation date: 2008-04-09 """ from hachoir_parser import Parser from hachoir_core.field import FieldSet, String, Bit, Bits, UInt16, UInt24, RawBytes, Enum, NullBytes from hachoir_core.stream import BIG_ENDIAN, LITTLE_ENDIAN from hachoir_core.tools import createDict from hachoir_parser.container.ogg import parseVorbisComment class VorbisComment(FieldSet): endian = LITTLE_ENDIAN createFields = parseVorbisComment class StreamInfo(FieldSet): static_size = 34*8 def createFields(self): yield UInt16(self, "min_block_size", "The minimum block size (in samples) used in the stream") yield UInt16(self, "max_block_size", "The maximum block size (in samples) used in the stream") yield UInt24(self, "min_frame_size", "The minimum frame size (in bytes) used in the stream") yield UInt24(self, "max_frame_size", "The maximum frame size (in bytes) used in the stream") yield Bits(self, "sample_hertz", 20, "Sample rate in Hertz") yield Bits(self, "nb_channel", 3, "Number of channels minus one") yield Bits(self, "bits_per_sample", 5, "Bits per sample minus one") yield Bits(self, "total_samples", 36, "Total samples in stream") yield RawBytes(self, "md5sum", 16, "MD5 signature of the unencoded audio data") class SeekPoint(FieldSet): def createFields(self): yield Bits(self, "sample_number", 64, "Sample number") yield Bits(self, "offset", 64, "Offset in bytes") yield Bits(self, "nb_sample", 16) class SeekTable(FieldSet): def createFields(self): while not self.eof: yield SeekPoint(self, "point[]") class MetadataBlock(FieldSet): "Metadata block field: http://flac.sourceforge.net/format.html#metadata_block" BLOCK_TYPES = { 0: ("stream_info", u"Stream info", StreamInfo), 1: ("padding[]", u"Padding", None), 2: ("application[]", u"Application", None), 3: ("seek_table", u"Seek table", SeekTable), 4: ("comment", u"Vorbis comment", VorbisComment), 5: ("cue_sheet[]", u"Cue sheet", None), 6: ("picture[]", u"Picture", None), } BLOCK_TYPE_DESC = createDict(BLOCK_TYPES, 1) def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = 32 + self["metadata_length"].value * 8 try: key = self["block_type"].value self._name, self._description, self.handler = self.BLOCK_TYPES[key] except KeyError: self.handler = None def createFields(self): yield Bit(self, "last_metadata_block", "True if this is the last metadata block") yield Enum(Bits(self, "block_type", 7, "Metadata block header type"), self.BLOCK_TYPE_DESC) yield UInt24(self, "metadata_length", "Length of following metadata in bytes (doesn't include this header)") block_type = self["block_type"].value size = self["metadata_length"].value if not size: return try: handler = self.BLOCK_TYPES[block_type][2] except KeyError: handler = None if handler: yield handler(self, "content", size=size*8) elif self["block_type"].value == 1: yield NullBytes(self, "padding", size) else: yield RawBytes(self, "rawdata", size) class Metadata(FieldSet): def createFields(self): while not self.eof: field = MetadataBlock(self,"metadata_block[]") yield field if field["last_metadata_block"].value: break class Frame(FieldSet): SAMPLE_RATES = { 0: "get from STREAMINFO metadata block", 1: "88.2kHz", 2: "176.4kHz", 3: "192kHz", 4: "8kHz", 5: "16kHz", 6: "22.05kHz", 7: "24kHz", 8: "32kHz", 9: "44.1kHz", 10: "48kHz", 11: "96kHz", 12: "get 8 bit sample rate (in kHz) from end of header", 13: "get 16 bit sample rate (in Hz) from end of header", 14: "get 16 bit sample rate (in tens of Hz) from end of header", } def createFields(self): yield Bits(self, "sync", 14, "Sync code: 11111111111110") yield Bit(self, "reserved[]") yield Bit(self, "blocking_strategy") yield Bits(self, "block_size", 4) yield Enum(Bits(self, "sample_rate", 4), self.SAMPLE_RATES) yield Bits(self, "channel_assign", 4) yield Bits(self, "sample_size", 3) yield Bit(self, "reserved[]") # FIXME: Finish frame header parser class Frames(FieldSet): def createFields(self): while not self.eof: yield Frame(self, "frame[]") # FIXME: Parse all frames return class FlacParser(Parser): "Parse FLAC audio files: FLAC is a lossless audio codec" MAGIC = "fLaC\x00" PARSER_TAGS = { "id": "flac", "category": "audio", "file_ext": ("flac",), "mime": (u"audio/x-flac",), "magic": ((MAGIC, 0),), "min_size": 4*8, "description": "FLAC audio", } endian = BIG_ENDIAN def validate(self): if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC: return u"Invalid magic string" return True def createFields(self): yield String(self, "signature", 4,charset="ASCII", description="FLAC signature: fLaC string") yield Metadata(self,"metadata") yield Frames(self,"frames")
gpl-3.0
toshywoshy/ansible
lib/ansible/modules/network/fortios/fortios_wireless_controller_ble_profile.py
7
13328
#!/usr/bin/python from __future__ import (absolute_import, division, print_function) # Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: fortios_wireless_controller_ble_profile short_description: Configure Bluetooth Low Energy profile in Fortinet's FortiOS and FortiGate. description: - This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the user to set and modify wireless_controller feature and ble_profile category. Examples include all parameters and values need to be adjusted to datasources before usage. Tested with FOS v6.0.5 version_added: "2.9" author: - Miguel Angel Munoz (@mamunozgonzalez) - Nicolas Thomas (@thomnico) notes: - Requires fortiosapi library developed by Fortinet - Run as a local_action in your playbook requirements: - fortiosapi>=0.9.8 options: host: description: - FortiOS or FortiGate IP address. type: str required: false username: description: - FortiOS or FortiGate username. type: str required: false password: description: - FortiOS or FortiGate password. type: str default: "" vdom: description: - Virtual domain, among those defined previously. A vdom is a virtual instance of the FortiGate that can be configured and used as a different unit. type: str default: root https: description: - Indicates if the requests towards FortiGate must use HTTPS protocol. type: bool default: true ssl_verify: description: - Ensures FortiGate certificate must be verified by a proper CA. type: bool default: true state: description: - Indicates whether to create or remove the object. type: str required: true choices: - present - absent wireless_controller_ble_profile: description: - Configure Bluetooth Low Energy profile. default: null type: dict suboptions: advertising: description: - Advertising type. type: str choices: - ibeacon - eddystone-uid - eddystone-url beacon_interval: description: - Beacon interval . type: int ble_scanning: description: - Enable/disable Bluetooth Low Energy (BLE) scanning. type: str choices: - enable - disable comment: description: - Comment. type: str eddystone_instance: description: - Eddystone instance ID. type: str eddystone_namespace: description: - Eddystone namespace ID. type: str eddystone_url: description: - Eddystone URL. type: str eddystone_url_encode_hex: description: - Eddystone encoded URL hexadecimal string type: str ibeacon_uuid: description: - Universally Unique Identifier (UUID; automatically assigned but can be manually reset). type: str major_id: description: - Major ID. type: int minor_id: description: - Minor ID. type: int name: description: - Bluetooth Low Energy profile name. required: true type: str txpower: description: - Transmit power level . type: str choices: - 0 - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 ''' EXAMPLES = ''' - hosts: localhost vars: host: "192.168.122.40" username: "admin" password: "" vdom: "root" ssl_verify: "False" tasks: - name: Configure Bluetooth Low Energy profile. fortios_wireless_controller_ble_profile: host: "{{ host }}" username: "{{ username }}" password: "{{ password }}" vdom: "{{ vdom }}" https: "False" state: "present" wireless_controller_ble_profile: advertising: "ibeacon" beacon_interval: "4" ble_scanning: "enable" comment: "Comment." eddystone_instance: "<your_own_value>" eddystone_namespace: "<your_own_value>" eddystone_url: "<your_own_value>" eddystone_url_encode_hex: "<your_own_value>" ibeacon_uuid: "<your_own_value>" major_id: "12" minor_id: "13" name: "default_name_14" txpower: "0" ''' RETURN = ''' build: description: Build number of the fortigate image returned: always type: str sample: '1547' http_method: description: Last method used to provision the content into FortiGate returned: always type: str sample: 'PUT' http_status: description: Last result given by FortiGate on last operation applied returned: always type: str sample: "200" mkey: description: Master key (id) used in the last call to FortiGate returned: success type: str sample: "id" name: description: Name of the table used to fulfill the request returned: always type: str sample: "urlfilter" path: description: Path of the table used to fulfill the request returned: always type: str sample: "webfilter" revision: description: Internal revision number returned: always type: str sample: "17.0.2.10658" serial: description: Serial number of the unit returned: always type: str sample: "FGVMEVYYQT3AB5352" status: description: Indication of the operation's result returned: always type: str sample: "success" vdom: description: Virtual domain used returned: always type: str sample: "root" version: description: Version of the FortiGate returned: always type: str sample: "v5.6.3" ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.connection import Connection from ansible.module_utils.network.fortios.fortios import FortiOSHandler from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG def login(data, fos): host = data['host'] username = data['username'] password = data['password'] ssl_verify = data['ssl_verify'] fos.debug('on') if 'https' in data and not data['https']: fos.https('off') else: fos.https('on') fos.login(host, username, password, verify=ssl_verify) def filter_wireless_controller_ble_profile_data(json): option_list = ['advertising', 'beacon_interval', 'ble_scanning', 'comment', 'eddystone_instance', 'eddystone_namespace', 'eddystone_url', 'eddystone_url_encode_hex', 'ibeacon_uuid', 'major_id', 'minor_id', 'name', 'txpower'] dictionary = {} for attribute in option_list: if attribute in json and json[attribute] is not None: dictionary[attribute] = json[attribute] return dictionary def underscore_to_hyphen(data): if isinstance(data, list): for i, elem in enumerate(data): data[i] = underscore_to_hyphen(elem) elif isinstance(data, dict): new_data = {} for k, v in data.items(): new_data[k.replace('_', '-')] = underscore_to_hyphen(v) data = new_data return data def wireless_controller_ble_profile(data, fos): vdom = data['vdom'] state = data['state'] wireless_controller_ble_profile_data = data['wireless_controller_ble_profile'] filtered_data = underscore_to_hyphen(filter_wireless_controller_ble_profile_data(wireless_controller_ble_profile_data)) if state == "present": return fos.set('wireless-controller', 'ble-profile', data=filtered_data, vdom=vdom) elif state == "absent": return fos.delete('wireless-controller', 'ble-profile', mkey=filtered_data['name'], vdom=vdom) def is_successful_status(status): return status['status'] == "success" or \ status['http_method'] == "DELETE" and status['http_status'] == 404 def fortios_wireless_controller(data, fos): if data['wireless_controller_ble_profile']: resp = wireless_controller_ble_profile(data, fos) return not is_successful_status(resp), \ resp['status'] == "success", \ resp def main(): fields = { "host": {"required": False, "type": "str"}, "username": {"required": False, "type": "str"}, "password": {"required": False, "type": "str", "default": "", "no_log": True}, "vdom": {"required": False, "type": "str", "default": "root"}, "https": {"required": False, "type": "bool", "default": True}, "ssl_verify": {"required": False, "type": "bool", "default": True}, "state": {"required": True, "type": "str", "choices": ["present", "absent"]}, "wireless_controller_ble_profile": { "required": False, "type": "dict", "default": None, "options": { "advertising": {"required": False, "type": "str", "choices": ["ibeacon", "eddystone-uid", "eddystone-url"]}, "beacon_interval": {"required": False, "type": "int"}, "ble_scanning": {"required": False, "type": "str", "choices": ["enable", "disable"]}, "comment": {"required": False, "type": "str"}, "eddystone_instance": {"required": False, "type": "str"}, "eddystone_namespace": {"required": False, "type": "str"}, "eddystone_url": {"required": False, "type": "str"}, "eddystone_url_encode_hex": {"required": False, "type": "str"}, "ibeacon_uuid": {"required": False, "type": "str"}, "major_id": {"required": False, "type": "int"}, "minor_id": {"required": False, "type": "int"}, "name": {"required": True, "type": "str"}, "txpower": {"required": False, "type": "str", "choices": ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"]} } } } module = AnsibleModule(argument_spec=fields, supports_check_mode=False) # legacy_mode refers to using fortiosapi instead of HTTPAPI legacy_mode = 'host' in module.params and module.params['host'] is not None and \ 'username' in module.params and module.params['username'] is not None and \ 'password' in module.params and module.params['password'] is not None if not legacy_mode: if module._socket_path: connection = Connection(module._socket_path) fos = FortiOSHandler(connection) is_error, has_changed, result = fortios_wireless_controller(module.params, fos) else: module.fail_json(**FAIL_SOCKET_MSG) else: try: from fortiosapi import FortiOSAPI except ImportError: module.fail_json(msg="fortiosapi module is required") fos = FortiOSAPI() login(module.params, fos) is_error, has_changed, result = fortios_wireless_controller(module.params, fos) fos.logout() if not is_error: module.exit_json(changed=has_changed, meta=result) else: module.fail_json(msg="Error in repo", meta=result) if __name__ == '__main__': main()
gpl-3.0
home-assistant/home-assistant
homeassistant/components/guardian/switch.py
2
7888
"""Switches for the Elexa Guardian integration.""" from __future__ import annotations from aioguardian import Client from aioguardian.errors import GuardianError import voluptuous as vol from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_FILENAME, CONF_PORT, CONF_URL from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import ValveControllerEntity from .const import ( API_VALVE_STATUS, CONF_UID, DATA_CLIENT, DATA_COORDINATOR, DATA_PAIRED_SENSOR_MANAGER, DOMAIN, LOGGER, ) ATTR_AVG_CURRENT = "average_current" ATTR_INST_CURRENT = "instantaneous_current" ATTR_INST_CURRENT_DDT = "instantaneous_current_ddt" ATTR_TRAVEL_COUNT = "travel_count" SERVICE_DISABLE_AP = "disable_ap" SERVICE_ENABLE_AP = "enable_ap" SERVICE_PAIR_SENSOR = "pair_sensor" SERVICE_REBOOT = "reboot" SERVICE_RESET_VALVE_DIAGNOSTICS = "reset_valve_diagnostics" SERVICE_UNPAIR_SENSOR = "unpair_sensor" SERVICE_UPGRADE_FIRMWARE = "upgrade_firmware" async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up Guardian switches based on a config entry.""" platform = entity_platform.async_get_current_platform() for service_name, schema, method in [ (SERVICE_DISABLE_AP, {}, "async_disable_ap"), (SERVICE_ENABLE_AP, {}, "async_enable_ap"), (SERVICE_PAIR_SENSOR, {vol.Required(CONF_UID): cv.string}, "async_pair_sensor"), (SERVICE_REBOOT, {}, "async_reboot"), (SERVICE_RESET_VALVE_DIAGNOSTICS, {}, "async_reset_valve_diagnostics"), ( SERVICE_UPGRADE_FIRMWARE, { vol.Optional(CONF_URL): cv.url, vol.Optional(CONF_PORT): cv.port, vol.Optional(CONF_FILENAME): cv.string, }, "async_upgrade_firmware", ), ( SERVICE_UNPAIR_SENSOR, {vol.Required(CONF_UID): cv.string}, "async_unpair_sensor", ), ]: platform.async_register_entity_service(service_name, schema, method) async_add_entities( [ ValveControllerSwitch( entry, hass.data[DOMAIN][DATA_CLIENT][entry.entry_id], hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id], ) ] ) class ValveControllerSwitch(ValveControllerEntity, SwitchEntity): """Define a switch to open/close the Guardian valve.""" def __init__( self, entry: ConfigEntry, client: Client, coordinators: dict[str, DataUpdateCoordinator], ) -> None: """Initialize.""" super().__init__( entry, coordinators, "valve", "Valve Controller", None, "mdi:water" ) self._client = client self._is_on = True @property def available(self) -> bool: """Return whether the entity is available.""" return self.coordinators[API_VALVE_STATUS].last_update_success @property def is_on(self) -> bool: """Return True if the valve is open.""" return self._is_on async def _async_continue_entity_setup(self): """Register API interest (and related tasks) when the entity is added.""" self.async_add_coordinator_update_listener(API_VALVE_STATUS) @callback def _async_update_from_latest_data(self) -> None: """Update the entity.""" self._is_on = self.coordinators[API_VALVE_STATUS].data["state"] in ( "start_opening", "opening", "finish_opening", "opened", ) self._attrs.update( { ATTR_AVG_CURRENT: self.coordinators[API_VALVE_STATUS].data[ "average_current" ], ATTR_INST_CURRENT: self.coordinators[API_VALVE_STATUS].data[ "instantaneous_current" ], ATTR_INST_CURRENT_DDT: self.coordinators[API_VALVE_STATUS].data[ "instantaneous_current_ddt" ], ATTR_TRAVEL_COUNT: self.coordinators[API_VALVE_STATUS].data[ "travel_count" ], } ) async def async_disable_ap(self): """Disable the device's onboard access point.""" try: async with self._client: await self._client.wifi.disable_ap() except GuardianError as err: LOGGER.error("Error while disabling valve controller AP: %s", err) async def async_enable_ap(self): """Enable the device's onboard access point.""" try: async with self._client: await self._client.wifi.enable_ap() except GuardianError as err: LOGGER.error("Error while enabling valve controller AP: %s", err) async def async_pair_sensor(self, *, uid): """Add a new paired sensor.""" try: async with self._client: await self._client.sensor.pair_sensor(uid) except GuardianError as err: LOGGER.error("Error while adding paired sensor: %s", err) return await self.hass.data[DOMAIN][DATA_PAIRED_SENSOR_MANAGER][ self._entry.entry_id ].async_pair_sensor(uid) async def async_reboot(self): """Reboot the device.""" try: async with self._client: await self._client.system.reboot() except GuardianError as err: LOGGER.error("Error while rebooting valve controller: %s", err) async def async_reset_valve_diagnostics(self): """Fully reset system motor diagnostics.""" try: async with self._client: await self._client.valve.reset() except GuardianError as err: LOGGER.error("Error while resetting valve diagnostics: %s", err) async def async_unpair_sensor(self, *, uid): """Add a new paired sensor.""" try: async with self._client: await self._client.sensor.unpair_sensor(uid) except GuardianError as err: LOGGER.error("Error while removing paired sensor: %s", err) return await self.hass.data[DOMAIN][DATA_PAIRED_SENSOR_MANAGER][ self._entry.entry_id ].async_unpair_sensor(uid) async def async_upgrade_firmware(self, *, url, port, filename): """Upgrade the device firmware.""" try: async with self._client: await self._client.system.upgrade_firmware( url=url, port=port, filename=filename, ) except GuardianError as err: LOGGER.error("Error while upgrading firmware: %s", err) async def async_turn_off(self, **kwargs) -> None: """Turn the valve off (closed).""" try: async with self._client: await self._client.valve.close() except GuardianError as err: LOGGER.error("Error while closing the valve: %s", err) return self._is_on = False self.async_write_ha_state() async def async_turn_on(self, **kwargs) -> None: """Turn the valve on (open).""" try: async with self._client: await self._client.valve.open() except GuardianError as err: LOGGER.error("Error while opening the valve: %s", err) return self._is_on = True self.async_write_ha_state()
apache-2.0
nitzmahone/ansible-modules-extras
network/haproxy.py
26
13390
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2014, Ravi Bhure <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: haproxy version_added: "1.9" short_description: Enable, disable, and set weights for HAProxy backend servers using socket commands. description: - Enable, disable, and set weights for HAProxy backend servers using socket commands. notes: - Enable and disable commands are restricted and can only be issued on sockets configured for level 'admin'. For example, you can add the line 'stats socket /var/run/haproxy.sock level admin' to the general section of haproxy.cfg. See http://haproxy.1wt.eu/download/1.5/doc/configuration.txt. options: backend: description: - Name of the HAProxy backend pool. required: false default: auto-detected host: description: - Name of the backend host to change. required: true default: null shutdown_sessions: description: - When disabling a server, immediately terminate all the sessions attached to the specified server. This can be used to terminate long-running sessions after a server is put into maintenance mode. required: false default: false socket: description: - Path to the HAProxy socket file. required: false default: /var/run/haproxy.sock state: description: - Desired state of the provided backend host. required: true default: null choices: [ "enabled", "disabled" ] fail_on_not_found: description: - Fail whenever trying to enable/disable a backend host that does not exist required: false default: false version_added: "2.2" wait: description: - Wait until the server reports a status of 'UP' when `state=enabled`, or status of 'MAINT' when `state=disabled`. required: false default: false version_added: "2.0" wait_interval: description: - Number of seconds to wait between retries. required: false default: 5 version_added: "2.0" wait_retries: description: - Number of times to check for status after changing the state. required: false default: 25 version_added: "2.0" weight: description: - The value passed in argument. If the value ends with the `%` sign, then the new weight will be relative to the initially configured weight. Relative weights are only permitted between 0 and 100% and absolute weights are permitted between 0 and 256. required: false default: null ''' EXAMPLES = ''' # disable server in 'www' backend pool - haproxy: state=disabled host={{ inventory_hostname }} backend=www # disable server without backend pool name (apply to all available backend pool) - haproxy: state=disabled host={{ inventory_hostname }} # disable server, provide socket file - haproxy: state=disabled host={{ inventory_hostname }} socket=/var/run/haproxy.sock backend=www # disable server, provide socket file, wait until status reports in maintenance - haproxy: state=disabled host={{ inventory_hostname }} socket=/var/run/haproxy.sock backend=www wait=yes # disable backend server in 'www' backend pool and drop open sessions to it - haproxy: state=disabled host={{ inventory_hostname }} backend=www socket=/var/run/haproxy.sock shutdown_sessions=true # disable server without backend pool name (apply to all available backend pool) but fail when the backend host is not found - haproxy: state=disabled host={{ inventory_hostname }} fail_on_not_found=yes # enable server in 'www' backend pool - haproxy: state=enabled host={{ inventory_hostname }} backend=www # enable server in 'www' backend pool wait until healthy - haproxy: state=enabled host={{ inventory_hostname }} backend=www wait=yes # enable server in 'www' backend pool wait until healthy. Retry 10 times with intervals of 5 seconds to retrieve the health - haproxy: state=enabled host={{ inventory_hostname }} backend=www wait=yes wait_retries=10 wait_interval=5 # enable server in 'www' backend pool with change server(s) weight - haproxy: state=enabled host={{ inventory_hostname }} socket=/var/run/haproxy.sock weight=10 backend=www author: "Ravi Bhure (@ravibhure)" ''' import socket import csv import time from string import Template DEFAULT_SOCKET_LOCATION="/var/run/haproxy.sock" RECV_SIZE = 1024 ACTION_CHOICES = ['enabled', 'disabled'] WAIT_RETRIES=25 WAIT_INTERVAL=5 ###################################################################### class TimeoutException(Exception): pass class HAProxy(object): """ Used for communicating with HAProxy through its local UNIX socket interface. Perform common tasks in Haproxy related to enable server and disable server. The complete set of external commands Haproxy handles is documented on their website: http://haproxy.1wt.eu/download/1.5/doc/configuration.txt#Unix Socket commands """ def __init__(self, module): self.module = module self.state = self.module.params['state'] self.host = self.module.params['host'] self.backend = self.module.params['backend'] self.weight = self.module.params['weight'] self.socket = self.module.params['socket'] self.shutdown_sessions = self.module.params['shutdown_sessions'] self.fail_on_not_found = self.module.params['fail_on_not_found'] self.wait = self.module.params['wait'] self.wait_retries = self.module.params['wait_retries'] self.wait_interval = self.module.params['wait_interval'] self.command_results = {} def execute(self, cmd, timeout=200, capture_output=True): """ Executes a HAProxy command by sending a message to a HAProxy's local UNIX socket and waiting up to 'timeout' milliseconds for the response. """ self.client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.client.connect(self.socket) self.client.sendall('%s\n' % cmd) result = '' buf = '' buf = self.client.recv(RECV_SIZE) while buf: result += buf buf = self.client.recv(RECV_SIZE) if capture_output: self.capture_command_output(cmd, result.strip()) self.client.close() return result def capture_command_output(self, cmd, output): """ Capture the output for a command """ if not 'command' in self.command_results.keys(): self.command_results['command'] = [] self.command_results['command'].append(cmd) if not 'output' in self.command_results.keys(): self.command_results['output'] = [] self.command_results['output'].append(output) def discover_all_backends(self): """ Discover all entries with svname = 'BACKEND' and return a list of their corresponding pxnames """ data = self.execute('show stat', 200, False).lstrip('# ') r = csv.DictReader(data.splitlines()) return map(lambda d: d['pxname'], filter(lambda d: d['svname'] == 'BACKEND', r)) def execute_for_backends(self, cmd, pxname, svname, wait_for_status = None): """ Run some command on the specified backends. If no backends are provided they will be discovered automatically (all backends) """ # Discover backends if none are given if pxname is None: backends = self.discover_all_backends() else: backends = [pxname] # Run the command for each requested backend for backend in backends: # Fail when backends were not found state = self.get_state_for(backend, svname) if (self.fail_on_not_found or self.wait) and state is None: self.module.fail_json(msg="The specified backend '%s/%s' was not found!" % (backend, svname)) self.execute(Template(cmd).substitute(pxname = backend, svname = svname)) if self.wait: self.wait_until_status(backend, svname, wait_for_status) def get_state_for(self, pxname, svname): """ Find the state of specific services. When pxname is not set, get all backends for a specific host. Returns a list of dictionaries containing the status and weight for those services. """ data = self.execute('show stat', 200, False).lstrip('# ') r = csv.DictReader(data.splitlines()) state = map(lambda d: { 'status': d['status'], 'weight': d['weight'] }, filter(lambda d: (pxname is None or d['pxname'] == pxname) and d['svname'] == svname, r)) return state or None def wait_until_status(self, pxname, svname, status): """ Wait for a service to reach the specified status. Try RETRIES times with INTERVAL seconds of sleep in between. If the service has not reached the expected status in that time, the module will fail. If the service was not found, the module will fail. """ for i in range(1, self.wait_retries): state = self.get_state_for(pxname, svname) # We can assume there will only be 1 element in state because both svname and pxname are always set when we get here if state[0]['status'] == status: return True else: time.sleep(self.wait_interval) self.module.fail_json(msg="server %s/%s not status '%s' after %d retries. Aborting." % (pxname, svname, status, self.wait_retries)) def enabled(self, host, backend, weight): """ Enabled action, marks server to UP and checks are re-enabled, also supports to get current weight for server (default) and set the weight for haproxy backend server when provides. """ cmd = "get weight $pxname/$svname; enable server $pxname/$svname" if weight: cmd += "; set weight $pxname/$svname %s" % weight self.execute_for_backends(cmd, backend, host, 'UP') def disabled(self, host, backend, shutdown_sessions): """ Disabled action, marks server to DOWN for maintenance. In this mode, no more checks will be performed on the server until it leaves maintenance, also it shutdown sessions while disabling backend host server. """ cmd = "get weight $pxname/$svname; disable server $pxname/$svname" if shutdown_sessions: cmd += "; shutdown sessions server $pxname/$svname" self.execute_for_backends(cmd, backend, host, 'MAINT') def act(self): """ Figure out what you want to do from ansible, and then do it. """ # Get the state before the run state_before = self.get_state_for(self.backend, self.host) self.command_results['state_before'] = state_before # toggle enable/disbale server if self.state == 'enabled': self.enabled(self.host, self.backend, self.weight) elif self.state == 'disabled': self.disabled(self.host, self.backend, self.shutdown_sessions) else: self.module.fail_json(msg="unknown state specified: '%s'" % self.state) # Get the state after the run state_after = self.get_state_for(self.backend, self.host) self.command_results['state_after'] = state_after # Report change status if state_before != state_after: self.command_results['changed'] = True self.module.exit_json(**self.command_results) else: self.command_results['changed'] = False self.module.exit_json(**self.command_results) def main(): # load ansible module object module = AnsibleModule( argument_spec = dict( state = dict(required=True, default=None, choices=ACTION_CHOICES), host=dict(required=True, default=None), backend=dict(required=False, default=None), weight=dict(required=False, default=None), socket = dict(required=False, default=DEFAULT_SOCKET_LOCATION), shutdown_sessions=dict(required=False, default=False, type='bool'), fail_on_not_found=dict(required=False, default=False, type='bool'), wait=dict(required=False, default=False, type='bool'), wait_retries=dict(required=False, default=WAIT_RETRIES, type='int'), wait_interval=dict(required=False, default=WAIT_INTERVAL, type='int'), ), ) if not socket: module.fail_json(msg="unable to locate haproxy socket") ansible_haproxy = HAProxy(module) ansible_haproxy.act() # import module snippets from ansible.module_utils.basic import * main()
gpl-3.0
robmcl4/Coinex
arbitrage.py
1
16316
""" arbitrage.py Check for arbitrage opportunities. USAGE: python arbitrage.py [--all] --all Display all arbitrage opportunities, not just profitable ones """ from models import * from decimal import * import utils import sys # the coinex transaction fee TRANSAC_FEE = 0.002 # the minimum amount of to_currency required for a transaction MIN_TRANSAC = 0.01 class SmartExchange(Exchange): """ Defines a SmartExchange, which can tell the current trading price via a weighted average """ def __init__(self, exc): """ Make a new SmartExchange around the given exchange """ self._loaded = exc._loaded self.id = exc.id self.from_currency = exc.from_currency self.to_currency = exc.to_currency def get_orders(self): """ Memoize getting the orders """ if hasattr(self, '_orders'): return self._orders self._orders = super().get_orders() return self._orders def get_best_offer(self, target_cur): """ Memoize getting the best offer for a currency """ if hasattr(self, '_best_offers'): if target_cur.id in self._best_offers: return self._best_offers[target_cur.id] else: self._best_offers = dict() ret = super().get_best_offer(target_cur) self._best_offers[target_cur.id] = ret return ret def convert_to_other(self, amt, target_cur): """ Convert the given amount of coin to the target currency using the most fiar trade, returns the amount of the new currency """ amt = Decimal(amt) if target_cur == self.to_currency: return amt / self.get_best_offer(target_cur).rate elif target_cur == self.from_currency: return amt * self.get_best_offer(target_cur).rate else: raise ValueError( 'Unsupported currency for this exchange ' + target_cur.abbreviation ) def is_enough(self, amt, cur): """ Returns True if the given amt is enough to be traded. amt: a Decimal of the amount to check cur: the currency of amt Otherwise returns False """ if cur == self.to_currency: return amt > MIN_TRANSAC elif cur == self.from_currency: new_amt = amt / self.get_best_offer(self.to_currency).rate return new_amt > MIN_TRANSAC else: raise ValueError("Invalid currency") def max_currency(self, target_cur): """ Returns a Decimal of the maximum amount of currency that can be exchanged into target_cur in units of the currency that is not target_cur NOTE: this accounts for the transaction fee """ tfee = Decimal(1 - TRANSAC_FEE) if target_cur == self.to_currency: # we need to end up with units of from_currency best_order = self.get_lowest_ask() # filter out non-asks orders = filter( lambda x: x.bid is False, self.get_orders() ) # filter out orders not of the same rate # maybe multiple orders exist? orders = filter( lambda x: x.rate == best_order.rate, orders ) # we need to return in units of from_currency # amount is in units of to_currency # order.rate is in from_currency per to_currency ret = Decimal(0) for order in orders: ret += (order.amount - order.filled) * order.rate return Decimal(ret * tfee) elif target_cur == self.from_currency: best_order = self.get_highest_bid() # filter out non-bids orders = filter( lambda x: x.bid is True, self.get_orders() ) # filter out orders not of the same rate orders = filter( lambda x: x.rate == best_order.rate, orders ) # we need to return in units of to_currency # balance.amount is in units of to_currency # order.rate is in from_currency per to_currency ret = Decimal(0) for order in orders: ret += order.amount - order.filled return Decimal(ret * tfee) raise ValueError( 'Unsupported currency for this exchange ' + target_cur.abbreviation ) class ArbitrageChain: """ Defines the series of exchanges through which an arbitrage can be run """ def __init__(self, ex1, ex2, ex3): self._roi = None self.ex1 = ex1 self.ex2 = ex2 self.ex3 = ex3 self.cur1 = ex1.from_currency self.cur2 = ex1.to_currency if ex2.to_currency == ex1.to_currency: self.cur3 = ex2.from_currency elif ex2.from_currency == ex1.to_currency: self.cur3 = ex2.to_currency else: raise ValueError("Unsupported 2nd exchange combination") # verify the third exchange's validity ex3_curs = [ex3.to_currency, ex3.from_currency] if not self.cur1 in ex3_curs or not self.cur3 in ex3_curs: raise ValueError("Unsupported 3rd exchange combination") def get_roi(self): """ Get the return on investment. Returns a Decimal of the ROI or None if this chain cannot be executed NOTE: 100% is returned as Decimal(1.0) NOTE: this is memoized """ if self._roi is not None: return self._roi tfee = Decimal(1 - TRANSAC_FEE) # we are starting with 1 unit of ex1.from_currency amt = Decimal(1) # make sure it is enough to convert if not self.ex1.is_enough(amt, self.cur1): return None # now convert to cur2 amt = (self.ex1.convert_to_other(amt, self.cur2)) * tfee # again make sure it is enough to convert if not self.ex2.is_enough(amt, self.cur2): return None # now convert to cur3 amt = (self.ex2.convert_to_other(amt, self.cur3)) * tfee # again make sure it is enough to convert if not self.ex3.is_enough(amt, self.cur3): return None # now convert back to cur1 amt = (self.ex3.convert_to_other(amt, self.cur1)) * tfee # let's see what we got back! return the ROI self._roi = Decimal(amt - Decimal(1)) return self._roi def get_max_transfer(self): """ Get the max that can be transferred through this chain returns in units of currency 1 NOTE: this is memoized """ if hasattr(self, '_max_transfer'): return self._max_transfer tfee = Decimal(1 - TRANSAC_FEE) # max3 is currently in units of cur3, convert to cur1 backward max3 = self.ex3.max_currency(target_cur=self.cur1) # max3 is now in units of cur2 max3 = self.ex2.convert_to_other(amt=max3, target_cur=self.cur2) / tfee # max3 is now in units of cur1 max3 = self.ex1.convert_to_other(amt=max3, target_cur=self.cur1) / tfee # max2 is currently in units of cur2, convert to cur1 backward max2 = self.ex2.max_currency(target_cur=self.cur3) # max2 is now in units of cur1 max2 = self.ex1.convert_to_other(amt=max2, target_cur=self.cur1) / tfee # max1 is currently in units of cur1 max1 = self.ex1.max_currency(target_cur=self.cur2) ret = min(max1, max2, max3) self._max_transfer = ret return ret def get_min_transfer(self): """ Get the least amount of cur1 that can be put through the system NOTE: this is memoized """ if hasattr(self, '_min_transfer'): return self._min_transfer tfee = Decimal(1 - TRANSAC_FEE) # get the minimum of cur1 we can trade if self.cur1 == self.ex3.to_currency: # min1 is in units of cur1, convert backward through the chain min1 = Decimal(MIN_TRANSAC) min1 = self.ex3.convert_to_other(amt=min1, target_cur=self.cur3) min1 /= tfee min1 = self.ex2.convert_to_other(amt=min1, target_cur=self.cur2) min1 /= tfee min1 = self.ex1.convert_to_other(amt=min1, target_cur=self.cur1) min1 /= tfee else: min1 = Decimal(0) if self.cur3 in [self.ex2.to_currency, self.ex3.to_currency]: min3 = Decimal(MIN_TRANSAC) min3 = self.ex2.convert_to_other(amt=min3, target_cur=self.cur2) min3 /= tfee min3 = self.ex1.convert_to_other(amt=min3, target_cur=self.cur1) min3 /= tfee else: min3 = Decimal(0) if self.cur2 in [self.ex1.to_currency, self.ex2.to_currency]: min2 = Decimal(MIN_TRANSAC) min2 = self.ex1.convert_to_other(amt=min2, target_cur=self.cur1) min2 /= tfee else: min2 = Decimal(0) ret = max(min1, min2, min3) self._min_transfer = ret return ret def can_execute(self): """ Returns true if the user currently has some of the first currency and this chain's max is greater than the min. NOTE: this memoizes the wallet balances """ if self.get_min_transfer() >= self.get_max_transfer(): return False if not hasattr(ArbitrageChain, '_bals') or not ArbitrageChain._bals: ArbitrageChain._bals = Wallet.get_balances() for bal in ArbitrageChain._bals: if bal.currency == self.cur1 and bal.amount > 0: print("{0} {1}".format(bal.currency.abbreviation, bal.amount)) return True return False def perform_chain_operation(self, amt, target_cur, exchange): """ Trade the given amount (of not target_cur) over the exchange. Returns the amount of target_cur that we now have """ tfee = Decimal(1 - Decimal(TRANSAC_FEE)) from_cur = exchange.from_currency if exchange.from_currency == target_cur: from_cur = exchange.to_currency best = exchange.get_best_offer(target_cur) print('Buying {0} of {1}'.format( str(amt), target_cur.abbreviation )) # amount must always be in terms of the 'to_currency', # convert if needed if exchange.to_currency != from_cur: amt = self.ex1.convert_to_other(amt, target_cur) ordr = best.get_compliment(max_amt=amt) try: ordr.submit() except Exception as e: if hasattr(e, 'read'): print(e.read()) raise e if (ordr.complete is not True): print("waiting for order to complete") ordr = utils.wait_for_order_to_complete(ordr.id) amt *= tfee print("now have {0} of {1}".format( str(amt), target_cur.abbreviation )) return amt def execute(self): """ Perform the trades necessary to complete this chain """ while True: try: amt = input("How much currency to use? ({0}) ".format( self.cur1.abbreviation )) amt = Decimal(amt) break except InvalidOperation: print("Invalid amount. Enter again.") amt = self.perform_chain_operation( amt, self.cur2, self.ex1 ) amt = self.perform_chain_operation( amt, self.cur3, self.ex2 ) amt = self.perform_chain_operation( amt, self.cur1, self.ex3 ) # reset the record of balances ArbitrageChain._bals = None print("finished") def __str__(self): ret = '' ret += self.cur1.abbreviation.rjust(4) ret += ' -> ' ret += self.cur2.abbreviation.rjust(4) ret += ' -> ' ret += self.cur3.abbreviation.rjust(4) ret += ' -> ' ret += self.cur1.abbreviation.rjust(4) roi = self.get_roi() if roi: ret += ' ({0})%'.format(str(roi * 100)) else: ret += ' (Not Exchangeable)' ret += ' ({0} to {1} {2})'.format( str(self.get_max_transfer()), str(self.get_min_transfer()), self.cur1.abbreviation ) ret += '\n' def describe_exchange(ex, to_currency): return '-> {0} {1}/{2}'.format( ex.get_best_offer(to_currency).rate, ex.from_currency.abbreviation, ex.to_currency.abbreviation ) ret += describe_exchange(self.ex1, self.cur2) + '\n' ret += describe_exchange(self.ex2, self.cur3) + '\n' ret += describe_exchange(self.ex3, self.cur1) return ret def offer_execute_chain(chain): """ Ask the user if they would like to execute a given chain. If they answer positively, the chain is executed """ answer = input("Would you like to execute this chain? (y/N) ") if answer.lower() in ['y', 'yes']: chain.execute() else: print("Not executing chain") def valid(exc, cur1, cur2=None, exclude=None, exclude_cur=None): """ Find if the given exc satisfies currency 1 (currency 2) (and is not exclude) (and currency is not exclude) """ if exclude is not None and exc == exclude: return False curs = [exc.to_currency, exc.from_currency] if exclude_cur is not None and exclude_cur in curs: return False if cur2 is not None: return cur1 in curs and cur2 in curs return cur1 in curs def get_chains(): """ Get a list of all arbitrage chains """ excs = Exchange.get_all() ret = [] for ex1 in excs: exld = ex1.from_currency viable1 = filter( lambda x: valid(x, ex1.to_currency, exclude=ex1, exclude_cur=exld), excs ) for ex2 in viable1: if ex2.to_currency == ex1.to_currency: cur = ex2.from_currency else: cur = ex2.to_currency viable2 = filter( lambda x: valid(x, cur, ex1.from_currency, ex2), excs ) for ex3 in viable2: ex1 = SmartExchange(ex1) ex2 = SmartExchange(ex2) ex3 = SmartExchange(ex3) ret.append(ArbitrageChain(ex1, ex2, ex3)) return ret def get_profitable_chains(len_cb=None, iter_cb=None): """ Get alist of all profitable arbitrage chains """ chains = get_chains() if len_cb: len_cb(len(chains)) for chain in chains: if iter_cb: iter_cb() roi = chain.get_roi() if roi and roi > 0: yield chain def show_all(): """ Print out all possible arbitrages, regardless of profit """ print("-------Getting All Chains-------") chains = get_chains() for chain in chains: print(str(chain)) if chain.can_execute(): offer_execute_chain(chain) else: print('This chain cannot be executed') print('Found {0} arbitrage chains'.format(len(chains))) def show_profitable(): """ Print out only profitable arbitrages """ print("-------Getting Profitable Chains-------") chains = get_profitable_chains() n = 0 for chain in chains: print(str(chain)) if chain.can_execute(): offer_execute_chain(chain) else: print('This chain cannot be executed') n += 1 print('Found {0} arbitrage chains'.format(n)) def main(): try: if '--all' in sys.argv: show_all() else: show_profitable() except KeyboardInterrupt: print("Exiting") if __name__ == '__main__': main()
apache-2.0
KingsCross/Quadrotor
Tools/autotest/dump_logs.py
229
1762
#!/usr/bin/env python # dump flash logs from SITL # Andrew Tridgell, April 2013 import pexpect, os, sys, shutil, atexit import optparse, fnmatch, time, glob, traceback, signal sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), 'pysim')) import util ############## main program ############# parser = optparse.OptionParser(sys.argv[0]) parser.add_option("--cli", action='store_true', default=False, help='put us in the CLI menu in logs') opts, args = parser.parse_args() os.environ['PYTHONUNBUFFERED'] = '1' def dump_logs(atype): '''dump DataFlash logs''' logfile = '%s.log' % atype print("Dumping logs for %s to %s" % (atype, logfile)) sil = util.start_SIL(atype) log = open(logfile, mode='w') mavproxy = util.start_MAVProxy_SIL(atype, setup=True, logfile=log) mavproxy.send('\n\n\n') print("navigating menus") mavproxy.expect(']') mavproxy.send("logs\n") if opts.cli: mavproxy.interact() return mavproxy.expect("logs enabled:") lognums = [] i = mavproxy.expect(["No logs", "(\d+) logs"]) if i == 0: numlogs = 0 else: numlogs = int(mavproxy.match.group(1)) for i in range(numlogs): mavproxy.expect("Log (\d+)") lognums.append(int(mavproxy.match.group(1))) mavproxy.expect("Log]") for i in range(numlogs): print("Dumping log %u (i=%u)" % (lognums[i], i)) mavproxy.send("dump %u\n" % lognums[i]) mavproxy.expect("logs enabled:", timeout=120) mavproxy.expect("Log]") util.pexpect_close(mavproxy) util.pexpect_close(sil) log.close() print("Saved log for %s to %s" % (atype, logfile)) return True vehicle = os.path.basename(os.getcwd()) dump_logs(vehicle)
gpl-3.0
seaotterman/tensorflow
tensorflow/contrib/linalg/python/ops/linear_operator_addition.py
30
15248
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Add one or more `LinearOperators` efficiently.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import six from tensorflow.contrib.linalg.python.ops import linear_operator from tensorflow.contrib.linalg.python.ops import linear_operator_diag from tensorflow.contrib.linalg.python.ops import linear_operator_full_matrix from tensorflow.contrib.linalg.python.ops import linear_operator_identity from tensorflow.contrib.linalg.python.ops import linear_operator_tril from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import check_ops __all__ = [] def add_operators(operators, operator_name=None, addition_tiers=None, name=None): """Efficiently add one or more linear operators. Given operators `[A1, A2,...]`, this `Op` returns a possibly shorter list of operators `[B1, B2,...]` such that ```sum_k Ak.apply(x) = sum_k Bk.apply(x).``` The operators `Bk` result by adding some of the `Ak`, as allowed by `addition_tiers`. Example of efficient adding of diagonal operators. ```python A1 = LinearOperatorDiag(diag=[1., 1.], name="A1") A2 = LinearOperatorDiag(diag=[2., 2.], name="A2") # Use two tiers, the first contains an Adder that returns Diag. Since both # A1 and A2 are Diag, they can use this Adder. The second tier will not be # used. addition_tiers = [ [_AddAndReturnDiag()], [_AddAndReturnMatrix()]] B_list = add_operators([A1, A2], addition_tiers=addition_tiers) len(B_list) ==> 1 B_list[0].__class__.__name__ ==> 'LinearOperatorDiag' B_list[0].to_dense() ==> [[3., 0.], [0., 3.]] B_list[0].name ==> 'Add/A1__A2/' ``` Args: operators: Iterable of `LinearOperator` objects with same `dtype`, domain and range dimensions, and broadcastable batch shapes. operator_name: String name for returned `LinearOperator`. Defaults to concatenation of "Add/A__B/" that indicates the order of addition steps. addition_tiers: List tiers, like `[tier_0, tier_1, ...]`, where `tier_i` is a list of `Adder` objects. This function attempts to do all additions in tier `i` before trying tier `i + 1`. name: A name for this `Op`. Defaults to `add_operators`. Returns: Subclass of `LinearOperator`. Class and order of addition may change as new (and better) addition strategies emerge. Raises: ValueError: If `operators` argument is empty. ValueError: If shapes are incompatible. """ # Default setting if addition_tiers is None: addition_tiers = _DEFAULT_ADDITION_TIERS # Argument checking. check_ops.assert_proper_iterable(operators) operators = list(reversed(operators)) if len(operators) < 1: raise ValueError( "Argument 'operators' must contain at least one operator. " "Found: %s" % operators) if not all( isinstance(op, linear_operator.LinearOperator) for op in operators): raise TypeError( "Argument 'operators' must contain only LinearOperator instances. " "Found: %s" % operators) _static_check_for_same_dimensions(operators) _static_check_for_broadcastable_batch_shape(operators) graph_parents = [] for operator in operators: graph_parents.extend(operator.graph_parents) with ops.name_scope(name or "add_operators", values=graph_parents): # Additions done in one of the tiers. Try tier 0, 1,... ops_to_try_at_next_tier = list(operators) for tier in addition_tiers: ops_to_try_at_this_tier = ops_to_try_at_next_tier ops_to_try_at_next_tier = [] while ops_to_try_at_this_tier: op1 = ops_to_try_at_this_tier.pop() op2, adder = _pop_a_match_at_tier(op1, ops_to_try_at_this_tier, tier) if op2 is not None: # Will try to add the result of this again at this same tier. new_operator = adder.add(op1, op2, operator_name) ops_to_try_at_this_tier.append(new_operator) else: ops_to_try_at_next_tier.append(op1) return ops_to_try_at_next_tier def _pop_a_match_at_tier(op1, operator_list, tier): # Search from the back of list to the front in order to create nice default # order of operations. for i in range(1, len(operator_list) + 1): op2 = operator_list[-i] for adder in tier: if adder.can_add(op1, op2): return operator_list.pop(-i), adder return None, None def _infer_hints_allowing_override(op1, op2, hints): """Infer hints from op1 and op2. hints argument is an override. Args: op1: LinearOperator op2: LinearOperator hints: _Hints object holding "is_X" boolean hints to use for returned operator. If some hint is None, try to set using op1 and op2. If the hint is provided, ignore op1 and op2 hints. This allows an override of previous hints, but does not allow forbidden hints (e.g. you still cannot say a real diagonal operator is not self-adjoint. Returns: _Hints object. """ hints = hints or _Hints() # If A, B are self-adjoint, then so is A + B. if hints.is_self_adjoint is None: is_self_adjoint = op1.is_self_adjoint and op2.is_self_adjoint else: is_self_adjoint = hints.is_self_adjoint # If A, B are positive definite, then so is A + B. if hints.is_positive_definite is None: is_positive_definite = op1.is_positive_definite and op2.is_positive_definite else: is_positive_definite = hints.is_positive_definite # A positive definite operator is always non-singular. if is_positive_definite and hints.is_positive_definite is None: is_non_singular = True else: is_non_singular = hints.is_non_singular return _Hints( is_non_singular=is_non_singular, is_self_adjoint=is_self_adjoint, is_positive_definite=is_positive_definite) def _static_check_for_same_dimensions(operators): """ValueError if operators determined to have different dimensions.""" if len(operators) < 2: return domain_dimensions = [(op.name, op.domain_dimension.value) for op in operators if op.domain_dimension.value is not None] if len(set(value for name, value in domain_dimensions)) > 1: raise ValueError("Operators must have the same domain dimension. Found: %s" % domain_dimensions) range_dimensions = [(op.name, op.range_dimension.value) for op in operators if op.range_dimension.value is not None] if len(set(value for name, value in range_dimensions)) > 1: raise ValueError("Operators must have the same range dimension. Found: %s" % range_dimensions) def _static_check_for_broadcastable_batch_shape(operators): """ValueError if operators determined to have non-broadcastable shapes.""" if len(operators) < 2: return # This will fail if they cannot be broadcast together. batch_shape = operators[0].batch_shape for op in operators[1:]: batch_shape = array_ops.broadcast_static_shape(batch_shape, op.batch_shape) class _Hints(object): """Holds 'is_X' flags that every LinearOperator is initialized with.""" def __init__(self, is_non_singular=None, is_positive_definite=None, is_self_adjoint=None): self.is_non_singular = is_non_singular self.is_positive_definite = is_positive_definite self.is_self_adjoint = is_self_adjoint ################################################################################ # Classes to add two linear operators. ################################################################################ @six.add_metaclass(abc.ABCMeta) class _Adder(object): """Abstract base class to add two operators. Each `Adder` acts independently, adding everything it can, paying no attention as to whether another `Adder` could have done the addition more efficiently. """ @property def name(self): return self.__class__.__name__ @abc.abstractmethod def can_add(self, op1, op2): """Returns `True` if this `Adder` can add `op1` and `op2`. Else `False`.""" pass @abc.abstractmethod def _add(self, op1, op2, operator_name, hints): # Derived classes can assume op1 and op2 have been validated, e.g. they have # the same dtype, and their domain/range dimensions match. pass def add(self, op1, op2, operator_name, hints=None): """Return new `LinearOperator` acting like `op1 + op2`. Args: op1: `LinearOperator` op2: `LinearOperator`, with `shape` and `dtype` such that adding to `op1` is allowed. operator_name: `String` name to give to returned `LinearOperator` hints: `_Hints` object. Returned `LinearOperator` will be created with these hints. Returns: `LinearOperator` """ updated_hints = _infer_hints_allowing_override(op1, op2, hints) if operator_name is None: operator_name = "Add/" + op1.name + "__" + op2.name + "/" values = op1.graph_parents + op2.graph_parents scope_name = self.name if scope_name.startswith("_"): scope_name = scope_name[1:] with ops.name_scope(scope_name, values=values): return self._add(op1, op2, operator_name, updated_hints) class _AddAndReturnScaledIdentity(_Adder): """Handles additions resulting in an Identity family member. The Identity (`LinearOperatorScaledIdentity`, `LinearOperatorIdentity`) family is closed under addition. This `Adder` respects that, and returns an Identity """ def can_add(self, op1, op2): types = {_type(op1), _type(op2)} return not types.difference(_IDENTITY_FAMILY) def _add(self, op1, op2, operator_name, hints): # Will build a LinearOperatorScaledIdentity. if _type(op1) == _SCALED_IDENTITY: multiplier_1 = op1.multiplier else: multiplier_1 = array_ops.ones(op1.batch_shape_tensor(), dtype=op1.dtype) if _type(op2) == _SCALED_IDENTITY: multiplier_2 = op2.multiplier else: multiplier_2 = array_ops.ones(op2.batch_shape_tensor(), dtype=op2.dtype) return linear_operator_identity.LinearOperatorScaledIdentity( num_rows=op1.range_dimension_tensor(), multiplier=multiplier_1 + multiplier_2, is_non_singular=hints.is_non_singular, is_self_adjoint=hints.is_self_adjoint, is_positive_definite=hints.is_positive_definite, name=operator_name) class _AddAndReturnDiag(_Adder): """Handles additions resulting in a Diag operator.""" def can_add(self, op1, op2): types = {_type(op1), _type(op2)} return not types.difference(_DIAG_LIKE) def _add(self, op1, op2, operator_name, hints): return linear_operator_diag.LinearOperatorDiag( diag=op1.diag_part() + op2.diag_part(), is_non_singular=hints.is_non_singular, is_self_adjoint=hints.is_self_adjoint, is_positive_definite=hints.is_positive_definite, name=operator_name) class _AddAndReturnTriL(_Adder): """Handles additions resulting in a TriL operator.""" def can_add(self, op1, op2): types = {_type(op1), _type(op2)} return not types.difference(_DIAG_LIKE.union({_TRIL})) def _add(self, op1, op2, operator_name, hints): if _type(op1) in _EFFICIENT_ADD_TO_TENSOR: op_add_to_tensor, op_other = op1, op2 else: op_add_to_tensor, op_other = op2, op1 return linear_operator_tril.LinearOperatorTriL( tril=op_add_to_tensor.add_to_tensor(op_other.to_dense()), is_non_singular=hints.is_non_singular, is_self_adjoint=hints.is_self_adjoint, is_positive_definite=hints.is_positive_definite, name=operator_name) class _AddAndReturnMatrix(_Adder): """"Handles additions resulting in a `LinearOperatorFullMatrix`.""" def can_add(self, op1, op2): # pylint: disable=unused-argument return isinstance(op1, linear_operator.LinearOperator) and isinstance( op2, linear_operator.LinearOperator) def _add(self, op1, op2, operator_name, hints): if _type(op1) in _EFFICIENT_ADD_TO_TENSOR: op_add_to_tensor, op_other = op1, op2 else: op_add_to_tensor, op_other = op2, op1 return linear_operator_full_matrix.LinearOperatorFullMatrix( matrix=op_add_to_tensor.add_to_tensor(op_other.to_dense()), is_non_singular=hints.is_non_singular, is_self_adjoint=hints.is_self_adjoint, is_positive_definite=hints.is_positive_definite, name=operator_name) ################################################################################ # Constants designating types of LinearOperators ################################################################################ # Type name constants for LinearOperator classes. _IDENTITY = "identity" _SCALED_IDENTITY = "scaled_identity" _DIAG = "diag" _TRIL = "tril" _MATRIX = "matrix" # Groups of operators. _DIAG_LIKE = {_DIAG, _IDENTITY, _SCALED_IDENTITY} _IDENTITY_FAMILY = {_IDENTITY, _SCALED_IDENTITY} # operators with an efficient .add_to_tensor() method. _EFFICIENT_ADD_TO_TENSOR = _DIAG_LIKE def _type(operator): """Returns the type name constant (e.g. _TRIL) for operator.""" if isinstance(operator, linear_operator_diag.LinearOperatorDiag): return _DIAG if isinstance(operator, linear_operator_tril.LinearOperatorTriL): return _TRIL if isinstance(operator, linear_operator_full_matrix.LinearOperatorFullMatrix): return _MATRIX if isinstance(operator, linear_operator_identity.LinearOperatorIdentity): return _IDENTITY if isinstance(operator, linear_operator_identity.LinearOperatorScaledIdentity): return _SCALED_IDENTITY raise TypeError("Operator type unknown: %s" % operator) ################################################################################ # Addition tiers: # We attempt to use Adders in tier K before K+1. # # Organize tiers to # (i) reduce O(..) complexity of forming final operator, and # (ii) produce the "most efficient" final operator. # Dev notes: # * Results of addition at tier K will be added at tier K or higher. # * Tiers may change, and we warn the user that it may change. ################################################################################ # Note that the final tier, _AddAndReturnMatrix, will convert everything to a # dense matrix. So it is sometimes very inefficient. _DEFAULT_ADDITION_TIERS = [ [_AddAndReturnScaledIdentity()], [_AddAndReturnDiag()], [_AddAndReturnTriL()], [_AddAndReturnMatrix()], ]
apache-2.0
vicnet/weboob
modules/meslieuxparis/module.py
1
1444
# -*- coding: utf-8 -*- # Copyright(C) 2018 Vincent A # # This file is part of a weboob module. # # This weboob module is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This weboob module is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this weboob module. If not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from weboob.tools.backend import Module from weboob.capabilities.contact import CapDirectory from .browser import MeslieuxparisBrowser __all__ = ['MeslieuxparisModule'] class MeslieuxparisModule(Module, CapDirectory): NAME = 'meslieuxparis' DESCRIPTION = 'MesLieux public Paris places' MAINTAINER = 'Vincent A' EMAIL = '[email protected]' LICENSE = 'AGPLv3+' VERSION = '1.6' BROWSER = MeslieuxparisBrowser def search_contacts(self, query, sortby): if query.city and query.city.lower() != 'paris': return [] return self.browser.search_contacts(query.name.lower())
lgpl-3.0
PLyczkowski/Sticky-Keymap
2.74/python/lib/encodings/mac_centeuro.py
257
14102
""" Python Character Mapping Codec mac_centeuro generated from 'MAPPINGS/VENDORS/APPLE/CENTEURO.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='mac-centeuro', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( '\x00' # 0x00 -> CONTROL CHARACTER '\x01' # 0x01 -> CONTROL CHARACTER '\x02' # 0x02 -> CONTROL CHARACTER '\x03' # 0x03 -> CONTROL CHARACTER '\x04' # 0x04 -> CONTROL CHARACTER '\x05' # 0x05 -> CONTROL CHARACTER '\x06' # 0x06 -> CONTROL CHARACTER '\x07' # 0x07 -> CONTROL CHARACTER '\x08' # 0x08 -> CONTROL CHARACTER '\t' # 0x09 -> CONTROL CHARACTER '\n' # 0x0A -> CONTROL CHARACTER '\x0b' # 0x0B -> CONTROL CHARACTER '\x0c' # 0x0C -> CONTROL CHARACTER '\r' # 0x0D -> CONTROL CHARACTER '\x0e' # 0x0E -> CONTROL CHARACTER '\x0f' # 0x0F -> CONTROL CHARACTER '\x10' # 0x10 -> CONTROL CHARACTER '\x11' # 0x11 -> CONTROL CHARACTER '\x12' # 0x12 -> CONTROL CHARACTER '\x13' # 0x13 -> CONTROL CHARACTER '\x14' # 0x14 -> CONTROL CHARACTER '\x15' # 0x15 -> CONTROL CHARACTER '\x16' # 0x16 -> CONTROL CHARACTER '\x17' # 0x17 -> CONTROL CHARACTER '\x18' # 0x18 -> CONTROL CHARACTER '\x19' # 0x19 -> CONTROL CHARACTER '\x1a' # 0x1A -> CONTROL CHARACTER '\x1b' # 0x1B -> CONTROL CHARACTER '\x1c' # 0x1C -> CONTROL CHARACTER '\x1d' # 0x1D -> CONTROL CHARACTER '\x1e' # 0x1E -> CONTROL CHARACTER '\x1f' # 0x1F -> CONTROL CHARACTER ' ' # 0x20 -> SPACE '!' # 0x21 -> EXCLAMATION MARK '"' # 0x22 -> QUOTATION MARK '#' # 0x23 -> NUMBER SIGN '$' # 0x24 -> DOLLAR SIGN '%' # 0x25 -> PERCENT SIGN '&' # 0x26 -> AMPERSAND "'" # 0x27 -> APOSTROPHE '(' # 0x28 -> LEFT PARENTHESIS ')' # 0x29 -> RIGHT PARENTHESIS '*' # 0x2A -> ASTERISK '+' # 0x2B -> PLUS SIGN ',' # 0x2C -> COMMA '-' # 0x2D -> HYPHEN-MINUS '.' # 0x2E -> FULL STOP '/' # 0x2F -> SOLIDUS '0' # 0x30 -> DIGIT ZERO '1' # 0x31 -> DIGIT ONE '2' # 0x32 -> DIGIT TWO '3' # 0x33 -> DIGIT THREE '4' # 0x34 -> DIGIT FOUR '5' # 0x35 -> DIGIT FIVE '6' # 0x36 -> DIGIT SIX '7' # 0x37 -> DIGIT SEVEN '8' # 0x38 -> DIGIT EIGHT '9' # 0x39 -> DIGIT NINE ':' # 0x3A -> COLON ';' # 0x3B -> SEMICOLON '<' # 0x3C -> LESS-THAN SIGN '=' # 0x3D -> EQUALS SIGN '>' # 0x3E -> GREATER-THAN SIGN '?' # 0x3F -> QUESTION MARK '@' # 0x40 -> COMMERCIAL AT 'A' # 0x41 -> LATIN CAPITAL LETTER A 'B' # 0x42 -> LATIN CAPITAL LETTER B 'C' # 0x43 -> LATIN CAPITAL LETTER C 'D' # 0x44 -> LATIN CAPITAL LETTER D 'E' # 0x45 -> LATIN CAPITAL LETTER E 'F' # 0x46 -> LATIN CAPITAL LETTER F 'G' # 0x47 -> LATIN CAPITAL LETTER G 'H' # 0x48 -> LATIN CAPITAL LETTER H 'I' # 0x49 -> LATIN CAPITAL LETTER I 'J' # 0x4A -> LATIN CAPITAL LETTER J 'K' # 0x4B -> LATIN CAPITAL LETTER K 'L' # 0x4C -> LATIN CAPITAL LETTER L 'M' # 0x4D -> LATIN CAPITAL LETTER M 'N' # 0x4E -> LATIN CAPITAL LETTER N 'O' # 0x4F -> LATIN CAPITAL LETTER O 'P' # 0x50 -> LATIN CAPITAL LETTER P 'Q' # 0x51 -> LATIN CAPITAL LETTER Q 'R' # 0x52 -> LATIN CAPITAL LETTER R 'S' # 0x53 -> LATIN CAPITAL LETTER S 'T' # 0x54 -> LATIN CAPITAL LETTER T 'U' # 0x55 -> LATIN CAPITAL LETTER U 'V' # 0x56 -> LATIN CAPITAL LETTER V 'W' # 0x57 -> LATIN CAPITAL LETTER W 'X' # 0x58 -> LATIN CAPITAL LETTER X 'Y' # 0x59 -> LATIN CAPITAL LETTER Y 'Z' # 0x5A -> LATIN CAPITAL LETTER Z '[' # 0x5B -> LEFT SQUARE BRACKET '\\' # 0x5C -> REVERSE SOLIDUS ']' # 0x5D -> RIGHT SQUARE BRACKET '^' # 0x5E -> CIRCUMFLEX ACCENT '_' # 0x5F -> LOW LINE '`' # 0x60 -> GRAVE ACCENT 'a' # 0x61 -> LATIN SMALL LETTER A 'b' # 0x62 -> LATIN SMALL LETTER B 'c' # 0x63 -> LATIN SMALL LETTER C 'd' # 0x64 -> LATIN SMALL LETTER D 'e' # 0x65 -> LATIN SMALL LETTER E 'f' # 0x66 -> LATIN SMALL LETTER F 'g' # 0x67 -> LATIN SMALL LETTER G 'h' # 0x68 -> LATIN SMALL LETTER H 'i' # 0x69 -> LATIN SMALL LETTER I 'j' # 0x6A -> LATIN SMALL LETTER J 'k' # 0x6B -> LATIN SMALL LETTER K 'l' # 0x6C -> LATIN SMALL LETTER L 'm' # 0x6D -> LATIN SMALL LETTER M 'n' # 0x6E -> LATIN SMALL LETTER N 'o' # 0x6F -> LATIN SMALL LETTER O 'p' # 0x70 -> LATIN SMALL LETTER P 'q' # 0x71 -> LATIN SMALL LETTER Q 'r' # 0x72 -> LATIN SMALL LETTER R 's' # 0x73 -> LATIN SMALL LETTER S 't' # 0x74 -> LATIN SMALL LETTER T 'u' # 0x75 -> LATIN SMALL LETTER U 'v' # 0x76 -> LATIN SMALL LETTER V 'w' # 0x77 -> LATIN SMALL LETTER W 'x' # 0x78 -> LATIN SMALL LETTER X 'y' # 0x79 -> LATIN SMALL LETTER Y 'z' # 0x7A -> LATIN SMALL LETTER Z '{' # 0x7B -> LEFT CURLY BRACKET '|' # 0x7C -> VERTICAL LINE '}' # 0x7D -> RIGHT CURLY BRACKET '~' # 0x7E -> TILDE '\x7f' # 0x7F -> CONTROL CHARACTER '\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS '\u0100' # 0x81 -> LATIN CAPITAL LETTER A WITH MACRON '\u0101' # 0x82 -> LATIN SMALL LETTER A WITH MACRON '\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE '\u0104' # 0x84 -> LATIN CAPITAL LETTER A WITH OGONEK '\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS '\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS '\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE '\u0105' # 0x88 -> LATIN SMALL LETTER A WITH OGONEK '\u010c' # 0x89 -> LATIN CAPITAL LETTER C WITH CARON '\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS '\u010d' # 0x8B -> LATIN SMALL LETTER C WITH CARON '\u0106' # 0x8C -> LATIN CAPITAL LETTER C WITH ACUTE '\u0107' # 0x8D -> LATIN SMALL LETTER C WITH ACUTE '\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE '\u0179' # 0x8F -> LATIN CAPITAL LETTER Z WITH ACUTE '\u017a' # 0x90 -> LATIN SMALL LETTER Z WITH ACUTE '\u010e' # 0x91 -> LATIN CAPITAL LETTER D WITH CARON '\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE '\u010f' # 0x93 -> LATIN SMALL LETTER D WITH CARON '\u0112' # 0x94 -> LATIN CAPITAL LETTER E WITH MACRON '\u0113' # 0x95 -> LATIN SMALL LETTER E WITH MACRON '\u0116' # 0x96 -> LATIN CAPITAL LETTER E WITH DOT ABOVE '\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE '\u0117' # 0x98 -> LATIN SMALL LETTER E WITH DOT ABOVE '\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX '\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS '\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE '\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE '\u011a' # 0x9D -> LATIN CAPITAL LETTER E WITH CARON '\u011b' # 0x9E -> LATIN SMALL LETTER E WITH CARON '\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS '\u2020' # 0xA0 -> DAGGER '\xb0' # 0xA1 -> DEGREE SIGN '\u0118' # 0xA2 -> LATIN CAPITAL LETTER E WITH OGONEK '\xa3' # 0xA3 -> POUND SIGN '\xa7' # 0xA4 -> SECTION SIGN '\u2022' # 0xA5 -> BULLET '\xb6' # 0xA6 -> PILCROW SIGN '\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S '\xae' # 0xA8 -> REGISTERED SIGN '\xa9' # 0xA9 -> COPYRIGHT SIGN '\u2122' # 0xAA -> TRADE MARK SIGN '\u0119' # 0xAB -> LATIN SMALL LETTER E WITH OGONEK '\xa8' # 0xAC -> DIAERESIS '\u2260' # 0xAD -> NOT EQUAL TO '\u0123' # 0xAE -> LATIN SMALL LETTER G WITH CEDILLA '\u012e' # 0xAF -> LATIN CAPITAL LETTER I WITH OGONEK '\u012f' # 0xB0 -> LATIN SMALL LETTER I WITH OGONEK '\u012a' # 0xB1 -> LATIN CAPITAL LETTER I WITH MACRON '\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO '\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO '\u012b' # 0xB4 -> LATIN SMALL LETTER I WITH MACRON '\u0136' # 0xB5 -> LATIN CAPITAL LETTER K WITH CEDILLA '\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL '\u2211' # 0xB7 -> N-ARY SUMMATION '\u0142' # 0xB8 -> LATIN SMALL LETTER L WITH STROKE '\u013b' # 0xB9 -> LATIN CAPITAL LETTER L WITH CEDILLA '\u013c' # 0xBA -> LATIN SMALL LETTER L WITH CEDILLA '\u013d' # 0xBB -> LATIN CAPITAL LETTER L WITH CARON '\u013e' # 0xBC -> LATIN SMALL LETTER L WITH CARON '\u0139' # 0xBD -> LATIN CAPITAL LETTER L WITH ACUTE '\u013a' # 0xBE -> LATIN SMALL LETTER L WITH ACUTE '\u0145' # 0xBF -> LATIN CAPITAL LETTER N WITH CEDILLA '\u0146' # 0xC0 -> LATIN SMALL LETTER N WITH CEDILLA '\u0143' # 0xC1 -> LATIN CAPITAL LETTER N WITH ACUTE '\xac' # 0xC2 -> NOT SIGN '\u221a' # 0xC3 -> SQUARE ROOT '\u0144' # 0xC4 -> LATIN SMALL LETTER N WITH ACUTE '\u0147' # 0xC5 -> LATIN CAPITAL LETTER N WITH CARON '\u2206' # 0xC6 -> INCREMENT '\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK '\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK '\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS '\xa0' # 0xCA -> NO-BREAK SPACE '\u0148' # 0xCB -> LATIN SMALL LETTER N WITH CARON '\u0150' # 0xCC -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE '\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE '\u0151' # 0xCE -> LATIN SMALL LETTER O WITH DOUBLE ACUTE '\u014c' # 0xCF -> LATIN CAPITAL LETTER O WITH MACRON '\u2013' # 0xD0 -> EN DASH '\u2014' # 0xD1 -> EM DASH '\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK '\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK '\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK '\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK '\xf7' # 0xD6 -> DIVISION SIGN '\u25ca' # 0xD7 -> LOZENGE '\u014d' # 0xD8 -> LATIN SMALL LETTER O WITH MACRON '\u0154' # 0xD9 -> LATIN CAPITAL LETTER R WITH ACUTE '\u0155' # 0xDA -> LATIN SMALL LETTER R WITH ACUTE '\u0158' # 0xDB -> LATIN CAPITAL LETTER R WITH CARON '\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK '\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK '\u0159' # 0xDE -> LATIN SMALL LETTER R WITH CARON '\u0156' # 0xDF -> LATIN CAPITAL LETTER R WITH CEDILLA '\u0157' # 0xE0 -> LATIN SMALL LETTER R WITH CEDILLA '\u0160' # 0xE1 -> LATIN CAPITAL LETTER S WITH CARON '\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK '\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK '\u0161' # 0xE4 -> LATIN SMALL LETTER S WITH CARON '\u015a' # 0xE5 -> LATIN CAPITAL LETTER S WITH ACUTE '\u015b' # 0xE6 -> LATIN SMALL LETTER S WITH ACUTE '\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE '\u0164' # 0xE8 -> LATIN CAPITAL LETTER T WITH CARON '\u0165' # 0xE9 -> LATIN SMALL LETTER T WITH CARON '\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE '\u017d' # 0xEB -> LATIN CAPITAL LETTER Z WITH CARON '\u017e' # 0xEC -> LATIN SMALL LETTER Z WITH CARON '\u016a' # 0xED -> LATIN CAPITAL LETTER U WITH MACRON '\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE '\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX '\u016b' # 0xF0 -> LATIN SMALL LETTER U WITH MACRON '\u016e' # 0xF1 -> LATIN CAPITAL LETTER U WITH RING ABOVE '\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE '\u016f' # 0xF3 -> LATIN SMALL LETTER U WITH RING ABOVE '\u0170' # 0xF4 -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE '\u0171' # 0xF5 -> LATIN SMALL LETTER U WITH DOUBLE ACUTE '\u0172' # 0xF6 -> LATIN CAPITAL LETTER U WITH OGONEK '\u0173' # 0xF7 -> LATIN SMALL LETTER U WITH OGONEK '\xdd' # 0xF8 -> LATIN CAPITAL LETTER Y WITH ACUTE '\xfd' # 0xF9 -> LATIN SMALL LETTER Y WITH ACUTE '\u0137' # 0xFA -> LATIN SMALL LETTER K WITH CEDILLA '\u017b' # 0xFB -> LATIN CAPITAL LETTER Z WITH DOT ABOVE '\u0141' # 0xFC -> LATIN CAPITAL LETTER L WITH STROKE '\u017c' # 0xFD -> LATIN SMALL LETTER Z WITH DOT ABOVE '\u0122' # 0xFE -> LATIN CAPITAL LETTER G WITH CEDILLA '\u02c7' # 0xFF -> CARON ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
gpl-2.0
lcostantino/healing-os
healing/openstack/common/log.py
1
26305
# Copyright 2011 OpenStack Foundation. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """OpenStack logging handler. This module adds to logging functionality by adding the option to specify a context object when calling the various log methods. If the context object is not specified, default formatting is used. Additionally, an instance uuid may be passed as part of the log message, which is intended to make it easier for admins to find messages related to a specific instance. It also allows setting of formatting information through conf. """ import inspect import itertools import logging import logging.config import logging.handlers import os import re import sys import traceback from oslo.config import cfg import six from six import moves from healing.openstack.common.gettextutils import _ from healing.openstack.common import importutils from healing.openstack.common import jsonutils from healing.openstack.common import local _DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" _SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password'] # NOTE(ldbragst): Let's build a list of regex objects using the list of # _SANITIZE_KEYS we already have. This way, we only have to add the new key # to the list of _SANITIZE_KEYS and we can generate regular expressions # for XML and JSON automatically. _SANITIZE_PATTERNS = [] _FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', r'(<%(key)s>).*?(</%(key)s>)', r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])', r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?[\'"])' '.*?([\'"])', r'(%(key)s\s*--?[A-z]+\s*).*?([\s])'] for key in _SANITIZE_KEYS: for pattern in _FORMAT_PATTERNS: reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) _SANITIZE_PATTERNS.append(reg_ex) common_cli_opts = [ cfg.BoolOpt('debug', short='d', default=False, help='Print debugging output (set logging level to ' 'DEBUG instead of default WARNING level).'), cfg.BoolOpt('verbose', short='v', default=False, help='Print more verbose output (set logging level to ' 'INFO instead of default WARNING level).'), ] logging_cli_opts = [ cfg.StrOpt('log-config-append', metavar='PATH', deprecated_name='log-config', help='The name of a logging configuration file. This file ' 'is appended to any existing logging configuration ' 'files. For details about logging configuration files, ' 'see the Python logging module documentation.'), cfg.StrOpt('log-format', default=None, metavar='FORMAT', help='DEPRECATED. ' 'A logging.Formatter log message format string which may ' 'use any of the available logging.LogRecord attributes. ' 'This option is deprecated. Please use ' 'logging_context_format_string and ' 'logging_default_format_string instead.'), cfg.StrOpt('log-date-format', default=_DEFAULT_LOG_DATE_FORMAT, metavar='DATE_FORMAT', help='Format string for %%(asctime)s in log records. ' 'Default: %(default)s .'), cfg.StrOpt('log-file', metavar='PATH', deprecated_name='logfile', help='(Optional) Name of log file to output to. ' 'If no default is set, logging will go to stdout.'), cfg.StrOpt('log-dir', deprecated_name='logdir', help='(Optional) The base directory used for relative ' '--log-file paths.'), cfg.BoolOpt('use-syslog', default=False, help='Use syslog for logging. ' 'Existing syslog format is DEPRECATED during I, ' 'and will chang in J to honor RFC5424.'), cfg.BoolOpt('use-syslog-rfc-format', # TODO(bogdando) remove or use True after existing # syslog format deprecation in J default=False, help='(Optional) Enables or disables syslog rfc5424 format ' 'for logging. If enabled, prefixes the MSG part of the ' 'syslog message with APP-NAME (RFC5424). The ' 'format without the APP-NAME is deprecated in I, ' 'and will be removed in J.'), cfg.StrOpt('syslog-log-facility', default='LOG_USER', help='Syslog facility to receive log lines.') ] generic_log_opts = [ cfg.BoolOpt('use_stderr', default=True, help='Log output to standard error.') ] log_opts = [ cfg.StrOpt('logging_context_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [%(request_id)s %(user_identity)s] ' '%(instance)s%(message)s', help='Format string to use for log messages with context.'), cfg.StrOpt('logging_default_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [-] %(instance)s%(message)s', help='Format string to use for log messages without context.'), cfg.StrOpt('logging_debug_format_suffix', default='%(funcName)s %(pathname)s:%(lineno)d', help='Data to append to log format when level is DEBUG.'), cfg.StrOpt('logging_exception_prefix', default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' '%(instance)s', help='Prefix each line of exception output with this format.'), cfg.ListOpt('default_log_levels', default=[ 'amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN' ], help='List of logger=LEVEL pairs.'), cfg.BoolOpt('publish_errors', default=False, help='Enables or disables publication of error events.'), cfg.BoolOpt('fatal_deprecations', default=False, help='Enables or disables fatal status of deprecations.'), # NOTE(mikal): there are two options here because sometimes we are handed # a full instance (and could include more information), and other times we # are just handed a UUID for the instance. cfg.StrOpt('instance_format', default='[instance: %(uuid)s] ', help='The format for an instance that is passed with the log ' 'message. '), cfg.StrOpt('instance_uuid_format', default='[instance: %(uuid)s] ', help='The format for an instance UUID that is passed with the ' 'log message. '), ] CONF = cfg.CONF CONF.register_cli_opts(common_cli_opts) CONF.register_cli_opts(logging_cli_opts) CONF.register_opts(generic_log_opts) CONF.register_opts(log_opts) # our new audit level # NOTE(jkoelker) Since we synthesized an audit level, make the logging # module aware of it so it acts like other levels. logging.AUDIT = logging.INFO + 1 logging.addLevelName(logging.AUDIT, 'AUDIT') try: NullHandler = logging.NullHandler except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None def _dictify_context(context): if context is None: return None if not isinstance(context, dict) and getattr(context, 'to_dict', None): context = context.to_dict() return context def _get_binary_name(): return os.path.basename(inspect.stack()[-1][1]) def _get_log_file_path(binary=None): logfile = CONF.log_file logdir = CONF.log_dir if logfile and not logdir: return logfile if logfile and logdir: return os.path.join(logdir, logfile) if logdir: binary = binary or _get_binary_name() return '%s.log' % (os.path.join(logdir, binary),) return None def mask_password(message, secret="***"): """Replace password with 'secret' in message. :param message: The string which includes security information. :param secret: value with which to replace passwords. :returns: The unicode value of message with the password fields masked. For example: >>> mask_password("'adminPass' : 'aaaaa'") "'adminPass' : '***'" >>> mask_password("'admin_pass' : 'aaaaa'") "'admin_pass' : '***'" >>> mask_password('"password" : "aaaaa"') '"password" : "***"' >>> mask_password("'original_password' : 'aaaaa'") "'original_password' : '***'" >>> mask_password("u'original_password' : u'aaaaa'") "u'original_password' : u'***'" """ message = six.text_type(message) # NOTE(ldbragst): Check to see if anything in message contains any key # specified in _SANITIZE_KEYS, if not then just return the message since # we don't have to mask any passwords. if not any(key in message for key in _SANITIZE_KEYS): return message secret = r'\g<1>' + secret + r'\g<2>' for pattern in _SANITIZE_PATTERNS: message = re.sub(pattern, secret, message) return message class BaseLoggerAdapter(logging.LoggerAdapter): def audit(self, msg, *args, **kwargs): self.log(logging.AUDIT, msg, *args, **kwargs) class LazyAdapter(BaseLoggerAdapter): def __init__(self, name='unknown', version='unknown'): self._logger = None self.extra = {} self.name = name self.version = version @property def logger(self): if not self._logger: self._logger = getLogger(self.name, self.version) return self._logger class ContextAdapter(BaseLoggerAdapter): warn = logging.LoggerAdapter.warning def __init__(self, logger, project_name, version_string): self.logger = logger self.project = project_name self.version = version_string self._deprecated_messages_sent = dict() @property def handlers(self): return self.logger.handlers def deprecated(self, msg, *args, **kwargs): """Call this method when a deprecated feature is used. If the system is configured for fatal deprecations then the message is logged at the 'critical' level and :class:`DeprecatedConfig` will be raised. Otherwise, the message will be logged (once) at the 'warn' level. :raises: :class:`DeprecatedConfig` if the system is configured for fatal deprecations. """ stdmsg = _("Deprecated: %s") % msg if CONF.fatal_deprecations: self.critical(stdmsg, *args, **kwargs) raise DeprecatedConfig(msg=stdmsg) # Using a list because a tuple with dict can't be stored in a set. sent_args = self._deprecated_messages_sent.setdefault(msg, list()) if args in sent_args: # Already logged this message, so don't log it again. return sent_args.append(args) self.warn(stdmsg, *args, **kwargs) def process(self, msg, kwargs): # NOTE(mrodden): catch any Message/other object and # coerce to unicode before they can get # to the python logging and possibly # cause string encoding trouble if not isinstance(msg, six.string_types): msg = six.text_type(msg) if 'extra' not in kwargs: kwargs['extra'] = {} extra = kwargs['extra'] context = kwargs.pop('context', None) if not context: context = getattr(local.store, 'context', None) if context: extra.update(_dictify_context(context)) instance = kwargs.pop('instance', None) instance_uuid = (extra.get('instance_uuid') or kwargs.pop('instance_uuid', None)) instance_extra = '' if instance: instance_extra = CONF.instance_format % instance elif instance_uuid: instance_extra = (CONF.instance_uuid_format % {'uuid': instance_uuid}) extra['instance'] = instance_extra extra.setdefault('user_identity', kwargs.pop('user_identity', None)) extra['project'] = self.project extra['version'] = self.version extra['extra'] = extra.copy() return msg, kwargs class JSONFormatter(logging.Formatter): def __init__(self, fmt=None, datefmt=None): # NOTE(jkoelker) we ignore the fmt argument, but its still there # since logging.config.fileConfig passes it. self.datefmt = datefmt def formatException(self, ei, strip_newlines=True): lines = traceback.format_exception(*ei) if strip_newlines: lines = [moves.filter( lambda x: x, line.rstrip().splitlines()) for line in lines] lines = list(itertools.chain(*lines)) return lines def format(self, record): message = {'message': record.getMessage(), 'asctime': self.formatTime(record, self.datefmt), 'name': record.name, 'msg': record.msg, 'args': record.args, 'levelname': record.levelname, 'levelno': record.levelno, 'pathname': record.pathname, 'filename': record.filename, 'module': record.module, 'lineno': record.lineno, 'funcname': record.funcName, 'created': record.created, 'msecs': record.msecs, 'relative_created': record.relativeCreated, 'thread': record.thread, 'thread_name': record.threadName, 'process_name': record.processName, 'process': record.process, 'traceback': None} if hasattr(record, 'extra'): message['extra'] = record.extra if record.exc_info: message['traceback'] = self.formatException(record.exc_info) return jsonutils.dumps(message) def _create_logging_excepthook(product_name): def logging_excepthook(exc_type, value, tb): extra = {} if CONF.verbose or CONF.debug: extra['exc_info'] = (exc_type, value, tb) getLogger(product_name).critical( "".join(traceback.format_exception_only(exc_type, value)), **extra) return logging_excepthook class LogConfigError(Exception): message = _('Error loading logging config %(log_config)s: %(err_msg)s') def __init__(self, log_config, err_msg): self.log_config = log_config self.err_msg = err_msg def __str__(self): return self.message % dict(log_config=self.log_config, err_msg=self.err_msg) def _load_log_config(log_config_append): try: logging.config.fileConfig(log_config_append, disable_existing_loggers=False) except moves.configparser.Error as exc: raise LogConfigError(log_config_append, six.text_type(exc)) def setup(product_name, version='unknown'): """Setup logging.""" if CONF.log_config_append: _load_log_config(CONF.log_config_append) else: _setup_logging_from_conf(product_name, version) sys.excepthook = _create_logging_excepthook(product_name) def set_defaults(logging_context_format_string): cfg.set_defaults(log_opts, logging_context_format_string= logging_context_format_string) def _find_facility_from_conf(): facility_names = logging.handlers.SysLogHandler.facility_names facility = getattr(logging.handlers.SysLogHandler, CONF.syslog_log_facility, None) if facility is None and CONF.syslog_log_facility in facility_names: facility = facility_names.get(CONF.syslog_log_facility) if facility is None: valid_facilities = facility_names.keys() consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] valid_facilities.extend(consts) raise TypeError(_('syslog facility must be one of: %s') % ', '.join("'%s'" % fac for fac in valid_facilities)) return facility class RFCSysLogHandler(logging.handlers.SysLogHandler): def __init__(self, *args, **kwargs): self.binary_name = _get_binary_name() # Do not use super() unless type(logging.handlers.SysLogHandler) # is 'type' (Python 2.7). # Use old style calls, if the type is 'classobj' (Python 2.6) logging.handlers.SysLogHandler.__init__(self, *args, **kwargs) def format(self, record): # Do not use super() unless type(logging.handlers.SysLogHandler) # is 'type' (Python 2.7). # Use old style calls, if the type is 'classobj' (Python 2.6) msg = logging.handlers.SysLogHandler.format(self, record) msg = self.binary_name + ' ' + msg return msg def _setup_logging_from_conf(project, version): log_root = getLogger(None).logger for handler in log_root.handlers: log_root.removeHandler(handler) if CONF.use_syslog: facility = _find_facility_from_conf() # TODO(bogdando) use the format provided by RFCSysLogHandler # after existing syslog format deprecation in J if CONF.use_syslog_rfc_format: syslog = RFCSysLogHandler(address='/dev/log', facility=facility) else: syslog = logging.handlers.SysLogHandler(address='/dev/log', facility=facility) log_root.addHandler(syslog) logpath = _get_log_file_path() if logpath: filelog = logging.handlers.WatchedFileHandler(logpath) log_root.addHandler(filelog) if CONF.use_stderr: streamlog = ColorHandler() log_root.addHandler(streamlog) elif not logpath: # pass sys.stdout as a positional argument # python2.6 calls the argument strm, in 2.7 it's stream streamlog = logging.StreamHandler(sys.stdout) log_root.addHandler(streamlog) if CONF.publish_errors: handler = importutils.import_object( "healing.openstack.common.log_handler.PublishErrorsHandler", logging.ERROR) log_root.addHandler(handler) datefmt = CONF.log_date_format for handler in log_root.handlers: # NOTE(alaski): CONF.log_format overrides everything currently. This # should be deprecated in favor of context aware formatting. if CONF.log_format: handler.setFormatter(logging.Formatter(fmt=CONF.log_format, datefmt=datefmt)) log_root.info('Deprecated: log_format is now deprecated and will ' 'be removed in the next release') else: handler.setFormatter(ContextFormatter(project=project, version=version, datefmt=datefmt)) if CONF.debug: log_root.setLevel(logging.DEBUG) elif CONF.verbose: log_root.setLevel(logging.INFO) else: log_root.setLevel(logging.WARNING) for pair in CONF.default_log_levels: mod, _sep, level_name = pair.partition('=') level = logging.getLevelName(level_name) logger = logging.getLogger(mod) logger.setLevel(level) _loggers = {} def getLogger(name='unknown', version='unknown'): if name not in _loggers: _loggers[name] = ContextAdapter(logging.getLogger(name), name, version) return _loggers[name] def getLazyLogger(name='unknown', version='unknown'): """Returns lazy logger. Creates a pass-through logger that does not create the real logger until it is really needed and delegates all calls to the real logger once it is created. """ return LazyAdapter(name, version) class WritableLogger(object): """A thin wrapper that responds to `write` and logs.""" def __init__(self, logger, level=logging.INFO): self.logger = logger self.level = level def write(self, msg): self.logger.log(self.level, msg.rstrip()) class ContextFormatter(logging.Formatter): """A context.RequestContext aware formatter configured through flags. The flags used to set format strings are: logging_context_format_string and logging_default_format_string. You can also specify logging_debug_format_suffix to append extra formatting if the log level is debug. For information about what variables are available for the formatter see: http://docs.python.org/library/logging.html#formatter If available, uses the context value stored in TLS - local.store.context """ def __init__(self, *args, **kwargs): """Initialize ContextFormatter instance Takes additional keyword arguments which can be used in the message format string. :keyword project: project name :type project: string :keyword version: project version :type version: string """ self.project = kwargs.pop('project', 'unknown') self.version = kwargs.pop('version', 'unknown') logging.Formatter.__init__(self, *args, **kwargs) def format(self, record): """Uses contextstring if request_id is set, otherwise default.""" # store project info record.project = self.project record.version = self.version # store request info context = getattr(local.store, 'context', None) if context: d = _dictify_context(context) for k, v in d.items(): setattr(record, k, v) # NOTE(sdague): default the fancier formatting params # to an empty string so we don't throw an exception if # they get used for key in ('instance', 'color', 'user_identity'): if key not in record.__dict__: record.__dict__[key] = '' if record.__dict__.get('request_id'): self._fmt = CONF.logging_context_format_string else: self._fmt = CONF.logging_default_format_string if (record.levelno == logging.DEBUG and CONF.logging_debug_format_suffix): self._fmt += " " + CONF.logging_debug_format_suffix # Cache this on the record, Logger will respect our formatted copy if record.exc_info: record.exc_text = self.formatException(record.exc_info, record) return logging.Formatter.format(self, record) def formatException(self, exc_info, record=None): """Format exception output with CONF.logging_exception_prefix.""" if not record: return logging.Formatter.formatException(self, exc_info) stringbuffer = moves.StringIO() traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], None, stringbuffer) lines = stringbuffer.getvalue().split('\n') stringbuffer.close() if CONF.logging_exception_prefix.find('%(asctime)') != -1: record.asctime = self.formatTime(record, self.datefmt) formatted_lines = [] for line in lines: pl = CONF.logging_exception_prefix % record.__dict__ fl = '%s%s' % (pl, line) formatted_lines.append(fl) return '\n'.join(formatted_lines) class ColorHandler(logging.StreamHandler): LEVEL_COLORS = { logging.DEBUG: '\033[00;32m', # GREEN logging.INFO: '\033[00;36m', # CYAN logging.AUDIT: '\033[01;36m', # BOLD CYAN logging.WARN: '\033[01;33m', # BOLD YELLOW logging.ERROR: '\033[01;31m', # BOLD RED logging.CRITICAL: '\033[01;31m', # BOLD RED } def format(self, record): record.color = self.LEVEL_COLORS[record.levelno] return logging.StreamHandler.format(self, record) class DeprecatedConfig(Exception): message = _("Fatal call to deprecated config: %(msg)s") def __init__(self, msg): super(Exception, self).__init__(self.message % dict(msg=msg))
apache-2.0
abhattad4/Digi-Menu
digimenu2/django/utils/html.py
36
15061
"""HTML utilities suitable for global use.""" from __future__ import unicode_literals import re import sys import warnings from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.encoding import force_str, force_text from django.utils.functional import allow_lazy from django.utils.http import RFC3986_GENDELIMS, RFC3986_SUBDELIMS from django.utils.safestring import SafeData, SafeText, mark_safe from django.utils.six.moves.urllib.parse import ( parse_qsl, quote, unquote, urlencode, urlsplit, urlunsplit, ) from django.utils.text import normalize_newlines from .html_parser import HTMLParseError, HTMLParser # Configuration for urlize() function. TRAILING_PUNCTUATION = ['.', ',', ':', ';', '.)', '"', '\'', '!'] WRAPPING_PUNCTUATION = [('(', ')'), ('<', '>'), ('[', ']'), ('&lt;', '&gt;'), ('"', '"'), ('\'', '\'')] # List of possible strings used for bullets in bulleted lists. DOTS = ['&middot;', '*', '\u2022', '&#149;', '&bull;', '&#8226;'] unencoded_ampersands_re = re.compile(r'&(?!(\w+|#\d+);)') word_split_re = re.compile(r'(\s+)') simple_url_re = re.compile(r'^https?://\[?\w', re.IGNORECASE) simple_url_2_re = re.compile(r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)($|/.*)$', re.IGNORECASE) simple_email_re = re.compile(r'^\S+@\S+\.\S+$') link_target_attribute_re = re.compile(r'(<a [^>]*?)target=[^\s>]+') html_gunk_re = re.compile( r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|' '<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE) hard_coded_bullets_re = re.compile( r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join(re.escape(x) for x in DOTS), re.DOTALL) trailing_empty_content_re = re.compile(r'(?:<p>(?:&nbsp;|\s|<br \/>)*?</p>\s*)+\Z') def escape(text): """ Returns the given text with ampersands, quotes and angle brackets encoded for use in HTML. This function always escapes its input, even if it's already escaped and marked as such. This may result in double-escaping. If this is a concern, use conditional_escape() instead. """ return mark_safe(force_text(text).replace('&', '&amp;').replace('<', '&lt;') .replace('>', '&gt;').replace('"', '&quot;').replace("'", '&#39;')) escape = allow_lazy(escape, six.text_type, SafeText) _js_escapes = { ord('\\'): '\\u005C', ord('\''): '\\u0027', ord('"'): '\\u0022', ord('>'): '\\u003E', ord('<'): '\\u003C', ord('&'): '\\u0026', ord('='): '\\u003D', ord('-'): '\\u002D', ord(';'): '\\u003B', ord('\u2028'): '\\u2028', ord('\u2029'): '\\u2029' } # Escape every ASCII character with a value less than 32. _js_escapes.update((ord('%c' % z), '\\u%04X' % z) for z in range(32)) def escapejs(value): """Hex encodes characters for use in JavaScript strings.""" return mark_safe(force_text(value).translate(_js_escapes)) escapejs = allow_lazy(escapejs, six.text_type, SafeText) def conditional_escape(text): """ Similar to escape(), except that it doesn't operate on pre-escaped strings. This function relies on the __html__ convention used both by Django's SafeData class and by third-party libraries like markupsafe. """ if hasattr(text, '__html__'): return text.__html__() else: return escape(text) def format_html(format_string, *args, **kwargs): """ Similar to str.format, but passes all arguments through conditional_escape, and calls 'mark_safe' on the result. This function should be used instead of str.format or % interpolation to build up small HTML fragments. """ args_safe = map(conditional_escape, args) kwargs_safe = {k: conditional_escape(v) for (k, v) in six.iteritems(kwargs)} return mark_safe(format_string.format(*args_safe, **kwargs_safe)) def format_html_join(sep, format_string, args_generator): """ A wrapper of format_html, for the common case of a group of arguments that need to be formatted using the same format string, and then joined using 'sep'. 'sep' is also passed through conditional_escape. 'args_generator' should be an iterator that returns the sequence of 'args' that will be passed to format_html. Example: format_html_join('\n', "<li>{} {}</li>", ((u.first_name, u.last_name) for u in users)) """ return mark_safe(conditional_escape(sep).join( format_html(format_string, *tuple(args)) for args in args_generator)) def linebreaks(value, autoescape=False): """Converts newlines into <p> and <br />s.""" value = normalize_newlines(value) paras = re.split('\n{2,}', value) if autoescape: paras = ['<p>%s</p>' % escape(p).replace('\n', '<br />') for p in paras] else: paras = ['<p>%s</p>' % p.replace('\n', '<br />') for p in paras] return '\n\n'.join(paras) linebreaks = allow_lazy(linebreaks, six.text_type) class MLStripper(HTMLParser): def __init__(self): # The strict parameter was added in Python 3.2 with a default of True. # The default changed to False in Python 3.3 and was deprecated. if sys.version_info[:2] == (3, 2): HTMLParser.__init__(self, strict=False) else: HTMLParser.__init__(self) self.reset() self.fed = [] def handle_data(self, d): self.fed.append(d) def handle_entityref(self, name): self.fed.append('&%s;' % name) def handle_charref(self, name): self.fed.append('&#%s;' % name) def get_data(self): return ''.join(self.fed) def _strip_once(value): """ Internal tag stripping utility used by strip_tags. """ s = MLStripper() try: s.feed(value) except HTMLParseError: return value try: s.close() except (HTMLParseError, UnboundLocalError): # UnboundLocalError because of http://bugs.python.org/issue17802 # on Python 3.2, triggered by strict=False mode of HTMLParser return s.get_data() + s.rawdata else: return s.get_data() def strip_tags(value): """Returns the given HTML with all tags stripped.""" # Note: in typical case this loop executes _strip_once once. Loop condition # is redundant, but helps to reduce number of executions of _strip_once. while '<' in value and '>' in value: new_value = _strip_once(value) if len(new_value) >= len(value): # _strip_once was not able to detect more tags or length increased # due to http://bugs.python.org/issue20288 # (affects Python 2 < 2.7.7 and Python 3 < 3.3.5) break value = new_value return value strip_tags = allow_lazy(strip_tags) def remove_tags(html, tags): """Returns the given HTML with given tags removed.""" warnings.warn( "django.utils.html.remove_tags() and the removetags template filter " "are deprecated. Consider using the bleach library instead.", RemovedInDjango20Warning, stacklevel=3 ) tags = [re.escape(tag) for tag in tags.split()] tags_re = '(%s)' % '|'.join(tags) starttag_re = re.compile(r'<%s(/?>|(\s+[^>]*>))' % tags_re, re.U) endtag_re = re.compile('</%s>' % tags_re) html = starttag_re.sub('', html) html = endtag_re.sub('', html) return html remove_tags = allow_lazy(remove_tags, six.text_type) def strip_spaces_between_tags(value): """Returns the given HTML with spaces between tags removed.""" return re.sub(r'>\s+<', '><', force_text(value)) strip_spaces_between_tags = allow_lazy(strip_spaces_between_tags, six.text_type) def strip_entities(value): """Returns the given HTML with all entities (&something;) stripped.""" warnings.warn( "django.utils.html.strip_entities() is deprecated.", RemovedInDjango20Warning, stacklevel=2 ) return re.sub(r'&(?:\w+|#\d+);', '', force_text(value)) strip_entities = allow_lazy(strip_entities, six.text_type) def smart_urlquote(url): "Quotes a URL if it isn't already quoted." def unquote_quote(segment): segment = unquote(force_str(segment)) # Tilde is part of RFC3986 Unreserved Characters # http://tools.ietf.org/html/rfc3986#section-2.3 # See also http://bugs.python.org/issue16285 segment = quote(segment, safe=RFC3986_SUBDELIMS + RFC3986_GENDELIMS + str('~')) return force_text(segment) # Handle IDN before quoting. try: scheme, netloc, path, query, fragment = urlsplit(url) except ValueError: # invalid IPv6 URL (normally square brackets in hostname part). return unquote_quote(url) try: netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE except UnicodeError: # invalid domain part return unquote_quote(url) if query: # Separately unquoting key/value, so as to not mix querystring separators # included in query values. See #22267. query_parts = [(unquote(force_str(q[0])), unquote(force_str(q[1]))) for q in parse_qsl(query, keep_blank_values=True)] # urlencode will take care of quoting query = urlencode(query_parts) path = unquote_quote(path) fragment = unquote_quote(fragment) return urlunsplit((scheme, netloc, path, query, fragment)) def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False): """ Converts any URLs in text into clickable links. Works on http://, https://, www. links, and also on links ending in one of the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org). Links can have trailing punctuation (periods, commas, close-parens) and leading punctuation (opening parens) and it'll still do the right thing. If trim_url_limit is not None, the URLs in the link text longer than this limit will be truncated to trim_url_limit-3 characters and appended with an ellipsis. If nofollow is True, the links will get a rel="nofollow" attribute. If autoescape is True, the link text and URLs will be autoescaped. """ safe_input = isinstance(text, SafeData) def trim_url(x, limit=trim_url_limit): if limit is None or len(x) <= limit: return x return '%s...' % x[:max(0, limit - 3)] def unescape(text, trail): """ If input URL is HTML-escaped, unescape it so as we can safely feed it to smart_urlquote. For example: http://example.com?x=1&amp;y=&lt;2&gt; => http://example.com?x=1&y=<2> """ unescaped = (text + trail).replace( '&amp;', '&').replace('&lt;', '<').replace( '&gt;', '>').replace('&quot;', '"').replace('&#39;', "'") if trail and unescaped.endswith(trail): # Remove trail for unescaped if it was not consumed by unescape unescaped = unescaped[:-len(trail)] elif trail == ';': # Trail was consumed by unescape (as end-of-entity marker), move it to text text += trail trail = '' return text, unescaped, trail words = word_split_re.split(force_text(text)) for i, word in enumerate(words): if '.' in word or '@' in word or ':' in word: # Deal with punctuation. lead, middle, trail = '', word, '' for punctuation in TRAILING_PUNCTUATION: if middle.endswith(punctuation): middle = middle[:-len(punctuation)] trail = punctuation + trail for opening, closing in WRAPPING_PUNCTUATION: if middle.startswith(opening): middle = middle[len(opening):] lead = lead + opening # Keep parentheses at the end only if they're balanced. if (middle.endswith(closing) and middle.count(closing) == middle.count(opening) + 1): middle = middle[:-len(closing)] trail = closing + trail # Make URL we want to point to. url = None nofollow_attr = ' rel="nofollow"' if nofollow else '' if simple_url_re.match(middle): middle, middle_unescaped, trail = unescape(middle, trail) url = smart_urlquote(middle_unescaped) elif simple_url_2_re.match(middle): middle, middle_unescaped, trail = unescape(middle, trail) url = smart_urlquote('http://%s' % middle_unescaped) elif ':' not in middle and simple_email_re.match(middle): local, domain = middle.rsplit('@', 1) try: domain = domain.encode('idna').decode('ascii') except UnicodeError: continue url = 'mailto:%s@%s' % (local, domain) nofollow_attr = '' # Make link. if url: trimmed = trim_url(middle) if autoescape and not safe_input: lead, trail = escape(lead), escape(trail) trimmed = escape(trimmed) middle = '<a href="%s"%s>%s</a>' % (escape(url), nofollow_attr, trimmed) words[i] = mark_safe('%s%s%s' % (lead, middle, trail)) else: if safe_input: words[i] = mark_safe(word) elif autoescape: words[i] = escape(word) elif safe_input: words[i] = mark_safe(word) elif autoescape: words[i] = escape(word) return ''.join(words) urlize = allow_lazy(urlize, six.text_type) def avoid_wrapping(value): """ Avoid text wrapping in the middle of a phrase by adding non-breaking spaces where there previously were normal spaces. """ return value.replace(" ", "\xa0") def html_safe(klass): """ A decorator that defines the __html__ method. This helps non-Django templates to detect classes whose __str__ methods return SafeText. """ if '__html__' in klass.__dict__: raise ValueError( "can't apply @html_safe to %s because it defines " "__html__()." % klass.__name__ ) if six.PY2: if '__unicode__' not in klass.__dict__: raise ValueError( "can't apply @html_safe to %s because it doesn't " "define __unicode__()." % klass.__name__ ) klass_unicode = klass.__unicode__ klass.__unicode__ = lambda self: mark_safe(klass_unicode(self)) klass.__html__ = lambda self: unicode(self) else: if '__str__' not in klass.__dict__: raise ValueError( "can't apply @html_safe to %s because it doesn't " "define __str__()." % klass.__name__ ) klass_str = klass.__str__ klass.__str__ = lambda self: mark_safe(klass_str(self)) klass.__html__ = lambda self: str(self) return klass
bsd-3-clause
OpenAcademy-OpenStack/nova-scheduler
nova/tests/utils.py
1
6545
# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import errno import platform import socket import sys from oslo.config import cfg from nova.compute import flavors import nova.context import nova.db from nova import exception from nova.image import glance from nova.network import minidns from nova.network import model as network_model from nova.objects import instance as instance_obj CONF = cfg.CONF CONF.import_opt('use_ipv6', 'nova.netconf') def get_test_admin_context(): return nova.context.get_admin_context() def get_test_image_info(context, instance_ref): if not context: context = get_test_admin_context() image_ref = instance_ref['image_ref'] image_service, image_id = glance.get_remote_image_service(context, image_ref) return image_service.show(context, image_id) def get_test_flavor(context=None, options=None): options = options or {} if not context: context = get_test_admin_context() test_flavor = {'name': 'kinda.big', 'flavorid': 'someid', 'memory_mb': 2048, 'vcpus': 4, 'root_gb': 40, 'ephemeral_gb': 80, 'swap': 1024} test_flavor.update(options) try: flavor_ref = nova.db.flavor_create(context, test_flavor) except (exception.FlavorExists, exception.FlavorIdExists): flavor_ref = nova.db.flavor_get_by_name(context, 'kinda.big') return flavor_ref def get_test_instance(context=None, flavor=None, obj=False): if not context: context = get_test_admin_context() if not flavor: flavor = get_test_flavor(context) metadata = {} flavors.save_flavor_info(metadata, flavor, '') test_instance = {'memory_kb': '2048000', 'basepath': '/some/path', 'bridge_name': 'br100', 'vcpus': 4, 'root_gb': 40, 'project_id': 'fake', 'bridge': 'br101', 'image_ref': 'cedef40a-ed67-4d10-800e-17455edce175', 'instance_type_id': '5', 'system_metadata': metadata, 'extra_specs': {}} if obj: instance = instance_obj.Instance(context, **test_instance) instance.create() else: instance = nova.db.instance_create(context, test_instance) return instance def get_test_network_info(count=1): ipv6 = CONF.use_ipv6 fake = 'fake' fake_ip = '0.0.0.0' fake_netmask = '255.255.255.255' fake_vlan = 100 fake_bridge_interface = 'eth0' def current(): subnet_4 = network_model.Subnet(cidr=fake_ip, dns=[network_model.IP(fake_ip), network_model.IP(fake_ip)], gateway=network_model.IP(fake_ip), ips=[network_model.IP(fake_ip), network_model.IP(fake_ip)], routes=None, dhcp_server=fake_ip) subnet_6 = network_model.Subnet(cidr=fake_ip, gateway=network_model.IP(fake_ip), ips=[network_model.IP(fake_ip), network_model.IP(fake_ip), network_model.IP(fake_ip)], routes=None, version=6) subnets = [subnet_4] if ipv6: subnets.append(subnet_6) network = network_model.Network(id=None, bridge=fake, label=None, subnets=subnets, vlan=fake_vlan, bridge_interface=fake_bridge_interface, injected=False) vif = network_model.VIF(id='vif-xxx-yyy-zzz', address=fake, network=network, type=network_model.VIF_TYPE_BRIDGE, devname=None, ovs_interfaceid=None) return vif return network_model.NetworkInfo([current() for x in xrange(0, count)]) def is_osx(): return platform.mac_ver()[0] != '' test_dns_managers = [] def dns_manager(): global test_dns_managers manager = minidns.MiniDNS() test_dns_managers.append(manager) return manager def cleanup_dns_managers(): global test_dns_managers for manager in test_dns_managers: manager.delete_dns_file() test_dns_managers = [] def killer_xml_body(): return (("""<!DOCTYPE x [ <!ENTITY a "%(a)s"> <!ENTITY b "%(b)s"> <!ENTITY c "%(c)s">]> <foo> <bar> <v1>%(d)s</v1> </bar> </foo>""") % { 'a': 'A' * 10, 'b': '&a;' * 10, 'c': '&b;' * 10, 'd': '&c;' * 9999, }).strip() def is_ipv6_supported(): has_ipv6_support = socket.has_ipv6 try: s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) except socket.error as e: if e.errno == errno.EAFNOSUPPORT: has_ipv6_support = False else: raise # check if there is at least one interface with ipv6 if has_ipv6_support and sys.platform.startswith('linux'): try: with open('/proc/net/if_inet6') as f: if not f.read(): has_ipv6_support = False except IOError: has_ipv6_support = False return has_ipv6_support
apache-2.0
lilchurro/vent
vent/menu.py
2
5236
#!/usr/bin/env python2.7 # -*- coding: utf-8 -*- import curses import npyscreen import time from threading import Thread from vent.api.actions import Action from vent.api.plugins import Plugin from vent.helpers.meta import Version from vent.helpers.paths import PathDirs from vent.menus.help import HelpForm from vent.menus.main import MainForm from vent.menus.tutorial_forms import TutorialAddingFilesForm from vent.menus.tutorial_forms import TutorialAddingPluginsForm from vent.menus.tutorial_forms import TutorialBackgroundForm from vent.menus.tutorial_forms import TutorialBuildingCoresForm from vent.menus.tutorial_forms import TutorialGettingSetupForm from vent.menus.tutorial_forms import TutorialIntroForm from vent.menus.tutorial_forms import TutorialStartingCoresForm from vent.menus.tutorial_forms import TutorialTerminologyForm from vent.menus.tutorial_forms import TutorialTroubleshootingForm class VentApp(npyscreen.NPSAppManaged): """ Main menu app for vent CLI """ keypress_timeout_default = 10 repo_value = {} paths = PathDirs() first_time = paths.ensure_file(paths.init_file) if first_time[0] and first_time[1] != "exists": npyscreen.NPSAppManaged.STARTING_FORM = "TUTORIALINTRO" else: npyscreen.NPSAppManaged.STARTING_FORM = "MAIN" def onStart(self): """ Override onStart method for npyscreen """ curses.mousemask(0) self.paths.host_config() version = Version() # setup initial runtime stuff if self.first_time[0] and self.first_time[1] != "exists": plugins = Plugin() actions = Action() thr = Thread(target=MainForm.t_status, args=(), kwargs={'core': True}) thr.start() while thr.is_alive(): npyscreen.notify_wait("Please wait while Vent initializes...1/4", title="Setting up things...") time.sleep(1) thr.join() thr = Thread(target=MainForm.t_status, args=(), kwargs={'core': False}) thr.start() while thr.is_alive(): npyscreen.notify_wait("Please wait while Vent initializes...2/4", title="Setting up things...") time.sleep(1) thr.join() thr = Thread(target=plugins.auto_install, args=(), kwargs={}) thr.start() while thr.is_alive(): npyscreen.notify_wait("Please wait while Vent initializes...3/4", title="Setting up things...") time.sleep(1) thr.join() thr = Thread(target=actions.startup, args=(), kwargs={}) thr.start() while thr.is_alive(): npyscreen.notify_wait("Please wait while Vent initializes...4/4", title="Setting up things...") time.sleep(1) thr.join() quit_s = "\t"*4 + "^Q to quit" tab_esc = "\t"*4 + "TAB to close menu popup" self.addForm("MAIN", MainForm, name="Vent " + version + "\t\t\t\t\t^T for help" + quit_s + tab_esc, color="IMPORTANT") self.addForm("HELP", HelpForm, name="Help\t\t\t\t\t\t\t\t^T to toggle previous" + quit_s, color="DANGER") self.addForm("TUTORIALINTRO", TutorialIntroForm, name="Vent Tutorial" + quit_s, color="DANGER") self.addForm("TUTORIALBACKGROUND", TutorialBackgroundForm, name="About Vent" + quit_s, color="DANGER") self.addForm("TUTORIALTERMINOLOGY", TutorialTerminologyForm, name="About Vent" + quit_s, color="DANGER") self.addForm("TUTORIALGETTINGSETUP", TutorialGettingSetupForm, name="About Vent" + quit_s, color="DANGER") self.addForm("TUTORIALBUILDINGCORES", TutorialBuildingCoresForm, name="Working with Cores" + quit_s, color="DANGER") self.addForm("TUTORIALSTARTINGCORES", TutorialStartingCoresForm, name="Working with Cores" + quit_s, color="DANGER") self.addForm("TUTORIALADDINGPLUGINS", TutorialAddingPluginsForm, name="Working with Plugins" + quit_s, color="DANGER") self.addForm("TUTORIALADDINGFILES", TutorialAddingFilesForm, name="Files" + quit_s, color="DANGER") self.addForm("TUTORIALTROUBLESHOOTING", TutorialTroubleshootingForm, name="Troubleshooting" + quit_s, color="DANGER") def change_form(self, name): """ Changes the form (window) that is displayed """ self.switchForm(name)
apache-2.0
obsh/tornado
demos/chat/chatdemo.py
98
4426
#!/usr/bin/env python # # Copyright 2009 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import tornado.escape import tornado.ioloop import tornado.web import os.path import uuid from tornado.concurrent import Future from tornado import gen from tornado.options import define, options, parse_command_line define("port", default=8888, help="run on the given port", type=int) define("debug", default=False, help="run in debug mode") class MessageBuffer(object): def __init__(self): self.waiters = set() self.cache = [] self.cache_size = 200 def wait_for_messages(self, cursor=None): # Construct a Future to return to our caller. This allows # wait_for_messages to be yielded from a coroutine even though # it is not a coroutine itself. We will set the result of the # Future when results are available. result_future = Future() if cursor: new_count = 0 for msg in reversed(self.cache): if msg["id"] == cursor: break new_count += 1 if new_count: result_future.set_result(self.cache[-new_count:]) return result_future self.waiters.add(result_future) return result_future def cancel_wait(self, future): self.waiters.remove(future) # Set an empty result to unblock any coroutines waiting. future.set_result([]) def new_messages(self, messages): logging.info("Sending new message to %r listeners", len(self.waiters)) for future in self.waiters: future.set_result(messages) self.waiters = set() self.cache.extend(messages) if len(self.cache) > self.cache_size: self.cache = self.cache[-self.cache_size:] # Making this a non-singleton is left as an exercise for the reader. global_message_buffer = MessageBuffer() class MainHandler(tornado.web.RequestHandler): def get(self): self.render("index.html", messages=global_message_buffer.cache) class MessageNewHandler(tornado.web.RequestHandler): def post(self): message = { "id": str(uuid.uuid4()), "body": self.get_argument("body"), } # to_basestring is necessary for Python 3's json encoder, # which doesn't accept byte strings. message["html"] = tornado.escape.to_basestring( self.render_string("message.html", message=message)) if self.get_argument("next", None): self.redirect(self.get_argument("next")) else: self.write(message) global_message_buffer.new_messages([message]) class MessageUpdatesHandler(tornado.web.RequestHandler): @gen.coroutine def post(self): cursor = self.get_argument("cursor", None) # Save the future returned by wait_for_messages so we can cancel # it in wait_for_messages self.future = global_message_buffer.wait_for_messages(cursor=cursor) messages = yield self.future if self.request.connection.stream.closed(): return self.write(dict(messages=messages)) def on_connection_close(self): global_message_buffer.cancel_wait(self.future) def main(): parse_command_line() app = tornado.web.Application( [ (r"/", MainHandler), (r"/a/message/new", MessageNewHandler), (r"/a/message/updates", MessageUpdatesHandler), ], cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__", template_path=os.path.join(os.path.dirname(__file__), "templates"), static_path=os.path.join(os.path.dirname(__file__), "static"), xsrf_cookies=True, debug=options.debug, ) app.listen(options.port) tornado.ioloop.IOLoop.current().start() if __name__ == "__main__": main()
apache-2.0
battlecat/Spirit
spirit/comment/models.py
2
2911
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.urlresolvers import reverse from django.conf import settings from django.db.models import F from django.utils import timezone from .managers import CommentQuerySet COMMENT, MOVED, CLOSED, UNCLOSED, PINNED, UNPINNED = range(6) ACTION = ( (COMMENT, _("comment")), (MOVED, _("topic moved")), (CLOSED, _("topic closed")), (UNCLOSED, _("topic unclosed")), (PINNED, _("topic pinned")), (UNPINNED, _("topic unpinned")), ) class Comment(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='st_comments') topic = models.ForeignKey('spirit_topic.Topic') comment = models.TextField(_("comment")) comment_html = models.TextField(_("comment html")) action = models.IntegerField(_("action"), choices=ACTION, default=COMMENT) date = models.DateTimeField(default=timezone.now) is_removed = models.BooleanField(default=False) is_modified = models.BooleanField(default=False) ip_address = models.GenericIPAddressField(blank=True, null=True) modified_count = models.PositiveIntegerField(_("modified count"), default=0) likes_count = models.PositiveIntegerField(_("likes count"), default=0) objects = CommentQuerySet.as_manager() class Meta: ordering = ['-date', '-pk'] verbose_name = _("comment") verbose_name_plural = _("comments") def get_absolute_url(self): return reverse('spirit:comment:find', kwargs={'pk': str(self.id), }) @property def like(self): # *likes* is dynamically created by manager.with_likes() try: assert len(self.likes) <= 1, "Panic, too many likes" return self.likes[0] except (AttributeError, IndexError): return def increase_modified_count(self): Comment.objects\ .filter(pk=self.pk)\ .update(modified_count=F('modified_count') + 1) def increase_likes_count(self): Comment.objects\ .filter(pk=self.pk)\ .update(likes_count=F('likes_count') + 1) def decrease_likes_count(self): Comment.objects\ .filter(pk=self.pk, likes_count__gt=0)\ .update(likes_count=F('likes_count') - 1) @classmethod def create_moderation_action(cls, user, topic, action): # TODO: better comment_html text (map to actions), use default language return cls.objects.create( user=user, topic=topic, action=action, comment="action", comment_html="action" ) @classmethod def get_last_for_topic(cls, topic_id): return (cls.objects .filter(topic_id=topic_id) .order_by('pk') .last())
mit
Microvellum/Fluid-Designer
win64-vc/2.78/Python/bin/2.78/scripts/modules/blend_render_info.py
1
2789
#!/usr/bin/env python3 # ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # <pep8 compliant> # This module can get render info without running from inside blender. # # This struct wont change according to Ton. # Note that the size differs on 32/64bit # # typedef struct BHead { # int code, len; # void *old; # int SDNAnr, nr; # } BHead; def read_blend_rend_chunk(path): import struct blendfile = open(path, "rb") head = blendfile.read(7) if head[0:2] == b'\x1f\x8b': # gzip magic import gzip blendfile.seek(0) blendfile = gzip.open(blendfile, "rb") head = blendfile.read(7) if head != b'BLENDER': print("not a blend file:", path) blendfile.close() return [] is_64_bit = (blendfile.read(1) == b'-') # true for PPC, false for X86 is_big_endian = (blendfile.read(1) == b'V') # Now read the bhead chunk!!! blendfile.read(3) # skip the version scenes = [] sizeof_bhead = 24 if is_64_bit else 20 while blendfile.read(4) == b'REND': sizeof_bhead_left = sizeof_bhead - 4 struct.unpack('>i' if is_big_endian else '<i', blendfile.read(4))[0] sizeof_bhead_left -= 4 # We don't care about the rest of the bhead struct blendfile.read(sizeof_bhead_left) # Now we want the scene name, start and end frame. this is 32bites long start_frame, end_frame = struct.unpack('>2i' if is_big_endian else '<2i', blendfile.read(8)) scene_name = blendfile.read(64) scene_name = scene_name[:scene_name.index(b'\0')] try: scene_name = str(scene_name, "utf8") except TypeError: pass scenes.append((start_frame, end_frame, scene_name)) blendfile.close() return scenes def main(): import sys for arg in sys.argv[1:]: if arg.lower().endswith('.blend'): for value in read_blend_rend_chunk(arg): print("%d %d %s" % value) if __name__ == '__main__': main()
gpl-3.0
Pexego/odoo
addons/note/tests/test_note.py
427
1686
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Business Applications # Copyright (c) 2013-TODAY OpenERP S.A. <http://openerp.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.tests import common class TestNote(common.TransactionCase): def test_bug_lp_1156215(self): """ensure any users can create new users""" cr, uid = self.cr, self.uid IMD = self.registry('ir.model.data') Users = self.registry('res.users') _, demo_user = IMD.get_object_reference(cr, uid, 'base', 'user_demo') _, group_id = IMD.get_object_reference(cr, uid, 'base', 'group_erp_manager') Users.write(cr, uid, [demo_user], { 'groups_id': [(4, group_id)], }) # must not fail Users.create(cr, demo_user, { 'name': 'test bug lp:1156215', 'login': 'lp_1156215', })
agpl-3.0
joe-antognini/matasano
set1.py
1
6125
#! /usr/bin/env python # # Matasano Crypto Pals # Set 1 # import binascii import base64 from freqs import * from Crypto.Cipher import AES def hex2base64(a): '''Convert a string in hex to a string in base64.''' data = binascii.unhexlify(a) return base64.b64encode(data) def fixedXOR(b1, b2): int1 = int(b1.encode('hex'), base=16) int2 = int(b2.encode('hex'), base=16) ret = hex(int1 ^ int2)[2:].rstrip('L') if len(ret) % 2 == 1: ret = '0' + ret return ret.decode('hex') def score_string(s): '''Return a score associated with how likely the string is given English character frequencies.''' rare_char_score = -4 nonascii_score = -100 score = 0 for char in s: if char in en_log_freqs: score += en_log_freqs[char] elif 31 < ord(char) < 127: score += rare_char_score else: score += nonascii_score return score def encrypt_single_key_xor(instring, key): '''Encrypt a string against a repeating single character.''' repeated_char = len(instring) * key return fixedXOR(instring, repeated_char) def decrypt_single_key_xor(instring, return_key=False): '''Decode a string that has been encrypted against a single character.''' maxscore = None best_string = None best_key = None for i in xrange(32, 128): s = encrypt_single_key_xor(instring, chr(i)) score = score_string(s) if score > maxscore: maxscore = score best_string = s best_key = chr(i) if return_key: return (best_string, best_key) else: return best_string def detect_single_char_xor(filename): '''Find the line in the file that has been encrypted with single key XOR.''' maxscore = None best_line = None with open(filename) as infile: lines = infile.readlines() for raw_line in lines: line = raw_line.strip().decode('hex') decoded_line = decrypt_single_key_xor(line) score = score_string(decoded_line) if score > maxscore: best_line = decoded_line maxscore = score return best_line def repeating_key_xor(s, key): '''Encrypt a string s with the repeating key.''' key_len = len(key) enc_blocks = [] for i in range(key_len): decrypt_block = encrypt_single_key_xor(s[i::key_len], key[i]) enc_blocks.append(decrypt_block) enc_block = [elem for sublist in zip(*enc_blocks) for elem in sublist] enc_block = '' for i in xrange(len(enc_blocks[0])): for block in enc_blocks: try: enc_block += block[i] except IndexError: pass return enc_block.encode('hex') def hamming_distance(b1, b2): '''Compute the Hamming distance between s1 and s2.''' int1 = int(b1.encode('hex'), base=16) int2 = int(b2.encode('hex'), base=16) xored = int1 ^ int2 hamming_distance = 0 while xored: if xored % 2 == 1: hamming_distance += 1 xored >>= 1 return hamming_distance def find_keysize(data): '''Find the keysize used to encrypt the data.''' nblocks = 12 max_keysize = 40 normalized_hamdists = [] min_keysize = 1 for keysize in range(min_keysize, min(max_keysize+1, len(data)/nblocks)): hamdists = [] for i in range(nblocks): block1 = data[keysize*i:keysize*(i+1)] block2 = data[keysize*(i+1):keysize*(i+2)] hamdists.append(hamming_distance(block1, block2) / float(keysize)) mean_hamdist = sum(hamdists) / len(hamdists) normalized_hamdists.append(mean_hamdist) return normalized_hamdists.index(min(normalized_hamdists)) + min_keysize def break_rep_key_xor(data, return_key=False): '''Break repeating key xor.''' keysize = find_keysize(data) decrypt_blocks = [] key = '' for i in range(keysize): block = data[i::keysize] decrypt_block, decrypt_key = decrypt_single_key_xor(block, return_key=True) decrypt_blocks.append(decrypt_block) key += decrypt_key decrypted_message = '' for i in range(len(decrypt_blocks[0])): for decrypt_block in decrypt_blocks: try: decrypted_message += decrypt_block[i] except IndexError: pass if return_key: return decrypted_message, key else: return decrypted_message if __name__ == '__main__': # Challenge 1 STRING1_1 = '49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d' RESULT1_1 = 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t' assert hex2base64(STRING1_1) == RESULT1_1 print "Challenge 1 test passed" print # Challenge 2 STRING1_2A = '1c0111001f010100061a024b53535009181c' STRING1_2B = '686974207468652062756c6c277320657965' RESULT1_2 = '746865206b696420646f6e277420706c6179' assert fixedXOR(STRING1_2A.decode('hex'), STRING1_2B.decode('hex')).encode('hex') == RESULT1_2 print "Challenge 2 test passed" print # Challenge 3 STRING1_3 = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' print "Challenge 3 solution:" print decrypt_single_key_xor(STRING1_3.decode('hex')) print # Challenge 4 print "Challenge 4 solution:" print detect_single_char_xor('set1-4.txt') # Challenge 5 STRING1_5A = 'Burning \'em, if you ain\'t quick and nimble\n' STRING1_5B = 'I go crazy when I hear a cymbal' STRING1_5 = STRING1_5A + STRING1_5B STRING1_5KEY = 'ICE' RESULT1_5A = '0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272' RESULT1_5B = 'a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f' RESULT1_5 = RESULT1_5A + RESULT1_5B assert repeating_key_xor(STRING1_5, STRING1_5KEY) == RESULT1_5 print "Challenge 5 test passed" print # Challenge 6 B64_1_6 = '' with open('set1-6.txt') as infile: for line in infile: B64_1_6 += line.strip() DATA1_6 = base64.b64decode(B64_1_6) print "Challenge 6 solution:" print break_rep_key_xor(DATA1_6) print # Challenge 7 KEY1_7 = 'YELLOW SUBMARINE' DATA_64_1_7 = '' with open('set1-7.txt') as infile: for line in infile: DATA_64_1_7 += line.strip() DATA1_7 = base64.b64decode(DATA_64_1_7) CIPHER = AES.new(KEY1_7, AES.MODE_ECB) print "Challenge 7 solution:" print CIPHER.decrypt(DATA1_7)
mit
XiaodunServerGroup/medicalmooc
common/lib/sandbox-packages/eia.py
79
1979
""" Standard resistor codes. http://en.wikipedia.org/wiki/Electronic_color_code """ E6 = [10, 15, 22, 33, 47, 68] E12 = [10, 12, 15, 18, 22, 27, 33, 39, 47, 56, 68, 82] E24 = [10, 12, 15, 18, 22, 27, 33, 39, 47, 56, 68, 82, 11, 13, 16, 20, 24, 30, 36, 43, 51, 62, 75, 91] E48 = [100, 121, 147, 178, 215, 261, 316, 383, 464, 562, 681, 825, 105, 127, 154, 187, 226, 274, 332, 402, 487, 590, 715, 866, 110, 133, 162, 196, 237, 287, 348, 422, 511, 619, 750, 909, 115, 140, 169, 205, 249, 301, 365, 442, 536, 649, 787, 953] E96 = [100, 121, 147, 178, 215, 261, 316, 383, 464, 562, 681, 825, 102, 124, 150, 182, 221, 267, 324, 392, 475, 576, 698, 845, 105, 127, 154, 187, 226, 274, 332, 402, 487, 590, 715, 866, 107, 130, 158, 191, 232, 280, 340, 412, 499, 604, 732, 887, 110, 133, 162, 196, 237, 287, 348, 422, 511, 619, 750, 909, 113, 137, 165, 200, 243, 294, 357, 432, 523, 634, 768, 931, 115, 140, 169, 205, 249, 301, 365, 442, 536, 649, 787, 953, 118, 143, 174, 210, 255, 309, 374, 453, 549, 665, 806, 976] E192 = [100, 121, 147, 178, 215, 261, 316, 383, 464, 562, 681, 825, 101, 123, 149, 180, 218, 264, 320, 388, 470, 569, 690, 835, 102, 124, 150, 182, 221, 267, 324, 392, 475, 576, 698, 845, 104, 126, 152, 184, 223, 271, 328, 397, 481, 583, 706, 856, 105, 127, 154, 187, 226, 274, 332, 402, 487, 590, 715, 866, 106, 129, 156, 189, 229, 277, 336, 407, 493, 597, 723, 876, 107, 130, 158, 191, 232, 280, 340, 412, 499, 604, 732, 887, 109, 132, 160, 193, 234, 284, 344, 417, 505, 612, 741, 898, 110, 133, 162, 196, 237, 287, 348, 422, 511, 619, 750, 909, 111, 135, 164, 198, 240, 291, 352, 427, 517, 626, 759, 920, 113, 137, 165, 200, 243, 294, 357, 432, 523, 634, 768, 931, 114, 138, 167, 203, 246, 298, 361, 437, 530, 642, 777, 942, 115, 140, 169, 205, 249, 301, 365, 442, 536, 649, 787, 953, 117, 142, 172, 208, 252, 305, 370, 448, 542, 657, 796, 965, 118, 143, 174, 210, 255, 309, 374, 453, 549, 665, 806, 976, 120, 145, 176, 213, 258, 312, 379, 459, 556, 673, 816, 988]
agpl-3.0
markoshorro/gem5
src/arch/x86/isa/insts/system/control_registers.py
91
3073
# Copyright (c) 2009 The Regents of The University of Michigan # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' def macroop CLTS { rdcr t1, regIdx(0), dataSize=8 andi t1, t1, 0xF7, dataSize=1 wrcr regIdx(0), t1, dataSize=8 }; def macroop LMSW_R { rdcr t1, regIdx(0), dataSize=8 # This logic sets MP, EM, and TS to whatever is in the operand. It will # set PE but not clear it. limm t2, "~ULL(0xe)", dataSize=8 and t1, t1, t2, dataSize=8 andi t2, reg, 0xf, dataSize=8 or t1, t1, t2, dataSize=8 wrcr regIdx(0), t1, dataSize=8 }; def macroop LMSW_M { ld t3, seg, sib, disp, dataSize=2 rdcr t1, regIdx(0), dataSize=8 # This logic sets MP, EM, and TS to whatever is in the operand. It will # set PE but not clear it. limm t2, "~ULL(0xe)", dataSize=8 and t1, t1, t2, dataSize=8 andi t2, t3, 0xf, dataSize=8 or t1, t1, t2, dataSize=8 wrcr regIdx(0), t1, dataSize=8 }; def macroop LMSW_P { rdip t7, dataSize=asz ld t3, seg, riprel, disp, dataSize=2 rdcr t1, regIdx(0), dataSize=8 # This logic sets MP, EM, and TS to whatever is in the operand. It will # set PE but not clear it. limm t2, "~ULL(0xe)", dataSize=8 and t1, t1, t2, dataSize=8 andi t2, t3, 0xf, dataSize=8 or t1, t1, t2, dataSize=8 wrcr regIdx(0), t1, dataSize=8 }; def macroop SMSW_R { rdcr reg, regIdx(0) }; def macroop SMSW_M { rdcr t1, regIdx(0) st t1, seg, sib, disp, dataSize=2 }; def macroop SMSW_P { rdcr t1, regIdx(0) rdip t7, dataSize=asz st t1, seg, riprel, disp, dataSize=2 }; '''
bsd-3-clause
jymannob/CouchPotatoServer
couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/brightcove.py
8
12227
# encoding: utf-8 from __future__ import unicode_literals import re import json import xml.etree.ElementTree from .common import InfoExtractor from ..utils import ( compat_urllib_parse, find_xpath_attr, fix_xml_ampersands, compat_urlparse, compat_str, compat_urllib_request, compat_parse_qs, determine_ext, ExtractorError, unsmuggle_url, unescapeHTML, ) class BrightcoveIE(InfoExtractor): _VALID_URL = r'https?://.*brightcove\.com/(services|viewer).*\?(?P<query>.*)' _FEDERATED_URL_TEMPLATE = 'http://c.brightcove.com/services/viewer/htmlFederated?%s' _TESTS = [ { # From http://www.8tv.cat/8aldia/videos/xavier-sala-i-martin-aquesta-tarda-a-8-al-dia/ 'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=1654948606001&flashID=myExperience&%40videoPlayer=2371591881001', 'md5': '5423e113865d26e40624dce2e4b45d95', 'note': 'Test Brightcove downloads and detection in GenericIE', 'info_dict': { 'id': '2371591881001', 'ext': 'mp4', 'title': 'Xavier Sala i Martín: “Un banc que no presta és un banc zombi que no serveix per a res”', 'uploader': '8TV', 'description': 'md5:a950cc4285c43e44d763d036710cd9cd', } }, { # From http://medianetwork.oracle.com/video/player/1785452137001 'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=1217746023001&flashID=myPlayer&%40videoPlayer=1785452137001', 'info_dict': { 'id': '1785452137001', 'ext': 'flv', 'title': 'JVMLS 2012: Arrays 2.0 - Opportunities and Challenges', 'description': 'John Rose speaks at the JVM Language Summit, August 1, 2012.', 'uploader': 'Oracle', }, }, { # From http://mashable.com/2013/10/26/thermoelectric-bracelet-lets-you-control-your-body-temperature/ 'url': 'http://c.brightcove.com/services/viewer/federated_f9?&playerID=1265504713001&publisherID=AQ%7E%7E%2CAAABBzUwv1E%7E%2CxP-xFHVUstiMFlNYfvF4G9yFnNaqCw_9&videoID=2750934548001', 'info_dict': { 'id': '2750934548001', 'ext': 'mp4', 'title': 'This Bracelet Acts as a Personal Thermostat', 'description': 'md5:547b78c64f4112766ccf4e151c20b6a0', 'uploader': 'Mashable', }, }, { # test that the default referer works # from http://national.ballet.ca/interact/video/Lost_in_Motion_II/ 'url': 'http://link.brightcove.com/services/player/bcpid756015033001?bckey=AQ~~,AAAApYJi_Ck~,GxhXCegT1Dp39ilhXuxMJxasUhVNZiil&bctid=2878862109001', 'info_dict': { 'id': '2878862109001', 'ext': 'mp4', 'title': 'Lost in Motion II', 'description': 'md5:363109c02998fee92ec02211bd8000df', 'uploader': 'National Ballet of Canada', }, }, { # test flv videos served by akamaihd.net # From http://www.redbull.com/en/bike/stories/1331655643987/replay-uci-dh-world-cup-2014-from-fort-william 'url': 'http://c.brightcove.com/services/viewer/htmlFederated?%40videoPlayer=ref%3ABC2996102916001&linkBaseURL=http%3A%2F%2Fwww.redbull.com%2Fen%2Fbike%2Fvideos%2F1331655630249%2Freplay-uci-fort-william-2014-dh&playerKey=AQ%7E%7E%2CAAAApYJ7UqE%7E%2Cxqr_zXk0I-zzNndy8NlHogrCb5QdyZRf&playerID=1398061561001#__youtubedl_smuggle=%7B%22Referer%22%3A+%22http%3A%2F%2Fwww.redbull.com%2Fen%2Fbike%2Fstories%2F1331655643987%2Freplay-uci-dh-world-cup-2014-from-fort-william%22%7D', # The md5 checksum changes on each download 'info_dict': { 'id': '2996102916001', 'ext': 'flv', 'title': 'UCI MTB World Cup 2014: Fort William, UK - Downhill Finals', 'uploader': 'Red Bull TV', 'description': 'UCI MTB World Cup 2014: Fort William, UK - Downhill Finals', }, }, ] @classmethod def _build_brighcove_url(cls, object_str): """ Build a Brightcove url from a xml string containing <object class="BrightcoveExperience">{params}</object> """ # Fix up some stupid HTML, see https://github.com/rg3/youtube-dl/issues/1553 object_str = re.sub(r'(<param name="[^"]+" value="[^"]+")>', lambda m: m.group(1) + '/>', object_str) # Fix up some stupid XML, see https://github.com/rg3/youtube-dl/issues/1608 object_str = object_str.replace('<--', '<!--') object_str = fix_xml_ampersands(object_str) object_doc = xml.etree.ElementTree.fromstring(object_str.encode('utf-8')) fv_el = find_xpath_attr(object_doc, './param', 'name', 'flashVars') if fv_el is not None: flashvars = dict( (k, v[0]) for k, v in compat_parse_qs(fv_el.attrib['value']).items()) else: flashvars = {} def find_param(name): if name in flashvars: return flashvars[name] node = find_xpath_attr(object_doc, './param', 'name', name) if node is not None: return node.attrib['value'] return None params = {} playerID = find_param('playerID') if playerID is None: raise ExtractorError('Cannot find player ID') params['playerID'] = playerID playerKey = find_param('playerKey') # Not all pages define this value if playerKey is not None: params['playerKey'] = playerKey # The three fields hold the id of the video videoPlayer = find_param('@videoPlayer') or find_param('videoId') or find_param('videoID') if videoPlayer is not None: params['@videoPlayer'] = videoPlayer linkBase = find_param('linkBaseURL') if linkBase is not None: params['linkBaseURL'] = linkBase data = compat_urllib_parse.urlencode(params) return cls._FEDERATED_URL_TEMPLATE % data @classmethod def _extract_brightcove_url(cls, webpage): """Try to extract the brightcove url from the webpage, returns None if it can't be found """ urls = cls._extract_brightcove_urls(webpage) return urls[0] if urls else None @classmethod def _extract_brightcove_urls(cls, webpage): """Return a list of all Brightcove URLs from the webpage """ url_m = re.search(r'<meta\s+property="og:video"\s+content="(http://c.brightcove.com/[^"]+)"', webpage) if url_m: url = unescapeHTML(url_m.group(1)) # Some sites don't add it, we can't download with this url, for example: # http://www.ktvu.com/videos/news/raw-video-caltrain-releases-video-of-man-almost/vCTZdY/ if 'playerKey' in url: return [url] matches = re.findall( r'''(?sx)<object (?: [^>]+?class=[\'"][^>]*?BrightcoveExperience.*?[\'"] | [^>]*?>\s*<param\s+name="movie"\s+value="https?://[^/]*brightcove\.com/ ).+?</object>''', webpage) return [cls._build_brighcove_url(m) for m in matches] def _real_extract(self, url): url, smuggled_data = unsmuggle_url(url, {}) # Change the 'videoId' and others field to '@videoPlayer' url = re.sub(r'(?<=[?&])(videoI(d|D)|bctid)', '%40videoPlayer', url) # Change bckey (used by bcove.me urls) to playerKey url = re.sub(r'(?<=[?&])bckey', 'playerKey', url) mobj = re.match(self._VALID_URL, url) query_str = mobj.group('query') query = compat_urlparse.parse_qs(query_str) videoPlayer = query.get('@videoPlayer') if videoPlayer: # We set the original url as the default 'Referer' header referer = smuggled_data.get('Referer', url) return self._get_video_info( videoPlayer[0], query_str, query, referer=referer) else: player_key = query['playerKey'] return self._get_playlist_info(player_key[0]) def _get_video_info(self, video_id, query_str, query, referer=None): request_url = self._FEDERATED_URL_TEMPLATE % query_str req = compat_urllib_request.Request(request_url) linkBase = query.get('linkBaseURL') if linkBase is not None: referer = linkBase[0] if referer is not None: req.add_header('Referer', referer) webpage = self._download_webpage(req, video_id) self.report_extraction(video_id) info = self._search_regex(r'var experienceJSON = ({.*});', webpage, 'json') info = json.loads(info)['data'] video_info = info['programmedContent']['videoPlayer']['mediaDTO'] video_info['_youtubedl_adServerURL'] = info.get('adServerURL') return self._extract_video_info(video_info) def _get_playlist_info(self, player_key): info_url = 'http://c.brightcove.com/services/json/experience/runtime/?command=get_programming_for_experience&playerKey=%s' % player_key playlist_info = self._download_webpage( info_url, player_key, 'Downloading playlist information') json_data = json.loads(playlist_info) if 'videoList' not in json_data: raise ExtractorError('Empty playlist') playlist_info = json_data['videoList'] videos = [self._extract_video_info(video_info) for video_info in playlist_info['mediaCollectionDTO']['videoDTOs']] return self.playlist_result(videos, playlist_id=playlist_info['id'], playlist_title=playlist_info['mediaCollectionDTO']['displayName']) def _extract_video_info(self, video_info): info = { 'id': compat_str(video_info['id']), 'title': video_info['displayName'].strip(), 'description': video_info.get('shortDescription'), 'thumbnail': video_info.get('videoStillURL') or video_info.get('thumbnailURL'), 'uploader': video_info.get('publisherName'), } renditions = video_info.get('renditions') if renditions: formats = [] for rend in renditions: url = rend['defaultURL'] if rend['remote']: # This type of renditions are served through akamaihd.net, # but they don't use f4m manifests url = url.replace('control/', '') + '?&v=3.3.0&fp=13&r=FEEFJ&g=RTSJIMBMPFPB' ext = 'flv' else: ext = determine_ext(url) size = rend.get('size') formats.append({ 'url': url, 'ext': ext, 'height': rend.get('frameHeight'), 'width': rend.get('frameWidth'), 'filesize': size if size != 0 else None, }) self._sort_formats(formats) info['formats'] = formats elif video_info.get('FLVFullLengthURL') is not None: info.update({ 'url': video_info['FLVFullLengthURL'], }) if self._downloader.params.get('include_ads', False): adServerURL = video_info.get('_youtubedl_adServerURL') if adServerURL: ad_info = { '_type': 'url', 'url': adServerURL, } if 'url' in info: return { '_type': 'playlist', 'title': info['title'], 'entries': [ad_info, info], } else: return ad_info if 'url' not in info and not info.get('formats'): raise ExtractorError('Unable to extract video url for %s' % info['id']) return info
gpl-3.0
DANCEcollaborative/forum-xblock
XBlock Integration Files/xdjangobb/xblock/lib/python2.7/site-packages/django/contrib/gis/tests/inspectapp/tests.py
134
5128
from __future__ import absolute_import import os from django.db import connections from django.test import TestCase from django.contrib.gis.gdal import Driver from django.contrib.gis.geometry.test_data import TEST_DATA from django.contrib.gis.utils.ogrinspect import ogrinspect from .models import AllOGRFields class OGRInspectTest(TestCase): def test_poly(self): shp_file = os.path.join(TEST_DATA, 'test_poly', 'test_poly.shp') model_def = ogrinspect(shp_file, 'MyModel') expected = [ '# This is an auto-generated Django model module created by ogrinspect.', 'from django.contrib.gis.db import models', '', 'class MyModel(models.Model):', ' float = models.FloatField()', ' int = models.FloatField()', ' str = models.CharField(max_length=80)', ' geom = models.PolygonField(srid=-1)', ' objects = models.GeoManager()', ] self.assertEqual(model_def, '\n'.join(expected)) def test_date_field(self): shp_file = os.path.join(TEST_DATA, 'cities', 'cities.shp') model_def = ogrinspect(shp_file, 'City') expected = [ '# This is an auto-generated Django model module created by ogrinspect.', 'from django.contrib.gis.db import models', '', 'class City(models.Model):', ' name = models.CharField(max_length=80)', ' population = models.FloatField()', ' density = models.FloatField()', ' created = models.DateField()', ' geom = models.PointField(srid=-1)', ' objects = models.GeoManager()', ] self.assertEqual(model_def, '\n'.join(expected)) def test_time_field(self): # Only possible to test this on PostGIS at the momemnt. MySQL # complains about permissions, and SpatiaLite/Oracle are # insanely difficult to get support compiled in for in GDAL. if not connections['default'].ops.postgis: return # Getting the database identifier used by OGR, if None returned # GDAL does not have the support compiled in. ogr_db = get_ogr_db_string() if not ogr_db: return # writing shapefules via GDAL currently does not support writing OGRTime # fields, so we need to actually use a database model_def = ogrinspect(ogr_db, 'Measurement', layer_key=AllOGRFields._meta.db_table, decimal=['f_decimal']) self.assertTrue(model_def.startswith( '# This is an auto-generated Django model module created by ogrinspect.\n' 'from django.contrib.gis.db import models\n' '\n' 'class Measurement(models.Model):\n' )) # The ordering of model fields might vary depending on several factors (version of GDAL, etc.) self.assertIn(' f_decimal = models.DecimalField(max_digits=0, decimal_places=0)', model_def) self.assertIn(' f_int = models.IntegerField()', model_def) self.assertIn(' f_datetime = models.DateTimeField()', model_def) self.assertIn(' f_time = models.TimeField()', model_def) self.assertIn(' f_float = models.FloatField()', model_def) self.assertIn(' f_char = models.CharField(max_length=10)', model_def) self.assertIn(' f_date = models.DateField()', model_def) self.assertTrue(model_def.endswith( ' geom = models.PolygonField()\n' ' objects = models.GeoManager()' )) def get_ogr_db_string(): # Construct the DB string that GDAL will use to inspect the database. # GDAL will create its own connection to the database, so we re-use the # connection settings from the Django test. This approach is a bit fragile # and cannot work on any other database other than PostgreSQL at the moment. db = connections.databases['default'] # Map from the django backend into the OGR driver name and database identifier # http://www.gdal.org/ogr/ogr_formats.html # # TODO: Support Oracle (OCI), MySQL, and SpatiaLite. drivers = { 'django.contrib.gis.db.backends.postgis': ('PostgreSQL', 'PG'), } drv_name, db_str = drivers[db['ENGINE']] # Ensure that GDAL library has driver support for the database. try: Driver(drv_name) except: return None # Build the params of the OGR database connection string # TODO: connection strings are database-dependent, thus if # we ever test other backends, this will need to change. params = ["dbname='%s'" % db['NAME']] def add(key, template): value = db.get(key, None) # Don't add the parameter if it is not in django's settings if value: params.append(template % value) add('HOST', "host='%s'") add('PORT', "port='%s'") add('USER', "user='%s'") add('PASSWORD', "password='%s'") return '%s:%s' % (db_str, ' '.join(params))
mit
saikrishnar/Forced-Alignment
Post_1/scripts/run_kaldi/utils/reverse_arpa.py
13
5824
# -*- coding: utf-8 -*- # Copyright 2012 Mirko Hannemann BUT, [email protected] import sys import codecs # for UTF-8/unicode if len(sys.argv) != 2: print 'usage: reverse_arpa arpa.in' sys.exit() arpaname = sys.argv[1] #\data\ #ngram 1=4 #ngram 2=2 #ngram 3=2 # #\1-grams: #-5.234679 a -3.3 #-3.456783 b #0.0000000 <s> -2.5 #-4.333333 </s> # #\2-grams: #-1.45678 a b -3.23 #-1.30490 <s> a -4.2 # #\3-grams: #-0.34958 <s> a b #-0.23940 a b </s> #\end\ # read language model in ARPA format try: file = codecs.open(arpaname, "r", "utf-8") except IOError: print 'file not found: ' + arpaname sys.exit() text=file.readline() while (text and text[:6] != "\\data\\"): text=file.readline() if not text: print "invalid ARPA file" sys.exit() #print text, while (text and text[:5] != "ngram"): text=file.readline() # get ngram counts cngrams=[] n=0 while (text and text[:5] == "ngram"): ind = text.split("=") counts = int(ind[1].strip()) r = ind[0].split() read_n = int(r[1].strip()) if read_n != n+1: print "invalid ARPA file:", text sys.exit() n = read_n cngrams.append(counts) #print text, text=file.readline() # read all n-grams order by order sentprob = 0.0 # sentence begin unigram ngrams=[] inf=float("inf") for n in range(1,len(cngrams)+1): # unigrams, bigrams, trigrams while (text and "-grams:" not in text): text=file.readline() if n != int(text[1]): print "invalid ARPA file:", text sys.exit() #print text,cngrams[n-1] this_ngrams={} # stores all read ngrams for ng in range(cngrams[n-1]): while (text and len(text.split())<2): text=file.readline() if (not text) or ((len(text.split())==1) and (("-grams:" in text) or (text[:5] == "\\end\\"))): break if (not text) or ((len(text.split())==1) and (("-grams:" in text) or (text[:5] == "\\end\\"))): break # to deal with incorrect ARPA files entry = text.split() prob = float(entry[0]) if len(entry)>n+1: back = float(entry[-1]) words = entry[1:n+1] else: back = 0.0 words = entry[1:] ngram = " ".join(words) if (n==1) and words[0]=="<s>": sentprob = prob prob = 0.0 this_ngrams[ngram] = (prob,back) #print prob,ngram.encode("utf-8"),back for x in range(n-1,0,-1): # add all missing backoff ngrams for reversed lm l_ngram = " ".join(words[:x]) # shortened ngram r_ngram = " ".join(words[1:1+x]) # shortened ngram with offset one if l_ngram not in ngrams[x-1]: # create missing ngram ngrams[x-1][l_ngram] = (0.0,inf) #print ngram, "create 0.0", l_ngram, "inf" if r_ngram not in ngrams[x-1]: # create missing ngram ngrams[x-1][r_ngram] = (0.0,inf) #print ngram, "create 0.0", r_ngram, "inf",x,n,h_ngram # add all missing backoff ngrams for forward lm h_ngram = " ".join(words[n-x:]) # shortened history if h_ngram not in ngrams[x-1]: # create missing ngram ngrams[x-1][h_ngram] = (0.0,inf) #print "create inf", h_ngram, "0.0" text=file.readline() if (not text) or ((len(text.split())==1) and (("-grams:" in text) or (text[:5] == "\\end\\"))): break ngrams.append(this_ngrams) while (text and text[:5] != "\\end\\"): text=file.readline() if not text: print "invalid ARPA file" sys.exit() file.close() #print text, #fourgram "maxent" model (b(ABCD)=0): #p(A)+b(A) A 0 #p(AB)+b(AB)-b(A)-p(B) AB 0 #p(ABC)+b(ABC)-b(AB)-p(BC) ABC 0 #p(ABCD)+b(ABCD)-b(ABC)-p(BCD) ABCD 0 #fourgram reverse ARPA model (b(ABCD)=0): #p(A)+b(A) A 0 #p(AB)+b(AB)-p(B)+p(A) BA 0 #p(ABC)+b(ABC)-p(BC)+p(AB)-p(B)+p(A) CBA 0 #p(ABCD)+b(ABCD)-p(BCD)+p(ABC)-p(BC)+p(AB)-p(B)+p(A) DCBA 0 # compute new reversed ARPA model print "\\data\\" for n in range(1,len(cngrams)+1): # unigrams, bigrams, trigrams print "ngram "+str(n)+"="+str(len(ngrams[n-1].keys())) offset = 0.0 for n in range(1,len(cngrams)+1): # unigrams, bigrams, trigrams print "\\"+str(n)+"-grams:" keys = ngrams[n-1].keys() keys.sort() for ngram in keys: prob = ngrams[n-1][ngram] # reverse word order words = ngram.split() rstr = " ".join(reversed(words)) # swap <s> and </s> rev_ngram = rstr.replace("<s>","<temp>").replace("</s>","<s>").replace("<temp>","</s>") revprob = prob[0] if (prob[1] != inf): # only backoff weights from not newly created ngrams revprob = revprob + prob[1] #print prob[0],prob[1] # sum all missing terms in decreasing ngram order for x in range(n-1,0,-1): l_ngram = " ".join(words[:x]) # shortened ngram if l_ngram not in ngrams[x-1]: sys.stderr.write(rev_ngram+": not found "+l_ngram+"\n") p_l = ngrams[x-1][l_ngram][0] #print p_l,l_ngram revprob = revprob + p_l r_ngram = " ".join(words[1:1+x]) # shortened ngram with offset one if r_ngram not in ngrams[x-1]: sys.stderr.write(rev_ngram+": not found "+r_ngram+"\n") p_r = ngrams[x-1][r_ngram][0] #print -p_r,r_ngram revprob = revprob - p_r if n != len(cngrams): #not highest order back = 0.0 if rev_ngram[:3] == "<s>": # special handling since arpa2fst ignores <s> weight if n == 1: offset = revprob # remember <s> weight revprob = sentprob # apply <s> weight from forward model back = offset elif n == 2: revprob = revprob + offset # add <s> weight to bigrams starting with <s> if (prob[1] != inf): # only backoff weights from not newly created ngrams print revprob,rev_ngram.encode("utf-8"),back else: print revprob,rev_ngram.encode("utf-8"),"-100000.0" else: # highest order - no backoff weights if (n==2) and (rev_ngram[:3] == "<s>"): revprob = revprob + offset print revprob,rev_ngram.encode("utf-8") print "\\end\\"
gpl-2.0
atodorov/anaconda
pyanaconda/modules/storage/devicetree/devicetree_interface.py
6
1430
# # DBus interface for the device tree module # # Copyright (C) 2019 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from dasbus.server.interface import dbus_class from pyanaconda.modules.storage.devicetree.handler_interface import DeviceTreeHandlerInterface from pyanaconda.modules.storage.devicetree.viewer_interface import DeviceTreeViewerInterface __all__ = ["DeviceTreeInterface"] @dbus_class class DeviceTreeInterface(DeviceTreeViewerInterface, DeviceTreeHandlerInterface): """DBus interface for the device tree module.""" pass
gpl-2.0
WoLpH/celery
celery/tests/test_task/test_task_builtins.py
1
1421
from __future__ import with_statement import warnings from celery.task import ping, PingTask, backend_cleanup from celery.tests.compat import catch_warnings from celery.tests.utils import unittest def some_func(i): return i * i class test_deprecated(unittest.TestCase): def test_ping(self): warnings.resetwarnings() with catch_warnings(record=True) as log: prev = PingTask.app.conf.CELERY_ALWAYS_EAGER PingTask.app.conf.CELERY_ALWAYS_EAGER = True try: pong = ping() warning = log[0].message self.assertEqual(pong, "pong") self.assertIsInstance(warning, DeprecationWarning) self.assertIn("ping task has been deprecated", warning.args[0]) finally: PingTask.app.conf.CELERY_ALWAYS_EAGER = prev def test_TaskSet_import_from_task_base(self): warnings.resetwarnings() with catch_warnings(record=True) as log: from celery.task.base import TaskSet, subtask TaskSet() subtask(PingTask) for w in (log[0].message, log[1].message): self.assertIsInstance(w, DeprecationWarning) self.assertIn("is deprecated", w.args[0]) class test_backend_cleanup(unittest.TestCase): def test_run(self): backend_cleanup.apply()
bsd-3-clause
ThrowDice/watu
friendfeed.py
1
14114
#!/usr/bin/env python # # Copyright 2008 FriendFeed # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Methods to interact with the FriendFeed API Detailed documentation is available at http://friendfeed.com/api/. Many parts of the FriendFeed API require authentication. To support authentication, FriendFeed gives users a "remote key" that they give to third party applications to access FriendFeed. The user's nickname and that remote key are passed as arguments to the constructor of the FriendFeed class, and the credentials are automatically passed to all called methods. For example: session = friendfeed.FriendFeed(nickname, remote_key) entry = session.publish_message("Testing the FriendFeed API") Users can get their remote key from http://friendfeed.com/remotekey. You should direct users who don't know their remote key to that page. For guidelines on user interface and terminology, check out http://friendfeed.com/api/guidelines. """ import base64 import datetime import time import urllib from google.appengine.api import urlfetch #import urllib2 # We require a JSON parsing library. These seem to be the most popular. try: import cjson parse_json = lambda s: cjson.decode(s.decode("utf-8"), True) except ImportError: try: from django.utils import simplejson parse_json = lambda s: simplejson.loads(s.decode("utf-8")) except ImportError: import json parse_json = lambda s: _unicodify(json.read(s)) class FriendFeed(object): def __init__(self, auth_nickname=None, auth_key=None): """Creates a new FriendFeed session for the given user. The credentials are optional for some operations, but required for private feeds and all operations that write data, like publish_link. """ self.auth_nickname = auth_nickname self.auth_key = auth_key def fetch_public_feed(self, **kwargs): """Returns the public feed with everyone's public entries. Authentication is not required. """ return self._fetch_feed("/api/feed/public", **kwargs) def fetch_user_feed(self, nickname, **kwargs): """Returns the entries shared by the user with the given nickname. Authentication is required if the user's feed is not public. """ return self._fetch_feed( "/api/feed/user/" + urllib.quote_plus(nickname), **kwargs) def fetch_user_comments_feed(self, nickname, **kwargs): """Returns the entries the given user has commented on.""" return self._fetch_feed( "/api/feed/user/" + urllib.quote_plus(nickname) + "/comments", **kwargs) def fetch_user_likes_feed(self, nickname, **kwargs): """Returns the entries the given user has "liked".""" return self._fetch_feed( "/api/feed/user/" + urllib.quote_plus(nickname) + "/likes", **kwargs) def fetch_user_discussion_feed(self, nickname, **kwargs): """Returns the entries the given user has commented on or "liked".""" return self._fetch_feed( "/api/feed/user/" + urllib.quote_plus(nickname) + "/discussion", **kwargs) def fetch_multi_user_feed(self, nicknames, **kwargs): """Returns a merged feed with all of the given users' entries. Authentication is required if any one of the users' feeds is not public. """ return self._fetch_feed("/api/feed/user", nickname=",".join(nicknames), **kwargs) def fetch_home_feed(self, **kwargs): """Returns the entries the authenticated user sees on their home page. Authentication is always required. """ return self._fetch_feed("/api/feed/home", **kwargs) def search(self, q, **kwargs): """Searches over entries in FriendFeed. If the request is authenticated, the default scope is over all of the entries in the authenticated user's Friends Feed. If the request is not authenticated, the default scope is over all public entries. The query syntax is the same syntax as http://friendfeed.com/advancedsearch """ kwargs["q"] = q return self._fetch_feed("/api/feed/search", **kwargs) def publish_message(self, message, **kwargs): """Publishes the given message to the authenticated user's feed. See publish_link for additional options. """ return self.publish_link(title=message, link=None, **kwargs) def publish_link(self, title, link, comment=None, image_urls=[], images=[], via=None, audio_urls=[], audio=[], room=None): """Publishes the given link/title to the authenticated user's feed. Authentication is always required. image_urls is a list of URLs that will be downloaded and included as thumbnails beneath the link. The thumbnails will all link to the destination link. If you would prefer that the images link somewhere else, you can specify images[] instead, which should be a list of dicts of the form {"url": ..., "link": ...}. The thumbnail with the given url will link to the specified link. audio_urls is a list of MP3 URLs that will show up as a play button beneath the link. You can optionally supply audio[] instead, which should be a list of dicts of the form {"url": ..., "title": ...}. The given title will appear when the audio file is played. We return the parsed/published entry as returned from the server, which includes the final thumbnail URLs as well as the ID for the new entry. Example: session = friendfeed.FriendFeed(nickname, remote_key) entry = session.publish_link( title="Testing the FriendFeed API", link="http://friendfeed.com/", image_urls=[ "http://friendfeed.com/static/images/jim-superman.jpg", "http://friendfeed.com/static/images/logo.png", ], ) print "Posted images at http://friendfeed.com/e/%s" % entry["id"] """ post_args = {"title": title} if link: post_args["link"] = link if comment: post_args["comment"] = comment if via: post_args["via"] = via images = images[:] for image_url in image_urls: images.append({"url": image_url}) for i, image in enumerate(images): post_args["image%d_url" % i] = image["url"] if image.get("link"): post_args["image%d_link" % i] = image["link"] audio = audio[:] for audio_url in audio_urls: audio.append({"url": audio_url}) for i, clip in enumerate(audio): post_args["audio%d_url" % i] = clip["url"] if clip.get("title"): post_args["audio%d_title" % i] = clip["title"] if room: post_args["room"] = room feed = self._fetch_feed("/api/share", post_args=post_args) return feed["entries"][0] def add_comment(self, entry_id, body, via=None): """Adds the given comment to the entry with the given ID. We return the ID of the new comment, which can be used to edit or delete the comment. """ args = { "entry": entry_id, "body": body } if via: args["via"] = via result = self._fetch("/api/comment", args) return result["id"] def edit_comment(self, entry_id, comment_id, body): """Updates the comment with the given ID.""" self._fetch("/api/comment", { "entry": entry_id, "comment": comment_id, "body": body }) def delete_comment(self, entry_id, comment_id): """Deletes the comment with the given ID.""" self._fetch("/api/comment/delete", { "entry": entry_id, "comment": comment_id, }) def undelete_comment(self, entry_id, comment_id): """Un-deletes the comment with the given ID.""" self._fetch("/api/comment/delete", { "entry": entry_id, "comment": comment_id, "undelete": 1, }) def add_like(self, entry_id): """'Likes' the entry with the given ID.""" self._fetch("/api/like", { "entry": entry_id, }) def delete_like(self, entry_id): """Deletes the 'Like' for the entry with the given ID (if any).""" self._fetch("/api/like/delete", { "entry": entry_id, }) def _fetch_feed(self, uri, post_args=None, **kwargs): """Publishes to the given URI and parses the returned JSON feed.""" # Parse all the dates in the result JSON result = self._fetch(uri, post_args, **kwargs) rfc3339_date = "%Y-%m-%dT%H:%M:%SZ" date_properties = frozenset(("updated", "published")) for entry in result.get("entries", []): entry["updated"] = self._parse_date(entry["updated"]) entry["published"] = self._parse_date(entry["published"]) for comment in entry.get("comments", []): comment["date"] = self._parse_date(comment["date"]) for like in entry.get("likes", []): like["date"] = self._parse_date(like["date"]) return result def _fetch(self, uri, post_args, **url_args): url_args["format"] = "json" args = urllib.urlencode(url_args) url = "http://friendfeed.com" + uri + "?" + args #if post_args is not None: # request = urllib2.Request(url, urllib.urlencode(post_args)) #else: # request = urllib2.Request(url) #if self.auth_nickname and self.auth_key: # pair = "%s:%s" % (self.auth_nickname, self.auth_key) # token = base64.b64encode(pair) # request.add_header("Authorization", "Basic %s" % token) #stream = urllib2.urlopen(request) #data = stream.read() #stream.close() if self.auth_nickname and self.auth_key: pair = "%s:%s" % (self.auth_nickname,self.auth_key) token = base64.b64encode(pair) au="Basic %s" % token if post_args is not None: form_data = urllib.urlencode(post_args) result=urlfetch.fetch(url=url,payload=form_data,\ method=urlfetch.POST,\ headers={'Authorization':au}) else: result=urlfetch.fetch(url=url,\ method=urlfetch.GET,\ headers={'Authorization':au}) else: if post_args is not None: form_data = urllib.urlencode(post_args) result=urlfetch.fetch(url=url,payload=form_data,\ method=urlfetch.POST) else: result=urlfetch.fetch(url=url,\ method=urlfetch.GET) data=result.content return parse_json(data) def _parse_date(self, date_str): rfc3339_date = "%Y-%m-%dT%H:%M:%SZ" return datetime.datetime(*time.strptime(date_str, rfc3339_date)[:6]) def _unicodify(json): """Makes all strings in the given JSON-like structure unicode.""" if isinstance(json, str): return json.decode("utf-8") elif isinstance(json, dict): for name in json: json[name] = _unicodify(json[name]) elif isinstance(json, list): for part in json: _unicodify(part) return json def _example(): # Fill in a nickname and a valid remote key below for authenticated # actions like posting an entry and reading a protected feed # session = FriendFeed(auth_nickname=nickname, auth_key=remote_key) session = FriendFeed() feed = session.fetch_public_feed() # feed = session.fetch_user_feed("bret") # feed = session.fetch_user_feed("paul", service="twitter") # feed = session.fetch_user_discussion_feed("bret") # feed = session.fetch_multi_user_feed(["bret", "paul", "jim"]) # feed = session.search("who:bret friendfeed") for entry in feed["entries"]: print entry["published"].strftime("%m/%d/%Y"), entry["title"] if session.auth_nickname and session.auth_key: # The feed that the authenticated user would see on their home page feed = session.fetch_home_feed() # Post a message on this user's feed entry = session.publish_message("Testing the FriendFeed API") print "Posted new message at http://friendfeed.com/e/%s" % entry["id"] # Post a link on this user's feed entry = session.publish_link(title="Testing the FriendFeed API", link="http://friendfeed.com/") print "Posted new link at http://friendfeed.com/e/%s" % entry["id"] # Post a link with two thumbnails on this user's feed entry = session.publish_link( title="Testing the FriendFeed API", link="http://friendfeed.com/", image_urls=[ "http://friendfeed.com/static/images/jim-superman.jpg", "http://friendfeed.com/static/images/logo.png", ], ) print "Posted images at http://friendfeed.com/e/%s" % entry["id"] if __name__ == "__main__": _example()
gpl-3.0