repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
ivar1234/babu.repo
|
lib/ansible/plugins/action/include_vars.py
|
49
|
2027
|
# (c) 2013-2014, Benno Joy <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.errors import AnsibleError
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
source = self._task.args.get('_raw_params')
if self._task._role:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'vars', source)
else:
source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'vars', source)
if os.path.exists(source):
(data, show_content) = self._loader._get_file_contents(source)
data = self._loader.load(data, show_content)
if data is None:
data = {}
if not isinstance(data, dict):
raise AnsibleError("%s must be stored as a dictionary/hash" % source)
result['ansible_facts'] = data
result['_ansible_no_log'] = not show_content
else:
result['failed'] = True
result['msg'] = "Source file not found."
result['file'] = source
return result
|
gpl-3.0
|
Linutronix/linux
|
tools/testing/kunit/kunit_parser.py
|
239
|
10790
|
# SPDX-License-Identifier: GPL-2.0
#
# Parses test results from a kernel dmesg log.
#
# Copyright (C) 2019, Google LLC.
# Author: Felix Guo <[email protected]>
# Author: Brendan Higgins <[email protected]>
import re
from collections import namedtuple
from datetime import datetime
from enum import Enum, auto
from functools import reduce
from typing import Iterable, Iterator, List, Optional, Tuple
TestResult = namedtuple('TestResult', ['status','suites','log'])
class TestSuite(object):
def __init__(self) -> None:
self.status = TestStatus.SUCCESS
self.name = ''
self.cases = [] # type: List[TestCase]
def __str__(self) -> str:
return 'TestSuite(' + str(self.status) + ',' + self.name + ',' + str(self.cases) + ')'
def __repr__(self) -> str:
return str(self)
class TestCase(object):
def __init__(self) -> None:
self.status = TestStatus.SUCCESS
self.name = ''
self.log = [] # type: List[str]
def __str__(self) -> str:
return 'TestCase(' + str(self.status) + ',' + self.name + ',' + str(self.log) + ')'
def __repr__(self) -> str:
return str(self)
class TestStatus(Enum):
SUCCESS = auto()
FAILURE = auto()
TEST_CRASHED = auto()
NO_TESTS = auto()
FAILURE_TO_PARSE_TESTS = auto()
kunit_start_re = re.compile(r'TAP version [0-9]+$')
kunit_end_re = re.compile('(List of all partitions:|'
'Kernel panic - not syncing: VFS:)')
def isolate_kunit_output(kernel_output) -> Iterator[str]:
started = False
for line in kernel_output:
line = line.rstrip() # line always has a trailing \n
if kunit_start_re.search(line):
prefix_len = len(line.split('TAP version')[0])
started = True
yield line[prefix_len:] if prefix_len > 0 else line
elif kunit_end_re.search(line):
break
elif started:
yield line[prefix_len:] if prefix_len > 0 else line
def raw_output(kernel_output) -> None:
for line in kernel_output:
print(line.rstrip())
DIVIDER = '=' * 60
RESET = '\033[0;0m'
def red(text) -> str:
return '\033[1;31m' + text + RESET
def yellow(text) -> str:
return '\033[1;33m' + text + RESET
def green(text) -> str:
return '\033[1;32m' + text + RESET
def print_with_timestamp(message) -> None:
print('[%s] %s' % (datetime.now().strftime('%H:%M:%S'), message))
def format_suite_divider(message) -> str:
return '======== ' + message + ' ========'
def print_suite_divider(message) -> None:
print_with_timestamp(DIVIDER)
print_with_timestamp(format_suite_divider(message))
def print_log(log) -> None:
for m in log:
print_with_timestamp(m)
TAP_ENTRIES = re.compile(r'^(TAP|[\s]*ok|[\s]*not ok|[\s]*[0-9]+\.\.[0-9]+|[\s]*#).*$')
def consume_non_diagnostic(lines: List[str]) -> None:
while lines and not TAP_ENTRIES.match(lines[0]):
lines.pop(0)
def save_non_diagnostic(lines: List[str], test_case: TestCase) -> None:
while lines and not TAP_ENTRIES.match(lines[0]):
test_case.log.append(lines[0])
lines.pop(0)
OkNotOkResult = namedtuple('OkNotOkResult', ['is_ok','description', 'text'])
OK_NOT_OK_SUBTEST = re.compile(r'^[\s]+(ok|not ok) [0-9]+ - (.*)$')
OK_NOT_OK_MODULE = re.compile(r'^(ok|not ok) ([0-9]+) - (.*)$')
def parse_ok_not_ok_test_case(lines: List[str], test_case: TestCase) -> bool:
save_non_diagnostic(lines, test_case)
if not lines:
test_case.status = TestStatus.TEST_CRASHED
return True
line = lines[0]
match = OK_NOT_OK_SUBTEST.match(line)
while not match and lines:
line = lines.pop(0)
match = OK_NOT_OK_SUBTEST.match(line)
if match:
test_case.log.append(lines.pop(0))
test_case.name = match.group(2)
if test_case.status == TestStatus.TEST_CRASHED:
return True
if match.group(1) == 'ok':
test_case.status = TestStatus.SUCCESS
else:
test_case.status = TestStatus.FAILURE
return True
else:
return False
SUBTEST_DIAGNOSTIC = re.compile(r'^[\s]+# (.*)$')
DIAGNOSTIC_CRASH_MESSAGE = re.compile(r'^[\s]+# .*?: kunit test case crashed!$')
def parse_diagnostic(lines: List[str], test_case: TestCase) -> bool:
save_non_diagnostic(lines, test_case)
if not lines:
return False
line = lines[0]
match = SUBTEST_DIAGNOSTIC.match(line)
if match:
test_case.log.append(lines.pop(0))
crash_match = DIAGNOSTIC_CRASH_MESSAGE.match(line)
if crash_match:
test_case.status = TestStatus.TEST_CRASHED
return True
else:
return False
def parse_test_case(lines: List[str]) -> Optional[TestCase]:
test_case = TestCase()
save_non_diagnostic(lines, test_case)
while parse_diagnostic(lines, test_case):
pass
if parse_ok_not_ok_test_case(lines, test_case):
return test_case
else:
return None
SUBTEST_HEADER = re.compile(r'^[\s]+# Subtest: (.*)$')
def parse_subtest_header(lines: List[str]) -> Optional[str]:
consume_non_diagnostic(lines)
if not lines:
return None
match = SUBTEST_HEADER.match(lines[0])
if match:
lines.pop(0)
return match.group(1)
else:
return None
SUBTEST_PLAN = re.compile(r'[\s]+[0-9]+\.\.([0-9]+)')
def parse_subtest_plan(lines: List[str]) -> Optional[int]:
consume_non_diagnostic(lines)
match = SUBTEST_PLAN.match(lines[0])
if match:
lines.pop(0)
return int(match.group(1))
else:
return None
def max_status(left: TestStatus, right: TestStatus) -> TestStatus:
if left == TestStatus.TEST_CRASHED or right == TestStatus.TEST_CRASHED:
return TestStatus.TEST_CRASHED
elif left == TestStatus.FAILURE or right == TestStatus.FAILURE:
return TestStatus.FAILURE
elif left != TestStatus.SUCCESS:
return left
elif right != TestStatus.SUCCESS:
return right
else:
return TestStatus.SUCCESS
def parse_ok_not_ok_test_suite(lines: List[str],
test_suite: TestSuite,
expected_suite_index: int) -> bool:
consume_non_diagnostic(lines)
if not lines:
test_suite.status = TestStatus.TEST_CRASHED
return False
line = lines[0]
match = OK_NOT_OK_MODULE.match(line)
if match:
lines.pop(0)
if match.group(1) == 'ok':
test_suite.status = TestStatus.SUCCESS
else:
test_suite.status = TestStatus.FAILURE
suite_index = int(match.group(2))
if suite_index != expected_suite_index:
print_with_timestamp(
red('[ERROR] ') + 'expected_suite_index ' +
str(expected_suite_index) + ', but got ' +
str(suite_index))
return True
else:
return False
def bubble_up_errors(statuses: Iterable[TestStatus]) -> TestStatus:
return reduce(max_status, statuses, TestStatus.SUCCESS)
def bubble_up_test_case_errors(test_suite: TestSuite) -> TestStatus:
max_test_case_status = bubble_up_errors(x.status for x in test_suite.cases)
return max_status(max_test_case_status, test_suite.status)
def parse_test_suite(lines: List[str], expected_suite_index: int) -> Optional[TestSuite]:
if not lines:
return None
consume_non_diagnostic(lines)
test_suite = TestSuite()
test_suite.status = TestStatus.SUCCESS
name = parse_subtest_header(lines)
if not name:
return None
test_suite.name = name
expected_test_case_num = parse_subtest_plan(lines)
if expected_test_case_num is None:
return None
while expected_test_case_num > 0:
test_case = parse_test_case(lines)
if not test_case:
break
test_suite.cases.append(test_case)
expected_test_case_num -= 1
if parse_ok_not_ok_test_suite(lines, test_suite, expected_suite_index):
test_suite.status = bubble_up_test_case_errors(test_suite)
return test_suite
elif not lines:
print_with_timestamp(red('[ERROR] ') + 'ran out of lines before end token')
return test_suite
else:
print('failed to parse end of suite' + lines[0])
return None
TAP_HEADER = re.compile(r'^TAP version 14$')
def parse_tap_header(lines: List[str]) -> bool:
consume_non_diagnostic(lines)
if TAP_HEADER.match(lines[0]):
lines.pop(0)
return True
else:
return False
TEST_PLAN = re.compile(r'[0-9]+\.\.([0-9]+)')
def parse_test_plan(lines: List[str]) -> Optional[int]:
consume_non_diagnostic(lines)
match = TEST_PLAN.match(lines[0])
if match:
lines.pop(0)
return int(match.group(1))
else:
return None
def bubble_up_suite_errors(test_suites: Iterable[TestSuite]) -> TestStatus:
return bubble_up_errors(x.status for x in test_suites)
def parse_test_result(lines: List[str]) -> TestResult:
consume_non_diagnostic(lines)
if not lines or not parse_tap_header(lines):
return TestResult(TestStatus.NO_TESTS, [], lines)
expected_test_suite_num = parse_test_plan(lines)
if not expected_test_suite_num:
return TestResult(TestStatus.FAILURE_TO_PARSE_TESTS, [], lines)
test_suites = []
for i in range(1, expected_test_suite_num + 1):
test_suite = parse_test_suite(lines, i)
if test_suite:
test_suites.append(test_suite)
else:
print_with_timestamp(
red('[ERROR] ') + ' expected ' +
str(expected_test_suite_num) +
' test suites, but got ' + str(i - 2))
break
test_suite = parse_test_suite(lines, -1)
if test_suite:
print_with_timestamp(red('[ERROR] ') +
'got unexpected test suite: ' + test_suite.name)
if test_suites:
return TestResult(bubble_up_suite_errors(test_suites), test_suites, lines)
else:
return TestResult(TestStatus.NO_TESTS, [], lines)
def print_and_count_results(test_result: TestResult) -> Tuple[int, int, int]:
total_tests = 0
failed_tests = 0
crashed_tests = 0
for test_suite in test_result.suites:
if test_suite.status == TestStatus.SUCCESS:
print_suite_divider(green('[PASSED] ') + test_suite.name)
elif test_suite.status == TestStatus.TEST_CRASHED:
print_suite_divider(red('[CRASHED] ' + test_suite.name))
else:
print_suite_divider(red('[FAILED] ') + test_suite.name)
for test_case in test_suite.cases:
total_tests += 1
if test_case.status == TestStatus.SUCCESS:
print_with_timestamp(green('[PASSED] ') + test_case.name)
elif test_case.status == TestStatus.TEST_CRASHED:
crashed_tests += 1
print_with_timestamp(red('[CRASHED] ' + test_case.name))
print_log(map(yellow, test_case.log))
print_with_timestamp('')
else:
failed_tests += 1
print_with_timestamp(red('[FAILED] ') + test_case.name)
print_log(map(yellow, test_case.log))
print_with_timestamp('')
return total_tests, failed_tests, crashed_tests
def parse_run_tests(kernel_output) -> TestResult:
total_tests = 0
failed_tests = 0
crashed_tests = 0
test_result = parse_test_result(list(isolate_kunit_output(kernel_output)))
if test_result.status == TestStatus.NO_TESTS:
print(red('[ERROR] ') + yellow('no tests run!'))
elif test_result.status == TestStatus.FAILURE_TO_PARSE_TESTS:
print(red('[ERROR] ') + yellow('could not parse test results!'))
else:
(total_tests,
failed_tests,
crashed_tests) = print_and_count_results(test_result)
print_with_timestamp(DIVIDER)
fmt = green if test_result.status == TestStatus.SUCCESS else red
print_with_timestamp(
fmt('Testing complete. %d tests run. %d failed. %d crashed.' %
(total_tests, failed_tests, crashed_tests)))
return test_result
|
gpl-2.0
|
mogoweb/chromium-crosswalk
|
tools/valgrind/valgrind_test.py
|
1
|
46063
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs an exe through Valgrind and puts the intermediate files in a
directory.
"""
import datetime
import glob
import logging
import optparse
import os
import re
import shutil
import stat
import subprocess
import sys
import tempfile
import common
import drmemory_analyze
import memcheck_analyze
import tsan_analyze
class BaseTool(object):
"""Abstract class for running Valgrind-, PIN-based and other dynamic
error detector tools.
Always subclass this and implement ToolCommand with framework- and
tool-specific stuff.
"""
def __init__(self):
temp_parent_dir = None
self.log_parent_dir = ""
if common.IsWindows():
# gpu process on Windows Vista+ runs at Low Integrity and can only
# write to certain directories (http://crbug.com/119131)
#
# TODO(bruening): if scripts die in middle and don't clean up temp
# dir, we'll accumulate files in profile dir. should remove
# really old files automatically.
profile = os.getenv("USERPROFILE")
if profile:
self.log_parent_dir = profile + "\\AppData\\LocalLow\\"
if os.path.exists(self.log_parent_dir):
self.log_parent_dir = common.NormalizeWindowsPath(self.log_parent_dir)
temp_parent_dir = self.log_parent_dir
# Generated every time (even when overridden)
self.temp_dir = tempfile.mkdtemp(prefix="vg_logs_", dir=temp_parent_dir)
self.log_dir = self.temp_dir # overridable by --keep_logs
self.option_parser_hooks = []
# TODO(glider): we may not need some of the env vars on some of the
# platforms.
self._env = {
"G_SLICE" : "always-malloc",
"NSS_DISABLE_UNLOAD" : "1",
"NSS_DISABLE_ARENA_FREE_LIST" : "1",
"GTEST_DEATH_TEST_USE_FORK": "1",
}
def ToolName(self):
raise NotImplementedError, "This method should be implemented " \
"in the tool-specific subclass"
def Analyze(self, check_sanity=False):
raise NotImplementedError, "This method should be implemented " \
"in the tool-specific subclass"
def RegisterOptionParserHook(self, hook):
# Frameworks and tools can add their own flags to the parser.
self.option_parser_hooks.append(hook)
def CreateOptionParser(self):
# Defines Chromium-specific flags.
self._parser = optparse.OptionParser("usage: %prog [options] <program to "
"test>")
self._parser.disable_interspersed_args()
self._parser.add_option("-t", "--timeout",
dest="timeout", metavar="TIMEOUT", default=10000,
help="timeout in seconds for the run (default 10000)")
self._parser.add_option("", "--build_dir",
help="the location of the compiler output")
self._parser.add_option("", "--source_dir",
help="path to top of source tree for this build"
"(used to normalize source paths in baseline)")
self._parser.add_option("", "--gtest_filter", default="",
help="which test case to run")
self._parser.add_option("", "--gtest_repeat",
help="how many times to run each test")
self._parser.add_option("", "--gtest_print_time", action="store_true",
default=False,
help="show how long each test takes")
self._parser.add_option("", "--ignore_exit_code", action="store_true",
default=False,
help="ignore exit code of the test "
"(e.g. test failures)")
self._parser.add_option("", "--keep_logs", action="store_true",
default=False,
help="store memory tool logs in the <tool>.logs "
"directory instead of /tmp.\nThis can be "
"useful for tool developers/maintainers.\n"
"Please note that the <tool>.logs directory "
"will be clobbered on tool startup.")
# To add framework- or tool-specific flags, please add a hook using
# RegisterOptionParserHook in the corresponding subclass.
# See ValgrindTool and ThreadSanitizerBase for examples.
for hook in self.option_parser_hooks:
hook(self, self._parser)
def ParseArgv(self, args):
self.CreateOptionParser()
# self._tool_flags will store those tool flags which we don't parse
# manually in this script.
self._tool_flags = []
known_args = []
""" We assume that the first argument not starting with "-" is a program
name and all the following flags should be passed to the program.
TODO(timurrrr): customize optparse instead
"""
while len(args) > 0 and args[0][:1] == "-":
arg = args[0]
if (arg == "--"):
break
if self._parser.has_option(arg.split("=")[0]):
known_args += [arg]
else:
self._tool_flags += [arg]
args = args[1:]
if len(args) > 0:
known_args += args
self._options, self._args = self._parser.parse_args(known_args)
self._timeout = int(self._options.timeout)
self._source_dir = self._options.source_dir
if self._options.keep_logs:
# log_parent_dir has trailing slash if non-empty
self.log_dir = self.log_parent_dir + "%s.logs" % self.ToolName()
if os.path.exists(self.log_dir):
shutil.rmtree(self.log_dir)
os.mkdir(self.log_dir)
logging.info("Logs are in " + self.log_dir)
self._ignore_exit_code = self._options.ignore_exit_code
if self._options.gtest_filter != "":
self._args.append("--gtest_filter=%s" % self._options.gtest_filter)
if self._options.gtest_repeat:
self._args.append("--gtest_repeat=%s" % self._options.gtest_repeat)
if self._options.gtest_print_time:
self._args.append("--gtest_print_time")
return True
def Setup(self, args):
return self.ParseArgv(args)
def ToolCommand(self):
raise NotImplementedError, "This method should be implemented " \
"in the tool-specific subclass"
def Cleanup(self):
# You may override it in the tool-specific subclass
pass
def Execute(self):
""" Execute the app to be tested after successful instrumentation.
Full execution command-line provided by subclassers via proc."""
logging.info("starting execution...")
proc = self.ToolCommand()
for var in self._env:
common.PutEnvAndLog(var, self._env[var])
return common.RunSubprocess(proc, self._timeout)
def RunTestsAndAnalyze(self, check_sanity):
exec_retcode = self.Execute()
analyze_retcode = self.Analyze(check_sanity)
if analyze_retcode:
logging.error("Analyze failed.")
logging.info("Search the log for '[ERROR]' to see the error reports.")
return analyze_retcode
if exec_retcode:
if self._ignore_exit_code:
logging.info("Test execution failed, but the exit code is ignored.")
else:
logging.error("Test execution failed.")
return exec_retcode
else:
logging.info("Test execution completed successfully.")
if not analyze_retcode:
logging.info("Analysis completed successfully.")
return 0
def Main(self, args, check_sanity, min_runtime_in_seconds):
"""Call this to run through the whole process: Setup, Execute, Analyze"""
start_time = datetime.datetime.now()
retcode = -1
if self.Setup(args):
retcode = self.RunTestsAndAnalyze(check_sanity)
shutil.rmtree(self.temp_dir, ignore_errors=True)
self.Cleanup()
else:
logging.error("Setup failed")
end_time = datetime.datetime.now()
runtime_in_seconds = (end_time - start_time).seconds
hours = runtime_in_seconds / 3600
seconds = runtime_in_seconds % 3600
minutes = seconds / 60
seconds = seconds % 60
logging.info("elapsed time: %02d:%02d:%02d" % (hours, minutes, seconds))
if (min_runtime_in_seconds > 0 and
runtime_in_seconds < min_runtime_in_seconds):
logging.error("Layout tests finished too quickly. "
"It should have taken at least %d seconds. "
"Something went wrong?" % min_runtime_in_seconds)
retcode = -1
return retcode
def Run(self, args, module, min_runtime_in_seconds=0):
MODULES_TO_SANITY_CHECK = ["base"]
# TODO(timurrrr): this is a temporary workaround for http://crbug.com/47844
if self.ToolName() == "tsan" and common.IsMac():
MODULES_TO_SANITY_CHECK = []
check_sanity = module in MODULES_TO_SANITY_CHECK
return self.Main(args, check_sanity, min_runtime_in_seconds)
class ValgrindTool(BaseTool):
"""Abstract class for running Valgrind tools.
Always subclass this and implement ToolSpecificFlags() and
ExtendOptionParser() for tool-specific stuff.
"""
def __init__(self):
super(ValgrindTool, self).__init__()
self.RegisterOptionParserHook(ValgrindTool.ExtendOptionParser)
def UseXML(self):
# Override if tool prefers nonxml output
return True
def SelfContained(self):
# Returns true iff the tool is distibuted as a self-contained
# .sh script (e.g. ThreadSanitizer)
return False
def ExtendOptionParser(self, parser):
parser.add_option("", "--suppressions", default=[],
action="append",
help="path to a valgrind suppression file")
parser.add_option("", "--indirect", action="store_true",
default=False,
help="set BROWSER_WRAPPER rather than "
"running valgrind directly")
parser.add_option("", "--indirect_webkit_layout", action="store_true",
default=False,
help="set --wrapper rather than running Dr. Memory "
"directly.")
parser.add_option("", "--trace_children", action="store_true",
default=False,
help="also trace child processes")
parser.add_option("", "--num-callers",
dest="num_callers", default=30,
help="number of callers to show in stack traces")
parser.add_option("", "--generate_dsym", action="store_true",
default=False,
help="Generate .dSYM file on Mac if needed. Slow!")
def Setup(self, args):
if not BaseTool.Setup(self, args):
return False
if common.IsMac():
self.PrepareForTestMac()
return True
def PrepareForTestMac(self):
"""Runs dsymutil if needed.
Valgrind for Mac OS X requires that debugging information be in a .dSYM
bundle generated by dsymutil. It is not currently able to chase DWARF
data into .o files like gdb does, so executables without .dSYM bundles or
with the Chromium-specific "fake_dsym" bundles generated by
build/mac/strip_save_dsym won't give source file and line number
information in valgrind.
This function will run dsymutil if the .dSYM bundle is missing or if
it looks like a fake_dsym. A non-fake dsym that already exists is assumed
to be up-to-date.
"""
test_command = self._args[0]
dsym_bundle = self._args[0] + '.dSYM'
dsym_file = os.path.join(dsym_bundle, 'Contents', 'Resources', 'DWARF',
os.path.basename(test_command))
dsym_info_plist = os.path.join(dsym_bundle, 'Contents', 'Info.plist')
needs_dsymutil = True
saved_test_command = None
if os.path.exists(dsym_file) and os.path.exists(dsym_info_plist):
# Look for the special fake_dsym tag in dsym_info_plist.
dsym_info_plist_contents = open(dsym_info_plist).read()
if not re.search('^\s*<key>fake_dsym</key>$', dsym_info_plist_contents,
re.MULTILINE):
# fake_dsym is not set, this is a real .dSYM bundle produced by
# dsymutil. dsymutil does not need to be run again.
needs_dsymutil = False
else:
# fake_dsym is set. dsym_file is a copy of the original test_command
# before it was stripped. Copy it back to test_command so that
# dsymutil has unstripped input to work with. Move the stripped
# test_command out of the way, it will be restored when this is
# done.
saved_test_command = test_command + '.stripped'
os.rename(test_command, saved_test_command)
shutil.copyfile(dsym_file, test_command)
shutil.copymode(saved_test_command, test_command)
if needs_dsymutil:
if self._options.generate_dsym:
# Remove the .dSYM bundle if it exists.
shutil.rmtree(dsym_bundle, True)
dsymutil_command = ['dsymutil', test_command]
# dsymutil is crazy slow. Ideally we'd have a timeout here,
# but common.RunSubprocess' timeout is only checked
# after each line of output; dsymutil is silent
# until the end, and is then killed, which is silly.
common.RunSubprocess(dsymutil_command)
if saved_test_command:
os.rename(saved_test_command, test_command)
else:
logging.info("No real .dSYM for test_command. Line numbers will "
"not be shown. Either tell xcode to generate .dSYM "
"file, or use --generate_dsym option to this tool.")
def ToolCommand(self):
"""Get the valgrind command to run."""
# Note that self._args begins with the exe to be run.
tool_name = self.ToolName()
# Construct the valgrind command.
if self.SelfContained():
proc = ["valgrind-%s.sh" % tool_name]
else:
if 'CHROME_VALGRIND' in os.environ:
path = os.path.join(os.environ['CHROME_VALGRIND'], "bin", "valgrind")
else:
path = "valgrind"
proc = [path, "--tool=%s" % tool_name]
proc += ["--num-callers=%i" % int(self._options.num_callers)]
if self._options.trace_children:
proc += ["--trace-children=yes"]
proc += ["--trace-children-skip='*dbus-daemon*'"]
proc += ["--trace-children-skip='*dbus-launch*'"]
proc += ["--trace-children-skip='*perl*'"]
proc += ["--trace-children-skip='*python*'"]
# This is really Python, but for some reason Valgrind follows it.
proc += ["--trace-children-skip='*lsb_release*'"]
proc += self.ToolSpecificFlags()
proc += self._tool_flags
suppression_count = 0
for suppression_file in self._options.suppressions:
if os.path.exists(suppression_file):
suppression_count += 1
proc += ["--suppressions=%s" % suppression_file]
if not suppression_count:
logging.warning("WARNING: NOT USING SUPPRESSIONS!")
logfilename = self.log_dir + ("/%s." % tool_name) + "%p"
if self.UseXML():
proc += ["--xml=yes", "--xml-file=" + logfilename]
else:
proc += ["--log-file=" + logfilename]
# The Valgrind command is constructed.
# Valgrind doesn't play nice with the Chrome sandbox. Empty this env var
# set by runtest.py to disable the sandbox.
if os.environ.get("CHROME_DEVEL_SANDBOX", None):
logging.info("Removing CHROME_DEVEL_SANDBOX fron environment")
os.environ["CHROME_DEVEL_SANDBOX"] = ''
# Handle --indirect_webkit_layout separately.
if self._options.indirect_webkit_layout:
# Need to create the wrapper before modifying |proc|.
wrapper = self.CreateBrowserWrapper(proc, webkit=True)
proc = self._args
proc.append("--wrapper")
proc.append(wrapper)
return proc
if self._options.indirect:
wrapper = self.CreateBrowserWrapper(proc)
os.environ["BROWSER_WRAPPER"] = wrapper
logging.info('export BROWSER_WRAPPER=' + wrapper)
proc = []
proc += self._args
return proc
def ToolSpecificFlags(self):
raise NotImplementedError, "This method should be implemented " \
"in the tool-specific subclass"
def CreateBrowserWrapper(self, proc, webkit=False):
"""The program being run invokes Python or something else that can't stand
to be valgrinded, and also invokes the Chrome browser. In this case, use a
magic wrapper to only valgrind the Chrome browser. Build the wrapper here.
Returns the path to the wrapper. It's up to the caller to use the wrapper
appropriately.
"""
command = " ".join(proc)
# Add the PID of the browser wrapper to the logfile names so we can
# separate log files for different UI tests at the analyze stage.
command = command.replace("%p", "$$.%p")
(fd, indirect_fname) = tempfile.mkstemp(dir=self.log_dir,
prefix="browser_wrapper.",
text=True)
f = os.fdopen(fd, "w")
f.write('#!/bin/bash\n'
'echo "Started Valgrind wrapper for this test, PID=$$" >&2\n')
f.write('DIR=`dirname $0`\n'
'TESTNAME_FILE=$DIR/testcase.$$.name\n\n')
if webkit:
# Webkit layout_tests pass the URL as the first line of stdin.
f.write('tee $TESTNAME_FILE | %s "$@"\n' % command)
else:
# Try to get the test case name by looking at the program arguments.
# i.e. Chromium ui_tests used --test-name arg.
# TODO(timurrrr): This doesn't handle "--test-name Test.Name"
# TODO(timurrrr): ui_tests are dead. Where do we use the non-webkit
# wrapper now? browser_tests? What do they do?
f.write('for arg in $@\ndo\n'
' if [[ "$arg" =~ --test-name=(.*) ]]\n then\n'
' echo ${BASH_REMATCH[1]} >$TESTNAME_FILE\n'
' fi\n'
'done\n\n'
'%s "$@"\n' % command)
f.close()
os.chmod(indirect_fname, stat.S_IRUSR|stat.S_IXUSR)
return indirect_fname
def CreateAnalyzer(self):
raise NotImplementedError, "This method should be implemented " \
"in the tool-specific subclass"
def GetAnalyzeResults(self, check_sanity=False):
# Glob all the files in the log directory
filenames = glob.glob(self.log_dir + "/" + self.ToolName() + ".*")
# If we have browser wrapper, the logfiles are named as
# "toolname.wrapper_PID.valgrind_PID".
# Let's extract the list of wrapper_PIDs and name it ppids
ppids = set([int(f.split(".")[-2]) \
for f in filenames if re.search("\.[0-9]+\.[0-9]+$", f)])
analyzer = self.CreateAnalyzer()
if len(ppids) == 0:
# Fast path - no browser wrapper was set.
return analyzer.Report(filenames, None, check_sanity)
ret = 0
for ppid in ppids:
testcase_name = None
try:
f = open(self.log_dir + ("/testcase.%d.name" % ppid))
testcase_name = f.read().strip()
f.close()
wk_layout_prefix="third_party/WebKit/LayoutTests/"
wk_prefix_at = testcase_name.rfind(wk_layout_prefix)
if wk_prefix_at != -1:
testcase_name = testcase_name[wk_prefix_at + len(wk_layout_prefix):]
except IOError:
pass
print "====================================================="
print " Below is the report for valgrind wrapper PID=%d." % ppid
if testcase_name:
print " It was used while running the `%s` test." % testcase_name
else:
print " You can find the corresponding test"
print " by searching the above log for 'PID=%d'" % ppid
sys.stdout.flush()
ppid_filenames = [f for f in filenames \
if re.search("\.%d\.[0-9]+$" % ppid, f)]
# check_sanity won't work with browser wrappers
assert check_sanity == False
ret |= analyzer.Report(ppid_filenames, testcase_name)
print "====================================================="
sys.stdout.flush()
if ret != 0:
print ""
print "The Valgrind reports are grouped by test names."
print "Each test has its PID printed in the log when the test was run"
print "and at the beginning of its Valgrind report."
print "Hint: you can search for the reports by Ctrl+F -> `=#`"
sys.stdout.flush()
return ret
# TODO(timurrrr): Split into a separate file.
class Memcheck(ValgrindTool):
"""Memcheck
Dynamic memory error detector for Linux & Mac
http://valgrind.org/info/tools.html#memcheck
"""
def __init__(self):
super(Memcheck, self).__init__()
self.RegisterOptionParserHook(Memcheck.ExtendOptionParser)
def ToolName(self):
return "memcheck"
def ExtendOptionParser(self, parser):
parser.add_option("--leak-check", "--leak_check", type="string",
default="yes", # --leak-check=yes is equivalent of =full
help="perform leak checking at the end of the run")
parser.add_option("", "--show_all_leaks", action="store_true",
default=False,
help="also show less blatant leaks")
parser.add_option("", "--track_origins", action="store_true",
default=False,
help="Show whence uninitialized bytes came. 30% slower.")
def ToolSpecificFlags(self):
ret = ["--gen-suppressions=all", "--demangle=no"]
ret += ["--leak-check=%s" % self._options.leak_check]
if self._options.show_all_leaks:
ret += ["--show-reachable=yes"]
else:
ret += ["--show-possibly-lost=no"]
if self._options.track_origins:
ret += ["--track-origins=yes"]
# TODO(glider): this is a temporary workaround for http://crbug.com/51716
# Let's see whether it helps.
if common.IsMac():
ret += ["--smc-check=all"]
return ret
def CreateAnalyzer(self):
use_gdb = common.IsMac()
return memcheck_analyze.MemcheckAnalyzer(self._source_dir,
self._options.show_all_leaks,
use_gdb=use_gdb)
def Analyze(self, check_sanity=False):
ret = self.GetAnalyzeResults(check_sanity)
if ret != 0:
logging.info("Please see http://dev.chromium.org/developers/how-tos/"
"using-valgrind for the info on Memcheck/Valgrind")
return ret
class PinTool(BaseTool):
"""Abstract class for running PIN tools.
Always subclass this and implement ToolSpecificFlags() and
ExtendOptionParser() for tool-specific stuff.
"""
def PrepareForTest(self):
pass
def ToolSpecificFlags(self):
raise NotImplementedError, "This method should be implemented " \
"in the tool-specific subclass"
def ToolCommand(self):
"""Get the PIN command to run."""
# Construct the PIN command.
pin_cmd = os.getenv("PIN_COMMAND")
if not pin_cmd:
raise RuntimeError, "Please set PIN_COMMAND environment variable " \
"with the path to pin.exe"
proc = pin_cmd.split(" ")
proc += self.ToolSpecificFlags()
# The PIN command is constructed.
# PIN requires -- to separate PIN flags from the executable name.
# self._args begins with the exe to be run.
proc += ["--"]
proc += self._args
return proc
class ThreadSanitizerBase(object):
"""ThreadSanitizer
Dynamic data race detector for Linux, Mac and Windows.
http://code.google.com/p/data-race-test/wiki/ThreadSanitizer
Since TSan works on both Valgrind (Linux, Mac) and PIN (Windows), we need
to have multiple inheritance
"""
INFO_MESSAGE="Please see http://dev.chromium.org/developers/how-tos/" \
"using-valgrind/threadsanitizer for the info on " \
"ThreadSanitizer"
def __init__(self):
super(ThreadSanitizerBase, self).__init__()
self.RegisterOptionParserHook(ThreadSanitizerBase.ExtendOptionParser)
def ToolName(self):
return "tsan"
def UseXML(self):
return False
def SelfContained(self):
return True
def ExtendOptionParser(self, parser):
parser.add_option("", "--hybrid", default="no",
dest="hybrid",
help="Finds more data races, may give false positive "
"reports unless the code is annotated")
parser.add_option("", "--announce-threads", default="yes",
dest="announce_threads",
help="Show the the stack traces of thread creation")
parser.add_option("", "--free-is-write", default="no",
dest="free_is_write",
help="Treat free()/operator delete as memory write. "
"This helps finding more data races, but (currently) "
"this may give false positive reports on std::string "
"internals, see http://code.google.com/p/data-race-test"
"/issues/detail?id=40")
def EvalBoolFlag(self, flag_value):
if (flag_value in ["1", "true", "yes"]):
return True
elif (flag_value in ["0", "false", "no"]):
return False
raise RuntimeError, "Can't parse flag value (%s)" % flag_value
def ToolSpecificFlags(self):
ret = []
ignore_files = ["ignores.txt"]
for platform_suffix in common.PlatformNames():
ignore_files.append("ignores_%s.txt" % platform_suffix)
for ignore_file in ignore_files:
fullname = os.path.join(self._source_dir,
"tools", "valgrind", "tsan", ignore_file)
if os.path.exists(fullname):
fullname = common.NormalizeWindowsPath(fullname)
ret += ["--ignore=%s" % fullname]
# This should shorten filepaths for local builds.
ret += ["--file-prefix-to-cut=%s/" % self._source_dir]
# This should shorten filepaths on bots.
ret += ["--file-prefix-to-cut=build/src/"]
ret += ["--file-prefix-to-cut=out/Release/../../"]
# This should shorten filepaths for functions intercepted in TSan.
ret += ["--file-prefix-to-cut=scripts/tsan/tsan/"]
ret += ["--file-prefix-to-cut=src/tsan/tsan/"]
ret += ["--gen-suppressions=true"]
if self.EvalBoolFlag(self._options.hybrid):
ret += ["--hybrid=yes"] # "no" is the default value for TSAN
if self.EvalBoolFlag(self._options.announce_threads):
ret += ["--announce-threads"]
if self.EvalBoolFlag(self._options.free_is_write):
ret += ["--free-is-write=yes"]
else:
ret += ["--free-is-write=no"]
# --show-pc flag is needed for parsing the error logs on Darwin.
if platform_suffix == 'mac':
ret += ["--show-pc=yes"]
ret += ["--show-pid=no"]
boring_callers = common.BoringCallers(mangled=False, use_re_wildcards=False)
# TODO(timurrrr): In fact, we want "starting from .." instead of "below .."
for bc in boring_callers:
ret += ["--cut_stack_below=%s" % bc]
return ret
class ThreadSanitizerPosix(ThreadSanitizerBase, ValgrindTool):
def ToolSpecificFlags(self):
proc = ThreadSanitizerBase.ToolSpecificFlags(self)
# The -v flag is needed for printing the list of used suppressions and
# obtaining addresses for loaded shared libraries on Mac.
proc += ["-v"]
return proc
def CreateAnalyzer(self):
use_gdb = common.IsMac()
return tsan_analyze.TsanAnalyzer(self._source_dir, use_gdb)
def Analyze(self, check_sanity=False):
ret = self.GetAnalyzeResults(check_sanity)
if ret != 0:
logging.info(self.INFO_MESSAGE)
return ret
class ThreadSanitizerWindows(ThreadSanitizerBase, PinTool):
def __init__(self):
super(ThreadSanitizerWindows, self).__init__()
self.RegisterOptionParserHook(ThreadSanitizerWindows.ExtendOptionParser)
def ExtendOptionParser(self, parser):
parser.add_option("", "--suppressions", default=[],
action="append",
help="path to TSan suppression file")
def ToolSpecificFlags(self):
add_env = {
"CHROME_ALLOCATOR" : "WINHEAP",
}
for k,v in add_env.iteritems():
logging.info("export %s=%s", k, v)
os.putenv(k, v)
proc = ThreadSanitizerBase.ToolSpecificFlags(self)
# On PIN, ThreadSanitizer has its own suppression mechanism
# and --log-file flag which work exactly on Valgrind.
suppression_count = 0
for suppression_file in self._options.suppressions:
if os.path.exists(suppression_file):
suppression_count += 1
suppression_file = common.NormalizeWindowsPath(suppression_file)
proc += ["--suppressions=%s" % suppression_file]
if not suppression_count:
logging.warning("WARNING: NOT USING SUPPRESSIONS!")
logfilename = self.log_dir + "/tsan.%p"
proc += ["--log-file=" + common.NormalizeWindowsPath(logfilename)]
# TODO(timurrrr): Add flags for Valgrind trace children analog when we
# start running complex tests (e.g. UI) under TSan/Win.
return proc
def Analyze(self, check_sanity=False):
filenames = glob.glob(self.log_dir + "/tsan.*")
analyzer = tsan_analyze.TsanAnalyzer(self._source_dir)
ret = analyzer.Report(filenames, None, check_sanity)
if ret != 0:
logging.info(self.INFO_MESSAGE)
return ret
class DrMemory(BaseTool):
"""Dr.Memory
Dynamic memory error detector for Windows.
http://dev.chromium.org/developers/how-tos/using-drmemory
It is not very mature at the moment, some things might not work properly.
"""
def __init__(self, full_mode, pattern_mode):
super(DrMemory, self).__init__()
self.full_mode = full_mode
self.pattern_mode = pattern_mode
self.RegisterOptionParserHook(DrMemory.ExtendOptionParser)
def ToolName(self):
return "drmemory"
def ExtendOptionParser(self, parser):
parser.add_option("", "--suppressions", default=[],
action="append",
help="path to a drmemory suppression file")
parser.add_option("", "--follow_python", action="store_true",
default=False, dest="follow_python",
help="Monitor python child processes. If off, neither "
"python children nor any children of python children "
"will be monitored.")
parser.add_option("", "--indirect", action="store_true",
default=False,
help="set BROWSER_WRAPPER rather than "
"running Dr. Memory directly on the harness")
parser.add_option("", "--indirect_webkit_layout", action="store_true",
default=False,
help="set --wrapper rather than running valgrind "
"directly.")
parser.add_option("", "--use_debug", action="store_true",
default=False, dest="use_debug",
help="Run Dr. Memory debug build")
parser.add_option("", "--trace_children", action="store_true",
default=True,
help="TODO: default value differs from Valgrind")
def ToolCommand(self):
"""Get the tool command to run."""
# WINHEAP is what Dr. Memory supports as there are issues w/ both
# jemalloc (http://code.google.com/p/drmemory/issues/detail?id=320) and
# tcmalloc (http://code.google.com/p/drmemory/issues/detail?id=314)
add_env = {
"CHROME_ALLOCATOR" : "WINHEAP",
"JSIMD_FORCEMMX" : "1", # http://code.google.com/p/drmemory/issues/detail?id=540
}
for k,v in add_env.iteritems():
logging.info("export %s=%s", k, v)
os.putenv(k, v)
drmem_cmd = os.getenv("DRMEMORY_COMMAND")
if not drmem_cmd:
raise RuntimeError, "Please set DRMEMORY_COMMAND environment variable " \
"with the path to drmemory.exe"
proc = drmem_cmd.split(" ")
# By default, don't run python (this will exclude python's children as well)
# to reduce runtime. We're not really interested in spending time finding
# bugs in the python implementation.
# With file-based config we must update the file every time, and
# it will affect simultaneous drmem uses by this user. While file-based
# config has many advantages, here we may want this-instance-only
# (http://code.google.com/p/drmemory/issues/detail?id=334).
drconfig_cmd = [ proc[0].replace("drmemory.exe", "drconfig.exe") ]
drconfig_cmd += ["-quiet"] # suppress errors about no 64-bit libs
run_drconfig = True
if self._options.follow_python:
logging.info("Following python children")
# -unreg fails if not already registered so query for that first
query_cmd = drconfig_cmd + ["-isreg", "python.exe"]
query_proc = subprocess.Popen(query_cmd, stdout=subprocess.PIPE,
shell=True)
(query_out, query_err) = query_proc.communicate()
if re.search("exe not registered", query_out):
run_drconfig = False # all set
else:
drconfig_cmd += ["-unreg", "python.exe"]
else:
logging.info("Excluding python children")
drconfig_cmd += ["-reg", "python.exe", "-norun"]
if run_drconfig:
drconfig_retcode = common.RunSubprocess(drconfig_cmd, self._timeout)
if drconfig_retcode:
logging.error("Configuring whether to follow python children failed " \
"with %d.", drconfig_retcode)
raise RuntimeError, "Configuring python children failed "
suppression_count = 0
supp_files = self._options.suppressions
if self.full_mode:
supp_files += [s.replace(".txt", "_full.txt") for s in supp_files]
for suppression_file in supp_files:
if os.path.exists(suppression_file):
suppression_count += 1
proc += ["-suppress", common.NormalizeWindowsPath(suppression_file)]
if not suppression_count:
logging.warning("WARNING: NOT USING SUPPRESSIONS!")
# Un-comment to dump Dr.Memory events on error
#proc += ["-dr_ops", "-dumpcore_mask", "-dr_ops", "0x8bff"]
# Un-comment and comment next line to debug Dr.Memory
#proc += ["-dr_ops", "-no_hide"]
#proc += ["-dr_ops", "-msgbox_mask", "-dr_ops", "15"]
#Proc += ["-dr_ops", "-stderr_mask", "-dr_ops", "15"]
# Ensure we see messages about Dr. Memory crashing!
proc += ["-dr_ops", "-stderr_mask", "-dr_ops", "12"]
if self._options.use_debug:
proc += ["-debug"]
proc += ["-logdir", common.NormalizeWindowsPath(self.log_dir)]
if self.log_parent_dir:
# gpu process on Windows Vista+ runs at Low Integrity and can only
# write to certain directories (http://crbug.com/119131)
symcache_dir = os.path.join(self.log_parent_dir, "drmemory.symcache")
elif self._options.build_dir:
# The other case is only possible with -t cmdline.
# Anyways, if we omit -symcache_dir the -logdir's value is used which
# should be fine.
symcache_dir = os.path.join(self._options.build_dir, "drmemory.symcache")
if symcache_dir:
if not os.path.exists(symcache_dir):
try:
os.mkdir(symcache_dir)
except OSError:
logging.warning("Can't create symcache dir?")
if os.path.exists(symcache_dir):
proc += ["-symcache_dir", common.NormalizeWindowsPath(symcache_dir)]
# Use -no_summary to suppress DrMemory's summary and init-time
# notifications. We generate our own with drmemory_analyze.py.
proc += ["-batch", "-no_summary"]
# Un-comment to disable interleaved output. Will also suppress error
# messages normally printed to stderr.
#proc += ["-quiet", "-no_results_to_stderr"]
proc += ["-callstack_max_frames", "40"]
# disable leak scan for now
proc += ["-no_count_leaks", "-no_leak_scan"]
# make callstacks easier to read
proc += ["-callstack_srcfile_prefix",
"build\\src,chromium\\src,crt_build\\self_x86"]
proc += ["-callstack_modname_hide",
"*drmemory*,chrome.dll"]
boring_callers = common.BoringCallers(mangled=False, use_re_wildcards=False)
# TODO(timurrrr): In fact, we want "starting from .." instead of "below .."
proc += ["-callstack_truncate_below", ",".join(boring_callers)]
if self.pattern_mode:
proc += ["-pattern", "0xf1fd", "-no_count_leaks", "-redzone_size", "0x20"]
elif not self.full_mode:
proc += ["-light"]
proc += self._tool_flags
# DrM i#850/851: The new -callstack_use_top_fp_selectively has bugs.
proc += ["-no_callstack_use_top_fp_selectively"]
# Dr.Memory requires -- to separate tool flags from the executable name.
proc += ["--"]
if self._options.indirect or self._options.indirect_webkit_layout:
# TODO(timurrrr): reuse for TSan on Windows
wrapper_path = os.path.join(self._source_dir,
"tools", "valgrind", "browser_wrapper_win.py")
wrapper = " ".join(["python", wrapper_path] + proc)
self.CreateBrowserWrapper(wrapper)
logging.info("browser wrapper = " + " ".join(proc))
if self._options.indirect_webkit_layout:
proc = self._args
# Layout tests want forward slashes.
wrapper = wrapper.replace('\\', '/')
proc += ["--wrapper", wrapper]
return proc
else:
proc = []
# Note that self._args begins with the name of the exe to be run.
self._args[0] = common.NormalizeWindowsPath(self._args[0])
proc += self._args
return proc
def CreateBrowserWrapper(self, command):
os.putenv("BROWSER_WRAPPER", command)
def Analyze(self, check_sanity=False):
# Use one analyzer for all the log files to avoid printing duplicate reports
#
# TODO(timurrrr): unify this with Valgrind and other tools when we have
# http://code.google.com/p/drmemory/issues/detail?id=684
analyzer = drmemory_analyze.DrMemoryAnalyzer()
ret = 0
if not self._options.indirect and not self._options.indirect_webkit_layout:
filenames = glob.glob(self.log_dir + "/*/results.txt")
ret = analyzer.Report(filenames, None, check_sanity)
else:
testcases = glob.glob(self.log_dir + "/testcase.*.logs")
# If we have browser wrapper, the per-test logdirs are named as
# "testcase.wrapper_PID.name".
# Let's extract the list of wrapper_PIDs and name it ppids.
# NOTE: ppids may contain '_', i.e. they are not ints!
ppids = set([f.split(".")[-2] for f in testcases])
for ppid in ppids:
testcase_name = None
try:
f = open("%s/testcase.%s.name" % (self.log_dir, ppid))
testcase_name = f.read().strip()
f.close()
except IOError:
pass
print "====================================================="
print " Below is the report for drmemory wrapper PID=%s." % ppid
if testcase_name:
print " It was used while running the `%s` test." % testcase_name
else:
# TODO(timurrrr): hm, the PID line is suppressed on Windows...
print " You can find the corresponding test"
print " by searching the above log for 'PID=%s'" % ppid
sys.stdout.flush()
ppid_filenames = glob.glob("%s/testcase.%s.logs/*/results.txt" %
(self.log_dir, ppid))
ret |= analyzer.Report(ppid_filenames, testcase_name, False)
print "====================================================="
sys.stdout.flush()
logging.info("Please see http://dev.chromium.org/developers/how-tos/"
"using-drmemory for the info on Dr. Memory")
return ret
# RaceVerifier support. See
# http://code.google.com/p/data-race-test/wiki/RaceVerifier for more details.
class ThreadSanitizerRV1Analyzer(tsan_analyze.TsanAnalyzer):
""" TsanAnalyzer that saves race reports to a file. """
TMP_FILE = "rvlog.tmp"
def __init__(self, source_dir, use_gdb):
super(ThreadSanitizerRV1Analyzer, self).__init__(source_dir, use_gdb)
self.out = open(self.TMP_FILE, "w")
def Report(self, files, testcase, check_sanity=False):
reports = self.GetReports(files)
for report in reports:
print >>self.out, report
if len(reports) > 0:
logging.info("RaceVerifier pass 1 of 2, found %i reports" % len(reports))
return -1
return 0
def CloseOutputFile(self):
self.out.close()
class ThreadSanitizerRV1Mixin(object):
"""RaceVerifier first pass.
Runs ThreadSanitizer as usual, but hides race reports and collects them in a
temporary file"""
def __init__(self):
super(ThreadSanitizerRV1Mixin, self).__init__()
self.RegisterOptionParserHook(ThreadSanitizerRV1Mixin.ExtendOptionParser)
def ExtendOptionParser(self, parser):
parser.set_defaults(hybrid="yes")
def CreateAnalyzer(self):
use_gdb = common.IsMac()
self.analyzer = ThreadSanitizerRV1Analyzer(self._source_dir, use_gdb)
return self.analyzer
def Cleanup(self):
super(ThreadSanitizerRV1Mixin, self).Cleanup()
self.analyzer.CloseOutputFile()
class ThreadSanitizerRV2Mixin(object):
"""RaceVerifier second pass."""
def __init__(self):
super(ThreadSanitizerRV2Mixin, self).__init__()
self.RegisterOptionParserHook(ThreadSanitizerRV2Mixin.ExtendOptionParser)
def ExtendOptionParser(self, parser):
parser.add_option("", "--race-verifier-sleep-ms",
dest="race_verifier_sleep_ms", default=10,
help="duration of RaceVerifier delays")
def ToolSpecificFlags(self):
proc = super(ThreadSanitizerRV2Mixin, self).ToolSpecificFlags()
proc += ['--race-verifier=%s' % ThreadSanitizerRV1Analyzer.TMP_FILE,
'--race-verifier-sleep-ms=%d' %
int(self._options.race_verifier_sleep_ms)]
return proc
def Cleanup(self):
super(ThreadSanitizerRV2Mixin, self).Cleanup()
os.unlink(ThreadSanitizerRV1Analyzer.TMP_FILE)
class ThreadSanitizerRV1Posix(ThreadSanitizerRV1Mixin, ThreadSanitizerPosix):
pass
class ThreadSanitizerRV2Posix(ThreadSanitizerRV2Mixin, ThreadSanitizerPosix):
pass
class ThreadSanitizerRV1Windows(ThreadSanitizerRV1Mixin,
ThreadSanitizerWindows):
pass
class ThreadSanitizerRV2Windows(ThreadSanitizerRV2Mixin,
ThreadSanitizerWindows):
pass
class RaceVerifier(object):
"""Runs tests under RaceVerifier/Valgrind."""
MORE_INFO_URL = "http://code.google.com/p/data-race-test/wiki/RaceVerifier"
def RV1Factory(self):
if common.IsWindows():
return ThreadSanitizerRV1Windows()
else:
return ThreadSanitizerRV1Posix()
def RV2Factory(self):
if common.IsWindows():
return ThreadSanitizerRV2Windows()
else:
return ThreadSanitizerRV2Posix()
def ToolName(self):
return "tsan"
def Main(self, args, check_sanity, min_runtime_in_seconds):
logging.info("Running a TSan + RaceVerifier test. For more information, " +
"see " + self.MORE_INFO_URL)
cmd1 = self.RV1Factory()
ret = cmd1.Main(args, check_sanity, min_runtime_in_seconds)
# Verify race reports, if there are any.
if ret == -1:
logging.info("Starting pass 2 of 2. Running the same binary in " +
"RaceVerifier mode to confirm possible race reports.")
logging.info("For more information, see " + self.MORE_INFO_URL)
cmd2 = self.RV2Factory()
ret = cmd2.Main(args, check_sanity, min_runtime_in_seconds)
else:
logging.info("No reports, skipping RaceVerifier second pass")
logging.info("Please see " + self.MORE_INFO_URL + " for more information " +
"on RaceVerifier")
return ret
def Run(self, args, module, min_runtime_in_seconds=0):
return self.Main(args, False, min_runtime_in_seconds)
class EmbeddedTool(BaseTool):
"""Abstract class for tools embedded directly into the test binary.
"""
# TODO(glider): need to override Execute() and support process chaining here.
def ToolCommand(self):
# In the simplest case just the args of the script.
return self._args
class Asan(EmbeddedTool):
"""AddressSanitizer, a memory error detector.
More information at
http://dev.chromium.org/developers/testing/addresssanitizer
"""
def __init__(self):
super(Asan, self).__init__()
self._timeout = 1200
if common.IsMac():
self._env["DYLD_NO_PIE"] = "1"
def ToolName(self):
return "asan"
def ToolCommand(self):
# TODO(glider): use pipes instead of the ugly wrapper here once they
# are supported.
procs = [os.path.join(self._source_dir, "tools", "valgrind",
"asan", "asan_wrapper.sh")]
procs.extend(self._args)
return procs
def Analyze(sels, unused_check_sanity):
return 0
class ToolFactory:
def Create(self, tool_name):
if tool_name == "memcheck":
return Memcheck()
if tool_name == "tsan":
if common.IsWindows():
return ThreadSanitizerWindows()
else:
return ThreadSanitizerPosix()
if tool_name == "drmemory" or tool_name == "drmemory_light":
# TODO(timurrrr): remove support for "drmemory" when buildbots are
# switched to drmemory_light OR make drmemory==drmemory_full the default
# mode when the tool is mature enough.
return DrMemory(False, False)
if tool_name == "drmemory_full":
return DrMemory(True, False)
if tool_name == "drmemory_pattern":
return DrMemory(False, True)
if tool_name == "tsan_rv":
return RaceVerifier()
if tool_name == "asan":
return Asan()
try:
platform_name = common.PlatformNames()[0]
except common.NotImplementedError:
platform_name = sys.platform + "(Unknown)"
raise RuntimeError, "Unknown tool (tool=%s, platform=%s)" % (tool_name,
platform_name)
def CreateTool(tool):
return ToolFactory().Create(tool)
|
bsd-3-clause
|
bancek/egradebook
|
src/lib/django/forms/models.py
|
152
|
42947
|
"""
Helper functions for creating Form classes from Django models
and database field objects.
"""
from django.utils.encoding import smart_unicode, force_unicode
from django.utils.datastructures import SortedDict
from django.utils.text import get_text_list, capfirst
from django.utils.translation import ugettext_lazy as _, ugettext
from django.core.exceptions import ValidationError, NON_FIELD_ERRORS, \
FieldError
from django.core.validators import EMPTY_VALUES
from util import ErrorList
from forms import BaseForm, get_declared_fields
from fields import Field, ChoiceField
from widgets import SelectMultiple, HiddenInput, MultipleHiddenInput
from widgets import media_property
from formsets import BaseFormSet, formset_factory
__all__ = (
'ModelForm', 'BaseModelForm', 'model_to_dict', 'fields_for_model',
'save_instance', 'ModelChoiceField', 'ModelMultipleChoiceField',
)
def construct_instance(form, instance, fields=None, exclude=None):
"""
Constructs and returns a model instance from the bound ``form``'s
``cleaned_data``, but does not save the returned instance to the
database.
"""
from django.db import models
opts = instance._meta
cleaned_data = form.cleaned_data
file_field_list = []
for f in opts.fields:
if not f.editable or isinstance(f, models.AutoField) \
or not f.name in cleaned_data:
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
# Defer saving file-type fields until after the other fields, so a
# callable upload_to can use the values from other fields.
if isinstance(f, models.FileField):
file_field_list.append(f)
else:
f.save_form_data(instance, cleaned_data[f.name])
for f in file_field_list:
f.save_form_data(instance, cleaned_data[f.name])
return instance
def save_instance(form, instance, fields=None, fail_message='saved',
commit=True, exclude=None, construct=True):
"""
Saves bound Form ``form``'s cleaned_data into model instance ``instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
If construct=False, assume ``instance`` has already been constructed and
just needs to be saved.
"""
if construct:
instance = construct_instance(form, instance, fields, exclude)
opts = instance._meta
if form.errors:
raise ValueError("The %s could not be %s because the data didn't"
" validate." % (opts.object_name, fail_message))
# Wrap up the saving of m2m data as a function.
def save_m2m():
cleaned_data = form.cleaned_data
for f in opts.many_to_many:
if fields and f.name not in fields:
continue
if f.name in cleaned_data:
f.save_form_data(instance, cleaned_data[f.name])
if commit:
# If we are committing, save the instance and the m2m data immediately.
instance.save()
save_m2m()
else:
# We're not committing. Add a method to the form to allow deferred
# saving of m2m data.
form.save_m2m = save_m2m
return instance
# ModelForms #################################################################
def model_to_dict(instance, fields=None, exclude=None):
"""
Returns a dict containing the data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
"""
# avoid a circular import
from django.db.models.fields.related import ManyToManyField
opts = instance._meta
data = {}
for f in opts.fields + opts.many_to_many:
if not f.editable:
continue
if fields and not f.name in fields:
continue
if exclude and f.name in exclude:
continue
if isinstance(f, ManyToManyField):
# If the object doesn't have a primry key yet, just use an empty
# list for its m2m fields. Calling f.value_from_object will raise
# an exception.
if instance.pk is None:
data[f.name] = []
else:
# MultipleChoiceWidget needs a list of pks, not object instances.
data[f.name] = [obj.pk for obj in f.value_from_object(instance)]
else:
data[f.name] = f.value_from_object(instance)
return data
def fields_for_model(model, fields=None, exclude=None, widgets=None, formfield_callback=None):
"""
Returns a ``SortedDict`` containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
"""
field_list = []
ignored = []
opts = model._meta
for f in opts.fields + opts.many_to_many:
if not f.editable:
continue
if fields is not None and not f.name in fields:
continue
if exclude and f.name in exclude:
continue
if widgets and f.name in widgets:
kwargs = {'widget': widgets[f.name]}
else:
kwargs = {}
if formfield_callback is None:
formfield = f.formfield(**kwargs)
elif not callable(formfield_callback):
raise TypeError('formfield_callback must be a function or callable')
else:
formfield = formfield_callback(f, **kwargs)
if formfield:
field_list.append((f.name, formfield))
else:
ignored.append(f.name)
field_dict = SortedDict(field_list)
if fields:
field_dict = SortedDict(
[(f, field_dict.get(f)) for f in fields
if ((not exclude) or (exclude and f not in exclude)) and (f not in ignored)]
)
return field_dict
class ModelFormOptions(object):
def __init__(self, options=None):
self.model = getattr(options, 'model', None)
self.fields = getattr(options, 'fields', None)
self.exclude = getattr(options, 'exclude', None)
self.widgets = getattr(options, 'widgets', None)
class ModelFormMetaclass(type):
def __new__(cls, name, bases, attrs):
formfield_callback = attrs.pop('formfield_callback', None)
try:
parents = [b for b in bases if issubclass(b, ModelForm)]
except NameError:
# We are defining ModelForm itself.
parents = None
declared_fields = get_declared_fields(bases, attrs, False)
new_class = super(ModelFormMetaclass, cls).__new__(cls, name, bases,
attrs)
if not parents:
return new_class
if 'media' not in attrs:
new_class.media = media_property(new_class)
opts = new_class._meta = ModelFormOptions(getattr(new_class, 'Meta', None))
if opts.model:
# If a model is defined, extract form fields from it.
fields = fields_for_model(opts.model, opts.fields,
opts.exclude, opts.widgets, formfield_callback)
# make sure opts.fields doesn't specify an invalid field
none_model_fields = [k for k, v in fields.iteritems() if not v]
missing_fields = set(none_model_fields) - \
set(declared_fields.keys())
if missing_fields:
message = 'Unknown field(s) (%s) specified for %s'
message = message % (', '.join(missing_fields),
opts.model.__name__)
raise FieldError(message)
# Override default model fields with any custom declared ones
# (plus, include all the other declared fields).
fields.update(declared_fields)
else:
fields = declared_fields
new_class.declared_fields = declared_fields
new_class.base_fields = fields
return new_class
class BaseModelForm(BaseForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
opts = self._meta
if instance is None:
if opts.model is None:
raise ValueError('ModelForm has no model class specified.')
# if we didn't get an instance, instantiate a new one
self.instance = opts.model()
object_data = {}
else:
self.instance = instance
object_data = model_to_dict(instance, opts.fields, opts.exclude)
# if initial was provided, it should override the values from instance
if initial is not None:
object_data.update(initial)
# self._validate_unique will be set to True by BaseModelForm.clean().
# It is False by default so overriding self.clean() and failing to call
# super will stop validate_unique from being called.
self._validate_unique = False
super(BaseModelForm, self).__init__(data, files, auto_id, prefix, object_data,
error_class, label_suffix, empty_permitted)
def _update_errors(self, message_dict):
for k, v in message_dict.items():
if k != NON_FIELD_ERRORS:
self._errors.setdefault(k, self.error_class()).extend(v)
# Remove the data from the cleaned_data dict since it was invalid
if k in self.cleaned_data:
del self.cleaned_data[k]
if NON_FIELD_ERRORS in message_dict:
messages = message_dict[NON_FIELD_ERRORS]
self._errors.setdefault(NON_FIELD_ERRORS, self.error_class()).extend(messages)
def _get_validation_exclusions(self):
"""
For backwards-compatibility, several types of fields need to be
excluded from model validation. See the following tickets for
details: #12507, #12521, #12553
"""
exclude = []
# Build up a list of fields that should be excluded from model field
# validation and unique checks.
for f in self.instance._meta.fields:
field = f.name
# Exclude fields that aren't on the form. The developer may be
# adding these values to the model after form validation.
if field not in self.fields:
exclude.append(f.name)
# Don't perform model validation on fields that were defined
# manually on the form and excluded via the ModelForm's Meta
# class. See #12901.
elif self._meta.fields and field not in self._meta.fields:
exclude.append(f.name)
elif self._meta.exclude and field in self._meta.exclude:
exclude.append(f.name)
# Exclude fields that failed form validation. There's no need for
# the model fields to validate them as well.
elif field in self._errors.keys():
exclude.append(f.name)
# Exclude empty fields that are not required by the form, if the
# underlying model field is required. This keeps the model field
# from raising a required error. Note: don't exclude the field from
# validaton if the model field allows blanks. If it does, the blank
# value may be included in a unique check, so cannot be excluded
# from validation.
else:
form_field = self.fields[field]
field_value = self.cleaned_data.get(field, None)
if not f.blank and not form_field.required and field_value in EMPTY_VALUES:
exclude.append(f.name)
return exclude
def clean(self):
self._validate_unique = True
return self.cleaned_data
def _post_clean(self):
opts = self._meta
# Update the model instance with self.cleaned_data.
self.instance = construct_instance(self, self.instance, opts.fields, opts.exclude)
exclude = self._get_validation_exclusions()
# Foreign Keys being used to represent inline relationships
# are excluded from basic field value validation. This is for two
# reasons: firstly, the value may not be supplied (#12507; the
# case of providing new values to the admin); secondly the
# object being referred to may not yet fully exist (#12749).
# However, these fields *must* be included in uniqueness checks,
# so this can't be part of _get_validation_exclusions().
for f_name, field in self.fields.items():
if isinstance(field, InlineForeignKeyField):
exclude.append(f_name)
# Clean the model instance's fields.
try:
self.instance.clean_fields(exclude=exclude)
except ValidationError, e:
self._update_errors(e.message_dict)
# Call the model instance's clean method.
try:
self.instance.clean()
except ValidationError, e:
self._update_errors({NON_FIELD_ERRORS: e.messages})
# Validate uniqueness if needed.
if self._validate_unique:
self.validate_unique()
def validate_unique(self):
"""
Calls the instance's validate_unique() method and updates the form's
validation errors if any were raised.
"""
exclude = self._get_validation_exclusions()
try:
self.instance.validate_unique(exclude=exclude)
except ValidationError, e:
self._update_errors(e.message_dict)
def save(self, commit=True):
"""
Saves this ``form``'s cleaned_data into model instance
``self.instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
"""
if self.instance.pk is None:
fail_message = 'created'
else:
fail_message = 'changed'
return save_instance(self, self.instance, self._meta.fields,
fail_message, commit, construct=False)
save.alters_data = True
class ModelForm(BaseModelForm):
__metaclass__ = ModelFormMetaclass
def modelform_factory(model, form=ModelForm, fields=None, exclude=None,
formfield_callback=None):
# Create the inner Meta class. FIXME: ideally, we should be able to
# construct a ModelForm without creating and passing in a temporary
# inner class.
# Build up a list of attributes that the Meta object will have.
attrs = {'model': model}
if fields is not None:
attrs['fields'] = fields
if exclude is not None:
attrs['exclude'] = exclude
# If parent form class already has an inner Meta, the Meta we're
# creating needs to inherit from the parent's inner meta.
parent = (object,)
if hasattr(form, 'Meta'):
parent = (form.Meta, object)
Meta = type('Meta', parent, attrs)
# Give this new form class a reasonable name.
class_name = model.__name__ + 'Form'
# Class attributes for the new form class.
form_class_attrs = {
'Meta': Meta,
'formfield_callback': formfield_callback
}
return ModelFormMetaclass(class_name, (form,), form_class_attrs)
# ModelFormSets ##############################################################
class BaseModelFormSet(BaseFormSet):
"""
A ``FormSet`` for editing a queryset and/or adding new objects to it.
"""
model = None
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
queryset=None, **kwargs):
self.queryset = queryset
defaults = {'data': data, 'files': files, 'auto_id': auto_id, 'prefix': prefix}
defaults.update(kwargs)
super(BaseModelFormSet, self).__init__(**defaults)
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if not (self.data or self.files):
return len(self.get_queryset())
return super(BaseModelFormSet, self).initial_form_count()
def _existing_object(self, pk):
if not hasattr(self, '_object_dict'):
self._object_dict = dict([(o.pk, o) for o in self.get_queryset()])
return self._object_dict.get(pk)
def _construct_form(self, i, **kwargs):
if self.is_bound and i < self.initial_form_count():
# Import goes here instead of module-level because importing
# django.db has side effects.
from django.db import connections
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
pk_field = self.model._meta.pk
pk = pk_field.get_db_prep_lookup('exact', pk,
connection=connections[self.get_queryset().db])
if isinstance(pk, list):
pk = pk[0]
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and not kwargs.get('instance'):
kwargs['instance'] = self.get_queryset()[i]
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def get_queryset(self):
if not hasattr(self, '_queryset'):
if self.queryset is not None:
qs = self.queryset
else:
qs = self.model._default_manager.get_query_set()
# If the queryset isn't already ordered we need to add an
# artificial ordering here to make sure that all formsets
# constructed from this queryset have the same form order.
if not qs.ordered:
qs = qs.order_by(self.model._meta.pk.name)
# Removed queryset limiting here. As per discussion re: #13023
# on django-dev, max_num should not prevent existing
# related objects/inlines from being displayed.
self._queryset = qs
return self._queryset
def save_new(self, form, commit=True):
"""Saves and returns a new model instance for the given form."""
return form.save(commit=commit)
def save_existing(self, form, instance, commit=True):
"""Saves and returns an existing model instance for the given form."""
return form.save(commit=commit)
def save(self, commit=True):
"""Saves model instances for every form, adding and changing instances
as necessary, and returns the list of instances.
"""
if not commit:
self.saved_forms = []
def save_m2m():
for form in self.saved_forms:
form.save_m2m()
self.save_m2m = save_m2m
return self.save_existing_objects(commit) + self.save_new_objects(commit)
def clean(self):
self.validate_unique()
def validate_unique(self):
# Collect unique_checks and date_checks to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
for form in self.forms:
if not hasattr(form, 'cleaned_data'):
continue
exclude = form._get_validation_exclusions()
unique_checks, date_checks = form.instance._get_unique_checks(exclude=exclude)
all_unique_checks = all_unique_checks.union(set(unique_checks))
all_date_checks = all_date_checks.union(set(date_checks))
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in self.forms:
# if the form doesn't have cleaned_data then we ignore it,
# it's already invalid
if not hasattr(form, "cleaned_data"):
continue
# get data for each field of each of unique_check
row_data = tuple([form.cleaned_data[field] for field in unique_check if field in form.cleaned_data])
if row_data and not None in row_data:
# if we've aready seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
del form.cleaned_data
break
# mark the data as seen
seen_data.add(row_data)
# iterate over each of the date checks now
for date_check in all_date_checks:
seen_data = set()
uclass, lookup, field, unique_for = date_check
for form in self.forms:
# if the form doesn't have cleaned_data then we ignore it,
# it's already invalid
if not hasattr(self, 'cleaned_data'):
continue
# see if we have data for both fields
if (form.cleaned_data and form.cleaned_data[field] is not None
and form.cleaned_data[unique_for] is not None):
# if it's a date lookup we need to get the data for all the fields
if lookup == 'date':
date = form.cleaned_data[unique_for]
date_data = (date.year, date.month, date.day)
# otherwise it's just the attribute on the date/datetime
# object
else:
date_data = (getattr(form.cleaned_data[unique_for], lookup),)
data = (form.cleaned_data[field],) + date_data
# if we've aready seen it then we have a uniqueness failure
if data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_date_error_message(date_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
del form.cleaned_data
break
seen_data.add(data)
if errors:
raise ValidationError(errors)
def get_unique_error_message(self, unique_check):
if len(unique_check) == 1:
return ugettext("Please correct the duplicate data for %(field)s.") % {
"field": unique_check[0],
}
else:
return ugettext("Please correct the duplicate data for %(field)s, "
"which must be unique.") % {
"field": get_text_list(unique_check, unicode(_("and"))),
}
def get_date_error_message(self, date_check):
return ugettext("Please correct the duplicate data for %(field_name)s "
"which must be unique for the %(lookup)s in %(date_field)s.") % {
'field_name': date_check[2],
'date_field': date_check[3],
'lookup': unicode(date_check[1]),
}
def get_form_error(self):
return ugettext("Please correct the duplicate values below.")
def save_existing_objects(self, commit=True):
self.changed_objects = []
self.deleted_objects = []
if not self.get_queryset():
return []
saved_instances = []
for form in self.initial_forms:
pk_name = self._pk_field.name
raw_pk_value = form._raw_value(pk_name)
# clean() for different types of PK fields can sometimes return
# the model instance, and sometimes the PK. Handle either.
pk_value = form.fields[pk_name].clean(raw_pk_value)
pk_value = getattr(pk_value, 'pk', pk_value)
obj = self._existing_object(pk_value)
if self.can_delete and self._should_delete_form(form):
self.deleted_objects.append(obj)
obj.delete()
continue
if form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def save_new_objects(self, commit=True):
self.new_objects = []
for form in self.extra_forms:
if not form.has_changed():
continue
# If someone has marked an add form for deletion, don't save the
# object.
if self.can_delete and self._should_delete_form(form):
continue
self.new_objects.append(self.save_new(form, commit=commit))
if not commit:
self.saved_forms.append(form)
return self.new_objects
def add_fields(self, form, index):
"""Add a hidden field for the object's primary key."""
from django.db.models import AutoField, OneToOneField, ForeignKey
self._pk_field = pk = self.model._meta.pk
# If a pk isn't editable, then it won't be on the form, so we need to
# add it here so we can tell which object is which when we get the
# data back. Generally, pk.editable should be false, but for some
# reason, auto_created pk fields and AutoField's editable attribute is
# True, so check for that as well.
def pk_is_not_editable(pk):
return ((not pk.editable) or (pk.auto_created or isinstance(pk, AutoField))
or (pk.rel and pk.rel.parent_link and pk_is_not_editable(pk.rel.to._meta.pk)))
if pk_is_not_editable(pk) or pk.name not in form.fields:
if form.is_bound:
pk_value = form.instance.pk
else:
try:
if index is not None:
pk_value = self.get_queryset()[index].pk
else:
pk_value = None
except IndexError:
pk_value = None
if isinstance(pk, OneToOneField) or isinstance(pk, ForeignKey):
qs = pk.rel.to._default_manager.get_query_set()
else:
qs = self.model._default_manager.get_query_set()
qs = qs.using(form.instance._state.db)
form.fields[self._pk_field.name] = ModelChoiceField(qs, initial=pk_value, required=False, widget=HiddenInput)
super(BaseModelFormSet, self).add_fields(form, index)
def modelformset_factory(model, form=ModelForm, formfield_callback=None,
formset=BaseModelFormSet,
extra=1, can_delete=False, can_order=False,
max_num=None, fields=None, exclude=None):
"""
Returns a FormSet class for the given Django model class.
"""
form = modelform_factory(model, form=form, fields=fields, exclude=exclude,
formfield_callback=formfield_callback)
FormSet = formset_factory(form, formset, extra=extra, max_num=max_num,
can_order=can_order, can_delete=can_delete)
FormSet.model = model
return FormSet
# InlineFormSets #############################################################
class BaseInlineFormSet(BaseModelFormSet):
"""A formset for child objects related to a parent."""
def __init__(self, data=None, files=None, instance=None,
save_as_new=False, prefix=None, queryset=None):
from django.db.models.fields.related import RelatedObject
if instance is None:
self.instance = self.fk.rel.to()
else:
self.instance = instance
self.save_as_new = save_as_new
# is there a better way to get the object descriptor?
self.rel_name = RelatedObject(self.fk.rel.to, self.model, self.fk).get_accessor_name()
if queryset is None:
queryset = self.model._default_manager
qs = queryset.filter(**{self.fk.name: self.instance})
super(BaseInlineFormSet, self).__init__(data, files, prefix=prefix,
queryset=qs)
def initial_form_count(self):
if self.save_as_new:
return 0
return super(BaseInlineFormSet, self).initial_form_count()
def _construct_form(self, i, **kwargs):
form = super(BaseInlineFormSet, self)._construct_form(i, **kwargs)
if self.save_as_new:
# Remove the primary key from the form's data, we are only
# creating new instances
form.data[form.add_prefix(self._pk_field.name)] = None
# Remove the foreign key from the form's data
form.data[form.add_prefix(self.fk.name)] = None
# Set the fk value here so that the form can do it's validation.
setattr(form.instance, self.fk.get_attname(), self.instance.pk)
return form
#@classmethod
def get_default_prefix(cls):
from django.db.models.fields.related import RelatedObject
return RelatedObject(cls.fk.rel.to, cls.model, cls.fk).get_accessor_name().replace('+','')
get_default_prefix = classmethod(get_default_prefix)
def save_new(self, form, commit=True):
# Use commit=False so we can assign the parent key afterwards, then
# save the object.
obj = form.save(commit=False)
pk_value = getattr(self.instance, self.fk.rel.field_name)
setattr(obj, self.fk.get_attname(), getattr(pk_value, 'pk', pk_value))
if commit:
obj.save()
# form.save_m2m() can be called via the formset later on if commit=False
if commit and hasattr(form, 'save_m2m'):
form.save_m2m()
return obj
def add_fields(self, form, index):
super(BaseInlineFormSet, self).add_fields(form, index)
if self._pk_field == self.fk:
name = self._pk_field.name
kwargs = {'pk_field': True}
else:
# The foreign key field might not be on the form, so we poke at the
# Model field to get the label, since we need that for error messages.
name = self.fk.name
kwargs = {
'label': getattr(form.fields.get(name), 'label', capfirst(self.fk.verbose_name))
}
if self.fk.rel.field_name != self.fk.rel.to._meta.pk.name:
kwargs['to_field'] = self.fk.rel.field_name
form.fields[name] = InlineForeignKeyField(self.instance, **kwargs)
# Add the generated field to form._meta.fields if it's defined to make
# sure validation isn't skipped on that field.
if form._meta.fields:
if isinstance(form._meta.fields, tuple):
form._meta.fields = list(form._meta.fields)
form._meta.fields.append(self.fk.name)
def get_unique_error_message(self, unique_check):
unique_check = [field for field in unique_check if field != self.fk.name]
return super(BaseInlineFormSet, self).get_unique_error_message(unique_check)
def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
"""
Finds and returns the ForeignKey from model to parent if there is one
(returns None if can_fail is True and no such field exists). If fk_name is
provided, assume it is the name of the ForeignKey field. Unles can_fail is
True, an exception is raised if there is no ForeignKey from model to
parent_model.
"""
# avoid circular import
from django.db.models import ForeignKey
opts = model._meta
if fk_name:
fks_to_parent = [f for f in opts.fields if f.name == fk_name]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
if not isinstance(fk, ForeignKey) or \
(fk.rel.to != parent_model and
fk.rel.to not in parent_model._meta.get_parent_list()):
raise Exception("fk_name '%s' is not a ForeignKey to %s" % (fk_name, parent_model))
elif len(fks_to_parent) == 0:
raise Exception("%s has no field named '%s'" % (model, fk_name))
else:
# Try to discover what the ForeignKey from model to parent_model is
fks_to_parent = [
f for f in opts.fields
if isinstance(f, ForeignKey)
and (f.rel.to == parent_model
or f.rel.to in parent_model._meta.get_parent_list())
]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
elif len(fks_to_parent) == 0:
if can_fail:
return
raise Exception("%s has no ForeignKey to %s" % (model, parent_model))
else:
raise Exception("%s has more than 1 ForeignKey to %s" % (model, parent_model))
return fk
def inlineformset_factory(parent_model, model, form=ModelForm,
formset=BaseInlineFormSet, fk_name=None,
fields=None, exclude=None,
extra=3, can_order=False, can_delete=True, max_num=None,
formfield_callback=None):
"""
Returns an ``InlineFormSet`` for the given kwargs.
You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey``
to ``parent_model``.
"""
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
'can_order': can_order,
'fields': fields,
'exclude': exclude,
'max_num': max_num,
}
FormSet = modelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
# Fields #####################################################################
class InlineForeignKeyHiddenInput(HiddenInput):
def _has_changed(self, initial, data):
return False
class InlineForeignKeyField(Field):
"""
A basic integer field that deals with validating the given value to a
given parent instance in an inline.
"""
default_error_messages = {
'invalid_choice': _(u'The inline foreign key did not match the parent instance primary key.'),
}
def __init__(self, parent_instance, *args, **kwargs):
self.parent_instance = parent_instance
self.pk_field = kwargs.pop("pk_field", False)
self.to_field = kwargs.pop("to_field", None)
if self.parent_instance is not None:
if self.to_field:
kwargs["initial"] = getattr(self.parent_instance, self.to_field)
else:
kwargs["initial"] = self.parent_instance.pk
kwargs["required"] = False
kwargs["widget"] = InlineForeignKeyHiddenInput
super(InlineForeignKeyField, self).__init__(*args, **kwargs)
def clean(self, value):
if value in EMPTY_VALUES:
if self.pk_field:
return None
# if there is no value act as we did before.
return self.parent_instance
# ensure the we compare the values as equal types.
if self.to_field:
orig = getattr(self.parent_instance, self.to_field)
else:
orig = self.parent_instance.pk
if force_unicode(value) != force_unicode(orig):
raise ValidationError(self.error_messages['invalid_choice'])
return self.parent_instance
class ModelChoiceIterator(object):
def __init__(self, field):
self.field = field
self.queryset = field.queryset
def __iter__(self):
if self.field.empty_label is not None:
yield (u"", self.field.empty_label)
if self.field.cache_choices:
if self.field.choice_cache is None:
self.field.choice_cache = [
self.choice(obj) for obj in self.queryset.all()
]
for choice in self.field.choice_cache:
yield choice
else:
for obj in self.queryset.all():
yield self.choice(obj)
def __len__(self):
return len(self.queryset)
def choice(self, obj):
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
class ModelChoiceField(ChoiceField):
"""A ChoiceField whose choices are a model QuerySet."""
# This class is a subclass of ChoiceField for purity, but it doesn't
# actually use any of ChoiceField's implementation.
default_error_messages = {
'invalid_choice': _(u'Select a valid choice. That choice is not one of'
u' the available choices.'),
}
def __init__(self, queryset, empty_label=u"---------", cache_choices=False,
required=True, widget=None, label=None, initial=None,
help_text=None, to_field_name=None, *args, **kwargs):
if required and (initial is not None):
self.empty_label = None
else:
self.empty_label = empty_label
self.cache_choices = cache_choices
# Call Field instead of ChoiceField __init__() because we don't need
# ChoiceField.__init__().
Field.__init__(self, required, widget, label, initial, help_text,
*args, **kwargs)
self.queryset = queryset
self.choice_cache = None
self.to_field_name = to_field_name
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
# Need to force a new ModelChoiceIterator to be created, bug #11183
result.queryset = result.queryset
return result
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
# this method will be used to create object labels by the QuerySetIterator.
# Override it to customize the label.
def label_from_instance(self, obj):
"""
This method is used to convert objects into strings; it's used to
generate the labels for the choices presented by this object. Subclasses
can override this method to customize the display of the choices.
"""
return smart_unicode(obj)
def _get_choices(self):
# If self._choices is set, then somebody must have manually set
# the property self.choices. In this case, just return self._choices.
if hasattr(self, '_choices'):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
# choices dynamically. Return a fresh ModelChoiceIterator that has not been
# consumed. Note that we're instantiating a new ModelChoiceIterator *each*
# time _get_choices() is called (and, thus, each time self.choices is
# accessed) so that we can ensure the QuerySet has not been consumed. This
# construct might look complicated but it allows for lazy evaluation of
# the queryset.
return ModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
def prepare_value(self, value):
if hasattr(value, '_meta'):
if self.to_field_name:
return value.serializable_value(self.to_field_name)
else:
return value.pk
return super(ModelChoiceField, self).prepare_value(value)
def to_python(self, value):
if value in EMPTY_VALUES:
return None
try:
key = self.to_field_name or 'pk'
value = self.queryset.get(**{key: value})
except (ValueError, self.queryset.model.DoesNotExist):
raise ValidationError(self.error_messages['invalid_choice'])
return value
def validate(self, value):
return Field.validate(self, value)
class ModelMultipleChoiceField(ModelChoiceField):
"""A MultipleChoiceField whose choices are a model QuerySet."""
widget = SelectMultiple
hidden_widget = MultipleHiddenInput
default_error_messages = {
'list': _(u'Enter a list of values.'),
'invalid_choice': _(u'Select a valid choice. %s is not one of the'
u' available choices.'),
'invalid_pk_value': _(u'"%s" is not a valid value for a primary key.')
}
def __init__(self, queryset, cache_choices=False, required=True,
widget=None, label=None, initial=None,
help_text=None, *args, **kwargs):
super(ModelMultipleChoiceField, self).__init__(queryset, None,
cache_choices, required, widget, label, initial, help_text,
*args, **kwargs)
def clean(self, value):
if self.required and not value:
raise ValidationError(self.error_messages['required'])
elif not self.required and not value:
return []
if not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['list'])
key = self.to_field_name or 'pk'
for pk in value:
try:
self.queryset.filter(**{key: pk})
except ValueError:
raise ValidationError(self.error_messages['invalid_pk_value'] % pk)
qs = self.queryset.filter(**{'%s__in' % key: value})
pks = set([force_unicode(getattr(o, key)) for o in qs])
for val in value:
if force_unicode(val) not in pks:
raise ValidationError(self.error_messages['invalid_choice'] % val)
# Since this overrides the inherited ModelChoiceField.clean
# we run custom validators here
self.run_validators(value)
return qs
def prepare_value(self, value):
if hasattr(value, '__iter__'):
return [super(ModelMultipleChoiceField, self).prepare_value(v) for v in value]
return super(ModelMultipleChoiceField, self).prepare_value(value)
|
gpl-3.0
|
ActiveState/code
|
recipes/Python/456361_Paypal_IPN/recipe-456361.py
|
1
|
3423
|
#!/usr/bin/python24
import cgi
import time
import MySQLdb
from traceback import format_exception
from sys import exc_info
from string import split
from string import strip
from sys import exit
from urllib import urlencode
import urllib2
DATADIR = "/home/user/data/"
PP_URL = "https://www.sandbox.paypal.com/cgi-bin/webscr"
#PP_URL = "https://www.paypal.com/cgi-bin/webscr"
# non testing is www.paypal.com and /cgi-bin/webscr
# note we used the fields custom and option_selection1 and
# option_selection2 to pass item characteristics
def confirm_paypal(f,f1):
# f is the form handle to the cgi form passed by paypal
# f1 is a file handle to a log text file
newparams={}
for key in f.keys():
newparams[key]=f[key].value
newparams["cmd"]="_notify-validate"
params=urlencode(newparams)
f1.write(params + "\n")
f1.write(PP_URL + "\n")
req = urllib2.Request(PP_URL)
req.add_header("Content-type", "application/x-www-form-urlencoded")
fo = urllib2.urlopen(PP_URL, params)
ret = fo.read()
if ret == "VERIFIED":
f1.write(" verified send back ok\n")
print "Status: 200 Ok\n"
else:
f1.write(" ERROR did not verify\n")
exit(0)
return ret
def write_db(f, f1):
f1.write("... updating database\n")
try:
invoice = f['invoice'].value
try:
street = f['address_street'].value
city = f['address_city'].value
zipc = f['address_zip'].value
country = f["address_country_code"].value
firstn = f['first_name'].value
lastn = f['last_name'].value
except KeyError:
street = ""
city = ""
zipc = ""
country = ""
firstn = ""
lastn = ""
try:
#some countries don't have states
state = f['address_state'].value
except KeyError:
state =""
if f.has_key("custom"):
payer_url = f["custom"].value
query = "INSERT INTO names VALUES ('" + invoice + "', '" + \
firstn + "', '" + lastn + "', '" + street + "', '" + city + "', '" + state + "', '" + zipc + "', '" + \
country + "', '" + f['payer_email'].value + "', '" + \
payer_url + "', '" + f['option_selection1'].value + "', '" + f['option_selection2'].value + "')"
f1.write(query + "\n")
db = MySQLdb.connect(host="localhost", user="username", passwd="passwd",db="db")
cursor = db.cursor()
cursor.execute (query)
except:
f1.write(''.join(format_exception(*exc_info())))
if __name__=="__main__":
import cgitb; cgitb.enable()
#can disable cgitb if not req.
f1 = open(DATADIR + "log1.txt",'a')
f1.write("############ " +str(time.ctime(time.time())) + " starting request\n ")
try:
f = cgi.FieldStorage()
f1.write(repr(f) + "\n\n")
a = confirm_paypal(f, f1)
if not f['payment_status'].value == "Completed":
# We want want to respond to anything that isn't a payment - but we won't insert into our database
f1.write("### Not Completed so going to exit....\n")
exit(0)
else:
f1.write("### Completed so going to write data...\n")
write_db(f, f1)
except:
f1.write(''.join(format_exception(*exc_info())))
|
mit
|
timmytim/cm_kernel_jflteatt
|
tools/perf/scripts/python/sctop.py
|
11180
|
1924
|
# system call top
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
gpl-2.0
|
kmatzen/ansible
|
test/units/parsing/test_data_loader.py
|
58
|
3231
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from six import PY3
from yaml.scanner import ScannerError
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, mock_open
from ansible.errors import AnsibleParserError
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.yaml.objects import AnsibleMapping
class TestDataLoader(unittest.TestCase):
def setUp(self):
self._loader = DataLoader()
def tearDown(self):
pass
@patch.object(DataLoader, '_get_file_contents')
def test_parse_json_from_file(self, mock_def):
mock_def.return_value = ("""{"a": 1, "b": 2, "c": 3}""", True)
output = self._loader.load_from_file('dummy_json.txt')
self.assertEqual(output, dict(a=1,b=2,c=3))
@patch.object(DataLoader, '_get_file_contents')
def test_parse_yaml_from_file(self, mock_def):
mock_def.return_value = ("""
a: 1
b: 2
c: 3
""", True)
output = self._loader.load_from_file('dummy_yaml.txt')
self.assertEqual(output, dict(a=1,b=2,c=3))
@patch.object(DataLoader, '_get_file_contents')
def test_parse_fail_from_file(self, mock_def):
mock_def.return_value = ("""
TEXT:
***
NOT VALID
""", True)
self.assertRaises(AnsibleParserError, self._loader.load_from_file, 'dummy_yaml_bad.txt')
class TestDataLoaderWithVault(unittest.TestCase):
def setUp(self):
self._loader = DataLoader()
self._loader.set_vault_password('ansible')
def tearDown(self):
pass
@patch.multiple(DataLoader, path_exists=lambda s, x: True, is_file=lambda s, x: True)
def test_parse_from_vault_1_1_file(self):
vaulted_data = """$ANSIBLE_VAULT;1.1;AES256
33343734386261666161626433386662623039356366656637303939306563376130623138626165
6436333766346533353463636566313332623130383662340a393835656134633665333861393331
37666233346464636263636530626332623035633135363732623332313534306438393366323966
3135306561356164310a343937653834643433343734653137383339323330626437313562306630
3035
"""
if PY3:
builtins_name = 'builtins'
else:
builtins_name = '__builtin__'
with patch(builtins_name + '.open', mock_open(read_data=vaulted_data)):
output = self._loader.load_from_file('dummy_vault.txt')
self.assertEqual(output, dict(foo='bar'))
|
gpl-3.0
|
rotula/pdfminer
|
pdfminer/glyphlist.py
|
2
|
121489
|
""" Mappings from Adobe glyph names to Unicode characters.
In some CMap tables, Adobe glyph names are used for specifying
Unicode characters instead of using decimal/hex character code.
The following data was taken by
$ wget https://partners.adobe.com/public/developer/en/opentype/glyphlist.txt
$ python tools/conv_glyphlist.py glyphlist.txt > glyphlist.py
"""
# ###################################################################################
# Copyright (c) 1997,1998,2002,2007 Adobe Systems Incorporated
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this documentation file to use, copy, publish, distribute,
# sublicense, and/or sell copies of the documentation, and to permit
# others to do the same, provided that:
# - No modification, editing or other alteration of this document is
# allowed; and
# - The above copyright notice and this permission notice shall be
# included in all copies of the documentation.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this documentation file, to create their own derivative works
# from the content of this document to use, copy, publish, distribute,
# sublicense, and/or sell the derivative works, and to permit others to do
# the same, provided that the derived work is not represented as being a
# copy or version of this document.
#
# Adobe shall not be liable to any party for any loss of revenue or profit
# or for indirect, incidental, special, consequential, or other similar
# damages, whether based on tort (including without limitation negligence
# or strict liability), contract or other legal or equitable grounds even
# if Adobe has been advised or had reason to know of the possibility of
# such damages. The Adobe materials are provided on an "AS IS" basis.
# Adobe specifically disclaims all express, statutory, or implied
# warranties relating to the Adobe materials, including but not limited to
# those concerning merchantability or fitness for a particular purpose or
# non-infringement of any third party rights regarding the Adobe
# materials.
# ###################################################################################
# Name: Adobe Glyph List
# Table version: 2.0
# Date: September 20, 2002
#
# See http://partners.adobe.com/asn/developer/typeforum/unicodegn.html
#
# Format: Semicolon-delimited fields:
# (1) glyph name
# (2) Unicode scalar value
glyphname2unicode = {
'A': u'\u0041',
'AE': u'\u00C6',
'AEacute': u'\u01FC',
'AEmacron': u'\u01E2',
'AEsmall': u'\uF7E6',
'Aacute': u'\u00C1',
'Aacutesmall': u'\uF7E1',
'Abreve': u'\u0102',
'Abreveacute': u'\u1EAE',
'Abrevecyrillic': u'\u04D0',
'Abrevedotbelow': u'\u1EB6',
'Abrevegrave': u'\u1EB0',
'Abrevehookabove': u'\u1EB2',
'Abrevetilde': u'\u1EB4',
'Acaron': u'\u01CD',
'Acircle': u'\u24B6',
'Acircumflex': u'\u00C2',
'Acircumflexacute': u'\u1EA4',
'Acircumflexdotbelow': u'\u1EAC',
'Acircumflexgrave': u'\u1EA6',
'Acircumflexhookabove': u'\u1EA8',
'Acircumflexsmall': u'\uF7E2',
'Acircumflextilde': u'\u1EAA',
'Acute': u'\uF6C9',
'Acutesmall': u'\uF7B4',
'Acyrillic': u'\u0410',
'Adblgrave': u'\u0200',
'Adieresis': u'\u00C4',
'Adieresiscyrillic': u'\u04D2',
'Adieresismacron': u'\u01DE',
'Adieresissmall': u'\uF7E4',
'Adotbelow': u'\u1EA0',
'Adotmacron': u'\u01E0',
'Agrave': u'\u00C0',
'Agravesmall': u'\uF7E0',
'Ahookabove': u'\u1EA2',
'Aiecyrillic': u'\u04D4',
'Ainvertedbreve': u'\u0202',
'Alpha': u'\u0391',
'Alphatonos': u'\u0386',
'Amacron': u'\u0100',
'Amonospace': u'\uFF21',
'Aogonek': u'\u0104',
'Aring': u'\u00C5',
'Aringacute': u'\u01FA',
'Aringbelow': u'\u1E00',
'Aringsmall': u'\uF7E5',
'Asmall': u'\uF761',
'Atilde': u'\u00C3',
'Atildesmall': u'\uF7E3',
'Aybarmenian': u'\u0531',
'B': u'\u0042',
'Bcircle': u'\u24B7',
'Bdotaccent': u'\u1E02',
'Bdotbelow': u'\u1E04',
'Becyrillic': u'\u0411',
'Benarmenian': u'\u0532',
'Beta': u'\u0392',
'Bhook': u'\u0181',
'Blinebelow': u'\u1E06',
'Bmonospace': u'\uFF22',
'Brevesmall': u'\uF6F4',
'Bsmall': u'\uF762',
'Btopbar': u'\u0182',
'C': u'\u0043',
'Caarmenian': u'\u053E',
'Cacute': u'\u0106',
'Caron': u'\uF6CA',
'Caronsmall': u'\uF6F5',
'Ccaron': u'\u010C',
'Ccedilla': u'\u00C7',
'Ccedillaacute': u'\u1E08',
'Ccedillasmall': u'\uF7E7',
'Ccircle': u'\u24B8',
'Ccircumflex': u'\u0108',
'Cdot': u'\u010A',
'Cdotaccent': u'\u010A',
'Cedillasmall': u'\uF7B8',
'Chaarmenian': u'\u0549',
'Cheabkhasiancyrillic': u'\u04BC',
'Checyrillic': u'\u0427',
'Chedescenderabkhasiancyrillic': u'\u04BE',
'Chedescendercyrillic': u'\u04B6',
'Chedieresiscyrillic': u'\u04F4',
'Cheharmenian': u'\u0543',
'Chekhakassiancyrillic': u'\u04CB',
'Cheverticalstrokecyrillic': u'\u04B8',
'Chi': u'\u03A7',
'Chook': u'\u0187',
'Circumflexsmall': u'\uF6F6',
'Cmonospace': u'\uFF23',
'Coarmenian': u'\u0551',
'Csmall': u'\uF763',
'D': u'\u0044',
'DZ': u'\u01F1',
'DZcaron': u'\u01C4',
'Daarmenian': u'\u0534',
'Dafrican': u'\u0189',
'Dcaron': u'\u010E',
'Dcedilla': u'\u1E10',
'Dcircle': u'\u24B9',
'Dcircumflexbelow': u'\u1E12',
'Dcroat': u'\u0110',
'Ddotaccent': u'\u1E0A',
'Ddotbelow': u'\u1E0C',
'Decyrillic': u'\u0414',
'Deicoptic': u'\u03EE',
'Delta': u'\u2206',
'Deltagreek': u'\u0394',
'Dhook': u'\u018A',
'Dieresis': u'\uF6CB',
'DieresisAcute': u'\uF6CC',
'DieresisGrave': u'\uF6CD',
'Dieresissmall': u'\uF7A8',
'Digammagreek': u'\u03DC',
'Djecyrillic': u'\u0402',
'Dlinebelow': u'\u1E0E',
'Dmonospace': u'\uFF24',
'Dotaccentsmall': u'\uF6F7',
'Dslash': u'\u0110',
'Dsmall': u'\uF764',
'Dtopbar': u'\u018B',
'Dz': u'\u01F2',
'Dzcaron': u'\u01C5',
'Dzeabkhasiancyrillic': u'\u04E0',
'Dzecyrillic': u'\u0405',
'Dzhecyrillic': u'\u040F',
'E': u'\u0045',
'Eacute': u'\u00C9',
'Eacutesmall': u'\uF7E9',
'Ebreve': u'\u0114',
'Ecaron': u'\u011A',
'Ecedillabreve': u'\u1E1C',
'Echarmenian': u'\u0535',
'Ecircle': u'\u24BA',
'Ecircumflex': u'\u00CA',
'Ecircumflexacute': u'\u1EBE',
'Ecircumflexbelow': u'\u1E18',
'Ecircumflexdotbelow': u'\u1EC6',
'Ecircumflexgrave': u'\u1EC0',
'Ecircumflexhookabove': u'\u1EC2',
'Ecircumflexsmall': u'\uF7EA',
'Ecircumflextilde': u'\u1EC4',
'Ecyrillic': u'\u0404',
'Edblgrave': u'\u0204',
'Edieresis': u'\u00CB',
'Edieresissmall': u'\uF7EB',
'Edot': u'\u0116',
'Edotaccent': u'\u0116',
'Edotbelow': u'\u1EB8',
'Efcyrillic': u'\u0424',
'Egrave': u'\u00C8',
'Egravesmall': u'\uF7E8',
'Eharmenian': u'\u0537',
'Ehookabove': u'\u1EBA',
'Eightroman': u'\u2167',
'Einvertedbreve': u'\u0206',
'Eiotifiedcyrillic': u'\u0464',
'Elcyrillic': u'\u041B',
'Elevenroman': u'\u216A',
'Emacron': u'\u0112',
'Emacronacute': u'\u1E16',
'Emacrongrave': u'\u1E14',
'Emcyrillic': u'\u041C',
'Emonospace': u'\uFF25',
'Encyrillic': u'\u041D',
'Endescendercyrillic': u'\u04A2',
'Eng': u'\u014A',
'Enghecyrillic': u'\u04A4',
'Enhookcyrillic': u'\u04C7',
'Eogonek': u'\u0118',
'Eopen': u'\u0190',
'Epsilon': u'\u0395',
'Epsilontonos': u'\u0388',
'Ercyrillic': u'\u0420',
'Ereversed': u'\u018E',
'Ereversedcyrillic': u'\u042D',
'Escyrillic': u'\u0421',
'Esdescendercyrillic': u'\u04AA',
'Esh': u'\u01A9',
'Esmall': u'\uF765',
'Eta': u'\u0397',
'Etarmenian': u'\u0538',
'Etatonos': u'\u0389',
'Eth': u'\u00D0',
'Ethsmall': u'\uF7F0',
'Etilde': u'\u1EBC',
'Etildebelow': u'\u1E1A',
'Euro': u'\u20AC',
'Ezh': u'\u01B7',
'Ezhcaron': u'\u01EE',
'Ezhreversed': u'\u01B8',
'F': u'\u0046',
'Fcircle': u'\u24BB',
'Fdotaccent': u'\u1E1E',
'Feharmenian': u'\u0556',
'Feicoptic': u'\u03E4',
'Fhook': u'\u0191',
'Fitacyrillic': u'\u0472',
'Fiveroman': u'\u2164',
'Fmonospace': u'\uFF26',
'Fourroman': u'\u2163',
'Fsmall': u'\uF766',
'G': u'\u0047',
'GBsquare': u'\u3387',
'Gacute': u'\u01F4',
'Gamma': u'\u0393',
'Gammaafrican': u'\u0194',
'Gangiacoptic': u'\u03EA',
'Gbreve': u'\u011E',
'Gcaron': u'\u01E6',
'Gcedilla': u'\u0122',
'Gcircle': u'\u24BC',
'Gcircumflex': u'\u011C',
'Gcommaaccent': u'\u0122',
'Gdot': u'\u0120',
'Gdotaccent': u'\u0120',
'Gecyrillic': u'\u0413',
'Ghadarmenian': u'\u0542',
'Ghemiddlehookcyrillic': u'\u0494',
'Ghestrokecyrillic': u'\u0492',
'Gheupturncyrillic': u'\u0490',
'Ghook': u'\u0193',
'Gimarmenian': u'\u0533',
'Gjecyrillic': u'\u0403',
'Gmacron': u'\u1E20',
'Gmonospace': u'\uFF27',
'Grave': u'\uF6CE',
'Gravesmall': u'\uF760',
'Gsmall': u'\uF767',
'Gsmallhook': u'\u029B',
'Gstroke': u'\u01E4',
'H': u'\u0048',
'H18533': u'\u25CF',
'H18543': u'\u25AA',
'H18551': u'\u25AB',
'H22073': u'\u25A1',
'HPsquare': u'\u33CB',
'Haabkhasiancyrillic': u'\u04A8',
'Hadescendercyrillic': u'\u04B2',
'Hardsigncyrillic': u'\u042A',
'Hbar': u'\u0126',
'Hbrevebelow': u'\u1E2A',
'Hcedilla': u'\u1E28',
'Hcircle': u'\u24BD',
'Hcircumflex': u'\u0124',
'Hdieresis': u'\u1E26',
'Hdotaccent': u'\u1E22',
'Hdotbelow': u'\u1E24',
'Hmonospace': u'\uFF28',
'Hoarmenian': u'\u0540',
'Horicoptic': u'\u03E8',
'Hsmall': u'\uF768',
'Hungarumlaut': u'\uF6CF',
'Hungarumlautsmall': u'\uF6F8',
'Hzsquare': u'\u3390',
'I': u'\u0049',
'IAcyrillic': u'\u042F',
'IJ': u'\u0132',
'IUcyrillic': u'\u042E',
'Iacute': u'\u00CD',
'Iacutesmall': u'\uF7ED',
'Ibreve': u'\u012C',
'Icaron': u'\u01CF',
'Icircle': u'\u24BE',
'Icircumflex': u'\u00CE',
'Icircumflexsmall': u'\uF7EE',
'Icyrillic': u'\u0406',
'Idblgrave': u'\u0208',
'Idieresis': u'\u00CF',
'Idieresisacute': u'\u1E2E',
'Idieresiscyrillic': u'\u04E4',
'Idieresissmall': u'\uF7EF',
'Idot': u'\u0130',
'Idotaccent': u'\u0130',
'Idotbelow': u'\u1ECA',
'Iebrevecyrillic': u'\u04D6',
'Iecyrillic': u'\u0415',
'Ifraktur': u'\u2111',
'Igrave': u'\u00CC',
'Igravesmall': u'\uF7EC',
'Ihookabove': u'\u1EC8',
'Iicyrillic': u'\u0418',
'Iinvertedbreve': u'\u020A',
'Iishortcyrillic': u'\u0419',
'Imacron': u'\u012A',
'Imacroncyrillic': u'\u04E2',
'Imonospace': u'\uFF29',
'Iniarmenian': u'\u053B',
'Iocyrillic': u'\u0401',
'Iogonek': u'\u012E',
'Iota': u'\u0399',
'Iotaafrican': u'\u0196',
'Iotadieresis': u'\u03AA',
'Iotatonos': u'\u038A',
'Ismall': u'\uF769',
'Istroke': u'\u0197',
'Itilde': u'\u0128',
'Itildebelow': u'\u1E2C',
'Izhitsacyrillic': u'\u0474',
'Izhitsadblgravecyrillic': u'\u0476',
'J': u'\u004A',
'Jaarmenian': u'\u0541',
'Jcircle': u'\u24BF',
'Jcircumflex': u'\u0134',
'Jecyrillic': u'\u0408',
'Jheharmenian': u'\u054B',
'Jmonospace': u'\uFF2A',
'Jsmall': u'\uF76A',
'K': u'\u004B',
'KBsquare': u'\u3385',
'KKsquare': u'\u33CD',
'Kabashkircyrillic': u'\u04A0',
'Kacute': u'\u1E30',
'Kacyrillic': u'\u041A',
'Kadescendercyrillic': u'\u049A',
'Kahookcyrillic': u'\u04C3',
'Kappa': u'\u039A',
'Kastrokecyrillic': u'\u049E',
'Kaverticalstrokecyrillic': u'\u049C',
'Kcaron': u'\u01E8',
'Kcedilla': u'\u0136',
'Kcircle': u'\u24C0',
'Kcommaaccent': u'\u0136',
'Kdotbelow': u'\u1E32',
'Keharmenian': u'\u0554',
'Kenarmenian': u'\u053F',
'Khacyrillic': u'\u0425',
'Kheicoptic': u'\u03E6',
'Khook': u'\u0198',
'Kjecyrillic': u'\u040C',
'Klinebelow': u'\u1E34',
'Kmonospace': u'\uFF2B',
'Koppacyrillic': u'\u0480',
'Koppagreek': u'\u03DE',
'Ksicyrillic': u'\u046E',
'Ksmall': u'\uF76B',
'L': u'\u004C',
'LJ': u'\u01C7',
'LL': u'\uF6BF',
'Lacute': u'\u0139',
'Lambda': u'\u039B',
'Lcaron': u'\u013D',
'Lcedilla': u'\u013B',
'Lcircle': u'\u24C1',
'Lcircumflexbelow': u'\u1E3C',
'Lcommaaccent': u'\u013B',
'Ldot': u'\u013F',
'Ldotaccent': u'\u013F',
'Ldotbelow': u'\u1E36',
'Ldotbelowmacron': u'\u1E38',
'Liwnarmenian': u'\u053C',
'Lj': u'\u01C8',
'Ljecyrillic': u'\u0409',
'Llinebelow': u'\u1E3A',
'Lmonospace': u'\uFF2C',
'Lslash': u'\u0141',
'Lslashsmall': u'\uF6F9',
'Lsmall': u'\uF76C',
'M': u'\u004D',
'MBsquare': u'\u3386',
'Macron': u'\uF6D0',
'Macronsmall': u'\uF7AF',
'Macute': u'\u1E3E',
'Mcircle': u'\u24C2',
'Mdotaccent': u'\u1E40',
'Mdotbelow': u'\u1E42',
'Menarmenian': u'\u0544',
'Mmonospace': u'\uFF2D',
'Msmall': u'\uF76D',
'Mturned': u'\u019C',
'Mu': u'\u039C',
'N': u'\u004E',
'NJ': u'\u01CA',
'Nacute': u'\u0143',
'Ncaron': u'\u0147',
'Ncedilla': u'\u0145',
'Ncircle': u'\u24C3',
'Ncircumflexbelow': u'\u1E4A',
'Ncommaaccent': u'\u0145',
'Ndotaccent': u'\u1E44',
'Ndotbelow': u'\u1E46',
'Nhookleft': u'\u019D',
'Nineroman': u'\u2168',
'Nj': u'\u01CB',
'Njecyrillic': u'\u040A',
'Nlinebelow': u'\u1E48',
'Nmonospace': u'\uFF2E',
'Nowarmenian': u'\u0546',
'Nsmall': u'\uF76E',
'Ntilde': u'\u00D1',
'Ntildesmall': u'\uF7F1',
'Nu': u'\u039D',
'O': u'\u004F',
'OE': u'\u0152',
'OEsmall': u'\uF6FA',
'Oacute': u'\u00D3',
'Oacutesmall': u'\uF7F3',
'Obarredcyrillic': u'\u04E8',
'Obarreddieresiscyrillic': u'\u04EA',
'Obreve': u'\u014E',
'Ocaron': u'\u01D1',
'Ocenteredtilde': u'\u019F',
'Ocircle': u'\u24C4',
'Ocircumflex': u'\u00D4',
'Ocircumflexacute': u'\u1ED0',
'Ocircumflexdotbelow': u'\u1ED8',
'Ocircumflexgrave': u'\u1ED2',
'Ocircumflexhookabove': u'\u1ED4',
'Ocircumflexsmall': u'\uF7F4',
'Ocircumflextilde': u'\u1ED6',
'Ocyrillic': u'\u041E',
'Odblacute': u'\u0150',
'Odblgrave': u'\u020C',
'Odieresis': u'\u00D6',
'Odieresiscyrillic': u'\u04E6',
'Odieresissmall': u'\uF7F6',
'Odotbelow': u'\u1ECC',
'Ogoneksmall': u'\uF6FB',
'Ograve': u'\u00D2',
'Ogravesmall': u'\uF7F2',
'Oharmenian': u'\u0555',
'Ohm': u'\u2126',
'Ohookabove': u'\u1ECE',
'Ohorn': u'\u01A0',
'Ohornacute': u'\u1EDA',
'Ohorndotbelow': u'\u1EE2',
'Ohorngrave': u'\u1EDC',
'Ohornhookabove': u'\u1EDE',
'Ohorntilde': u'\u1EE0',
'Ohungarumlaut': u'\u0150',
'Oi': u'\u01A2',
'Oinvertedbreve': u'\u020E',
'Omacron': u'\u014C',
'Omacronacute': u'\u1E52',
'Omacrongrave': u'\u1E50',
'Omega': u'\u2126',
'Omegacyrillic': u'\u0460',
'Omegagreek': u'\u03A9',
'Omegaroundcyrillic': u'\u047A',
'Omegatitlocyrillic': u'\u047C',
'Omegatonos': u'\u038F',
'Omicron': u'\u039F',
'Omicrontonos': u'\u038C',
'Omonospace': u'\uFF2F',
'Oneroman': u'\u2160',
'Oogonek': u'\u01EA',
'Oogonekmacron': u'\u01EC',
'Oopen': u'\u0186',
'Oslash': u'\u00D8',
'Oslashacute': u'\u01FE',
'Oslashsmall': u'\uF7F8',
'Osmall': u'\uF76F',
'Ostrokeacute': u'\u01FE',
'Otcyrillic': u'\u047E',
'Otilde': u'\u00D5',
'Otildeacute': u'\u1E4C',
'Otildedieresis': u'\u1E4E',
'Otildesmall': u'\uF7F5',
'P': u'\u0050',
'Pacute': u'\u1E54',
'Pcircle': u'\u24C5',
'Pdotaccent': u'\u1E56',
'Pecyrillic': u'\u041F',
'Peharmenian': u'\u054A',
'Pemiddlehookcyrillic': u'\u04A6',
'Phi': u'\u03A6',
'Phook': u'\u01A4',
'Pi': u'\u03A0',
'Piwrarmenian': u'\u0553',
'Pmonospace': u'\uFF30',
'Psi': u'\u03A8',
'Psicyrillic': u'\u0470',
'Psmall': u'\uF770',
'Q': u'\u0051',
'Qcircle': u'\u24C6',
'Qmonospace': u'\uFF31',
'Qsmall': u'\uF771',
'R': u'\u0052',
'Raarmenian': u'\u054C',
'Racute': u'\u0154',
'Rcaron': u'\u0158',
'Rcedilla': u'\u0156',
'Rcircle': u'\u24C7',
'Rcommaaccent': u'\u0156',
'Rdblgrave': u'\u0210',
'Rdotaccent': u'\u1E58',
'Rdotbelow': u'\u1E5A',
'Rdotbelowmacron': u'\u1E5C',
'Reharmenian': u'\u0550',
'Rfraktur': u'\u211C',
'Rho': u'\u03A1',
'Ringsmall': u'\uF6FC',
'Rinvertedbreve': u'\u0212',
'Rlinebelow': u'\u1E5E',
'Rmonospace': u'\uFF32',
'Rsmall': u'\uF772',
'Rsmallinverted': u'\u0281',
'Rsmallinvertedsuperior': u'\u02B6',
'S': u'\u0053',
'SF010000': u'\u250C',
'SF020000': u'\u2514',
'SF030000': u'\u2510',
'SF040000': u'\u2518',
'SF050000': u'\u253C',
'SF060000': u'\u252C',
'SF070000': u'\u2534',
'SF080000': u'\u251C',
'SF090000': u'\u2524',
'SF100000': u'\u2500',
'SF110000': u'\u2502',
'SF190000': u'\u2561',
'SF200000': u'\u2562',
'SF210000': u'\u2556',
'SF220000': u'\u2555',
'SF230000': u'\u2563',
'SF240000': u'\u2551',
'SF250000': u'\u2557',
'SF260000': u'\u255D',
'SF270000': u'\u255C',
'SF280000': u'\u255B',
'SF360000': u'\u255E',
'SF370000': u'\u255F',
'SF380000': u'\u255A',
'SF390000': u'\u2554',
'SF400000': u'\u2569',
'SF410000': u'\u2566',
'SF420000': u'\u2560',
'SF430000': u'\u2550',
'SF440000': u'\u256C',
'SF450000': u'\u2567',
'SF460000': u'\u2568',
'SF470000': u'\u2564',
'SF480000': u'\u2565',
'SF490000': u'\u2559',
'SF500000': u'\u2558',
'SF510000': u'\u2552',
'SF520000': u'\u2553',
'SF530000': u'\u256B',
'SF540000': u'\u256A',
'Sacute': u'\u015A',
'Sacutedotaccent': u'\u1E64',
'Sampigreek': u'\u03E0',
'Scaron': u'\u0160',
'Scarondotaccent': u'\u1E66',
'Scaronsmall': u'\uF6FD',
'Scedilla': u'\u015E',
'Schwa': u'\u018F',
'Schwacyrillic': u'\u04D8',
'Schwadieresiscyrillic': u'\u04DA',
'Scircle': u'\u24C8',
'Scircumflex': u'\u015C',
'Scommaaccent': u'\u0218',
'Sdotaccent': u'\u1E60',
'Sdotbelow': u'\u1E62',
'Sdotbelowdotaccent': u'\u1E68',
'Seharmenian': u'\u054D',
'Sevenroman': u'\u2166',
'Shaarmenian': u'\u0547',
'Shacyrillic': u'\u0428',
'Shchacyrillic': u'\u0429',
'Sheicoptic': u'\u03E2',
'Shhacyrillic': u'\u04BA',
'Shimacoptic': u'\u03EC',
'Sigma': u'\u03A3',
'Sixroman': u'\u2165',
'Smonospace': u'\uFF33',
'Softsigncyrillic': u'\u042C',
'Ssmall': u'\uF773',
'Stigmagreek': u'\u03DA',
'T': u'\u0054',
'Tau': u'\u03A4',
'Tbar': u'\u0166',
'Tcaron': u'\u0164',
'Tcedilla': u'\u0162',
'Tcircle': u'\u24C9',
'Tcircumflexbelow': u'\u1E70',
'Tcommaaccent': u'\u0162',
'Tdotaccent': u'\u1E6A',
'Tdotbelow': u'\u1E6C',
'Tecyrillic': u'\u0422',
'Tedescendercyrillic': u'\u04AC',
'Tenroman': u'\u2169',
'Tetsecyrillic': u'\u04B4',
'Theta': u'\u0398',
'Thook': u'\u01AC',
'Thorn': u'\u00DE',
'Thornsmall': u'\uF7FE',
'Threeroman': u'\u2162',
'Tildesmall': u'\uF6FE',
'Tiwnarmenian': u'\u054F',
'Tlinebelow': u'\u1E6E',
'Tmonospace': u'\uFF34',
'Toarmenian': u'\u0539',
'Tonefive': u'\u01BC',
'Tonesix': u'\u0184',
'Tonetwo': u'\u01A7',
'Tretroflexhook': u'\u01AE',
'Tsecyrillic': u'\u0426',
'Tshecyrillic': u'\u040B',
'Tsmall': u'\uF774',
'Twelveroman': u'\u216B',
'Tworoman': u'\u2161',
'U': u'\u0055',
'Uacute': u'\u00DA',
'Uacutesmall': u'\uF7FA',
'Ubreve': u'\u016C',
'Ucaron': u'\u01D3',
'Ucircle': u'\u24CA',
'Ucircumflex': u'\u00DB',
'Ucircumflexbelow': u'\u1E76',
'Ucircumflexsmall': u'\uF7FB',
'Ucyrillic': u'\u0423',
'Udblacute': u'\u0170',
'Udblgrave': u'\u0214',
'Udieresis': u'\u00DC',
'Udieresisacute': u'\u01D7',
'Udieresisbelow': u'\u1E72',
'Udieresiscaron': u'\u01D9',
'Udieresiscyrillic': u'\u04F0',
'Udieresisgrave': u'\u01DB',
'Udieresismacron': u'\u01D5',
'Udieresissmall': u'\uF7FC',
'Udotbelow': u'\u1EE4',
'Ugrave': u'\u00D9',
'Ugravesmall': u'\uF7F9',
'Uhookabove': u'\u1EE6',
'Uhorn': u'\u01AF',
'Uhornacute': u'\u1EE8',
'Uhorndotbelow': u'\u1EF0',
'Uhorngrave': u'\u1EEA',
'Uhornhookabove': u'\u1EEC',
'Uhorntilde': u'\u1EEE',
'Uhungarumlaut': u'\u0170',
'Uhungarumlautcyrillic': u'\u04F2',
'Uinvertedbreve': u'\u0216',
'Ukcyrillic': u'\u0478',
'Umacron': u'\u016A',
'Umacroncyrillic': u'\u04EE',
'Umacrondieresis': u'\u1E7A',
'Umonospace': u'\uFF35',
'Uogonek': u'\u0172',
'Upsilon': u'\u03A5',
'Upsilon1': u'\u03D2',
'Upsilonacutehooksymbolgreek': u'\u03D3',
'Upsilonafrican': u'\u01B1',
'Upsilondieresis': u'\u03AB',
'Upsilondieresishooksymbolgreek': u'\u03D4',
'Upsilonhooksymbol': u'\u03D2',
'Upsilontonos': u'\u038E',
'Uring': u'\u016E',
'Ushortcyrillic': u'\u040E',
'Usmall': u'\uF775',
'Ustraightcyrillic': u'\u04AE',
'Ustraightstrokecyrillic': u'\u04B0',
'Utilde': u'\u0168',
'Utildeacute': u'\u1E78',
'Utildebelow': u'\u1E74',
'V': u'\u0056',
'Vcircle': u'\u24CB',
'Vdotbelow': u'\u1E7E',
'Vecyrillic': u'\u0412',
'Vewarmenian': u'\u054E',
'Vhook': u'\u01B2',
'Vmonospace': u'\uFF36',
'Voarmenian': u'\u0548',
'Vsmall': u'\uF776',
'Vtilde': u'\u1E7C',
'W': u'\u0057',
'Wacute': u'\u1E82',
'Wcircle': u'\u24CC',
'Wcircumflex': u'\u0174',
'Wdieresis': u'\u1E84',
'Wdotaccent': u'\u1E86',
'Wdotbelow': u'\u1E88',
'Wgrave': u'\u1E80',
'Wmonospace': u'\uFF37',
'Wsmall': u'\uF777',
'X': u'\u0058',
'Xcircle': u'\u24CD',
'Xdieresis': u'\u1E8C',
'Xdotaccent': u'\u1E8A',
'Xeharmenian': u'\u053D',
'Xi': u'\u039E',
'Xmonospace': u'\uFF38',
'Xsmall': u'\uF778',
'Y': u'\u0059',
'Yacute': u'\u00DD',
'Yacutesmall': u'\uF7FD',
'Yatcyrillic': u'\u0462',
'Ycircle': u'\u24CE',
'Ycircumflex': u'\u0176',
'Ydieresis': u'\u0178',
'Ydieresissmall': u'\uF7FF',
'Ydotaccent': u'\u1E8E',
'Ydotbelow': u'\u1EF4',
'Yericyrillic': u'\u042B',
'Yerudieresiscyrillic': u'\u04F8',
'Ygrave': u'\u1EF2',
'Yhook': u'\u01B3',
'Yhookabove': u'\u1EF6',
'Yiarmenian': u'\u0545',
'Yicyrillic': u'\u0407',
'Yiwnarmenian': u'\u0552',
'Ymonospace': u'\uFF39',
'Ysmall': u'\uF779',
'Ytilde': u'\u1EF8',
'Yusbigcyrillic': u'\u046A',
'Yusbigiotifiedcyrillic': u'\u046C',
'Yuslittlecyrillic': u'\u0466',
'Yuslittleiotifiedcyrillic': u'\u0468',
'Z': u'\u005A',
'Zaarmenian': u'\u0536',
'Zacute': u'\u0179',
'Zcaron': u'\u017D',
'Zcaronsmall': u'\uF6FF',
'Zcircle': u'\u24CF',
'Zcircumflex': u'\u1E90',
'Zdot': u'\u017B',
'Zdotaccent': u'\u017B',
'Zdotbelow': u'\u1E92',
'Zecyrillic': u'\u0417',
'Zedescendercyrillic': u'\u0498',
'Zedieresiscyrillic': u'\u04DE',
'Zeta': u'\u0396',
'Zhearmenian': u'\u053A',
'Zhebrevecyrillic': u'\u04C1',
'Zhecyrillic': u'\u0416',
'Zhedescendercyrillic': u'\u0496',
'Zhedieresiscyrillic': u'\u04DC',
'Zlinebelow': u'\u1E94',
'Zmonospace': u'\uFF3A',
'Zsmall': u'\uF77A',
'Zstroke': u'\u01B5',
'a': u'\u0061',
'aabengali': u'\u0986',
'aacute': u'\u00E1',
'aadeva': u'\u0906',
'aagujarati': u'\u0A86',
'aagurmukhi': u'\u0A06',
'aamatragurmukhi': u'\u0A3E',
'aarusquare': u'\u3303',
'aavowelsignbengali': u'\u09BE',
'aavowelsigndeva': u'\u093E',
'aavowelsigngujarati': u'\u0ABE',
'abbreviationmarkarmenian': u'\u055F',
'abbreviationsigndeva': u'\u0970',
'abengali': u'\u0985',
'abopomofo': u'\u311A',
'abreve': u'\u0103',
'abreveacute': u'\u1EAF',
'abrevecyrillic': u'\u04D1',
'abrevedotbelow': u'\u1EB7',
'abrevegrave': u'\u1EB1',
'abrevehookabove': u'\u1EB3',
'abrevetilde': u'\u1EB5',
'acaron': u'\u01CE',
'acircle': u'\u24D0',
'acircumflex': u'\u00E2',
'acircumflexacute': u'\u1EA5',
'acircumflexdotbelow': u'\u1EAD',
'acircumflexgrave': u'\u1EA7',
'acircumflexhookabove': u'\u1EA9',
'acircumflextilde': u'\u1EAB',
'acute': u'\u00B4',
'acutebelowcmb': u'\u0317',
'acutecmb': u'\u0301',
'acutecomb': u'\u0301',
'acutedeva': u'\u0954',
'acutelowmod': u'\u02CF',
'acutetonecmb': u'\u0341',
'acyrillic': u'\u0430',
'adblgrave': u'\u0201',
'addakgurmukhi': u'\u0A71',
'adeva': u'\u0905',
'adieresis': u'\u00E4',
'adieresiscyrillic': u'\u04D3',
'adieresismacron': u'\u01DF',
'adotbelow': u'\u1EA1',
'adotmacron': u'\u01E1',
'ae': u'\u00E6',
'aeacute': u'\u01FD',
'aekorean': u'\u3150',
'aemacron': u'\u01E3',
'afii00208': u'\u2015',
'afii08941': u'\u20A4',
'afii10017': u'\u0410',
'afii10018': u'\u0411',
'afii10019': u'\u0412',
'afii10020': u'\u0413',
'afii10021': u'\u0414',
'afii10022': u'\u0415',
'afii10023': u'\u0401',
'afii10024': u'\u0416',
'afii10025': u'\u0417',
'afii10026': u'\u0418',
'afii10027': u'\u0419',
'afii10028': u'\u041A',
'afii10029': u'\u041B',
'afii10030': u'\u041C',
'afii10031': u'\u041D',
'afii10032': u'\u041E',
'afii10033': u'\u041F',
'afii10034': u'\u0420',
'afii10035': u'\u0421',
'afii10036': u'\u0422',
'afii10037': u'\u0423',
'afii10038': u'\u0424',
'afii10039': u'\u0425',
'afii10040': u'\u0426',
'afii10041': u'\u0427',
'afii10042': u'\u0428',
'afii10043': u'\u0429',
'afii10044': u'\u042A',
'afii10045': u'\u042B',
'afii10046': u'\u042C',
'afii10047': u'\u042D',
'afii10048': u'\u042E',
'afii10049': u'\u042F',
'afii10050': u'\u0490',
'afii10051': u'\u0402',
'afii10052': u'\u0403',
'afii10053': u'\u0404',
'afii10054': u'\u0405',
'afii10055': u'\u0406',
'afii10056': u'\u0407',
'afii10057': u'\u0408',
'afii10058': u'\u0409',
'afii10059': u'\u040A',
'afii10060': u'\u040B',
'afii10061': u'\u040C',
'afii10062': u'\u040E',
'afii10063': u'\uF6C4',
'afii10064': u'\uF6C5',
'afii10065': u'\u0430',
'afii10066': u'\u0431',
'afii10067': u'\u0432',
'afii10068': u'\u0433',
'afii10069': u'\u0434',
'afii10070': u'\u0435',
'afii10071': u'\u0451',
'afii10072': u'\u0436',
'afii10073': u'\u0437',
'afii10074': u'\u0438',
'afii10075': u'\u0439',
'afii10076': u'\u043A',
'afii10077': u'\u043B',
'afii10078': u'\u043C',
'afii10079': u'\u043D',
'afii10080': u'\u043E',
'afii10081': u'\u043F',
'afii10082': u'\u0440',
'afii10083': u'\u0441',
'afii10084': u'\u0442',
'afii10085': u'\u0443',
'afii10086': u'\u0444',
'afii10087': u'\u0445',
'afii10088': u'\u0446',
'afii10089': u'\u0447',
'afii10090': u'\u0448',
'afii10091': u'\u0449',
'afii10092': u'\u044A',
'afii10093': u'\u044B',
'afii10094': u'\u044C',
'afii10095': u'\u044D',
'afii10096': u'\u044E',
'afii10097': u'\u044F',
'afii10098': u'\u0491',
'afii10099': u'\u0452',
'afii10100': u'\u0453',
'afii10101': u'\u0454',
'afii10102': u'\u0455',
'afii10103': u'\u0456',
'afii10104': u'\u0457',
'afii10105': u'\u0458',
'afii10106': u'\u0459',
'afii10107': u'\u045A',
'afii10108': u'\u045B',
'afii10109': u'\u045C',
'afii10110': u'\u045E',
'afii10145': u'\u040F',
'afii10146': u'\u0462',
'afii10147': u'\u0472',
'afii10148': u'\u0474',
'afii10192': u'\uF6C6',
'afii10193': u'\u045F',
'afii10194': u'\u0463',
'afii10195': u'\u0473',
'afii10196': u'\u0475',
'afii10831': u'\uF6C7',
'afii10832': u'\uF6C8',
'afii10846': u'\u04D9',
'afii299': u'\u200E',
'afii300': u'\u200F',
'afii301': u'\u200D',
'afii57381': u'\u066A',
'afii57388': u'\u060C',
'afii57392': u'\u0660',
'afii57393': u'\u0661',
'afii57394': u'\u0662',
'afii57395': u'\u0663',
'afii57396': u'\u0664',
'afii57397': u'\u0665',
'afii57398': u'\u0666',
'afii57399': u'\u0667',
'afii57400': u'\u0668',
'afii57401': u'\u0669',
'afii57403': u'\u061B',
'afii57407': u'\u061F',
'afii57409': u'\u0621',
'afii57410': u'\u0622',
'afii57411': u'\u0623',
'afii57412': u'\u0624',
'afii57413': u'\u0625',
'afii57414': u'\u0626',
'afii57415': u'\u0627',
'afii57416': u'\u0628',
'afii57417': u'\u0629',
'afii57418': u'\u062A',
'afii57419': u'\u062B',
'afii57420': u'\u062C',
'afii57421': u'\u062D',
'afii57422': u'\u062E',
'afii57423': u'\u062F',
'afii57424': u'\u0630',
'afii57425': u'\u0631',
'afii57426': u'\u0632',
'afii57427': u'\u0633',
'afii57428': u'\u0634',
'afii57429': u'\u0635',
'afii57430': u'\u0636',
'afii57431': u'\u0637',
'afii57432': u'\u0638',
'afii57433': u'\u0639',
'afii57434': u'\u063A',
'afii57440': u'\u0640',
'afii57441': u'\u0641',
'afii57442': u'\u0642',
'afii57443': u'\u0643',
'afii57444': u'\u0644',
'afii57445': u'\u0645',
'afii57446': u'\u0646',
'afii57448': u'\u0648',
'afii57449': u'\u0649',
'afii57450': u'\u064A',
'afii57451': u'\u064B',
'afii57452': u'\u064C',
'afii57453': u'\u064D',
'afii57454': u'\u064E',
'afii57455': u'\u064F',
'afii57456': u'\u0650',
'afii57457': u'\u0651',
'afii57458': u'\u0652',
'afii57470': u'\u0647',
'afii57505': u'\u06A4',
'afii57506': u'\u067E',
'afii57507': u'\u0686',
'afii57508': u'\u0698',
'afii57509': u'\u06AF',
'afii57511': u'\u0679',
'afii57512': u'\u0688',
'afii57513': u'\u0691',
'afii57514': u'\u06BA',
'afii57519': u'\u06D2',
'afii57534': u'\u06D5',
'afii57636': u'\u20AA',
'afii57645': u'\u05BE',
'afii57658': u'\u05C3',
'afii57664': u'\u05D0',
'afii57665': u'\u05D1',
'afii57666': u'\u05D2',
'afii57667': u'\u05D3',
'afii57668': u'\u05D4',
'afii57669': u'\u05D5',
'afii57670': u'\u05D6',
'afii57671': u'\u05D7',
'afii57672': u'\u05D8',
'afii57673': u'\u05D9',
'afii57674': u'\u05DA',
'afii57675': u'\u05DB',
'afii57676': u'\u05DC',
'afii57677': u'\u05DD',
'afii57678': u'\u05DE',
'afii57679': u'\u05DF',
'afii57680': u'\u05E0',
'afii57681': u'\u05E1',
'afii57682': u'\u05E2',
'afii57683': u'\u05E3',
'afii57684': u'\u05E4',
'afii57685': u'\u05E5',
'afii57686': u'\u05E6',
'afii57687': u'\u05E7',
'afii57688': u'\u05E8',
'afii57689': u'\u05E9',
'afii57690': u'\u05EA',
'afii57694': u'\uFB2A',
'afii57695': u'\uFB2B',
'afii57700': u'\uFB4B',
'afii57705': u'\uFB1F',
'afii57716': u'\u05F0',
'afii57717': u'\u05F1',
'afii57718': u'\u05F2',
'afii57723': u'\uFB35',
'afii57793': u'\u05B4',
'afii57794': u'\u05B5',
'afii57795': u'\u05B6',
'afii57796': u'\u05BB',
'afii57797': u'\u05B8',
'afii57798': u'\u05B7',
'afii57799': u'\u05B0',
'afii57800': u'\u05B2',
'afii57801': u'\u05B1',
'afii57802': u'\u05B3',
'afii57803': u'\u05C2',
'afii57804': u'\u05C1',
'afii57806': u'\u05B9',
'afii57807': u'\u05BC',
'afii57839': u'\u05BD',
'afii57841': u'\u05BF',
'afii57842': u'\u05C0',
'afii57929': u'\u02BC',
'afii61248': u'\u2105',
'afii61289': u'\u2113',
'afii61352': u'\u2116',
'afii61573': u'\u202C',
'afii61574': u'\u202D',
'afii61575': u'\u202E',
'afii61664': u'\u200C',
'afii63167': u'\u066D',
'afii64937': u'\u02BD',
'agrave': u'\u00E0',
'agujarati': u'\u0A85',
'agurmukhi': u'\u0A05',
'ahiragana': u'\u3042',
'ahookabove': u'\u1EA3',
'aibengali': u'\u0990',
'aibopomofo': u'\u311E',
'aideva': u'\u0910',
'aiecyrillic': u'\u04D5',
'aigujarati': u'\u0A90',
'aigurmukhi': u'\u0A10',
'aimatragurmukhi': u'\u0A48',
'ainarabic': u'\u0639',
'ainfinalarabic': u'\uFECA',
'aininitialarabic': u'\uFECB',
'ainmedialarabic': u'\uFECC',
'ainvertedbreve': u'\u0203',
'aivowelsignbengali': u'\u09C8',
'aivowelsigndeva': u'\u0948',
'aivowelsigngujarati': u'\u0AC8',
'akatakana': u'\u30A2',
'akatakanahalfwidth': u'\uFF71',
'akorean': u'\u314F',
'alef': u'\u05D0',
'alefarabic': u'\u0627',
'alefdageshhebrew': u'\uFB30',
'aleffinalarabic': u'\uFE8E',
'alefhamzaabovearabic': u'\u0623',
'alefhamzaabovefinalarabic': u'\uFE84',
'alefhamzabelowarabic': u'\u0625',
'alefhamzabelowfinalarabic': u'\uFE88',
'alefhebrew': u'\u05D0',
'aleflamedhebrew': u'\uFB4F',
'alefmaddaabovearabic': u'\u0622',
'alefmaddaabovefinalarabic': u'\uFE82',
'alefmaksuraarabic': u'\u0649',
'alefmaksurafinalarabic': u'\uFEF0',
'alefmaksurainitialarabic': u'\uFEF3',
'alefmaksuramedialarabic': u'\uFEF4',
'alefpatahhebrew': u'\uFB2E',
'alefqamatshebrew': u'\uFB2F',
'aleph': u'\u2135',
'allequal': u'\u224C',
'alpha': u'\u03B1',
'alphatonos': u'\u03AC',
'amacron': u'\u0101',
'amonospace': u'\uFF41',
'ampersand': u'\u0026',
'ampersandmonospace': u'\uFF06',
'ampersandsmall': u'\uF726',
'amsquare': u'\u33C2',
'anbopomofo': u'\u3122',
'angbopomofo': u'\u3124',
'angkhankhuthai': u'\u0E5A',
'angle': u'\u2220',
'anglebracketleft': u'\u3008',
'anglebracketleftvertical': u'\uFE3F',
'anglebracketright': u'\u3009',
'anglebracketrightvertical': u'\uFE40',
'angleleft': u'\u2329',
'angleright': u'\u232A',
'angstrom': u'\u212B',
'anoteleia': u'\u0387',
'anudattadeva': u'\u0952',
'anusvarabengali': u'\u0982',
'anusvaradeva': u'\u0902',
'anusvaragujarati': u'\u0A82',
'aogonek': u'\u0105',
'apaatosquare': u'\u3300',
'aparen': u'\u249C',
'apostrophearmenian': u'\u055A',
'apostrophemod': u'\u02BC',
'apple': u'\uF8FF',
'approaches': u'\u2250',
'approxequal': u'\u2248',
'approxequalorimage': u'\u2252',
'approximatelyequal': u'\u2245',
'araeaekorean': u'\u318E',
'araeakorean': u'\u318D',
'arc': u'\u2312',
'arighthalfring': u'\u1E9A',
'aring': u'\u00E5',
'aringacute': u'\u01FB',
'aringbelow': u'\u1E01',
'arrowboth': u'\u2194',
'arrowdashdown': u'\u21E3',
'arrowdashleft': u'\u21E0',
'arrowdashright': u'\u21E2',
'arrowdashup': u'\u21E1',
'arrowdblboth': u'\u21D4',
'arrowdbldown': u'\u21D3',
'arrowdblleft': u'\u21D0',
'arrowdblright': u'\u21D2',
'arrowdblup': u'\u21D1',
'arrowdown': u'\u2193',
'arrowdownleft': u'\u2199',
'arrowdownright': u'\u2198',
'arrowdownwhite': u'\u21E9',
'arrowheaddownmod': u'\u02C5',
'arrowheadleftmod': u'\u02C2',
'arrowheadrightmod': u'\u02C3',
'arrowheadupmod': u'\u02C4',
'arrowhorizex': u'\uF8E7',
'arrowleft': u'\u2190',
'arrowleftdbl': u'\u21D0',
'arrowleftdblstroke': u'\u21CD',
'arrowleftoverright': u'\u21C6',
'arrowleftwhite': u'\u21E6',
'arrowright': u'\u2192',
'arrowrightdblstroke': u'\u21CF',
'arrowrightheavy': u'\u279E',
'arrowrightoverleft': u'\u21C4',
'arrowrightwhite': u'\u21E8',
'arrowtableft': u'\u21E4',
'arrowtabright': u'\u21E5',
'arrowup': u'\u2191',
'arrowupdn': u'\u2195',
'arrowupdnbse': u'\u21A8',
'arrowupdownbase': u'\u21A8',
'arrowupleft': u'\u2196',
'arrowupleftofdown': u'\u21C5',
'arrowupright': u'\u2197',
'arrowupwhite': u'\u21E7',
'arrowvertex': u'\uF8E6',
'asciicircum': u'\u005E',
'asciicircummonospace': u'\uFF3E',
'asciitilde': u'\u007E',
'asciitildemonospace': u'\uFF5E',
'ascript': u'\u0251',
'ascriptturned': u'\u0252',
'asmallhiragana': u'\u3041',
'asmallkatakana': u'\u30A1',
'asmallkatakanahalfwidth': u'\uFF67',
'asterisk': u'\u002A',
'asteriskaltonearabic': u'\u066D',
'asteriskarabic': u'\u066D',
'asteriskmath': u'\u2217',
'asteriskmonospace': u'\uFF0A',
'asterisksmall': u'\uFE61',
'asterism': u'\u2042',
'asuperior': u'\uF6E9',
'asymptoticallyequal': u'\u2243',
'at': u'\u0040',
'atilde': u'\u00E3',
'atmonospace': u'\uFF20',
'atsmall': u'\uFE6B',
'aturned': u'\u0250',
'aubengali': u'\u0994',
'aubopomofo': u'\u3120',
'audeva': u'\u0914',
'augujarati': u'\u0A94',
'augurmukhi': u'\u0A14',
'aulengthmarkbengali': u'\u09D7',
'aumatragurmukhi': u'\u0A4C',
'auvowelsignbengali': u'\u09CC',
'auvowelsigndeva': u'\u094C',
'auvowelsigngujarati': u'\u0ACC',
'avagrahadeva': u'\u093D',
'aybarmenian': u'\u0561',
'ayin': u'\u05E2',
'ayinaltonehebrew': u'\uFB20',
'ayinhebrew': u'\u05E2',
'b': u'\u0062',
'babengali': u'\u09AC',
'backslash': u'\u005C',
'backslashmonospace': u'\uFF3C',
'badeva': u'\u092C',
'bagujarati': u'\u0AAC',
'bagurmukhi': u'\u0A2C',
'bahiragana': u'\u3070',
'bahtthai': u'\u0E3F',
'bakatakana': u'\u30D0',
'bar': u'\u007C',
'barmonospace': u'\uFF5C',
'bbopomofo': u'\u3105',
'bcircle': u'\u24D1',
'bdotaccent': u'\u1E03',
'bdotbelow': u'\u1E05',
'beamedsixteenthnotes': u'\u266C',
'because': u'\u2235',
'becyrillic': u'\u0431',
'beharabic': u'\u0628',
'behfinalarabic': u'\uFE90',
'behinitialarabic': u'\uFE91',
'behiragana': u'\u3079',
'behmedialarabic': u'\uFE92',
'behmeeminitialarabic': u'\uFC9F',
'behmeemisolatedarabic': u'\uFC08',
'behnoonfinalarabic': u'\uFC6D',
'bekatakana': u'\u30D9',
'benarmenian': u'\u0562',
'bet': u'\u05D1',
'beta': u'\u03B2',
'betasymbolgreek': u'\u03D0',
'betdagesh': u'\uFB31',
'betdageshhebrew': u'\uFB31',
'bethebrew': u'\u05D1',
'betrafehebrew': u'\uFB4C',
'bhabengali': u'\u09AD',
'bhadeva': u'\u092D',
'bhagujarati': u'\u0AAD',
'bhagurmukhi': u'\u0A2D',
'bhook': u'\u0253',
'bihiragana': u'\u3073',
'bikatakana': u'\u30D3',
'bilabialclick': u'\u0298',
'bindigurmukhi': u'\u0A02',
'birusquare': u'\u3331',
'blackcircle': u'\u25CF',
'blackdiamond': u'\u25C6',
'blackdownpointingtriangle': u'\u25BC',
'blackleftpointingpointer': u'\u25C4',
'blackleftpointingtriangle': u'\u25C0',
'blacklenticularbracketleft': u'\u3010',
'blacklenticularbracketleftvertical': u'\uFE3B',
'blacklenticularbracketright': u'\u3011',
'blacklenticularbracketrightvertical': u'\uFE3C',
'blacklowerlefttriangle': u'\u25E3',
'blacklowerrighttriangle': u'\u25E2',
'blackrectangle': u'\u25AC',
'blackrightpointingpointer': u'\u25BA',
'blackrightpointingtriangle': u'\u25B6',
'blacksmallsquare': u'\u25AA',
'blacksmilingface': u'\u263B',
'blacksquare': u'\u25A0',
'blackstar': u'\u2605',
'blackupperlefttriangle': u'\u25E4',
'blackupperrighttriangle': u'\u25E5',
'blackuppointingsmalltriangle': u'\u25B4',
'blackuppointingtriangle': u'\u25B2',
'blank': u'\u2423',
'blinebelow': u'\u1E07',
'block': u'\u2588',
'bmonospace': u'\uFF42',
'bobaimaithai': u'\u0E1A',
'bohiragana': u'\u307C',
'bokatakana': u'\u30DC',
'bparen': u'\u249D',
'bqsquare': u'\u33C3',
'braceex': u'\uF8F4',
'braceleft': u'\u007B',
'braceleftbt': u'\uF8F3',
'braceleftmid': u'\uF8F2',
'braceleftmonospace': u'\uFF5B',
'braceleftsmall': u'\uFE5B',
'bracelefttp': u'\uF8F1',
'braceleftvertical': u'\uFE37',
'braceright': u'\u007D',
'bracerightbt': u'\uF8FE',
'bracerightmid': u'\uF8FD',
'bracerightmonospace': u'\uFF5D',
'bracerightsmall': u'\uFE5C',
'bracerighttp': u'\uF8FC',
'bracerightvertical': u'\uFE38',
'bracketleft': u'\u005B',
'bracketleftbt': u'\uF8F0',
'bracketleftex': u'\uF8EF',
'bracketleftmonospace': u'\uFF3B',
'bracketlefttp': u'\uF8EE',
'bracketright': u'\u005D',
'bracketrightbt': u'\uF8FB',
'bracketrightex': u'\uF8FA',
'bracketrightmonospace': u'\uFF3D',
'bracketrighttp': u'\uF8F9',
'breve': u'\u02D8',
'brevebelowcmb': u'\u032E',
'brevecmb': u'\u0306',
'breveinvertedbelowcmb': u'\u032F',
'breveinvertedcmb': u'\u0311',
'breveinverteddoublecmb': u'\u0361',
'bridgebelowcmb': u'\u032A',
'bridgeinvertedbelowcmb': u'\u033A',
'brokenbar': u'\u00A6',
'bstroke': u'\u0180',
'bsuperior': u'\uF6EA',
'btopbar': u'\u0183',
'buhiragana': u'\u3076',
'bukatakana': u'\u30D6',
'bullet': u'\u2022',
'bulletinverse': u'\u25D8',
'bulletoperator': u'\u2219',
'bullseye': u'\u25CE',
'c': u'\u0063',
'caarmenian': u'\u056E',
'cabengali': u'\u099A',
'cacute': u'\u0107',
'cadeva': u'\u091A',
'cagujarati': u'\u0A9A',
'cagurmukhi': u'\u0A1A',
'calsquare': u'\u3388',
'candrabindubengali': u'\u0981',
'candrabinducmb': u'\u0310',
'candrabindudeva': u'\u0901',
'candrabindugujarati': u'\u0A81',
'capslock': u'\u21EA',
'careof': u'\u2105',
'caron': u'\u02C7',
'caronbelowcmb': u'\u032C',
'caroncmb': u'\u030C',
'carriagereturn': u'\u21B5',
'cbopomofo': u'\u3118',
'ccaron': u'\u010D',
'ccedilla': u'\u00E7',
'ccedillaacute': u'\u1E09',
'ccircle': u'\u24D2',
'ccircumflex': u'\u0109',
'ccurl': u'\u0255',
'cdot': u'\u010B',
'cdotaccent': u'\u010B',
'cdsquare': u'\u33C5',
'cedilla': u'\u00B8',
'cedillacmb': u'\u0327',
'cent': u'\u00A2',
'centigrade': u'\u2103',
'centinferior': u'\uF6DF',
'centmonospace': u'\uFFE0',
'centoldstyle': u'\uF7A2',
'centsuperior': u'\uF6E0',
'chaarmenian': u'\u0579',
'chabengali': u'\u099B',
'chadeva': u'\u091B',
'chagujarati': u'\u0A9B',
'chagurmukhi': u'\u0A1B',
'chbopomofo': u'\u3114',
'cheabkhasiancyrillic': u'\u04BD',
'checkmark': u'\u2713',
'checyrillic': u'\u0447',
'chedescenderabkhasiancyrillic': u'\u04BF',
'chedescendercyrillic': u'\u04B7',
'chedieresiscyrillic': u'\u04F5',
'cheharmenian': u'\u0573',
'chekhakassiancyrillic': u'\u04CC',
'cheverticalstrokecyrillic': u'\u04B9',
'chi': u'\u03C7',
'chieuchacirclekorean': u'\u3277',
'chieuchaparenkorean': u'\u3217',
'chieuchcirclekorean': u'\u3269',
'chieuchkorean': u'\u314A',
'chieuchparenkorean': u'\u3209',
'chochangthai': u'\u0E0A',
'chochanthai': u'\u0E08',
'chochingthai': u'\u0E09',
'chochoethai': u'\u0E0C',
'chook': u'\u0188',
'cieucacirclekorean': u'\u3276',
'cieucaparenkorean': u'\u3216',
'cieuccirclekorean': u'\u3268',
'cieuckorean': u'\u3148',
'cieucparenkorean': u'\u3208',
'cieucuparenkorean': u'\u321C',
'circle': u'\u25CB',
'circlemultiply': u'\u2297',
'circleot': u'\u2299',
'circleplus': u'\u2295',
'circlepostalmark': u'\u3036',
'circlewithlefthalfblack': u'\u25D0',
'circlewithrighthalfblack': u'\u25D1',
'circumflex': u'\u02C6',
'circumflexbelowcmb': u'\u032D',
'circumflexcmb': u'\u0302',
'clear': u'\u2327',
'clickalveolar': u'\u01C2',
'clickdental': u'\u01C0',
'clicklateral': u'\u01C1',
'clickretroflex': u'\u01C3',
'club': u'\u2663',
'clubsuitblack': u'\u2663',
'clubsuitwhite': u'\u2667',
'cmcubedsquare': u'\u33A4',
'cmonospace': u'\uFF43',
'cmsquaredsquare': u'\u33A0',
'coarmenian': u'\u0581',
'colon': u'\u003A',
'colonmonetary': u'\u20A1',
'colonmonospace': u'\uFF1A',
'colonsign': u'\u20A1',
'colonsmall': u'\uFE55',
'colontriangularhalfmod': u'\u02D1',
'colontriangularmod': u'\u02D0',
'comma': u'\u002C',
'commaabovecmb': u'\u0313',
'commaaboverightcmb': u'\u0315',
'commaaccent': u'\uF6C3',
'commaarabic': u'\u060C',
'commaarmenian': u'\u055D',
'commainferior': u'\uF6E1',
'commamonospace': u'\uFF0C',
'commareversedabovecmb': u'\u0314',
'commareversedmod': u'\u02BD',
'commasmall': u'\uFE50',
'commasuperior': u'\uF6E2',
'commaturnedabovecmb': u'\u0312',
'commaturnedmod': u'\u02BB',
'compass': u'\u263C',
'congruent': u'\u2245',
'contourintegral': u'\u222E',
'control': u'\u2303',
'controlACK': u'\u0006',
'controlBEL': u'\u0007',
'controlBS': u'\u0008',
'controlCAN': u'\u0018',
'controlCR': u'\u000D',
'controlDC1': u'\u0011',
'controlDC2': u'\u0012',
'controlDC3': u'\u0013',
'controlDC4': u'\u0014',
'controlDEL': u'\u007F',
'controlDLE': u'\u0010',
'controlEM': u'\u0019',
'controlENQ': u'\u0005',
'controlEOT': u'\u0004',
'controlESC': u'\u001B',
'controlETB': u'\u0017',
'controlETX': u'\u0003',
'controlFF': u'\u000C',
'controlFS': u'\u001C',
'controlGS': u'\u001D',
'controlHT': u'\u0009',
'controlLF': u'\u000A',
'controlNAK': u'\u0015',
'controlRS': u'\u001E',
'controlSI': u'\u000F',
'controlSO': u'\u000E',
'controlSOT': u'\u0002',
'controlSTX': u'\u0001',
'controlSUB': u'\u001A',
'controlSYN': u'\u0016',
'controlUS': u'\u001F',
'controlVT': u'\u000B',
'copyright': u'\u00A9',
'copyrightsans': u'\uF8E9',
'copyrightserif': u'\uF6D9',
'cornerbracketleft': u'\u300C',
'cornerbracketlefthalfwidth': u'\uFF62',
'cornerbracketleftvertical': u'\uFE41',
'cornerbracketright': u'\u300D',
'cornerbracketrighthalfwidth': u'\uFF63',
'cornerbracketrightvertical': u'\uFE42',
'corporationsquare': u'\u337F',
'cosquare': u'\u33C7',
'coverkgsquare': u'\u33C6',
'cparen': u'\u249E',
'cruzeiro': u'\u20A2',
'cstretched': u'\u0297',
'curlyand': u'\u22CF',
'curlyor': u'\u22CE',
'currency': u'\u00A4',
'cyrBreve': u'\uF6D1',
'cyrFlex': u'\uF6D2',
'cyrbreve': u'\uF6D4',
'cyrflex': u'\uF6D5',
'd': u'\u0064',
'daarmenian': u'\u0564',
'dabengali': u'\u09A6',
'dadarabic': u'\u0636',
'dadeva': u'\u0926',
'dadfinalarabic': u'\uFEBE',
'dadinitialarabic': u'\uFEBF',
'dadmedialarabic': u'\uFEC0',
'dagesh': u'\u05BC',
'dageshhebrew': u'\u05BC',
'dagger': u'\u2020',
'daggerdbl': u'\u2021',
'dagujarati': u'\u0AA6',
'dagurmukhi': u'\u0A26',
'dahiragana': u'\u3060',
'dakatakana': u'\u30C0',
'dalarabic': u'\u062F',
'dalet': u'\u05D3',
'daletdagesh': u'\uFB33',
'daletdageshhebrew': u'\uFB33',
'dalethatafpatah': u'\u05D3\u05B2',
'dalethatafpatahhebrew': u'\u05D3\u05B2',
'dalethatafsegol': u'\u05D3\u05B1',
'dalethatafsegolhebrew': u'\u05D3\u05B1',
'dalethebrew': u'\u05D3',
'dalethiriq': u'\u05D3\u05B4',
'dalethiriqhebrew': u'\u05D3\u05B4',
'daletholam': u'\u05D3\u05B9',
'daletholamhebrew': u'\u05D3\u05B9',
'daletpatah': u'\u05D3\u05B7',
'daletpatahhebrew': u'\u05D3\u05B7',
'daletqamats': u'\u05D3\u05B8',
'daletqamatshebrew': u'\u05D3\u05B8',
'daletqubuts': u'\u05D3\u05BB',
'daletqubutshebrew': u'\u05D3\u05BB',
'daletsegol': u'\u05D3\u05B6',
'daletsegolhebrew': u'\u05D3\u05B6',
'daletsheva': u'\u05D3\u05B0',
'daletshevahebrew': u'\u05D3\u05B0',
'dalettsere': u'\u05D3\u05B5',
'dalettserehebrew': u'\u05D3\u05B5',
'dalfinalarabic': u'\uFEAA',
'dammaarabic': u'\u064F',
'dammalowarabic': u'\u064F',
'dammatanaltonearabic': u'\u064C',
'dammatanarabic': u'\u064C',
'danda': u'\u0964',
'dargahebrew': u'\u05A7',
'dargalefthebrew': u'\u05A7',
'dasiapneumatacyrilliccmb': u'\u0485',
'dblGrave': u'\uF6D3',
'dblanglebracketleft': u'\u300A',
'dblanglebracketleftvertical': u'\uFE3D',
'dblanglebracketright': u'\u300B',
'dblanglebracketrightvertical': u'\uFE3E',
'dblarchinvertedbelowcmb': u'\u032B',
'dblarrowleft': u'\u21D4',
'dblarrowright': u'\u21D2',
'dbldanda': u'\u0965',
'dblgrave': u'\uF6D6',
'dblgravecmb': u'\u030F',
'dblintegral': u'\u222C',
'dbllowline': u'\u2017',
'dbllowlinecmb': u'\u0333',
'dbloverlinecmb': u'\u033F',
'dblprimemod': u'\u02BA',
'dblverticalbar': u'\u2016',
'dblverticallineabovecmb': u'\u030E',
'dbopomofo': u'\u3109',
'dbsquare': u'\u33C8',
'dcaron': u'\u010F',
'dcedilla': u'\u1E11',
'dcircle': u'\u24D3',
'dcircumflexbelow': u'\u1E13',
'dcroat': u'\u0111',
'ddabengali': u'\u09A1',
'ddadeva': u'\u0921',
'ddagujarati': u'\u0AA1',
'ddagurmukhi': u'\u0A21',
'ddalarabic': u'\u0688',
'ddalfinalarabic': u'\uFB89',
'dddhadeva': u'\u095C',
'ddhabengali': u'\u09A2',
'ddhadeva': u'\u0922',
'ddhagujarati': u'\u0AA2',
'ddhagurmukhi': u'\u0A22',
'ddotaccent': u'\u1E0B',
'ddotbelow': u'\u1E0D',
'decimalseparatorarabic': u'\u066B',
'decimalseparatorpersian': u'\u066B',
'decyrillic': u'\u0434',
'degree': u'\u00B0',
'dehihebrew': u'\u05AD',
'dehiragana': u'\u3067',
'deicoptic': u'\u03EF',
'dekatakana': u'\u30C7',
'deleteleft': u'\u232B',
'deleteright': u'\u2326',
'delta': u'\u03B4',
'deltaturned': u'\u018D',
'denominatorminusonenumeratorbengali': u'\u09F8',
'dezh': u'\u02A4',
'dhabengali': u'\u09A7',
'dhadeva': u'\u0927',
'dhagujarati': u'\u0AA7',
'dhagurmukhi': u'\u0A27',
'dhook': u'\u0257',
'dialytikatonos': u'\u0385',
'dialytikatonoscmb': u'\u0344',
'diamond': u'\u2666',
'diamondsuitwhite': u'\u2662',
'dieresis': u'\u00A8',
'dieresisacute': u'\uF6D7',
'dieresisbelowcmb': u'\u0324',
'dieresiscmb': u'\u0308',
'dieresisgrave': u'\uF6D8',
'dieresistonos': u'\u0385',
'dihiragana': u'\u3062',
'dikatakana': u'\u30C2',
'dittomark': u'\u3003',
'divide': u'\u00F7',
'divides': u'\u2223',
'divisionslash': u'\u2215',
'djecyrillic': u'\u0452',
'dkshade': u'\u2593',
'dlinebelow': u'\u1E0F',
'dlsquare': u'\u3397',
'dmacron': u'\u0111',
'dmonospace': u'\uFF44',
'dnblock': u'\u2584',
'dochadathai': u'\u0E0E',
'dodekthai': u'\u0E14',
'dohiragana': u'\u3069',
'dokatakana': u'\u30C9',
'dollar': u'\u0024',
'dollarinferior': u'\uF6E3',
'dollarmonospace': u'\uFF04',
'dollaroldstyle': u'\uF724',
'dollarsmall': u'\uFE69',
'dollarsuperior': u'\uF6E4',
'dong': u'\u20AB',
'dorusquare': u'\u3326',
'dotaccent': u'\u02D9',
'dotaccentcmb': u'\u0307',
'dotbelowcmb': u'\u0323',
'dotbelowcomb': u'\u0323',
'dotkatakana': u'\u30FB',
'dotlessi': u'\u0131',
'dotlessj': u'\uF6BE',
'dotlessjstrokehook': u'\u0284',
'dotmath': u'\u22C5',
'dottedcircle': u'\u25CC',
'doubleyodpatah': u'\uFB1F',
'doubleyodpatahhebrew': u'\uFB1F',
'downtackbelowcmb': u'\u031E',
'downtackmod': u'\u02D5',
'dparen': u'\u249F',
'dsuperior': u'\uF6EB',
'dtail': u'\u0256',
'dtopbar': u'\u018C',
'duhiragana': u'\u3065',
'dukatakana': u'\u30C5',
'dz': u'\u01F3',
'dzaltone': u'\u02A3',
'dzcaron': u'\u01C6',
'dzcurl': u'\u02A5',
'dzeabkhasiancyrillic': u'\u04E1',
'dzecyrillic': u'\u0455',
'dzhecyrillic': u'\u045F',
'e': u'\u0065',
'eacute': u'\u00E9',
'earth': u'\u2641',
'ebengali': u'\u098F',
'ebopomofo': u'\u311C',
'ebreve': u'\u0115',
'ecandradeva': u'\u090D',
'ecandragujarati': u'\u0A8D',
'ecandravowelsigndeva': u'\u0945',
'ecandravowelsigngujarati': u'\u0AC5',
'ecaron': u'\u011B',
'ecedillabreve': u'\u1E1D',
'echarmenian': u'\u0565',
'echyiwnarmenian': u'\u0587',
'ecircle': u'\u24D4',
'ecircumflex': u'\u00EA',
'ecircumflexacute': u'\u1EBF',
'ecircumflexbelow': u'\u1E19',
'ecircumflexdotbelow': u'\u1EC7',
'ecircumflexgrave': u'\u1EC1',
'ecircumflexhookabove': u'\u1EC3',
'ecircumflextilde': u'\u1EC5',
'ecyrillic': u'\u0454',
'edblgrave': u'\u0205',
'edeva': u'\u090F',
'edieresis': u'\u00EB',
'edot': u'\u0117',
'edotaccent': u'\u0117',
'edotbelow': u'\u1EB9',
'eegurmukhi': u'\u0A0F',
'eematragurmukhi': u'\u0A47',
'efcyrillic': u'\u0444',
'egrave': u'\u00E8',
'egujarati': u'\u0A8F',
'eharmenian': u'\u0567',
'ehbopomofo': u'\u311D',
'ehiragana': u'\u3048',
'ehookabove': u'\u1EBB',
'eibopomofo': u'\u311F',
'eight': u'\u0038',
'eightarabic': u'\u0668',
'eightbengali': u'\u09EE',
'eightcircle': u'\u2467',
'eightcircleinversesansserif': u'\u2791',
'eightdeva': u'\u096E',
'eighteencircle': u'\u2471',
'eighteenparen': u'\u2485',
'eighteenperiod': u'\u2499',
'eightgujarati': u'\u0AEE',
'eightgurmukhi': u'\u0A6E',
'eighthackarabic': u'\u0668',
'eighthangzhou': u'\u3028',
'eighthnotebeamed': u'\u266B',
'eightideographicparen': u'\u3227',
'eightinferior': u'\u2088',
'eightmonospace': u'\uFF18',
'eightoldstyle': u'\uF738',
'eightparen': u'\u247B',
'eightperiod': u'\u248F',
'eightpersian': u'\u06F8',
'eightroman': u'\u2177',
'eightsuperior': u'\u2078',
'eightthai': u'\u0E58',
'einvertedbreve': u'\u0207',
'eiotifiedcyrillic': u'\u0465',
'ekatakana': u'\u30A8',
'ekatakanahalfwidth': u'\uFF74',
'ekonkargurmukhi': u'\u0A74',
'ekorean': u'\u3154',
'elcyrillic': u'\u043B',
'element': u'\u2208',
'elevencircle': u'\u246A',
'elevenparen': u'\u247E',
'elevenperiod': u'\u2492',
'elevenroman': u'\u217A',
'ellipsis': u'\u2026',
'ellipsisvertical': u'\u22EE',
'emacron': u'\u0113',
'emacronacute': u'\u1E17',
'emacrongrave': u'\u1E15',
'emcyrillic': u'\u043C',
'emdash': u'\u2014',
'emdashvertical': u'\uFE31',
'emonospace': u'\uFF45',
'emphasismarkarmenian': u'\u055B',
'emptyset': u'\u2205',
'enbopomofo': u'\u3123',
'encyrillic': u'\u043D',
'endash': u'\u2013',
'endashvertical': u'\uFE32',
'endescendercyrillic': u'\u04A3',
'eng': u'\u014B',
'engbopomofo': u'\u3125',
'enghecyrillic': u'\u04A5',
'enhookcyrillic': u'\u04C8',
'enspace': u'\u2002',
'eogonek': u'\u0119',
'eokorean': u'\u3153',
'eopen': u'\u025B',
'eopenclosed': u'\u029A',
'eopenreversed': u'\u025C',
'eopenreversedclosed': u'\u025E',
'eopenreversedhook': u'\u025D',
'eparen': u'\u24A0',
'epsilon': u'\u03B5',
'epsilontonos': u'\u03AD',
'equal': u'\u003D',
'equalmonospace': u'\uFF1D',
'equalsmall': u'\uFE66',
'equalsuperior': u'\u207C',
'equivalence': u'\u2261',
'erbopomofo': u'\u3126',
'ercyrillic': u'\u0440',
'ereversed': u'\u0258',
'ereversedcyrillic': u'\u044D',
'escyrillic': u'\u0441',
'esdescendercyrillic': u'\u04AB',
'esh': u'\u0283',
'eshcurl': u'\u0286',
'eshortdeva': u'\u090E',
'eshortvowelsigndeva': u'\u0946',
'eshreversedloop': u'\u01AA',
'eshsquatreversed': u'\u0285',
'esmallhiragana': u'\u3047',
'esmallkatakana': u'\u30A7',
'esmallkatakanahalfwidth': u'\uFF6A',
'estimated': u'\u212E',
'esuperior': u'\uF6EC',
'eta': u'\u03B7',
'etarmenian': u'\u0568',
'etatonos': u'\u03AE',
'eth': u'\u00F0',
'etilde': u'\u1EBD',
'etildebelow': u'\u1E1B',
'etnahtafoukhhebrew': u'\u0591',
'etnahtafoukhlefthebrew': u'\u0591',
'etnahtahebrew': u'\u0591',
'etnahtalefthebrew': u'\u0591',
'eturned': u'\u01DD',
'eukorean': u'\u3161',
'euro': u'\u20AC',
'evowelsignbengali': u'\u09C7',
'evowelsigndeva': u'\u0947',
'evowelsigngujarati': u'\u0AC7',
'exclam': u'\u0021',
'exclamarmenian': u'\u055C',
'exclamdbl': u'\u203C',
'exclamdown': u'\u00A1',
'exclamdownsmall': u'\uF7A1',
'exclammonospace': u'\uFF01',
'exclamsmall': u'\uF721',
'existential': u'\u2203',
'ezh': u'\u0292',
'ezhcaron': u'\u01EF',
'ezhcurl': u'\u0293',
'ezhreversed': u'\u01B9',
'ezhtail': u'\u01BA',
'f': u'\u0066',
'fadeva': u'\u095E',
'fagurmukhi': u'\u0A5E',
'fahrenheit': u'\u2109',
'fathaarabic': u'\u064E',
'fathalowarabic': u'\u064E',
'fathatanarabic': u'\u064B',
'fbopomofo': u'\u3108',
'fcircle': u'\u24D5',
'fdotaccent': u'\u1E1F',
'feharabic': u'\u0641',
'feharmenian': u'\u0586',
'fehfinalarabic': u'\uFED2',
'fehinitialarabic': u'\uFED3',
'fehmedialarabic': u'\uFED4',
'feicoptic': u'\u03E5',
'female': u'\u2640',
'ff': u'\uFB00',
'ffi': u'\uFB03',
'ffl': u'\uFB04',
'fi': u'\uFB01',
'fifteencircle': u'\u246E',
'fifteenparen': u'\u2482',
'fifteenperiod': u'\u2496',
'figuredash': u'\u2012',
'filledbox': u'\u25A0',
'filledrect': u'\u25AC',
'finalkaf': u'\u05DA',
'finalkafdagesh': u'\uFB3A',
'finalkafdageshhebrew': u'\uFB3A',
'finalkafhebrew': u'\u05DA',
'finalkafqamats': u'\u05DA\u05B8',
'finalkafqamatshebrew': u'\u05DA\u05B8',
'finalkafsheva': u'\u05DA\u05B0',
'finalkafshevahebrew': u'\u05DA\u05B0',
'finalmem': u'\u05DD',
'finalmemhebrew': u'\u05DD',
'finalnun': u'\u05DF',
'finalnunhebrew': u'\u05DF',
'finalpe': u'\u05E3',
'finalpehebrew': u'\u05E3',
'finaltsadi': u'\u05E5',
'finaltsadihebrew': u'\u05E5',
'firsttonechinese': u'\u02C9',
'fisheye': u'\u25C9',
'fitacyrillic': u'\u0473',
'five': u'\u0035',
'fivearabic': u'\u0665',
'fivebengali': u'\u09EB',
'fivecircle': u'\u2464',
'fivecircleinversesansserif': u'\u278E',
'fivedeva': u'\u096B',
'fiveeighths': u'\u215D',
'fivegujarati': u'\u0AEB',
'fivegurmukhi': u'\u0A6B',
'fivehackarabic': u'\u0665',
'fivehangzhou': u'\u3025',
'fiveideographicparen': u'\u3224',
'fiveinferior': u'\u2085',
'fivemonospace': u'\uFF15',
'fiveoldstyle': u'\uF735',
'fiveparen': u'\u2478',
'fiveperiod': u'\u248C',
'fivepersian': u'\u06F5',
'fiveroman': u'\u2174',
'fivesuperior': u'\u2075',
'fivethai': u'\u0E55',
'fl': u'\uFB02',
'florin': u'\u0192',
'fmonospace': u'\uFF46',
'fmsquare': u'\u3399',
'fofanthai': u'\u0E1F',
'fofathai': u'\u0E1D',
'fongmanthai': u'\u0E4F',
'forall': u'\u2200',
'four': u'\u0034',
'fourarabic': u'\u0664',
'fourbengali': u'\u09EA',
'fourcircle': u'\u2463',
'fourcircleinversesansserif': u'\u278D',
'fourdeva': u'\u096A',
'fourgujarati': u'\u0AEA',
'fourgurmukhi': u'\u0A6A',
'fourhackarabic': u'\u0664',
'fourhangzhou': u'\u3024',
'fourideographicparen': u'\u3223',
'fourinferior': u'\u2084',
'fourmonospace': u'\uFF14',
'fournumeratorbengali': u'\u09F7',
'fouroldstyle': u'\uF734',
'fourparen': u'\u2477',
'fourperiod': u'\u248B',
'fourpersian': u'\u06F4',
'fourroman': u'\u2173',
'foursuperior': u'\u2074',
'fourteencircle': u'\u246D',
'fourteenparen': u'\u2481',
'fourteenperiod': u'\u2495',
'fourthai': u'\u0E54',
'fourthtonechinese': u'\u02CB',
'fparen': u'\u24A1',
'fraction': u'\u2044',
'franc': u'\u20A3',
'g': u'\u0067',
'gabengali': u'\u0997',
'gacute': u'\u01F5',
'gadeva': u'\u0917',
'gafarabic': u'\u06AF',
'gaffinalarabic': u'\uFB93',
'gafinitialarabic': u'\uFB94',
'gafmedialarabic': u'\uFB95',
'gagujarati': u'\u0A97',
'gagurmukhi': u'\u0A17',
'gahiragana': u'\u304C',
'gakatakana': u'\u30AC',
'gamma': u'\u03B3',
'gammalatinsmall': u'\u0263',
'gammasuperior': u'\u02E0',
'gangiacoptic': u'\u03EB',
'gbopomofo': u'\u310D',
'gbreve': u'\u011F',
'gcaron': u'\u01E7',
'gcedilla': u'\u0123',
'gcircle': u'\u24D6',
'gcircumflex': u'\u011D',
'gcommaaccent': u'\u0123',
'gdot': u'\u0121',
'gdotaccent': u'\u0121',
'gecyrillic': u'\u0433',
'gehiragana': u'\u3052',
'gekatakana': u'\u30B2',
'geometricallyequal': u'\u2251',
'gereshaccenthebrew': u'\u059C',
'gereshhebrew': u'\u05F3',
'gereshmuqdamhebrew': u'\u059D',
'germandbls': u'\u00DF',
'gershayimaccenthebrew': u'\u059E',
'gershayimhebrew': u'\u05F4',
'getamark': u'\u3013',
'ghabengali': u'\u0998',
'ghadarmenian': u'\u0572',
'ghadeva': u'\u0918',
'ghagujarati': u'\u0A98',
'ghagurmukhi': u'\u0A18',
'ghainarabic': u'\u063A',
'ghainfinalarabic': u'\uFECE',
'ghaininitialarabic': u'\uFECF',
'ghainmedialarabic': u'\uFED0',
'ghemiddlehookcyrillic': u'\u0495',
'ghestrokecyrillic': u'\u0493',
'gheupturncyrillic': u'\u0491',
'ghhadeva': u'\u095A',
'ghhagurmukhi': u'\u0A5A',
'ghook': u'\u0260',
'ghzsquare': u'\u3393',
'gihiragana': u'\u304E',
'gikatakana': u'\u30AE',
'gimarmenian': u'\u0563',
'gimel': u'\u05D2',
'gimeldagesh': u'\uFB32',
'gimeldageshhebrew': u'\uFB32',
'gimelhebrew': u'\u05D2',
'gjecyrillic': u'\u0453',
'glottalinvertedstroke': u'\u01BE',
'glottalstop': u'\u0294',
'glottalstopinverted': u'\u0296',
'glottalstopmod': u'\u02C0',
'glottalstopreversed': u'\u0295',
'glottalstopreversedmod': u'\u02C1',
'glottalstopreversedsuperior': u'\u02E4',
'glottalstopstroke': u'\u02A1',
'glottalstopstrokereversed': u'\u02A2',
'gmacron': u'\u1E21',
'gmonospace': u'\uFF47',
'gohiragana': u'\u3054',
'gokatakana': u'\u30B4',
'gparen': u'\u24A2',
'gpasquare': u'\u33AC',
'gradient': u'\u2207',
'grave': u'\u0060',
'gravebelowcmb': u'\u0316',
'gravecmb': u'\u0300',
'gravecomb': u'\u0300',
'gravedeva': u'\u0953',
'gravelowmod': u'\u02CE',
'gravemonospace': u'\uFF40',
'gravetonecmb': u'\u0340',
'greater': u'\u003E',
'greaterequal': u'\u2265',
'greaterequalorless': u'\u22DB',
'greatermonospace': u'\uFF1E',
'greaterorequivalent': u'\u2273',
'greaterorless': u'\u2277',
'greateroverequal': u'\u2267',
'greatersmall': u'\uFE65',
'gscript': u'\u0261',
'gstroke': u'\u01E5',
'guhiragana': u'\u3050',
'guillemotleft': u'\u00AB',
'guillemotright': u'\u00BB',
'guilsinglleft': u'\u2039',
'guilsinglright': u'\u203A',
'gukatakana': u'\u30B0',
'guramusquare': u'\u3318',
'gysquare': u'\u33C9',
'h': u'\u0068',
'haabkhasiancyrillic': u'\u04A9',
'haaltonearabic': u'\u06C1',
'habengali': u'\u09B9',
'hadescendercyrillic': u'\u04B3',
'hadeva': u'\u0939',
'hagujarati': u'\u0AB9',
'hagurmukhi': u'\u0A39',
'haharabic': u'\u062D',
'hahfinalarabic': u'\uFEA2',
'hahinitialarabic': u'\uFEA3',
'hahiragana': u'\u306F',
'hahmedialarabic': u'\uFEA4',
'haitusquare': u'\u332A',
'hakatakana': u'\u30CF',
'hakatakanahalfwidth': u'\uFF8A',
'halantgurmukhi': u'\u0A4D',
'hamzaarabic': u'\u0621',
'hamzadammaarabic': u'\u0621\u064F',
'hamzadammatanarabic': u'\u0621\u064C',
'hamzafathaarabic': u'\u0621\u064E',
'hamzafathatanarabic': u'\u0621\u064B',
'hamzalowarabic': u'\u0621',
'hamzalowkasraarabic': u'\u0621\u0650',
'hamzalowkasratanarabic': u'\u0621\u064D',
'hamzasukunarabic': u'\u0621\u0652',
'hangulfiller': u'\u3164',
'hardsigncyrillic': u'\u044A',
'harpoonleftbarbup': u'\u21BC',
'harpoonrightbarbup': u'\u21C0',
'hasquare': u'\u33CA',
'hatafpatah': u'\u05B2',
'hatafpatah16': u'\u05B2',
'hatafpatah23': u'\u05B2',
'hatafpatah2f': u'\u05B2',
'hatafpatahhebrew': u'\u05B2',
'hatafpatahnarrowhebrew': u'\u05B2',
'hatafpatahquarterhebrew': u'\u05B2',
'hatafpatahwidehebrew': u'\u05B2',
'hatafqamats': u'\u05B3',
'hatafqamats1b': u'\u05B3',
'hatafqamats28': u'\u05B3',
'hatafqamats34': u'\u05B3',
'hatafqamatshebrew': u'\u05B3',
'hatafqamatsnarrowhebrew': u'\u05B3',
'hatafqamatsquarterhebrew': u'\u05B3',
'hatafqamatswidehebrew': u'\u05B3',
'hatafsegol': u'\u05B1',
'hatafsegol17': u'\u05B1',
'hatafsegol24': u'\u05B1',
'hatafsegol30': u'\u05B1',
'hatafsegolhebrew': u'\u05B1',
'hatafsegolnarrowhebrew': u'\u05B1',
'hatafsegolquarterhebrew': u'\u05B1',
'hatafsegolwidehebrew': u'\u05B1',
'hbar': u'\u0127',
'hbopomofo': u'\u310F',
'hbrevebelow': u'\u1E2B',
'hcedilla': u'\u1E29',
'hcircle': u'\u24D7',
'hcircumflex': u'\u0125',
'hdieresis': u'\u1E27',
'hdotaccent': u'\u1E23',
'hdotbelow': u'\u1E25',
'he': u'\u05D4',
'heart': u'\u2665',
'heartsuitblack': u'\u2665',
'heartsuitwhite': u'\u2661',
'hedagesh': u'\uFB34',
'hedageshhebrew': u'\uFB34',
'hehaltonearabic': u'\u06C1',
'heharabic': u'\u0647',
'hehebrew': u'\u05D4',
'hehfinalaltonearabic': u'\uFBA7',
'hehfinalalttwoarabic': u'\uFEEA',
'hehfinalarabic': u'\uFEEA',
'hehhamzaabovefinalarabic': u'\uFBA5',
'hehhamzaaboveisolatedarabic': u'\uFBA4',
'hehinitialaltonearabic': u'\uFBA8',
'hehinitialarabic': u'\uFEEB',
'hehiragana': u'\u3078',
'hehmedialaltonearabic': u'\uFBA9',
'hehmedialarabic': u'\uFEEC',
'heiseierasquare': u'\u337B',
'hekatakana': u'\u30D8',
'hekatakanahalfwidth': u'\uFF8D',
'hekutaarusquare': u'\u3336',
'henghook': u'\u0267',
'herutusquare': u'\u3339',
'het': u'\u05D7',
'hethebrew': u'\u05D7',
'hhook': u'\u0266',
'hhooksuperior': u'\u02B1',
'hieuhacirclekorean': u'\u327B',
'hieuhaparenkorean': u'\u321B',
'hieuhcirclekorean': u'\u326D',
'hieuhkorean': u'\u314E',
'hieuhparenkorean': u'\u320D',
'hihiragana': u'\u3072',
'hikatakana': u'\u30D2',
'hikatakanahalfwidth': u'\uFF8B',
'hiriq': u'\u05B4',
'hiriq14': u'\u05B4',
'hiriq21': u'\u05B4',
'hiriq2d': u'\u05B4',
'hiriqhebrew': u'\u05B4',
'hiriqnarrowhebrew': u'\u05B4',
'hiriqquarterhebrew': u'\u05B4',
'hiriqwidehebrew': u'\u05B4',
'hlinebelow': u'\u1E96',
'hmonospace': u'\uFF48',
'hoarmenian': u'\u0570',
'hohipthai': u'\u0E2B',
'hohiragana': u'\u307B',
'hokatakana': u'\u30DB',
'hokatakanahalfwidth': u'\uFF8E',
'holam': u'\u05B9',
'holam19': u'\u05B9',
'holam26': u'\u05B9',
'holam32': u'\u05B9',
'holamhebrew': u'\u05B9',
'holamnarrowhebrew': u'\u05B9',
'holamquarterhebrew': u'\u05B9',
'holamwidehebrew': u'\u05B9',
'honokhukthai': u'\u0E2E',
'hookabovecomb': u'\u0309',
'hookcmb': u'\u0309',
'hookpalatalizedbelowcmb': u'\u0321',
'hookretroflexbelowcmb': u'\u0322',
'hoonsquare': u'\u3342',
'horicoptic': u'\u03E9',
'horizontalbar': u'\u2015',
'horncmb': u'\u031B',
'hotsprings': u'\u2668',
'house': u'\u2302',
'hparen': u'\u24A3',
'hsuperior': u'\u02B0',
'hturned': u'\u0265',
'huhiragana': u'\u3075',
'huiitosquare': u'\u3333',
'hukatakana': u'\u30D5',
'hukatakanahalfwidth': u'\uFF8C',
'hungarumlaut': u'\u02DD',
'hungarumlautcmb': u'\u030B',
'hv': u'\u0195',
'hyphen': u'\u002D',
'hypheninferior': u'\uF6E5',
'hyphenmonospace': u'\uFF0D',
'hyphensmall': u'\uFE63',
'hyphensuperior': u'\uF6E6',
'hyphentwo': u'\u2010',
'i': u'\u0069',
'iacute': u'\u00ED',
'iacyrillic': u'\u044F',
'ibengali': u'\u0987',
'ibopomofo': u'\u3127',
'ibreve': u'\u012D',
'icaron': u'\u01D0',
'icircle': u'\u24D8',
'icircumflex': u'\u00EE',
'icyrillic': u'\u0456',
'idblgrave': u'\u0209',
'ideographearthcircle': u'\u328F',
'ideographfirecircle': u'\u328B',
'ideographicallianceparen': u'\u323F',
'ideographiccallparen': u'\u323A',
'ideographiccentrecircle': u'\u32A5',
'ideographicclose': u'\u3006',
'ideographiccomma': u'\u3001',
'ideographiccommaleft': u'\uFF64',
'ideographiccongratulationparen': u'\u3237',
'ideographiccorrectcircle': u'\u32A3',
'ideographicearthparen': u'\u322F',
'ideographicenterpriseparen': u'\u323D',
'ideographicexcellentcircle': u'\u329D',
'ideographicfestivalparen': u'\u3240',
'ideographicfinancialcircle': u'\u3296',
'ideographicfinancialparen': u'\u3236',
'ideographicfireparen': u'\u322B',
'ideographichaveparen': u'\u3232',
'ideographichighcircle': u'\u32A4',
'ideographiciterationmark': u'\u3005',
'ideographiclaborcircle': u'\u3298',
'ideographiclaborparen': u'\u3238',
'ideographicleftcircle': u'\u32A7',
'ideographiclowcircle': u'\u32A6',
'ideographicmedicinecircle': u'\u32A9',
'ideographicmetalparen': u'\u322E',
'ideographicmoonparen': u'\u322A',
'ideographicnameparen': u'\u3234',
'ideographicperiod': u'\u3002',
'ideographicprintcircle': u'\u329E',
'ideographicreachparen': u'\u3243',
'ideographicrepresentparen': u'\u3239',
'ideographicresourceparen': u'\u323E',
'ideographicrightcircle': u'\u32A8',
'ideographicsecretcircle': u'\u3299',
'ideographicselfparen': u'\u3242',
'ideographicsocietyparen': u'\u3233',
'ideographicspace': u'\u3000',
'ideographicspecialparen': u'\u3235',
'ideographicstockparen': u'\u3231',
'ideographicstudyparen': u'\u323B',
'ideographicsunparen': u'\u3230',
'ideographicsuperviseparen': u'\u323C',
'ideographicwaterparen': u'\u322C',
'ideographicwoodparen': u'\u322D',
'ideographiczero': u'\u3007',
'ideographmetalcircle': u'\u328E',
'ideographmooncircle': u'\u328A',
'ideographnamecircle': u'\u3294',
'ideographsuncircle': u'\u3290',
'ideographwatercircle': u'\u328C',
'ideographwoodcircle': u'\u328D',
'ideva': u'\u0907',
'idieresis': u'\u00EF',
'idieresisacute': u'\u1E2F',
'idieresiscyrillic': u'\u04E5',
'idotbelow': u'\u1ECB',
'iebrevecyrillic': u'\u04D7',
'iecyrillic': u'\u0435',
'ieungacirclekorean': u'\u3275',
'ieungaparenkorean': u'\u3215',
'ieungcirclekorean': u'\u3267',
'ieungkorean': u'\u3147',
'ieungparenkorean': u'\u3207',
'igrave': u'\u00EC',
'igujarati': u'\u0A87',
'igurmukhi': u'\u0A07',
'ihiragana': u'\u3044',
'ihookabove': u'\u1EC9',
'iibengali': u'\u0988',
'iicyrillic': u'\u0438',
'iideva': u'\u0908',
'iigujarati': u'\u0A88',
'iigurmukhi': u'\u0A08',
'iimatragurmukhi': u'\u0A40',
'iinvertedbreve': u'\u020B',
'iishortcyrillic': u'\u0439',
'iivowelsignbengali': u'\u09C0',
'iivowelsigndeva': u'\u0940',
'iivowelsigngujarati': u'\u0AC0',
'ij': u'\u0133',
'ikatakana': u'\u30A4',
'ikatakanahalfwidth': u'\uFF72',
'ikorean': u'\u3163',
'ilde': u'\u02DC',
'iluyhebrew': u'\u05AC',
'imacron': u'\u012B',
'imacroncyrillic': u'\u04E3',
'imageorapproximatelyequal': u'\u2253',
'imatragurmukhi': u'\u0A3F',
'imonospace': u'\uFF49',
'increment': u'\u2206',
'infinity': u'\u221E',
'iniarmenian': u'\u056B',
'integral': u'\u222B',
'integralbottom': u'\u2321',
'integralbt': u'\u2321',
'integralex': u'\uF8F5',
'integraltop': u'\u2320',
'integraltp': u'\u2320',
'intersection': u'\u2229',
'intisquare': u'\u3305',
'invbullet': u'\u25D8',
'invcircle': u'\u25D9',
'invsmileface': u'\u263B',
'iocyrillic': u'\u0451',
'iogonek': u'\u012F',
'iota': u'\u03B9',
'iotadieresis': u'\u03CA',
'iotadieresistonos': u'\u0390',
'iotalatin': u'\u0269',
'iotatonos': u'\u03AF',
'iparen': u'\u24A4',
'irigurmukhi': u'\u0A72',
'ismallhiragana': u'\u3043',
'ismallkatakana': u'\u30A3',
'ismallkatakanahalfwidth': u'\uFF68',
'issharbengali': u'\u09FA',
'istroke': u'\u0268',
'isuperior': u'\uF6ED',
'iterationhiragana': u'\u309D',
'iterationkatakana': u'\u30FD',
'itilde': u'\u0129',
'itildebelow': u'\u1E2D',
'iubopomofo': u'\u3129',
'iucyrillic': u'\u044E',
'ivowelsignbengali': u'\u09BF',
'ivowelsigndeva': u'\u093F',
'ivowelsigngujarati': u'\u0ABF',
'izhitsacyrillic': u'\u0475',
'izhitsadblgravecyrillic': u'\u0477',
'j': u'\u006A',
'jaarmenian': u'\u0571',
'jabengali': u'\u099C',
'jadeva': u'\u091C',
'jagujarati': u'\u0A9C',
'jagurmukhi': u'\u0A1C',
'jbopomofo': u'\u3110',
'jcaron': u'\u01F0',
'jcircle': u'\u24D9',
'jcircumflex': u'\u0135',
'jcrossedtail': u'\u029D',
'jdotlessstroke': u'\u025F',
'jecyrillic': u'\u0458',
'jeemarabic': u'\u062C',
'jeemfinalarabic': u'\uFE9E',
'jeeminitialarabic': u'\uFE9F',
'jeemmedialarabic': u'\uFEA0',
'jeharabic': u'\u0698',
'jehfinalarabic': u'\uFB8B',
'jhabengali': u'\u099D',
'jhadeva': u'\u091D',
'jhagujarati': u'\u0A9D',
'jhagurmukhi': u'\u0A1D',
'jheharmenian': u'\u057B',
'jis': u'\u3004',
'jmonospace': u'\uFF4A',
'jparen': u'\u24A5',
'jsuperior': u'\u02B2',
'k': u'\u006B',
'kabashkircyrillic': u'\u04A1',
'kabengali': u'\u0995',
'kacute': u'\u1E31',
'kacyrillic': u'\u043A',
'kadescendercyrillic': u'\u049B',
'kadeva': u'\u0915',
'kaf': u'\u05DB',
'kafarabic': u'\u0643',
'kafdagesh': u'\uFB3B',
'kafdageshhebrew': u'\uFB3B',
'kaffinalarabic': u'\uFEDA',
'kafhebrew': u'\u05DB',
'kafinitialarabic': u'\uFEDB',
'kafmedialarabic': u'\uFEDC',
'kafrafehebrew': u'\uFB4D',
'kagujarati': u'\u0A95',
'kagurmukhi': u'\u0A15',
'kahiragana': u'\u304B',
'kahookcyrillic': u'\u04C4',
'kakatakana': u'\u30AB',
'kakatakanahalfwidth': u'\uFF76',
'kappa': u'\u03BA',
'kappasymbolgreek': u'\u03F0',
'kapyeounmieumkorean': u'\u3171',
'kapyeounphieuphkorean': u'\u3184',
'kapyeounpieupkorean': u'\u3178',
'kapyeounssangpieupkorean': u'\u3179',
'karoriisquare': u'\u330D',
'kashidaautoarabic': u'\u0640',
'kashidaautonosidebearingarabic': u'\u0640',
'kasmallkatakana': u'\u30F5',
'kasquare': u'\u3384',
'kasraarabic': u'\u0650',
'kasratanarabic': u'\u064D',
'kastrokecyrillic': u'\u049F',
'katahiraprolongmarkhalfwidth': u'\uFF70',
'kaverticalstrokecyrillic': u'\u049D',
'kbopomofo': u'\u310E',
'kcalsquare': u'\u3389',
'kcaron': u'\u01E9',
'kcedilla': u'\u0137',
'kcircle': u'\u24DA',
'kcommaaccent': u'\u0137',
'kdotbelow': u'\u1E33',
'keharmenian': u'\u0584',
'kehiragana': u'\u3051',
'kekatakana': u'\u30B1',
'kekatakanahalfwidth': u'\uFF79',
'kenarmenian': u'\u056F',
'kesmallkatakana': u'\u30F6',
'kgreenlandic': u'\u0138',
'khabengali': u'\u0996',
'khacyrillic': u'\u0445',
'khadeva': u'\u0916',
'khagujarati': u'\u0A96',
'khagurmukhi': u'\u0A16',
'khaharabic': u'\u062E',
'khahfinalarabic': u'\uFEA6',
'khahinitialarabic': u'\uFEA7',
'khahmedialarabic': u'\uFEA8',
'kheicoptic': u'\u03E7',
'khhadeva': u'\u0959',
'khhagurmukhi': u'\u0A59',
'khieukhacirclekorean': u'\u3278',
'khieukhaparenkorean': u'\u3218',
'khieukhcirclekorean': u'\u326A',
'khieukhkorean': u'\u314B',
'khieukhparenkorean': u'\u320A',
'khokhaithai': u'\u0E02',
'khokhonthai': u'\u0E05',
'khokhuatthai': u'\u0E03',
'khokhwaithai': u'\u0E04',
'khomutthai': u'\u0E5B',
'khook': u'\u0199',
'khorakhangthai': u'\u0E06',
'khzsquare': u'\u3391',
'kihiragana': u'\u304D',
'kikatakana': u'\u30AD',
'kikatakanahalfwidth': u'\uFF77',
'kiroguramusquare': u'\u3315',
'kiromeetorusquare': u'\u3316',
'kirosquare': u'\u3314',
'kiyeokacirclekorean': u'\u326E',
'kiyeokaparenkorean': u'\u320E',
'kiyeokcirclekorean': u'\u3260',
'kiyeokkorean': u'\u3131',
'kiyeokparenkorean': u'\u3200',
'kiyeoksioskorean': u'\u3133',
'kjecyrillic': u'\u045C',
'klinebelow': u'\u1E35',
'klsquare': u'\u3398',
'kmcubedsquare': u'\u33A6',
'kmonospace': u'\uFF4B',
'kmsquaredsquare': u'\u33A2',
'kohiragana': u'\u3053',
'kohmsquare': u'\u33C0',
'kokaithai': u'\u0E01',
'kokatakana': u'\u30B3',
'kokatakanahalfwidth': u'\uFF7A',
'kooposquare': u'\u331E',
'koppacyrillic': u'\u0481',
'koreanstandardsymbol': u'\u327F',
'koroniscmb': u'\u0343',
'kparen': u'\u24A6',
'kpasquare': u'\u33AA',
'ksicyrillic': u'\u046F',
'ktsquare': u'\u33CF',
'kturned': u'\u029E',
'kuhiragana': u'\u304F',
'kukatakana': u'\u30AF',
'kukatakanahalfwidth': u'\uFF78',
'kvsquare': u'\u33B8',
'kwsquare': u'\u33BE',
'l': u'\u006C',
'labengali': u'\u09B2',
'lacute': u'\u013A',
'ladeva': u'\u0932',
'lagujarati': u'\u0AB2',
'lagurmukhi': u'\u0A32',
'lakkhangyaothai': u'\u0E45',
'lamaleffinalarabic': u'\uFEFC',
'lamalefhamzaabovefinalarabic': u'\uFEF8',
'lamalefhamzaaboveisolatedarabic': u'\uFEF7',
'lamalefhamzabelowfinalarabic': u'\uFEFA',
'lamalefhamzabelowisolatedarabic': u'\uFEF9',
'lamalefisolatedarabic': u'\uFEFB',
'lamalefmaddaabovefinalarabic': u'\uFEF6',
'lamalefmaddaaboveisolatedarabic': u'\uFEF5',
'lamarabic': u'\u0644',
'lambda': u'\u03BB',
'lambdastroke': u'\u019B',
'lamed': u'\u05DC',
'lameddagesh': u'\uFB3C',
'lameddageshhebrew': u'\uFB3C',
'lamedhebrew': u'\u05DC',
'lamedholam': u'\u05DC\u05B9',
'lamedholamdagesh': u'\u05DC\u05B9\u05BC',
'lamedholamdageshhebrew': u'\u05DC\u05B9\u05BC',
'lamedholamhebrew': u'\u05DC\u05B9',
'lamfinalarabic': u'\uFEDE',
'lamhahinitialarabic': u'\uFCCA',
'laminitialarabic': u'\uFEDF',
'lamjeeminitialarabic': u'\uFCC9',
'lamkhahinitialarabic': u'\uFCCB',
'lamlamhehisolatedarabic': u'\uFDF2',
'lammedialarabic': u'\uFEE0',
'lammeemhahinitialarabic': u'\uFD88',
'lammeeminitialarabic': u'\uFCCC',
'lammeemjeeminitialarabic': u'\uFEDF\uFEE4\uFEA0',
'lammeemkhahinitialarabic': u'\uFEDF\uFEE4\uFEA8',
'largecircle': u'\u25EF',
'lbar': u'\u019A',
'lbelt': u'\u026C',
'lbopomofo': u'\u310C',
'lcaron': u'\u013E',
'lcedilla': u'\u013C',
'lcircle': u'\u24DB',
'lcircumflexbelow': u'\u1E3D',
'lcommaaccent': u'\u013C',
'ldot': u'\u0140',
'ldotaccent': u'\u0140',
'ldotbelow': u'\u1E37',
'ldotbelowmacron': u'\u1E39',
'leftangleabovecmb': u'\u031A',
'lefttackbelowcmb': u'\u0318',
'less': u'\u003C',
'lessequal': u'\u2264',
'lessequalorgreater': u'\u22DA',
'lessmonospace': u'\uFF1C',
'lessorequivalent': u'\u2272',
'lessorgreater': u'\u2276',
'lessoverequal': u'\u2266',
'lesssmall': u'\uFE64',
'lezh': u'\u026E',
'lfblock': u'\u258C',
'lhookretroflex': u'\u026D',
'lira': u'\u20A4',
'liwnarmenian': u'\u056C',
'lj': u'\u01C9',
'ljecyrillic': u'\u0459',
'll': u'\uF6C0',
'lladeva': u'\u0933',
'llagujarati': u'\u0AB3',
'llinebelow': u'\u1E3B',
'llladeva': u'\u0934',
'llvocalicbengali': u'\u09E1',
'llvocalicdeva': u'\u0961',
'llvocalicvowelsignbengali': u'\u09E3',
'llvocalicvowelsigndeva': u'\u0963',
'lmiddletilde': u'\u026B',
'lmonospace': u'\uFF4C',
'lmsquare': u'\u33D0',
'lochulathai': u'\u0E2C',
'logicaland': u'\u2227',
'logicalnot': u'\u00AC',
'logicalnotreversed': u'\u2310',
'logicalor': u'\u2228',
'lolingthai': u'\u0E25',
'longs': u'\u017F',
'lowlinecenterline': u'\uFE4E',
'lowlinecmb': u'\u0332',
'lowlinedashed': u'\uFE4D',
'lozenge': u'\u25CA',
'lparen': u'\u24A7',
'lslash': u'\u0142',
'lsquare': u'\u2113',
'lsuperior': u'\uF6EE',
'ltshade': u'\u2591',
'luthai': u'\u0E26',
'lvocalicbengali': u'\u098C',
'lvocalicdeva': u'\u090C',
'lvocalicvowelsignbengali': u'\u09E2',
'lvocalicvowelsigndeva': u'\u0962',
'lxsquare': u'\u33D3',
'm': u'\u006D',
'mabengali': u'\u09AE',
'macron': u'\u00AF',
'macronbelowcmb': u'\u0331',
'macroncmb': u'\u0304',
'macronlowmod': u'\u02CD',
'macronmonospace': u'\uFFE3',
'macute': u'\u1E3F',
'madeva': u'\u092E',
'magujarati': u'\u0AAE',
'magurmukhi': u'\u0A2E',
'mahapakhhebrew': u'\u05A4',
'mahapakhlefthebrew': u'\u05A4',
'mahiragana': u'\u307E',
'maichattawalowleftthai': u'\uF895',
'maichattawalowrightthai': u'\uF894',
'maichattawathai': u'\u0E4B',
'maichattawaupperleftthai': u'\uF893',
'maieklowleftthai': u'\uF88C',
'maieklowrightthai': u'\uF88B',
'maiekthai': u'\u0E48',
'maiekupperleftthai': u'\uF88A',
'maihanakatleftthai': u'\uF884',
'maihanakatthai': u'\u0E31',
'maitaikhuleftthai': u'\uF889',
'maitaikhuthai': u'\u0E47',
'maitholowleftthai': u'\uF88F',
'maitholowrightthai': u'\uF88E',
'maithothai': u'\u0E49',
'maithoupperleftthai': u'\uF88D',
'maitrilowleftthai': u'\uF892',
'maitrilowrightthai': u'\uF891',
'maitrithai': u'\u0E4A',
'maitriupperleftthai': u'\uF890',
'maiyamokthai': u'\u0E46',
'makatakana': u'\u30DE',
'makatakanahalfwidth': u'\uFF8F',
'male': u'\u2642',
'mansyonsquare': u'\u3347',
'maqafhebrew': u'\u05BE',
'mars': u'\u2642',
'masoracirclehebrew': u'\u05AF',
'masquare': u'\u3383',
'mbopomofo': u'\u3107',
'mbsquare': u'\u33D4',
'mcircle': u'\u24DC',
'mcubedsquare': u'\u33A5',
'mdotaccent': u'\u1E41',
'mdotbelow': u'\u1E43',
'meemarabic': u'\u0645',
'meemfinalarabic': u'\uFEE2',
'meeminitialarabic': u'\uFEE3',
'meemmedialarabic': u'\uFEE4',
'meemmeeminitialarabic': u'\uFCD1',
'meemmeemisolatedarabic': u'\uFC48',
'meetorusquare': u'\u334D',
'mehiragana': u'\u3081',
'meizierasquare': u'\u337E',
'mekatakana': u'\u30E1',
'mekatakanahalfwidth': u'\uFF92',
'mem': u'\u05DE',
'memdagesh': u'\uFB3E',
'memdageshhebrew': u'\uFB3E',
'memhebrew': u'\u05DE',
'menarmenian': u'\u0574',
'merkhahebrew': u'\u05A5',
'merkhakefulahebrew': u'\u05A6',
'merkhakefulalefthebrew': u'\u05A6',
'merkhalefthebrew': u'\u05A5',
'mhook': u'\u0271',
'mhzsquare': u'\u3392',
'middledotkatakanahalfwidth': u'\uFF65',
'middot': u'\u00B7',
'mieumacirclekorean': u'\u3272',
'mieumaparenkorean': u'\u3212',
'mieumcirclekorean': u'\u3264',
'mieumkorean': u'\u3141',
'mieumpansioskorean': u'\u3170',
'mieumparenkorean': u'\u3204',
'mieumpieupkorean': u'\u316E',
'mieumsioskorean': u'\u316F',
'mihiragana': u'\u307F',
'mikatakana': u'\u30DF',
'mikatakanahalfwidth': u'\uFF90',
'minus': u'\u2212',
'minusbelowcmb': u'\u0320',
'minuscircle': u'\u2296',
'minusmod': u'\u02D7',
'minusplus': u'\u2213',
'minute': u'\u2032',
'miribaarusquare': u'\u334A',
'mirisquare': u'\u3349',
'mlonglegturned': u'\u0270',
'mlsquare': u'\u3396',
'mmcubedsquare': u'\u33A3',
'mmonospace': u'\uFF4D',
'mmsquaredsquare': u'\u339F',
'mohiragana': u'\u3082',
'mohmsquare': u'\u33C1',
'mokatakana': u'\u30E2',
'mokatakanahalfwidth': u'\uFF93',
'molsquare': u'\u33D6',
'momathai': u'\u0E21',
'moverssquare': u'\u33A7',
'moverssquaredsquare': u'\u33A8',
'mparen': u'\u24A8',
'mpasquare': u'\u33AB',
'mssquare': u'\u33B3',
'msuperior': u'\uF6EF',
'mturned': u'\u026F',
'mu': u'\u00B5',
'mu1': u'\u00B5',
'muasquare': u'\u3382',
'muchgreater': u'\u226B',
'muchless': u'\u226A',
'mufsquare': u'\u338C',
'mugreek': u'\u03BC',
'mugsquare': u'\u338D',
'muhiragana': u'\u3080',
'mukatakana': u'\u30E0',
'mukatakanahalfwidth': u'\uFF91',
'mulsquare': u'\u3395',
'multiply': u'\u00D7',
'mumsquare': u'\u339B',
'munahhebrew': u'\u05A3',
'munahlefthebrew': u'\u05A3',
'musicalnote': u'\u266A',
'musicalnotedbl': u'\u266B',
'musicflatsign': u'\u266D',
'musicsharpsign': u'\u266F',
'mussquare': u'\u33B2',
'muvsquare': u'\u33B6',
'muwsquare': u'\u33BC',
'mvmegasquare': u'\u33B9',
'mvsquare': u'\u33B7',
'mwmegasquare': u'\u33BF',
'mwsquare': u'\u33BD',
'n': u'\u006E',
'nabengali': u'\u09A8',
'nabla': u'\u2207',
'nacute': u'\u0144',
'nadeva': u'\u0928',
'nagujarati': u'\u0AA8',
'nagurmukhi': u'\u0A28',
'nahiragana': u'\u306A',
'nakatakana': u'\u30CA',
'nakatakanahalfwidth': u'\uFF85',
'napostrophe': u'\u0149',
'nasquare': u'\u3381',
'nbopomofo': u'\u310B',
'nbspace': u'\u00A0',
'ncaron': u'\u0148',
'ncedilla': u'\u0146',
'ncircle': u'\u24DD',
'ncircumflexbelow': u'\u1E4B',
'ncommaaccent': u'\u0146',
'ndotaccent': u'\u1E45',
'ndotbelow': u'\u1E47',
'nehiragana': u'\u306D',
'nekatakana': u'\u30CD',
'nekatakanahalfwidth': u'\uFF88',
'newsheqelsign': u'\u20AA',
'nfsquare': u'\u338B',
'ngabengali': u'\u0999',
'ngadeva': u'\u0919',
'ngagujarati': u'\u0A99',
'ngagurmukhi': u'\u0A19',
'ngonguthai': u'\u0E07',
'nhiragana': u'\u3093',
'nhookleft': u'\u0272',
'nhookretroflex': u'\u0273',
'nieunacirclekorean': u'\u326F',
'nieunaparenkorean': u'\u320F',
'nieuncieuckorean': u'\u3135',
'nieuncirclekorean': u'\u3261',
'nieunhieuhkorean': u'\u3136',
'nieunkorean': u'\u3134',
'nieunpansioskorean': u'\u3168',
'nieunparenkorean': u'\u3201',
'nieunsioskorean': u'\u3167',
'nieuntikeutkorean': u'\u3166',
'nihiragana': u'\u306B',
'nikatakana': u'\u30CB',
'nikatakanahalfwidth': u'\uFF86',
'nikhahitleftthai': u'\uF899',
'nikhahitthai': u'\u0E4D',
'nine': u'\u0039',
'ninearabic': u'\u0669',
'ninebengali': u'\u09EF',
'ninecircle': u'\u2468',
'ninecircleinversesansserif': u'\u2792',
'ninedeva': u'\u096F',
'ninegujarati': u'\u0AEF',
'ninegurmukhi': u'\u0A6F',
'ninehackarabic': u'\u0669',
'ninehangzhou': u'\u3029',
'nineideographicparen': u'\u3228',
'nineinferior': u'\u2089',
'ninemonospace': u'\uFF19',
'nineoldstyle': u'\uF739',
'nineparen': u'\u247C',
'nineperiod': u'\u2490',
'ninepersian': u'\u06F9',
'nineroman': u'\u2178',
'ninesuperior': u'\u2079',
'nineteencircle': u'\u2472',
'nineteenparen': u'\u2486',
'nineteenperiod': u'\u249A',
'ninethai': u'\u0E59',
'nj': u'\u01CC',
'njecyrillic': u'\u045A',
'nkatakana': u'\u30F3',
'nkatakanahalfwidth': u'\uFF9D',
'nlegrightlong': u'\u019E',
'nlinebelow': u'\u1E49',
'nmonospace': u'\uFF4E',
'nmsquare': u'\u339A',
'nnabengali': u'\u09A3',
'nnadeva': u'\u0923',
'nnagujarati': u'\u0AA3',
'nnagurmukhi': u'\u0A23',
'nnnadeva': u'\u0929',
'nohiragana': u'\u306E',
'nokatakana': u'\u30CE',
'nokatakanahalfwidth': u'\uFF89',
'nonbreakingspace': u'\u00A0',
'nonenthai': u'\u0E13',
'nonuthai': u'\u0E19',
'noonarabic': u'\u0646',
'noonfinalarabic': u'\uFEE6',
'noonghunnaarabic': u'\u06BA',
'noonghunnafinalarabic': u'\uFB9F',
'noonhehinitialarabic': u'\uFEE7\uFEEC',
'nooninitialarabic': u'\uFEE7',
'noonjeeminitialarabic': u'\uFCD2',
'noonjeemisolatedarabic': u'\uFC4B',
'noonmedialarabic': u'\uFEE8',
'noonmeeminitialarabic': u'\uFCD5',
'noonmeemisolatedarabic': u'\uFC4E',
'noonnoonfinalarabic': u'\uFC8D',
'notcontains': u'\u220C',
'notelement': u'\u2209',
'notelementof': u'\u2209',
'notequal': u'\u2260',
'notgreater': u'\u226F',
'notgreaternorequal': u'\u2271',
'notgreaternorless': u'\u2279',
'notidentical': u'\u2262',
'notless': u'\u226E',
'notlessnorequal': u'\u2270',
'notparallel': u'\u2226',
'notprecedes': u'\u2280',
'notsubset': u'\u2284',
'notsucceeds': u'\u2281',
'notsuperset': u'\u2285',
'nowarmenian': u'\u0576',
'nparen': u'\u24A9',
'nssquare': u'\u33B1',
'nsuperior': u'\u207F',
'ntilde': u'\u00F1',
'nu': u'\u03BD',
'nuhiragana': u'\u306C',
'nukatakana': u'\u30CC',
'nukatakanahalfwidth': u'\uFF87',
'nuktabengali': u'\u09BC',
'nuktadeva': u'\u093C',
'nuktagujarati': u'\u0ABC',
'nuktagurmukhi': u'\u0A3C',
'numbersign': u'\u0023',
'numbersignmonospace': u'\uFF03',
'numbersignsmall': u'\uFE5F',
'numeralsigngreek': u'\u0374',
'numeralsignlowergreek': u'\u0375',
'numero': u'\u2116',
'nun': u'\u05E0',
'nundagesh': u'\uFB40',
'nundageshhebrew': u'\uFB40',
'nunhebrew': u'\u05E0',
'nvsquare': u'\u33B5',
'nwsquare': u'\u33BB',
'nyabengali': u'\u099E',
'nyadeva': u'\u091E',
'nyagujarati': u'\u0A9E',
'nyagurmukhi': u'\u0A1E',
'o': u'\u006F',
'oacute': u'\u00F3',
'oangthai': u'\u0E2D',
'obarred': u'\u0275',
'obarredcyrillic': u'\u04E9',
'obarreddieresiscyrillic': u'\u04EB',
'obengali': u'\u0993',
'obopomofo': u'\u311B',
'obreve': u'\u014F',
'ocandradeva': u'\u0911',
'ocandragujarati': u'\u0A91',
'ocandravowelsigndeva': u'\u0949',
'ocandravowelsigngujarati': u'\u0AC9',
'ocaron': u'\u01D2',
'ocircle': u'\u24DE',
'ocircumflex': u'\u00F4',
'ocircumflexacute': u'\u1ED1',
'ocircumflexdotbelow': u'\u1ED9',
'ocircumflexgrave': u'\u1ED3',
'ocircumflexhookabove': u'\u1ED5',
'ocircumflextilde': u'\u1ED7',
'ocyrillic': u'\u043E',
'odblacute': u'\u0151',
'odblgrave': u'\u020D',
'odeva': u'\u0913',
'odieresis': u'\u00F6',
'odieresiscyrillic': u'\u04E7',
'odotbelow': u'\u1ECD',
'oe': u'\u0153',
'oekorean': u'\u315A',
'ogonek': u'\u02DB',
'ogonekcmb': u'\u0328',
'ograve': u'\u00F2',
'ogujarati': u'\u0A93',
'oharmenian': u'\u0585',
'ohiragana': u'\u304A',
'ohookabove': u'\u1ECF',
'ohorn': u'\u01A1',
'ohornacute': u'\u1EDB',
'ohorndotbelow': u'\u1EE3',
'ohorngrave': u'\u1EDD',
'ohornhookabove': u'\u1EDF',
'ohorntilde': u'\u1EE1',
'ohungarumlaut': u'\u0151',
'oi': u'\u01A3',
'oinvertedbreve': u'\u020F',
'okatakana': u'\u30AA',
'okatakanahalfwidth': u'\uFF75',
'okorean': u'\u3157',
'olehebrew': u'\u05AB',
'omacron': u'\u014D',
'omacronacute': u'\u1E53',
'omacrongrave': u'\u1E51',
'omdeva': u'\u0950',
'omega': u'\u03C9',
'omega1': u'\u03D6',
'omegacyrillic': u'\u0461',
'omegalatinclosed': u'\u0277',
'omegaroundcyrillic': u'\u047B',
'omegatitlocyrillic': u'\u047D',
'omegatonos': u'\u03CE',
'omgujarati': u'\u0AD0',
'omicron': u'\u03BF',
'omicrontonos': u'\u03CC',
'omonospace': u'\uFF4F',
'one': u'\u0031',
'onearabic': u'\u0661',
'onebengali': u'\u09E7',
'onecircle': u'\u2460',
'onecircleinversesansserif': u'\u278A',
'onedeva': u'\u0967',
'onedotenleader': u'\u2024',
'oneeighth': u'\u215B',
'onefitted': u'\uF6DC',
'onegujarati': u'\u0AE7',
'onegurmukhi': u'\u0A67',
'onehackarabic': u'\u0661',
'onehalf': u'\u00BD',
'onehangzhou': u'\u3021',
'oneideographicparen': u'\u3220',
'oneinferior': u'\u2081',
'onemonospace': u'\uFF11',
'onenumeratorbengali': u'\u09F4',
'oneoldstyle': u'\uF731',
'oneparen': u'\u2474',
'oneperiod': u'\u2488',
'onepersian': u'\u06F1',
'onequarter': u'\u00BC',
'oneroman': u'\u2170',
'onesuperior': u'\u00B9',
'onethai': u'\u0E51',
'onethird': u'\u2153',
'oogonek': u'\u01EB',
'oogonekmacron': u'\u01ED',
'oogurmukhi': u'\u0A13',
'oomatragurmukhi': u'\u0A4B',
'oopen': u'\u0254',
'oparen': u'\u24AA',
'openbullet': u'\u25E6',
'option': u'\u2325',
'ordfeminine': u'\u00AA',
'ordmasculine': u'\u00BA',
'orthogonal': u'\u221F',
'oshortdeva': u'\u0912',
'oshortvowelsigndeva': u'\u094A',
'oslash': u'\u00F8',
'oslashacute': u'\u01FF',
'osmallhiragana': u'\u3049',
'osmallkatakana': u'\u30A9',
'osmallkatakanahalfwidth': u'\uFF6B',
'ostrokeacute': u'\u01FF',
'osuperior': u'\uF6F0',
'otcyrillic': u'\u047F',
'otilde': u'\u00F5',
'otildeacute': u'\u1E4D',
'otildedieresis': u'\u1E4F',
'oubopomofo': u'\u3121',
'overline': u'\u203E',
'overlinecenterline': u'\uFE4A',
'overlinecmb': u'\u0305',
'overlinedashed': u'\uFE49',
'overlinedblwavy': u'\uFE4C',
'overlinewavy': u'\uFE4B',
'overscore': u'\u00AF',
'ovowelsignbengali': u'\u09CB',
'ovowelsigndeva': u'\u094B',
'ovowelsigngujarati': u'\u0ACB',
'p': u'\u0070',
'paampssquare': u'\u3380',
'paasentosquare': u'\u332B',
'pabengali': u'\u09AA',
'pacute': u'\u1E55',
'padeva': u'\u092A',
'pagedown': u'\u21DF',
'pageup': u'\u21DE',
'pagujarati': u'\u0AAA',
'pagurmukhi': u'\u0A2A',
'pahiragana': u'\u3071',
'paiyannoithai': u'\u0E2F',
'pakatakana': u'\u30D1',
'palatalizationcyrilliccmb': u'\u0484',
'palochkacyrillic': u'\u04C0',
'pansioskorean': u'\u317F',
'paragraph': u'\u00B6',
'parallel': u'\u2225',
'parenleft': u'\u0028',
'parenleftaltonearabic': u'\uFD3E',
'parenleftbt': u'\uF8ED',
'parenleftex': u'\uF8EC',
'parenleftinferior': u'\u208D',
'parenleftmonospace': u'\uFF08',
'parenleftsmall': u'\uFE59',
'parenleftsuperior': u'\u207D',
'parenlefttp': u'\uF8EB',
'parenleftvertical': u'\uFE35',
'parenright': u'\u0029',
'parenrightaltonearabic': u'\uFD3F',
'parenrightbt': u'\uF8F8',
'parenrightex': u'\uF8F7',
'parenrightinferior': u'\u208E',
'parenrightmonospace': u'\uFF09',
'parenrightsmall': u'\uFE5A',
'parenrightsuperior': u'\u207E',
'parenrighttp': u'\uF8F6',
'parenrightvertical': u'\uFE36',
'partialdiff': u'\u2202',
'paseqhebrew': u'\u05C0',
'pashtahebrew': u'\u0599',
'pasquare': u'\u33A9',
'patah': u'\u05B7',
'patah11': u'\u05B7',
'patah1d': u'\u05B7',
'patah2a': u'\u05B7',
'patahhebrew': u'\u05B7',
'patahnarrowhebrew': u'\u05B7',
'patahquarterhebrew': u'\u05B7',
'patahwidehebrew': u'\u05B7',
'pazerhebrew': u'\u05A1',
'pbopomofo': u'\u3106',
'pcircle': u'\u24DF',
'pdotaccent': u'\u1E57',
'pe': u'\u05E4',
'pecyrillic': u'\u043F',
'pedagesh': u'\uFB44',
'pedageshhebrew': u'\uFB44',
'peezisquare': u'\u333B',
'pefinaldageshhebrew': u'\uFB43',
'peharabic': u'\u067E',
'peharmenian': u'\u057A',
'pehebrew': u'\u05E4',
'pehfinalarabic': u'\uFB57',
'pehinitialarabic': u'\uFB58',
'pehiragana': u'\u307A',
'pehmedialarabic': u'\uFB59',
'pekatakana': u'\u30DA',
'pemiddlehookcyrillic': u'\u04A7',
'perafehebrew': u'\uFB4E',
'percent': u'\u0025',
'percentarabic': u'\u066A',
'percentmonospace': u'\uFF05',
'percentsmall': u'\uFE6A',
'period': u'\u002E',
'periodarmenian': u'\u0589',
'periodcentered': u'\u00B7',
'periodhalfwidth': u'\uFF61',
'periodinferior': u'\uF6E7',
'periodmonospace': u'\uFF0E',
'periodsmall': u'\uFE52',
'periodsuperior': u'\uF6E8',
'perispomenigreekcmb': u'\u0342',
'perpendicular': u'\u22A5',
'perthousand': u'\u2030',
'peseta': u'\u20A7',
'pfsquare': u'\u338A',
'phabengali': u'\u09AB',
'phadeva': u'\u092B',
'phagujarati': u'\u0AAB',
'phagurmukhi': u'\u0A2B',
'phi': u'\u03C6',
'phi1': u'\u03D5',
'phieuphacirclekorean': u'\u327A',
'phieuphaparenkorean': u'\u321A',
'phieuphcirclekorean': u'\u326C',
'phieuphkorean': u'\u314D',
'phieuphparenkorean': u'\u320C',
'philatin': u'\u0278',
'phinthuthai': u'\u0E3A',
'phisymbolgreek': u'\u03D5',
'phook': u'\u01A5',
'phophanthai': u'\u0E1E',
'phophungthai': u'\u0E1C',
'phosamphaothai': u'\u0E20',
'pi': u'\u03C0',
'pieupacirclekorean': u'\u3273',
'pieupaparenkorean': u'\u3213',
'pieupcieuckorean': u'\u3176',
'pieupcirclekorean': u'\u3265',
'pieupkiyeokkorean': u'\u3172',
'pieupkorean': u'\u3142',
'pieupparenkorean': u'\u3205',
'pieupsioskiyeokkorean': u'\u3174',
'pieupsioskorean': u'\u3144',
'pieupsiostikeutkorean': u'\u3175',
'pieupthieuthkorean': u'\u3177',
'pieuptikeutkorean': u'\u3173',
'pihiragana': u'\u3074',
'pikatakana': u'\u30D4',
'pisymbolgreek': u'\u03D6',
'piwrarmenian': u'\u0583',
'plus': u'\u002B',
'plusbelowcmb': u'\u031F',
'pluscircle': u'\u2295',
'plusminus': u'\u00B1',
'plusmod': u'\u02D6',
'plusmonospace': u'\uFF0B',
'plussmall': u'\uFE62',
'plussuperior': u'\u207A',
'pmonospace': u'\uFF50',
'pmsquare': u'\u33D8',
'pohiragana': u'\u307D',
'pointingindexdownwhite': u'\u261F',
'pointingindexleftwhite': u'\u261C',
'pointingindexrightwhite': u'\u261E',
'pointingindexupwhite': u'\u261D',
'pokatakana': u'\u30DD',
'poplathai': u'\u0E1B',
'postalmark': u'\u3012',
'postalmarkface': u'\u3020',
'pparen': u'\u24AB',
'precedes': u'\u227A',
'prescription': u'\u211E',
'primemod': u'\u02B9',
'primereversed': u'\u2035',
'product': u'\u220F',
'projective': u'\u2305',
'prolongedkana': u'\u30FC',
'propellor': u'\u2318',
'propersubset': u'\u2282',
'propersuperset': u'\u2283',
'proportion': u'\u2237',
'proportional': u'\u221D',
'psi': u'\u03C8',
'psicyrillic': u'\u0471',
'psilipneumatacyrilliccmb': u'\u0486',
'pssquare': u'\u33B0',
'puhiragana': u'\u3077',
'pukatakana': u'\u30D7',
'pvsquare': u'\u33B4',
'pwsquare': u'\u33BA',
'q': u'\u0071',
'qadeva': u'\u0958',
'qadmahebrew': u'\u05A8',
'qafarabic': u'\u0642',
'qaffinalarabic': u'\uFED6',
'qafinitialarabic': u'\uFED7',
'qafmedialarabic': u'\uFED8',
'qamats': u'\u05B8',
'qamats10': u'\u05B8',
'qamats1a': u'\u05B8',
'qamats1c': u'\u05B8',
'qamats27': u'\u05B8',
'qamats29': u'\u05B8',
'qamats33': u'\u05B8',
'qamatsde': u'\u05B8',
'qamatshebrew': u'\u05B8',
'qamatsnarrowhebrew': u'\u05B8',
'qamatsqatanhebrew': u'\u05B8',
'qamatsqatannarrowhebrew': u'\u05B8',
'qamatsqatanquarterhebrew': u'\u05B8',
'qamatsqatanwidehebrew': u'\u05B8',
'qamatsquarterhebrew': u'\u05B8',
'qamatswidehebrew': u'\u05B8',
'qarneyparahebrew': u'\u059F',
'qbopomofo': u'\u3111',
'qcircle': u'\u24E0',
'qhook': u'\u02A0',
'qmonospace': u'\uFF51',
'qof': u'\u05E7',
'qofdagesh': u'\uFB47',
'qofdageshhebrew': u'\uFB47',
'qofhatafpatah': u'\u05E7\u05B2',
'qofhatafpatahhebrew': u'\u05E7\u05B2',
'qofhatafsegol': u'\u05E7\u05B1',
'qofhatafsegolhebrew': u'\u05E7\u05B1',
'qofhebrew': u'\u05E7',
'qofhiriq': u'\u05E7\u05B4',
'qofhiriqhebrew': u'\u05E7\u05B4',
'qofholam': u'\u05E7\u05B9',
'qofholamhebrew': u'\u05E7\u05B9',
'qofpatah': u'\u05E7\u05B7',
'qofpatahhebrew': u'\u05E7\u05B7',
'qofqamats': u'\u05E7\u05B8',
'qofqamatshebrew': u'\u05E7\u05B8',
'qofqubuts': u'\u05E7\u05BB',
'qofqubutshebrew': u'\u05E7\u05BB',
'qofsegol': u'\u05E7\u05B6',
'qofsegolhebrew': u'\u05E7\u05B6',
'qofsheva': u'\u05E7\u05B0',
'qofshevahebrew': u'\u05E7\u05B0',
'qoftsere': u'\u05E7\u05B5',
'qoftserehebrew': u'\u05E7\u05B5',
'qparen': u'\u24AC',
'quarternote': u'\u2669',
'qubuts': u'\u05BB',
'qubuts18': u'\u05BB',
'qubuts25': u'\u05BB',
'qubuts31': u'\u05BB',
'qubutshebrew': u'\u05BB',
'qubutsnarrowhebrew': u'\u05BB',
'qubutsquarterhebrew': u'\u05BB',
'qubutswidehebrew': u'\u05BB',
'question': u'\u003F',
'questionarabic': u'\u061F',
'questionarmenian': u'\u055E',
'questiondown': u'\u00BF',
'questiondownsmall': u'\uF7BF',
'questiongreek': u'\u037E',
'questionmonospace': u'\uFF1F',
'questionsmall': u'\uF73F',
'quotedbl': u'\u0022',
'quotedblbase': u'\u201E',
'quotedblleft': u'\u201C',
'quotedblmonospace': u'\uFF02',
'quotedblprime': u'\u301E',
'quotedblprimereversed': u'\u301D',
'quotedblright': u'\u201D',
'quoteleft': u'\u2018',
'quoteleftreversed': u'\u201B',
'quotereversed': u'\u201B',
'quoteright': u'\u2019',
'quoterightn': u'\u0149',
'quotesinglbase': u'\u201A',
'quotesingle': u'\u0027',
'quotesinglemonospace': u'\uFF07',
'r': u'\u0072',
'raarmenian': u'\u057C',
'rabengali': u'\u09B0',
'racute': u'\u0155',
'radeva': u'\u0930',
'radical': u'\u221A',
'radicalex': u'\uF8E5',
'radoverssquare': u'\u33AE',
'radoverssquaredsquare': u'\u33AF',
'radsquare': u'\u33AD',
'rafe': u'\u05BF',
'rafehebrew': u'\u05BF',
'ragujarati': u'\u0AB0',
'ragurmukhi': u'\u0A30',
'rahiragana': u'\u3089',
'rakatakana': u'\u30E9',
'rakatakanahalfwidth': u'\uFF97',
'ralowerdiagonalbengali': u'\u09F1',
'ramiddlediagonalbengali': u'\u09F0',
'ramshorn': u'\u0264',
'ratio': u'\u2236',
'rbopomofo': u'\u3116',
'rcaron': u'\u0159',
'rcedilla': u'\u0157',
'rcircle': u'\u24E1',
'rcommaaccent': u'\u0157',
'rdblgrave': u'\u0211',
'rdotaccent': u'\u1E59',
'rdotbelow': u'\u1E5B',
'rdotbelowmacron': u'\u1E5D',
'referencemark': u'\u203B',
'reflexsubset': u'\u2286',
'reflexsuperset': u'\u2287',
'registered': u'\u00AE',
'registersans': u'\uF8E8',
'registerserif': u'\uF6DA',
'reharabic': u'\u0631',
'reharmenian': u'\u0580',
'rehfinalarabic': u'\uFEAE',
'rehiragana': u'\u308C',
'rehyehaleflamarabic': u'\u0631\uFEF3\uFE8E\u0644',
'rekatakana': u'\u30EC',
'rekatakanahalfwidth': u'\uFF9A',
'resh': u'\u05E8',
'reshdageshhebrew': u'\uFB48',
'reshhatafpatah': u'\u05E8\u05B2',
'reshhatafpatahhebrew': u'\u05E8\u05B2',
'reshhatafsegol': u'\u05E8\u05B1',
'reshhatafsegolhebrew': u'\u05E8\u05B1',
'reshhebrew': u'\u05E8',
'reshhiriq': u'\u05E8\u05B4',
'reshhiriqhebrew': u'\u05E8\u05B4',
'reshholam': u'\u05E8\u05B9',
'reshholamhebrew': u'\u05E8\u05B9',
'reshpatah': u'\u05E8\u05B7',
'reshpatahhebrew': u'\u05E8\u05B7',
'reshqamats': u'\u05E8\u05B8',
'reshqamatshebrew': u'\u05E8\u05B8',
'reshqubuts': u'\u05E8\u05BB',
'reshqubutshebrew': u'\u05E8\u05BB',
'reshsegol': u'\u05E8\u05B6',
'reshsegolhebrew': u'\u05E8\u05B6',
'reshsheva': u'\u05E8\u05B0',
'reshshevahebrew': u'\u05E8\u05B0',
'reshtsere': u'\u05E8\u05B5',
'reshtserehebrew': u'\u05E8\u05B5',
'reversedtilde': u'\u223D',
'reviahebrew': u'\u0597',
'reviamugrashhebrew': u'\u0597',
'revlogicalnot': u'\u2310',
'rfishhook': u'\u027E',
'rfishhookreversed': u'\u027F',
'rhabengali': u'\u09DD',
'rhadeva': u'\u095D',
'rho': u'\u03C1',
'rhook': u'\u027D',
'rhookturned': u'\u027B',
'rhookturnedsuperior': u'\u02B5',
'rhosymbolgreek': u'\u03F1',
'rhotichookmod': u'\u02DE',
'rieulacirclekorean': u'\u3271',
'rieulaparenkorean': u'\u3211',
'rieulcirclekorean': u'\u3263',
'rieulhieuhkorean': u'\u3140',
'rieulkiyeokkorean': u'\u313A',
'rieulkiyeoksioskorean': u'\u3169',
'rieulkorean': u'\u3139',
'rieulmieumkorean': u'\u313B',
'rieulpansioskorean': u'\u316C',
'rieulparenkorean': u'\u3203',
'rieulphieuphkorean': u'\u313F',
'rieulpieupkorean': u'\u313C',
'rieulpieupsioskorean': u'\u316B',
'rieulsioskorean': u'\u313D',
'rieulthieuthkorean': u'\u313E',
'rieultikeutkorean': u'\u316A',
'rieulyeorinhieuhkorean': u'\u316D',
'rightangle': u'\u221F',
'righttackbelowcmb': u'\u0319',
'righttriangle': u'\u22BF',
'rihiragana': u'\u308A',
'rikatakana': u'\u30EA',
'rikatakanahalfwidth': u'\uFF98',
'ring': u'\u02DA',
'ringbelowcmb': u'\u0325',
'ringcmb': u'\u030A',
'ringhalfleft': u'\u02BF',
'ringhalfleftarmenian': u'\u0559',
'ringhalfleftbelowcmb': u'\u031C',
'ringhalfleftcentered': u'\u02D3',
'ringhalfright': u'\u02BE',
'ringhalfrightbelowcmb': u'\u0339',
'ringhalfrightcentered': u'\u02D2',
'rinvertedbreve': u'\u0213',
'rittorusquare': u'\u3351',
'rlinebelow': u'\u1E5F',
'rlongleg': u'\u027C',
'rlonglegturned': u'\u027A',
'rmonospace': u'\uFF52',
'rohiragana': u'\u308D',
'rokatakana': u'\u30ED',
'rokatakanahalfwidth': u'\uFF9B',
'roruathai': u'\u0E23',
'rparen': u'\u24AD',
'rrabengali': u'\u09DC',
'rradeva': u'\u0931',
'rragurmukhi': u'\u0A5C',
'rreharabic': u'\u0691',
'rrehfinalarabic': u'\uFB8D',
'rrvocalicbengali': u'\u09E0',
'rrvocalicdeva': u'\u0960',
'rrvocalicgujarati': u'\u0AE0',
'rrvocalicvowelsignbengali': u'\u09C4',
'rrvocalicvowelsigndeva': u'\u0944',
'rrvocalicvowelsigngujarati': u'\u0AC4',
'rsuperior': u'\uF6F1',
'rtblock': u'\u2590',
'rturned': u'\u0279',
'rturnedsuperior': u'\u02B4',
'ruhiragana': u'\u308B',
'rukatakana': u'\u30EB',
'rukatakanahalfwidth': u'\uFF99',
'rupeemarkbengali': u'\u09F2',
'rupeesignbengali': u'\u09F3',
'rupiah': u'\uF6DD',
'ruthai': u'\u0E24',
'rvocalicbengali': u'\u098B',
'rvocalicdeva': u'\u090B',
'rvocalicgujarati': u'\u0A8B',
'rvocalicvowelsignbengali': u'\u09C3',
'rvocalicvowelsigndeva': u'\u0943',
'rvocalicvowelsigngujarati': u'\u0AC3',
's': u'\u0073',
'sabengali': u'\u09B8',
'sacute': u'\u015B',
'sacutedotaccent': u'\u1E65',
'sadarabic': u'\u0635',
'sadeva': u'\u0938',
'sadfinalarabic': u'\uFEBA',
'sadinitialarabic': u'\uFEBB',
'sadmedialarabic': u'\uFEBC',
'sagujarati': u'\u0AB8',
'sagurmukhi': u'\u0A38',
'sahiragana': u'\u3055',
'sakatakana': u'\u30B5',
'sakatakanahalfwidth': u'\uFF7B',
'sallallahoualayhewasallamarabic': u'\uFDFA',
'samekh': u'\u05E1',
'samekhdagesh': u'\uFB41',
'samekhdageshhebrew': u'\uFB41',
'samekhhebrew': u'\u05E1',
'saraaathai': u'\u0E32',
'saraaethai': u'\u0E41',
'saraaimaimalaithai': u'\u0E44',
'saraaimaimuanthai': u'\u0E43',
'saraamthai': u'\u0E33',
'saraathai': u'\u0E30',
'saraethai': u'\u0E40',
'saraiileftthai': u'\uF886',
'saraiithai': u'\u0E35',
'saraileftthai': u'\uF885',
'saraithai': u'\u0E34',
'saraothai': u'\u0E42',
'saraueeleftthai': u'\uF888',
'saraueethai': u'\u0E37',
'saraueleftthai': u'\uF887',
'sarauethai': u'\u0E36',
'sarauthai': u'\u0E38',
'sarauuthai': u'\u0E39',
'sbopomofo': u'\u3119',
'scaron': u'\u0161',
'scarondotaccent': u'\u1E67',
'scedilla': u'\u015F',
'schwa': u'\u0259',
'schwacyrillic': u'\u04D9',
'schwadieresiscyrillic': u'\u04DB',
'schwahook': u'\u025A',
'scircle': u'\u24E2',
'scircumflex': u'\u015D',
'scommaaccent': u'\u0219',
'sdotaccent': u'\u1E61',
'sdotbelow': u'\u1E63',
'sdotbelowdotaccent': u'\u1E69',
'seagullbelowcmb': u'\u033C',
'second': u'\u2033',
'secondtonechinese': u'\u02CA',
'section': u'\u00A7',
'seenarabic': u'\u0633',
'seenfinalarabic': u'\uFEB2',
'seeninitialarabic': u'\uFEB3',
'seenmedialarabic': u'\uFEB4',
'segol': u'\u05B6',
'segol13': u'\u05B6',
'segol1f': u'\u05B6',
'segol2c': u'\u05B6',
'segolhebrew': u'\u05B6',
'segolnarrowhebrew': u'\u05B6',
'segolquarterhebrew': u'\u05B6',
'segoltahebrew': u'\u0592',
'segolwidehebrew': u'\u05B6',
'seharmenian': u'\u057D',
'sehiragana': u'\u305B',
'sekatakana': u'\u30BB',
'sekatakanahalfwidth': u'\uFF7E',
'semicolon': u'\u003B',
'semicolonarabic': u'\u061B',
'semicolonmonospace': u'\uFF1B',
'semicolonsmall': u'\uFE54',
'semivoicedmarkkana': u'\u309C',
'semivoicedmarkkanahalfwidth': u'\uFF9F',
'sentisquare': u'\u3322',
'sentosquare': u'\u3323',
'seven': u'\u0037',
'sevenarabic': u'\u0667',
'sevenbengali': u'\u09ED',
'sevencircle': u'\u2466',
'sevencircleinversesansserif': u'\u2790',
'sevendeva': u'\u096D',
'seveneighths': u'\u215E',
'sevengujarati': u'\u0AED',
'sevengurmukhi': u'\u0A6D',
'sevenhackarabic': u'\u0667',
'sevenhangzhou': u'\u3027',
'sevenideographicparen': u'\u3226',
'seveninferior': u'\u2087',
'sevenmonospace': u'\uFF17',
'sevenoldstyle': u'\uF737',
'sevenparen': u'\u247A',
'sevenperiod': u'\u248E',
'sevenpersian': u'\u06F7',
'sevenroman': u'\u2176',
'sevensuperior': u'\u2077',
'seventeencircle': u'\u2470',
'seventeenparen': u'\u2484',
'seventeenperiod': u'\u2498',
'seventhai': u'\u0E57',
'sfthyphen': u'\u00AD',
'shaarmenian': u'\u0577',
'shabengali': u'\u09B6',
'shacyrillic': u'\u0448',
'shaddaarabic': u'\u0651',
'shaddadammaarabic': u'\uFC61',
'shaddadammatanarabic': u'\uFC5E',
'shaddafathaarabic': u'\uFC60',
'shaddafathatanarabic': u'\u0651\u064B',
'shaddakasraarabic': u'\uFC62',
'shaddakasratanarabic': u'\uFC5F',
'shade': u'\u2592',
'shadedark': u'\u2593',
'shadelight': u'\u2591',
'shademedium': u'\u2592',
'shadeva': u'\u0936',
'shagujarati': u'\u0AB6',
'shagurmukhi': u'\u0A36',
'shalshelethebrew': u'\u0593',
'shbopomofo': u'\u3115',
'shchacyrillic': u'\u0449',
'sheenarabic': u'\u0634',
'sheenfinalarabic': u'\uFEB6',
'sheeninitialarabic': u'\uFEB7',
'sheenmedialarabic': u'\uFEB8',
'sheicoptic': u'\u03E3',
'sheqel': u'\u20AA',
'sheqelhebrew': u'\u20AA',
'sheva': u'\u05B0',
'sheva115': u'\u05B0',
'sheva15': u'\u05B0',
'sheva22': u'\u05B0',
'sheva2e': u'\u05B0',
'shevahebrew': u'\u05B0',
'shevanarrowhebrew': u'\u05B0',
'shevaquarterhebrew': u'\u05B0',
'shevawidehebrew': u'\u05B0',
'shhacyrillic': u'\u04BB',
'shimacoptic': u'\u03ED',
'shin': u'\u05E9',
'shindagesh': u'\uFB49',
'shindageshhebrew': u'\uFB49',
'shindageshshindot': u'\uFB2C',
'shindageshshindothebrew': u'\uFB2C',
'shindageshsindot': u'\uFB2D',
'shindageshsindothebrew': u'\uFB2D',
'shindothebrew': u'\u05C1',
'shinhebrew': u'\u05E9',
'shinshindot': u'\uFB2A',
'shinshindothebrew': u'\uFB2A',
'shinsindot': u'\uFB2B',
'shinsindothebrew': u'\uFB2B',
'shook': u'\u0282',
'sigma': u'\u03C3',
'sigma1': u'\u03C2',
'sigmafinal': u'\u03C2',
'sigmalunatesymbolgreek': u'\u03F2',
'sihiragana': u'\u3057',
'sikatakana': u'\u30B7',
'sikatakanahalfwidth': u'\uFF7C',
'siluqhebrew': u'\u05BD',
'siluqlefthebrew': u'\u05BD',
'similar': u'\u223C',
'sindothebrew': u'\u05C2',
'siosacirclekorean': u'\u3274',
'siosaparenkorean': u'\u3214',
'sioscieuckorean': u'\u317E',
'sioscirclekorean': u'\u3266',
'sioskiyeokkorean': u'\u317A',
'sioskorean': u'\u3145',
'siosnieunkorean': u'\u317B',
'siosparenkorean': u'\u3206',
'siospieupkorean': u'\u317D',
'siostikeutkorean': u'\u317C',
'six': u'\u0036',
'sixarabic': u'\u0666',
'sixbengali': u'\u09EC',
'sixcircle': u'\u2465',
'sixcircleinversesansserif': u'\u278F',
'sixdeva': u'\u096C',
'sixgujarati': u'\u0AEC',
'sixgurmukhi': u'\u0A6C',
'sixhackarabic': u'\u0666',
'sixhangzhou': u'\u3026',
'sixideographicparen': u'\u3225',
'sixinferior': u'\u2086',
'sixmonospace': u'\uFF16',
'sixoldstyle': u'\uF736',
'sixparen': u'\u2479',
'sixperiod': u'\u248D',
'sixpersian': u'\u06F6',
'sixroman': u'\u2175',
'sixsuperior': u'\u2076',
'sixteencircle': u'\u246F',
'sixteencurrencydenominatorbengali': u'\u09F9',
'sixteenparen': u'\u2483',
'sixteenperiod': u'\u2497',
'sixthai': u'\u0E56',
'slash': u'\u002F',
'slashmonospace': u'\uFF0F',
'slong': u'\u017F',
'slongdotaccent': u'\u1E9B',
'smileface': u'\u263A',
'smonospace': u'\uFF53',
'sofpasuqhebrew': u'\u05C3',
'softhyphen': u'\u00AD',
'softsigncyrillic': u'\u044C',
'sohiragana': u'\u305D',
'sokatakana': u'\u30BD',
'sokatakanahalfwidth': u'\uFF7F',
'soliduslongoverlaycmb': u'\u0338',
'solidusshortoverlaycmb': u'\u0337',
'sorusithai': u'\u0E29',
'sosalathai': u'\u0E28',
'sosothai': u'\u0E0B',
'sosuathai': u'\u0E2A',
'space': u'\u0020',
'spacehackarabic': u'\u0020',
'spade': u'\u2660',
'spadesuitblack': u'\u2660',
'spadesuitwhite': u'\u2664',
'sparen': u'\u24AE',
'squarebelowcmb': u'\u033B',
'squarecc': u'\u33C4',
'squarecm': u'\u339D',
'squarediagonalcrosshatchfill': u'\u25A9',
'squarehorizontalfill': u'\u25A4',
'squarekg': u'\u338F',
'squarekm': u'\u339E',
'squarekmcapital': u'\u33CE',
'squareln': u'\u33D1',
'squarelog': u'\u33D2',
'squaremg': u'\u338E',
'squaremil': u'\u33D5',
'squaremm': u'\u339C',
'squaremsquared': u'\u33A1',
'squareorthogonalcrosshatchfill': u'\u25A6',
'squareupperlefttolowerrightfill': u'\u25A7',
'squareupperrighttolowerleftfill': u'\u25A8',
'squareverticalfill': u'\u25A5',
'squarewhitewithsmallblack': u'\u25A3',
'srsquare': u'\u33DB',
'ssabengali': u'\u09B7',
'ssadeva': u'\u0937',
'ssagujarati': u'\u0AB7',
'ssangcieuckorean': u'\u3149',
'ssanghieuhkorean': u'\u3185',
'ssangieungkorean': u'\u3180',
'ssangkiyeokkorean': u'\u3132',
'ssangnieunkorean': u'\u3165',
'ssangpieupkorean': u'\u3143',
'ssangsioskorean': u'\u3146',
'ssangtikeutkorean': u'\u3138',
'ssuperior': u'\uF6F2',
'sterling': u'\u00A3',
'sterlingmonospace': u'\uFFE1',
'strokelongoverlaycmb': u'\u0336',
'strokeshortoverlaycmb': u'\u0335',
'subset': u'\u2282',
'subsetnotequal': u'\u228A',
'subsetorequal': u'\u2286',
'succeeds': u'\u227B',
'suchthat': u'\u220B',
'suhiragana': u'\u3059',
'sukatakana': u'\u30B9',
'sukatakanahalfwidth': u'\uFF7D',
'sukunarabic': u'\u0652',
'summation': u'\u2211',
'sun': u'\u263C',
'superset': u'\u2283',
'supersetnotequal': u'\u228B',
'supersetorequal': u'\u2287',
'svsquare': u'\u33DC',
'syouwaerasquare': u'\u337C',
't': u'\u0074',
'tabengali': u'\u09A4',
'tackdown': u'\u22A4',
'tackleft': u'\u22A3',
'tadeva': u'\u0924',
'tagujarati': u'\u0AA4',
'tagurmukhi': u'\u0A24',
'taharabic': u'\u0637',
'tahfinalarabic': u'\uFEC2',
'tahinitialarabic': u'\uFEC3',
'tahiragana': u'\u305F',
'tahmedialarabic': u'\uFEC4',
'taisyouerasquare': u'\u337D',
'takatakana': u'\u30BF',
'takatakanahalfwidth': u'\uFF80',
'tatweelarabic': u'\u0640',
'tau': u'\u03C4',
'tav': u'\u05EA',
'tavdages': u'\uFB4A',
'tavdagesh': u'\uFB4A',
'tavdageshhebrew': u'\uFB4A',
'tavhebrew': u'\u05EA',
'tbar': u'\u0167',
'tbopomofo': u'\u310A',
'tcaron': u'\u0165',
'tccurl': u'\u02A8',
'tcedilla': u'\u0163',
'tcheharabic': u'\u0686',
'tchehfinalarabic': u'\uFB7B',
'tchehinitialarabic': u'\uFB7C',
'tchehmedialarabic': u'\uFB7D',
'tchehmeeminitialarabic': u'\uFB7C\uFEE4',
'tcircle': u'\u24E3',
'tcircumflexbelow': u'\u1E71',
'tcommaaccent': u'\u0163',
'tdieresis': u'\u1E97',
'tdotaccent': u'\u1E6B',
'tdotbelow': u'\u1E6D',
'tecyrillic': u'\u0442',
'tedescendercyrillic': u'\u04AD',
'teharabic': u'\u062A',
'tehfinalarabic': u'\uFE96',
'tehhahinitialarabic': u'\uFCA2',
'tehhahisolatedarabic': u'\uFC0C',
'tehinitialarabic': u'\uFE97',
'tehiragana': u'\u3066',
'tehjeeminitialarabic': u'\uFCA1',
'tehjeemisolatedarabic': u'\uFC0B',
'tehmarbutaarabic': u'\u0629',
'tehmarbutafinalarabic': u'\uFE94',
'tehmedialarabic': u'\uFE98',
'tehmeeminitialarabic': u'\uFCA4',
'tehmeemisolatedarabic': u'\uFC0E',
'tehnoonfinalarabic': u'\uFC73',
'tekatakana': u'\u30C6',
'tekatakanahalfwidth': u'\uFF83',
'telephone': u'\u2121',
'telephoneblack': u'\u260E',
'telishagedolahebrew': u'\u05A0',
'telishaqetanahebrew': u'\u05A9',
'tencircle': u'\u2469',
'tenideographicparen': u'\u3229',
'tenparen': u'\u247D',
'tenperiod': u'\u2491',
'tenroman': u'\u2179',
'tesh': u'\u02A7',
'tet': u'\u05D8',
'tetdagesh': u'\uFB38',
'tetdageshhebrew': u'\uFB38',
'tethebrew': u'\u05D8',
'tetsecyrillic': u'\u04B5',
'tevirhebrew': u'\u059B',
'tevirlefthebrew': u'\u059B',
'thabengali': u'\u09A5',
'thadeva': u'\u0925',
'thagujarati': u'\u0AA5',
'thagurmukhi': u'\u0A25',
'thalarabic': u'\u0630',
'thalfinalarabic': u'\uFEAC',
'thanthakhatlowleftthai': u'\uF898',
'thanthakhatlowrightthai': u'\uF897',
'thanthakhatthai': u'\u0E4C',
'thanthakhatupperleftthai': u'\uF896',
'theharabic': u'\u062B',
'thehfinalarabic': u'\uFE9A',
'thehinitialarabic': u'\uFE9B',
'thehmedialarabic': u'\uFE9C',
'thereexists': u'\u2203',
'therefore': u'\u2234',
'theta': u'\u03B8',
'theta1': u'\u03D1',
'thetasymbolgreek': u'\u03D1',
'thieuthacirclekorean': u'\u3279',
'thieuthaparenkorean': u'\u3219',
'thieuthcirclekorean': u'\u326B',
'thieuthkorean': u'\u314C',
'thieuthparenkorean': u'\u320B',
'thirteencircle': u'\u246C',
'thirteenparen': u'\u2480',
'thirteenperiod': u'\u2494',
'thonangmonthothai': u'\u0E11',
'thook': u'\u01AD',
'thophuthaothai': u'\u0E12',
'thorn': u'\u00FE',
'thothahanthai': u'\u0E17',
'thothanthai': u'\u0E10',
'thothongthai': u'\u0E18',
'thothungthai': u'\u0E16',
'thousandcyrillic': u'\u0482',
'thousandsseparatorarabic': u'\u066C',
'thousandsseparatorpersian': u'\u066C',
'three': u'\u0033',
'threearabic': u'\u0663',
'threebengali': u'\u09E9',
'threecircle': u'\u2462',
'threecircleinversesansserif': u'\u278C',
'threedeva': u'\u0969',
'threeeighths': u'\u215C',
'threegujarati': u'\u0AE9',
'threegurmukhi': u'\u0A69',
'threehackarabic': u'\u0663',
'threehangzhou': u'\u3023',
'threeideographicparen': u'\u3222',
'threeinferior': u'\u2083',
'threemonospace': u'\uFF13',
'threenumeratorbengali': u'\u09F6',
'threeoldstyle': u'\uF733',
'threeparen': u'\u2476',
'threeperiod': u'\u248A',
'threepersian': u'\u06F3',
'threequarters': u'\u00BE',
'threequartersemdash': u'\uF6DE',
'threeroman': u'\u2172',
'threesuperior': u'\u00B3',
'threethai': u'\u0E53',
'thzsquare': u'\u3394',
'tihiragana': u'\u3061',
'tikatakana': u'\u30C1',
'tikatakanahalfwidth': u'\uFF81',
'tikeutacirclekorean': u'\u3270',
'tikeutaparenkorean': u'\u3210',
'tikeutcirclekorean': u'\u3262',
'tikeutkorean': u'\u3137',
'tikeutparenkorean': u'\u3202',
'tilde': u'\u02DC',
'tildebelowcmb': u'\u0330',
'tildecmb': u'\u0303',
'tildecomb': u'\u0303',
'tildedoublecmb': u'\u0360',
'tildeoperator': u'\u223C',
'tildeoverlaycmb': u'\u0334',
'tildeverticalcmb': u'\u033E',
'timescircle': u'\u2297',
'tipehahebrew': u'\u0596',
'tipehalefthebrew': u'\u0596',
'tippigurmukhi': u'\u0A70',
'titlocyrilliccmb': u'\u0483',
'tiwnarmenian': u'\u057F',
'tlinebelow': u'\u1E6F',
'tmonospace': u'\uFF54',
'toarmenian': u'\u0569',
'tohiragana': u'\u3068',
'tokatakana': u'\u30C8',
'tokatakanahalfwidth': u'\uFF84',
'tonebarextrahighmod': u'\u02E5',
'tonebarextralowmod': u'\u02E9',
'tonebarhighmod': u'\u02E6',
'tonebarlowmod': u'\u02E8',
'tonebarmidmod': u'\u02E7',
'tonefive': u'\u01BD',
'tonesix': u'\u0185',
'tonetwo': u'\u01A8',
'tonos': u'\u0384',
'tonsquare': u'\u3327',
'topatakthai': u'\u0E0F',
'tortoiseshellbracketleft': u'\u3014',
'tortoiseshellbracketleftsmall': u'\uFE5D',
'tortoiseshellbracketleftvertical': u'\uFE39',
'tortoiseshellbracketright': u'\u3015',
'tortoiseshellbracketrightsmall': u'\uFE5E',
'tortoiseshellbracketrightvertical': u'\uFE3A',
'totaothai': u'\u0E15',
'tpalatalhook': u'\u01AB',
'tparen': u'\u24AF',
'trademark': u'\u2122',
'trademarksans': u'\uF8EA',
'trademarkserif': u'\uF6DB',
'tretroflexhook': u'\u0288',
'triagdn': u'\u25BC',
'triaglf': u'\u25C4',
'triagrt': u'\u25BA',
'triagup': u'\u25B2',
'ts': u'\u02A6',
'tsadi': u'\u05E6',
'tsadidagesh': u'\uFB46',
'tsadidageshhebrew': u'\uFB46',
'tsadihebrew': u'\u05E6',
'tsecyrillic': u'\u0446',
'tsere': u'\u05B5',
'tsere12': u'\u05B5',
'tsere1e': u'\u05B5',
'tsere2b': u'\u05B5',
'tserehebrew': u'\u05B5',
'tserenarrowhebrew': u'\u05B5',
'tserequarterhebrew': u'\u05B5',
'tserewidehebrew': u'\u05B5',
'tshecyrillic': u'\u045B',
'tsuperior': u'\uF6F3',
'ttabengali': u'\u099F',
'ttadeva': u'\u091F',
'ttagujarati': u'\u0A9F',
'ttagurmukhi': u'\u0A1F',
'tteharabic': u'\u0679',
'ttehfinalarabic': u'\uFB67',
'ttehinitialarabic': u'\uFB68',
'ttehmedialarabic': u'\uFB69',
'tthabengali': u'\u09A0',
'tthadeva': u'\u0920',
'tthagujarati': u'\u0AA0',
'tthagurmukhi': u'\u0A20',
'tturned': u'\u0287',
'tuhiragana': u'\u3064',
'tukatakana': u'\u30C4',
'tukatakanahalfwidth': u'\uFF82',
'tusmallhiragana': u'\u3063',
'tusmallkatakana': u'\u30C3',
'tusmallkatakanahalfwidth': u'\uFF6F',
'twelvecircle': u'\u246B',
'twelveparen': u'\u247F',
'twelveperiod': u'\u2493',
'twelveroman': u'\u217B',
'twentycircle': u'\u2473',
'twentyhangzhou': u'\u5344',
'twentyparen': u'\u2487',
'twentyperiod': u'\u249B',
'two': u'\u0032',
'twoarabic': u'\u0662',
'twobengali': u'\u09E8',
'twocircle': u'\u2461',
'twocircleinversesansserif': u'\u278B',
'twodeva': u'\u0968',
'twodotenleader': u'\u2025',
'twodotleader': u'\u2025',
'twodotleadervertical': u'\uFE30',
'twogujarati': u'\u0AE8',
'twogurmukhi': u'\u0A68',
'twohackarabic': u'\u0662',
'twohangzhou': u'\u3022',
'twoideographicparen': u'\u3221',
'twoinferior': u'\u2082',
'twomonospace': u'\uFF12',
'twonumeratorbengali': u'\u09F5',
'twooldstyle': u'\uF732',
'twoparen': u'\u2475',
'twoperiod': u'\u2489',
'twopersian': u'\u06F2',
'tworoman': u'\u2171',
'twostroke': u'\u01BB',
'twosuperior': u'\u00B2',
'twothai': u'\u0E52',
'twothirds': u'\u2154',
'u': u'\u0075',
'uacute': u'\u00FA',
'ubar': u'\u0289',
'ubengali': u'\u0989',
'ubopomofo': u'\u3128',
'ubreve': u'\u016D',
'ucaron': u'\u01D4',
'ucircle': u'\u24E4',
'ucircumflex': u'\u00FB',
'ucircumflexbelow': u'\u1E77',
'ucyrillic': u'\u0443',
'udattadeva': u'\u0951',
'udblacute': u'\u0171',
'udblgrave': u'\u0215',
'udeva': u'\u0909',
'udieresis': u'\u00FC',
'udieresisacute': u'\u01D8',
'udieresisbelow': u'\u1E73',
'udieresiscaron': u'\u01DA',
'udieresiscyrillic': u'\u04F1',
'udieresisgrave': u'\u01DC',
'udieresismacron': u'\u01D6',
'udotbelow': u'\u1EE5',
'ugrave': u'\u00F9',
'ugujarati': u'\u0A89',
'ugurmukhi': u'\u0A09',
'uhiragana': u'\u3046',
'uhookabove': u'\u1EE7',
'uhorn': u'\u01B0',
'uhornacute': u'\u1EE9',
'uhorndotbelow': u'\u1EF1',
'uhorngrave': u'\u1EEB',
'uhornhookabove': u'\u1EED',
'uhorntilde': u'\u1EEF',
'uhungarumlaut': u'\u0171',
'uhungarumlautcyrillic': u'\u04F3',
'uinvertedbreve': u'\u0217',
'ukatakana': u'\u30A6',
'ukatakanahalfwidth': u'\uFF73',
'ukcyrillic': u'\u0479',
'ukorean': u'\u315C',
'umacron': u'\u016B',
'umacroncyrillic': u'\u04EF',
'umacrondieresis': u'\u1E7B',
'umatragurmukhi': u'\u0A41',
'umonospace': u'\uFF55',
'underscore': u'\u005F',
'underscoredbl': u'\u2017',
'underscoremonospace': u'\uFF3F',
'underscorevertical': u'\uFE33',
'underscorewavy': u'\uFE4F',
'union': u'\u222A',
'universal': u'\u2200',
'uogonek': u'\u0173',
'uparen': u'\u24B0',
'upblock': u'\u2580',
'upperdothebrew': u'\u05C4',
'upsilon': u'\u03C5',
'upsilondieresis': u'\u03CB',
'upsilondieresistonos': u'\u03B0',
'upsilonlatin': u'\u028A',
'upsilontonos': u'\u03CD',
'uptackbelowcmb': u'\u031D',
'uptackmod': u'\u02D4',
'uragurmukhi': u'\u0A73',
'uring': u'\u016F',
'ushortcyrillic': u'\u045E',
'usmallhiragana': u'\u3045',
'usmallkatakana': u'\u30A5',
'usmallkatakanahalfwidth': u'\uFF69',
'ustraightcyrillic': u'\u04AF',
'ustraightstrokecyrillic': u'\u04B1',
'utilde': u'\u0169',
'utildeacute': u'\u1E79',
'utildebelow': u'\u1E75',
'uubengali': u'\u098A',
'uudeva': u'\u090A',
'uugujarati': u'\u0A8A',
'uugurmukhi': u'\u0A0A',
'uumatragurmukhi': u'\u0A42',
'uuvowelsignbengali': u'\u09C2',
'uuvowelsigndeva': u'\u0942',
'uuvowelsigngujarati': u'\u0AC2',
'uvowelsignbengali': u'\u09C1',
'uvowelsigndeva': u'\u0941',
'uvowelsigngujarati': u'\u0AC1',
'v': u'\u0076',
'vadeva': u'\u0935',
'vagujarati': u'\u0AB5',
'vagurmukhi': u'\u0A35',
'vakatakana': u'\u30F7',
'vav': u'\u05D5',
'vavdagesh': u'\uFB35',
'vavdagesh65': u'\uFB35',
'vavdageshhebrew': u'\uFB35',
'vavhebrew': u'\u05D5',
'vavholam': u'\uFB4B',
'vavholamhebrew': u'\uFB4B',
'vavvavhebrew': u'\u05F0',
'vavyodhebrew': u'\u05F1',
'vcircle': u'\u24E5',
'vdotbelow': u'\u1E7F',
'vecyrillic': u'\u0432',
'veharabic': u'\u06A4',
'vehfinalarabic': u'\uFB6B',
'vehinitialarabic': u'\uFB6C',
'vehmedialarabic': u'\uFB6D',
'vekatakana': u'\u30F9',
'venus': u'\u2640',
'verticalbar': u'\u007C',
'verticallineabovecmb': u'\u030D',
'verticallinebelowcmb': u'\u0329',
'verticallinelowmod': u'\u02CC',
'verticallinemod': u'\u02C8',
'vewarmenian': u'\u057E',
'vhook': u'\u028B',
'vikatakana': u'\u30F8',
'viramabengali': u'\u09CD',
'viramadeva': u'\u094D',
'viramagujarati': u'\u0ACD',
'visargabengali': u'\u0983',
'visargadeva': u'\u0903',
'visargagujarati': u'\u0A83',
'vmonospace': u'\uFF56',
'voarmenian': u'\u0578',
'voicediterationhiragana': u'\u309E',
'voicediterationkatakana': u'\u30FE',
'voicedmarkkana': u'\u309B',
'voicedmarkkanahalfwidth': u'\uFF9E',
'vokatakana': u'\u30FA',
'vparen': u'\u24B1',
'vtilde': u'\u1E7D',
'vturned': u'\u028C',
'vuhiragana': u'\u3094',
'vukatakana': u'\u30F4',
'w': u'\u0077',
'wacute': u'\u1E83',
'waekorean': u'\u3159',
'wahiragana': u'\u308F',
'wakatakana': u'\u30EF',
'wakatakanahalfwidth': u'\uFF9C',
'wakorean': u'\u3158',
'wasmallhiragana': u'\u308E',
'wasmallkatakana': u'\u30EE',
'wattosquare': u'\u3357',
'wavedash': u'\u301C',
'wavyunderscorevertical': u'\uFE34',
'wawarabic': u'\u0648',
'wawfinalarabic': u'\uFEEE',
'wawhamzaabovearabic': u'\u0624',
'wawhamzaabovefinalarabic': u'\uFE86',
'wbsquare': u'\u33DD',
'wcircle': u'\u24E6',
'wcircumflex': u'\u0175',
'wdieresis': u'\u1E85',
'wdotaccent': u'\u1E87',
'wdotbelow': u'\u1E89',
'wehiragana': u'\u3091',
'weierstrass': u'\u2118',
'wekatakana': u'\u30F1',
'wekorean': u'\u315E',
'weokorean': u'\u315D',
'wgrave': u'\u1E81',
'whitebullet': u'\u25E6',
'whitecircle': u'\u25CB',
'whitecircleinverse': u'\u25D9',
'whitecornerbracketleft': u'\u300E',
'whitecornerbracketleftvertical': u'\uFE43',
'whitecornerbracketright': u'\u300F',
'whitecornerbracketrightvertical': u'\uFE44',
'whitediamond': u'\u25C7',
'whitediamondcontainingblacksmalldiamond': u'\u25C8',
'whitedownpointingsmalltriangle': u'\u25BF',
'whitedownpointingtriangle': u'\u25BD',
'whiteleftpointingsmalltriangle': u'\u25C3',
'whiteleftpointingtriangle': u'\u25C1',
'whitelenticularbracketleft': u'\u3016',
'whitelenticularbracketright': u'\u3017',
'whiterightpointingsmalltriangle': u'\u25B9',
'whiterightpointingtriangle': u'\u25B7',
'whitesmallsquare': u'\u25AB',
'whitesmilingface': u'\u263A',
'whitesquare': u'\u25A1',
'whitestar': u'\u2606',
'whitetelephone': u'\u260F',
'whitetortoiseshellbracketleft': u'\u3018',
'whitetortoiseshellbracketright': u'\u3019',
'whiteuppointingsmalltriangle': u'\u25B5',
'whiteuppointingtriangle': u'\u25B3',
'wihiragana': u'\u3090',
'wikatakana': u'\u30F0',
'wikorean': u'\u315F',
'wmonospace': u'\uFF57',
'wohiragana': u'\u3092',
'wokatakana': u'\u30F2',
'wokatakanahalfwidth': u'\uFF66',
'won': u'\u20A9',
'wonmonospace': u'\uFFE6',
'wowaenthai': u'\u0E27',
'wparen': u'\u24B2',
'wring': u'\u1E98',
'wsuperior': u'\u02B7',
'wturned': u'\u028D',
'wynn': u'\u01BF',
'x': u'\u0078',
'xabovecmb': u'\u033D',
'xbopomofo': u'\u3112',
'xcircle': u'\u24E7',
'xdieresis': u'\u1E8D',
'xdotaccent': u'\u1E8B',
'xeharmenian': u'\u056D',
'xi': u'\u03BE',
'xmonospace': u'\uFF58',
'xparen': u'\u24B3',
'xsuperior': u'\u02E3',
'y': u'\u0079',
'yaadosquare': u'\u334E',
'yabengali': u'\u09AF',
'yacute': u'\u00FD',
'yadeva': u'\u092F',
'yaekorean': u'\u3152',
'yagujarati': u'\u0AAF',
'yagurmukhi': u'\u0A2F',
'yahiragana': u'\u3084',
'yakatakana': u'\u30E4',
'yakatakanahalfwidth': u'\uFF94',
'yakorean': u'\u3151',
'yamakkanthai': u'\u0E4E',
'yasmallhiragana': u'\u3083',
'yasmallkatakana': u'\u30E3',
'yasmallkatakanahalfwidth': u'\uFF6C',
'yatcyrillic': u'\u0463',
'ycircle': u'\u24E8',
'ycircumflex': u'\u0177',
'ydieresis': u'\u00FF',
'ydotaccent': u'\u1E8F',
'ydotbelow': u'\u1EF5',
'yeharabic': u'\u064A',
'yehbarreearabic': u'\u06D2',
'yehbarreefinalarabic': u'\uFBAF',
'yehfinalarabic': u'\uFEF2',
'yehhamzaabovearabic': u'\u0626',
'yehhamzaabovefinalarabic': u'\uFE8A',
'yehhamzaaboveinitialarabic': u'\uFE8B',
'yehhamzaabovemedialarabic': u'\uFE8C',
'yehinitialarabic': u'\uFEF3',
'yehmedialarabic': u'\uFEF4',
'yehmeeminitialarabic': u'\uFCDD',
'yehmeemisolatedarabic': u'\uFC58',
'yehnoonfinalarabic': u'\uFC94',
'yehthreedotsbelowarabic': u'\u06D1',
'yekorean': u'\u3156',
'yen': u'\u00A5',
'yenmonospace': u'\uFFE5',
'yeokorean': u'\u3155',
'yeorinhieuhkorean': u'\u3186',
'yerahbenyomohebrew': u'\u05AA',
'yerahbenyomolefthebrew': u'\u05AA',
'yericyrillic': u'\u044B',
'yerudieresiscyrillic': u'\u04F9',
'yesieungkorean': u'\u3181',
'yesieungpansioskorean': u'\u3183',
'yesieungsioskorean': u'\u3182',
'yetivhebrew': u'\u059A',
'ygrave': u'\u1EF3',
'yhook': u'\u01B4',
'yhookabove': u'\u1EF7',
'yiarmenian': u'\u0575',
'yicyrillic': u'\u0457',
'yikorean': u'\u3162',
'yinyang': u'\u262F',
'yiwnarmenian': u'\u0582',
'ymonospace': u'\uFF59',
'yod': u'\u05D9',
'yoddagesh': u'\uFB39',
'yoddageshhebrew': u'\uFB39',
'yodhebrew': u'\u05D9',
'yodyodhebrew': u'\u05F2',
'yodyodpatahhebrew': u'\uFB1F',
'yohiragana': u'\u3088',
'yoikorean': u'\u3189',
'yokatakana': u'\u30E8',
'yokatakanahalfwidth': u'\uFF96',
'yokorean': u'\u315B',
'yosmallhiragana': u'\u3087',
'yosmallkatakana': u'\u30E7',
'yosmallkatakanahalfwidth': u'\uFF6E',
'yotgreek': u'\u03F3',
'yoyaekorean': u'\u3188',
'yoyakorean': u'\u3187',
'yoyakthai': u'\u0E22',
'yoyingthai': u'\u0E0D',
'yparen': u'\u24B4',
'ypogegrammeni': u'\u037A',
'ypogegrammenigreekcmb': u'\u0345',
'yr': u'\u01A6',
'yring': u'\u1E99',
'ysuperior': u'\u02B8',
'ytilde': u'\u1EF9',
'yturned': u'\u028E',
'yuhiragana': u'\u3086',
'yuikorean': u'\u318C',
'yukatakana': u'\u30E6',
'yukatakanahalfwidth': u'\uFF95',
'yukorean': u'\u3160',
'yusbigcyrillic': u'\u046B',
'yusbigiotifiedcyrillic': u'\u046D',
'yuslittlecyrillic': u'\u0467',
'yuslittleiotifiedcyrillic': u'\u0469',
'yusmallhiragana': u'\u3085',
'yusmallkatakana': u'\u30E5',
'yusmallkatakanahalfwidth': u'\uFF6D',
'yuyekorean': u'\u318B',
'yuyeokorean': u'\u318A',
'yyabengali': u'\u09DF',
'yyadeva': u'\u095F',
'z': u'\u007A',
'zaarmenian': u'\u0566',
'zacute': u'\u017A',
'zadeva': u'\u095B',
'zagurmukhi': u'\u0A5B',
'zaharabic': u'\u0638',
'zahfinalarabic': u'\uFEC6',
'zahinitialarabic': u'\uFEC7',
'zahiragana': u'\u3056',
'zahmedialarabic': u'\uFEC8',
'zainarabic': u'\u0632',
'zainfinalarabic': u'\uFEB0',
'zakatakana': u'\u30B6',
'zaqefgadolhebrew': u'\u0595',
'zaqefqatanhebrew': u'\u0594',
'zarqahebrew': u'\u0598',
'zayin': u'\u05D6',
'zayindagesh': u'\uFB36',
'zayindageshhebrew': u'\uFB36',
'zayinhebrew': u'\u05D6',
'zbopomofo': u'\u3117',
'zcaron': u'\u017E',
'zcircle': u'\u24E9',
'zcircumflex': u'\u1E91',
'zcurl': u'\u0291',
'zdot': u'\u017C',
'zdotaccent': u'\u017C',
'zdotbelow': u'\u1E93',
'zecyrillic': u'\u0437',
'zedescendercyrillic': u'\u0499',
'zedieresiscyrillic': u'\u04DF',
'zehiragana': u'\u305C',
'zekatakana': u'\u30BC',
'zero': u'\u0030',
'zeroarabic': u'\u0660',
'zerobengali': u'\u09E6',
'zerodeva': u'\u0966',
'zerogujarati': u'\u0AE6',
'zerogurmukhi': u'\u0A66',
'zerohackarabic': u'\u0660',
'zeroinferior': u'\u2080',
'zeromonospace': u'\uFF10',
'zerooldstyle': u'\uF730',
'zeropersian': u'\u06F0',
'zerosuperior': u'\u2070',
'zerothai': u'\u0E50',
'zerowidthjoiner': u'\uFEFF',
'zerowidthnonjoiner': u'\u200C',
'zerowidthspace': u'\u200B',
'zeta': u'\u03B6',
'zhbopomofo': u'\u3113',
'zhearmenian': u'\u056A',
'zhebrevecyrillic': u'\u04C2',
'zhecyrillic': u'\u0436',
'zhedescendercyrillic': u'\u0497',
'zhedieresiscyrillic': u'\u04DD',
'zihiragana': u'\u3058',
'zikatakana': u'\u30B8',
'zinorhebrew': u'\u05AE',
'zlinebelow': u'\u1E95',
'zmonospace': u'\uFF5A',
'zohiragana': u'\u305E',
'zokatakana': u'\u30BE',
'zparen': u'\u24B5',
'zretroflexhook': u'\u0290',
'zstroke': u'\u01B6',
'zuhiragana': u'\u305A',
'zukatakana': u'\u30BA',
}
#--end
|
mit
|
xguse/blacktie
|
src/blacktie/scripts/blacktie_pipeline.py
|
1
|
9732
|
#*****************************************************************************
# blacktie_pipeline.py (part of the blacktie package)
#
# (c) 2013 - Augustine Dunn
# James Laboratory
# Department of Biochemistry and Molecular Biology
# University of California Irvine
# [email protected]
#
# Licenced under the GNU General Public License 3.0 license.
#******************************************************************************
"""
####################
blacktie_pipeline.py
####################
Code defining an object oriented python pipeline script to allow simplified
coordination of data through parts or all of the popular Tophat/Cufflinks
RNA-seq analysis suite.
"""
import os
import sys
import argparse
import base64
import traceback
import re
import time
import socket
import shutil
from collections import defaultdict
import yaml
try:
import pprocess
except ImportError:
pass
import blacktie
from blacktie.utils.misc import Bunch,bunchify
from blacktie.utils.misc import email_notification
from blacktie.utils.misc import get_time
from blacktie.utils.misc import map_condition_groups
from blacktie.utils.externals import runExternalApp
from blacktie.utils.externals import mkdirp
from blacktie.utils import errors
from blacktie.utils.calls import *
def main():
"""
The main loop. Lets ROCK!
"""
desc = """This script reads options from a yaml formatted file and organizes the execution of tophat/cufflinks runs for multiple condition sets."""
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('--version', action='version', version='%(prog)s ' + blacktie.__version__,
help="""Print version number.""")
parser.add_argument('config_file', type=str,
help="""Path to a yaml formatted config file containing setup options for the runs.""")
parser.add_argument('--prog', type=str, choices=['tophat','cufflinks','cuffmerge','cuffdiff','cummerbund','all'], default='tophat',
help="""Which program do you want to run? (default: %(default)s)""")
parser.add_argument('--hide-logs', action='store_true', default=False,
help="""Make your log directories hidden to keep a tidy 'looking' base directory. (default: %(default)s)""")
parser.add_argument('--no-email', action='store_true', default=False,
help="""Don't send email notifications. (default: %(default)s)""")
parser.add_argument('--mode', type=str, choices=['analyze','dry_run','qsub_script'], default='analyze',
help="""1) 'analyze': run the analysis pipeline. 2) 'dry_run': walk through all steps that
would be run and print out the command lines; however, do not send the commands to the
system to be run. 3) 'qsub_script': generate bash scripts suitable to be sent to a compute cluster's
SGE through the qsub command. (default: %(default)s)""")
if len(sys.argv) == 1:
parser.print_help()
exit(0)
args = parser.parse_args()
yargs = bunchify(yaml.load(open(args.config_file,'rU')))
# set up run_id, log files, and email info
if yargs.run_options.run_id:
run_id = yargs.run_options.run_id
else:
run_id = get_time()
base_dir = yargs.run_options.base_dir.rstrip('/')
if args.hide_logs:
run_logs = '%s/.%s.logs' % (base_dir,run_id)
else:
run_logs = '%s/%s.logs' % (base_dir,run_id)
if not args.mode == 'dry_run':
mkdirp(run_logs)
else:
pass
yaml_out = '%s/%s.yaml' % (run_logs,run_id)
# copy yaml config file with run_id as name for records
if not args.mode == 'dry_run':
shutil.copyfile(args.config_file,yaml_out)
else:
pass
if not args.no_email:
email_info = Bunch({'email_from' : yargs.run_options.email_info.sender,
'email_to' : yargs.run_options.email_info.to,
'email_li' : open(yargs.run_options.email_info.li,'rU').readline().rstrip('\n')})
else:
email_info = Bunch({'email_from' : False,
'email_to' : False,
'email_li' : ''})
yargs.prgbar_regex = re.compile('>.+Processing.+\[.+\].+%\w*$')
yargs.groups = map_condition_groups(yargs)
yargs.call_records = {}
# loop through the queued conditions and send reports for tophat
if args.prog in ['tophat','all']:
print '[Note] Starting tophat step.\n'
for condition in yargs.condition_queue:
# Prep Tophat Call
tophat_call = TophatCall(yargs,email_info,run_id,run_logs,conditions=condition,mode=args.mode)
tophat_call.execute()
# record the tophat_call object
yargs.call_records[tophat_call.call_id] = tophat_call
else:
print "[Note] Skipping tophat step.\n"
if args.prog in ['cufflinks','all']:
# attempt to run more than one cufflinks call in parallel since cufflinks
# seems to use only one processor no matter the value of -p you give it and
# doesn't seem to consume massive amounts of memory
print "[Note] Starting cufflinks step.\n"
try:
if args.mode == 'dry_run':
raise errors.BlacktieError("dry run")
#TODO: on mac pprocess raised AttributeError "module" has no attrb "poll" or some crap
try:
queue = pprocess.Queue(limit=yargs.cufflinks_options.p)
except AttributeError as exc:
if 'poll' in str(exc):
raise(errors.BlacktieError('no poll'))
else:
raise
def run_cufflinks_call(cufflinks_call):
"""
function to start each parallel cufflinks_call inside the parallel job server.
"""
cufflinks_call.execute()
return cufflinks_call
def change_processor_count(cufflinks_call):
"""
Since we will run multiple instances of CufflinksCall at once, reduce
the number of processors any one system call thinks it can use.
"""
cufflinks_call.opt_dict['p'] = 2
cufflinks_call.construct_options_list()
cufflinks_call.options_list.extend([cufflinks_call.accepted_hits])
cufflinks_call.arg_str = ' '.join(cufflinks_call.options_list)
return cufflinks_call
execute = queue.manage(pprocess.MakeParallel(run_cufflinks_call))
jobs = []
for condition in yargs.condition_queue:
cufflinks_call = CufflinksCall(yargs,email_info,run_id,run_logs,conditions=condition,mode=args.mode)
cufflinks_call = change_processor_count(cufflinks_call)
jobs.append(cufflinks_call)
execute(cufflinks_call)
# record the cufflinks_call objects
for call in queue:
yargs.call_records[call.call_id] = call
except (NameError, errors.BlacktieError) as exc:
if ("'pprocess' is not defined" in str(exc)) or (str(exc) == "dry run") or (str(exc) == 'no poll'):
pass
else:
raise
print "Running cufflinks in serial NOT parallel.\n"
# loop through the queued conditions and send reports for cufflinks
for condition in yargs.condition_queue:
# Prep cufflinks_call
cufflinks_call = CufflinksCall(yargs,email_info,run_id,run_logs,conditions=condition,mode=args.mode)
cufflinks_call.execute()
# record the cufflinks_call object
yargs.call_records[cufflinks_call.call_id] = cufflinks_call
else:
print "[Note] Skipping cufflinks step.\n"
if args.prog in ['cuffmerge','all']:
print "[Note] Starting cuffmerge step.\n"
for exp_id in yargs.groups:
# Prep cuffmerge call
cuffmerge_call = CuffmergeCall(yargs,email_info,run_id,run_logs,conditions=exp_id,mode=args.mode)
cuffmerge_call.execute()
# record the cuffmerge_call object
yargs.call_records[cuffmerge_call.call_id] = cuffmerge_call
else:
print "[Note] Skipping cuffmerge step.\n"
if args.prog in ['cuffdiff','all']:
print "[Note] Starting cuffdiff step.\n"
for exp_id in yargs.groups:
# Prep cuffmerge call
cuffdiff_call = CuffdiffCall(yargs,email_info,run_id,run_logs,conditions=exp_id,mode=args.mode)
cuffdiff_call.execute()
# record the cuffdiff_call object
yargs.call_records[cuffdiff_call.call_id] = cuffdiff_call
else:
print "[Note] Skipping cuffdiff step.\n"
if args.prog in ['cummerbund','all']:
# test to make sure R and cummeRbund libs exist
from blacktie.scripts import cummerbund
cummerbund.import_cummeRbund_library()
print "[Note] Starting cummerbund step.\n"
for exp_id in yargs.groups:
# Prep cummerbund call
cummerbund_call = CummerbundCall(yargs,email_info,run_id,run_logs,conditions=exp_id,mode=args.mode)
cummerbund_call.execute()
# record the cummerbund_call object
yargs.call_records[cummerbund_call.call_id] = cummerbund_call
else:
print "[Note] Skipping cummerbund step.\n"
if __name__ == "__main__":
main()
|
gpl-3.0
|
shawnsi/limbo
|
test/test_plugins/test_gif.py
|
11
|
2185
|
# -*- coding: UTF-8 -*-
import os
import sys
from nose.tools import eq_
import vcr
DIR = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(DIR, '../../limbo/plugins'))
from gif import on_message
# The set of valid gifs given the bananas fixture
bananas_gifs = [u'http://fc05.deviantart.net/fs71/f/2012/269/a/b/i_heart_banana_by_mnrart-d5fyx04.gif', u'http://www.angelfire.com/pa5/anastasiaandbob4/banana.gif', u'http://newsimg.ngfiles.com/170000/170422_dancing_banana.gif', u'http://fc05.deviantart.net/fs71/f/2013/176/3/e/banana_jam_by_hat_kid-d6anoqh.gif', u'http://www.webweaver.nu/clipart/img/misc/food/banana.gif', u'http://www.sherv.net/cm/emo/funny/2/big-dancing-banana-smiley-emoticon.gif', u'http://4.bp.blogspot.com/-pPLoyosI_Zo/VJ12aP5QpyI/AAAAAAAADCY/XZvWpC0xc4E/s1600/banana-gif.gif', u'http://3.bp.blogspot.com/-71EXQ4bvCeA/U17dnGe4kzI/AAAAAAAABmk/YDBtHjZOBjQ/s1600/gif-banana.gif', u'http://byt.wpengine.netdna-cdn.com/wp-content/uploads/2014/09/banana-dolphin-and-boy.gif', u'http://www.animatedimages.org/data/media/330/animated-banana-image-0031.gif', u'http://ww2.valdosta.edu/~kabehland/gobananas.gif', u'http://volweb.utk.edu/SCHOOL/sweetwjh/dancing%20banana.gif', u'http://www.webweaver.nu/clipart/img/misc/food/fruit/bunch-of-bananas.gif', u'http://www.thedailyquarterly.com/articles/wp-content/uploads/2013/11/DancingBanana.gif', u'http://sagworks.files.wordpress.com/2011/07/071011-banana-animation.gif', u'http://joedale.typepad.com/photos/uncategorized/2008/05/29/bananas.gif', u'http://cdn2.scratch.mit.edu/get_image/gallery/217706_170x100.png?v=1371138299.28', u'http://runeatrepeat.com/wp-content/uploads/2015/01/banana-time-running-blog.gif', u'http://www.comevisit.com/chuckali/bananas.gif', u'http://www.sweetcomments.net/images/random/go-bananas.gif']
def test_gif():
with vcr.use_cassette('test/fixtures/gif_bananas.yaml'):
ret = on_message({"text": u"!gif bananas"}, None)
assert ret in bananas_gifs
def test_unicode():
with vcr.use_cassette('test/fixtures/gif_unicode.yaml'):
ret = on_message({"text": u"!gif Mötörhead"}, None)
# not blowing up == success, for our purposes
|
mit
|
jwhui/openthread
|
tests/scripts/thread-cert/Cert_9_2_15_PendingPartition.py
|
3
|
8095
|
#!/usr/bin/env python3
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
import thread_cert
from pktverify.consts import MLE_ADVERTISEMENT, MLE_PARENT_REQUEST, MLE_CHILD_ID_RESPONSE, MLE_CHILD_ID_REQUEST, MGMT_ACTIVE_SET_URI, MGMT_ACTIVE_GET_URI, RESPONSE_TLV, LINK_LAYER_FRAME_COUNTER_TLV, MODE_TLV, TIMEOUT_TLV, VERSION_TLV, TLV_REQUEST_TLV, CHALLENGE_TLV, SCAN_MASK_TLV, ADDRESS_REGISTRATION_TLV
from pktverify.packet_verifier import PacketVerifier
CHANNEL_INIT = 19
PANID_INIT = 0xface
PANID_FINAL = 0xabcd
COMMISSIONER = 1
LEADER = 2
ROUTER1 = 3
ROUTER2 = 4
class Cert_9_2_15_PendingPartition(thread_cert.TestCase):
SUPPORT_NCP = False
TOPOLOGY = {
COMMISSIONER: {
'name': 'COMMISSIONER',
'active_dataset': {
'timestamp': 15,
'panid': PANID_INIT,
'channel': CHANNEL_INIT
},
'mode': 'rdn',
'allowlist': [LEADER]
},
LEADER: {
'name': 'LEADER',
'active_dataset': {
'timestamp': 15,
'panid': PANID_INIT,
'channel': CHANNEL_INIT
},
'mode': 'rdn',
'partition_id': 0xffffffff,
'allowlist': [COMMISSIONER, ROUTER1]
},
ROUTER1: {
'name': 'ROUTER_1',
'active_dataset': {
'timestamp': 15,
'panid': PANID_INIT,
'channel': CHANNEL_INIT
},
'mode': 'rdn',
'allowlist': [LEADER, ROUTER2]
},
ROUTER2: {
'name': 'ROUTER_2',
'active_dataset': {
'timestamp': 15,
'panid': PANID_INIT,
'channel': CHANNEL_INIT
},
'mode': 'rdn',
'allowlist': [ROUTER1]
},
}
def _setUpRouter2(self):
self.nodes[ROUTER2].add_allowlist(self.nodes[ROUTER1].get_addr64())
self.nodes[ROUTER2].enable_allowlist()
self.nodes[ROUTER2].set_router_selection_jitter(1)
def test(self):
self.nodes[LEADER].start()
self.simulator.go(5)
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[COMMISSIONER].start()
self.simulator.go(5)
self.assertEqual(self.nodes[COMMISSIONER].get_state(), 'router')
self.nodes[COMMISSIONER].commissioner_start()
self.simulator.go(3)
self.nodes[ROUTER1].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER1].get_state(), 'router')
self.nodes[COMMISSIONER].send_mgmt_pending_set(
pending_timestamp=10,
active_timestamp=70,
delay_timer=600000,
mesh_local='fd00:0db9::',
)
self.simulator.go(5)
self.nodes[ROUTER2].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER2].get_state(), 'router')
self.nodes[ROUTER2].reset()
self._setUpRouter2()
self.simulator.go(100)
self.nodes[COMMISSIONER].send_mgmt_pending_set(
pending_timestamp=20,
active_timestamp=80,
delay_timer=200000,
mesh_local='fd00:0db7::',
panid=PANID_FINAL,
)
self.simulator.go(100)
self.nodes[ROUTER2].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER2].get_state(), 'router')
self.simulator.go(100)
self.assertEqual(self.nodes[COMMISSIONER].get_panid(), PANID_FINAL)
self.assertEqual(self.nodes[LEADER].get_panid(), PANID_FINAL)
self.assertEqual(self.nodes[ROUTER1].get_panid(), PANID_FINAL)
self.assertEqual(self.nodes[ROUTER2].get_panid(), PANID_FINAL)
ipaddrs = self.nodes[ROUTER2].get_addrs()
for ipaddr in ipaddrs:
if ipaddr[0:4] != 'fe80':
break
self.assertTrue(self.nodes[LEADER].ping(ipaddr))
def verify(self, pv):
pkts = pv.pkts
pv.summary.show()
LEADER = pv.vars['LEADER']
COMMISSIONER = pv.vars['COMMISSIONER']
ROUTER_1 = pv.vars['ROUTER_1']
ROUTER_2 = pv.vars['ROUTER_2']
_router2_pkts = pkts.filter_wpan_src64(ROUTER_2)
# Step 1: Ensure the topology is formed correctly
# Verify Commissioner, Leader and Router_1 are sending MLE advertisements
pkts.copy().filter_wpan_src64(LEADER).filter_mle_cmd(MLE_ADVERTISEMENT).must_next()
pkts.filter_wpan_dst64(COMMISSIONER).filter_mle_cmd(MLE_CHILD_ID_RESPONSE).must_next()
pkts.copy().filter_wpan_src64(COMMISSIONER).filter_mle_cmd(MLE_ADVERTISEMENT).must_next()
pkts.filter_wpan_dst64(ROUTER_1).filter_mle_cmd(MLE_CHILD_ID_RESPONSE).must_next()
pkts.copy().filter_wpan_src64(ROUTER_1).filter_mle_cmd(MLE_ADVERTISEMENT).must_next()
# Step 5: Router_2 begins attach process by sending a multicast MLE Parent Request
# The first MLE Parent Request sent MUST NOT be sent to all routers and REEDS
_router2_pkts.range(pkts.index).filter_mle_cmd(MLE_PARENT_REQUEST).must_next().must_verify(
lambda p: {MODE_TLV, CHALLENGE_TLV, SCAN_MASK_TLV, VERSION_TLV} == set(
p.mle.tlv.type) and p.mle.tlv.scan_mask.r == 1 and p.mle.tlv.scan_mask.e == 0)
# Step 7: Router_2 MUST send a MLE Child ID Request to Router_1
_router2_pkts.filter_mle_cmd(MLE_CHILD_ID_REQUEST).must_next().must_verify(lambda p: {
RESPONSE_TLV, LINK_LAYER_FRAME_COUNTER_TLV, MODE_TLV, TIMEOUT_TLV, VERSION_TLV, TLV_REQUEST_TLV
} < set(p.mle.tlv.type) and ADDRESS_REGISTRATION_TLV not in p.mle.tlv.type)
# Step 14: Router_2 begins attach process by sending a multicast MLE Parent Request
# The first MLE Parent Request sent MUST NOT be sent to all routers and REEDS
_router2_pkts.filter_mle_cmd(MLE_PARENT_REQUEST).must_next().must_verify(
lambda p: {MODE_TLV, CHALLENGE_TLV, SCAN_MASK_TLV, VERSION_TLV} == set(
p.mle.tlv.type) and p.mle.tlv.scan_mask.r == 1 and p.mle.tlv.scan_mask.e == 0)
# Step 16: Router_2 MUST send a MLE Child ID Request to Router_1
_router2_pkts.filter_mle_cmd(MLE_CHILD_ID_REQUEST).must_next().must_verify(lambda p: {
RESPONSE_TLV, LINK_LAYER_FRAME_COUNTER_TLV, MODE_TLV, TIMEOUT_TLV, VERSION_TLV, TLV_REQUEST_TLV
} < set(p.mle.tlv.type) and ADDRESS_REGISTRATION_TLV not in p.mle.tlv.type)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
PriceChild/ansible
|
lib/ansible/modules/cloud/univention/udm_user.py
|
69
|
21748
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright (c) 2016, Adfinis SyGroup AG
# Tobias Rueetschi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: udm_user
version_added: "2.2"
author: "Tobias Rueetschi (@2-B)"
short_description: Manage posix users on a univention corporate server
description:
- "This module allows to manage posix users on a univention corporate
server (UCS).
It uses the python API of the UCS to create a new object or edit it."
requirements:
- Python >= 2.6
options:
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the user is present or not.
username:
required: true
description:
- User name
aliases: ['name']
firstname:
required: false
description:
- First name. Required if C(state=present).
lastname:
required: false
description:
- Last name. Required if C(state=present).
password:
required: false
default: None
description:
- Password. Required if C(state=present).
birthday:
required: false
default: None
description:
- Birthday
city:
required: false
default: None
description:
- City of users business address.
country:
required: false
default: None
description:
- Country of users business address.
department_number:
required: false
default: None
description:
- Department number of users business address.
aliases: [ departmentNumber ]
description:
required: false
default: None
description:
- Description (not gecos)
display_name:
required: false
default: None
description:
- Display name (not gecos)
aliases: [ displayName ]
email:
required: false
default: ['']
description:
- A list of e-mail addresses.
employee_number:
required: false
default: None
description:
- Employee number
aliases: [ employeeNumber ]
employee_type:
required: false
default: None
description:
- Employee type
aliases: [ employeeType ]
gecos:
required: false
default: None
description:
- GECOS
groups:
required: false
default: []
description:
- "POSIX groups, the LDAP DNs of the groups will be found with the
LDAP filter for each group as $GROUP:
C((&(objectClass=posixGroup)(cn=$GROUP)))."
home_share:
required: false
default: None
description:
- "Home NFS share. Must be a LDAP DN, e.g.
C(cn=home,cn=shares,ou=school,dc=example,dc=com)."
aliases: [ homeShare ]
home_share_path:
required: false
default: None
description:
- Path to home NFS share, inside the homeShare.
aliases: [ homeSharePath ]
home_telephone_number:
required: false
default: []
description:
- List of private telephone numbers.
aliases: [ homeTelephoneNumber ]
homedrive:
required: false
default: None
description:
- Windows home drive, e.g. C("H:").
mail_alternative_address:
required: false
default: []
description:
- List of alternative e-mail addresses.
aliases: [ mailAlternativeAddress ]
mail_home_server:
required: false
default: None
description:
- FQDN of mail server
aliases: [ mailHomeServer ]
mail_primary_address:
required: false
default: None
description:
- Primary e-mail address
aliases: [ mailPrimaryAddress ]
mobile_telephone_number:
required: false
default: []
description:
- Mobile phone number
aliases: [ mobileTelephoneNumber ]
organisation:
required: false
default: None
description:
- Organisation
override_pw_history:
required: false
default: False
description:
- Override password history
aliases: [ overridePWHistory ]
override_pw_length:
required: false
default: False
description:
- Override password check
aliases: [ overridePWLength ]
pager_telephonenumber:
required: false
default: []
description:
- List of pager telephone numbers.
aliases: [ pagerTelephonenumber ]
phone:
required: false
default: []
description:
- List of telephone numbers.
postcode:
required: false
default: None
description:
- Postal code of users business address.
primary_group:
required: false
default: cn=Domain Users,cn=groups,$LDAP_BASE_DN
description:
- Primary group. This must be the group LDAP DN.
aliases: [ primaryGroup ]
profilepath:
required: false
default: None
description:
- Windows profile directory
pwd_change_next_login:
required: false
default: None
choices: [ '0', '1' ]
description:
- Change password on next login.
aliases: [ pwdChangeNextLogin ]
room_number:
required: false
default: None
description:
- Room number of users business address.
aliases: [ roomNumber ]
samba_privileges:
required: false
default: []
description:
- "Samba privilege, like allow printer administration, do domain
join."
aliases: [ sambaPrivileges ]
samba_user_workstations:
required: false
default: []
description:
- Allow the authentication only on this Microsoft Windows host.
aliases: [ sambaUserWorkstations ]
sambahome:
required: false
default: None
description:
- Windows home path, e.g. C('\\\\$FQDN\\$USERNAME').
scriptpath:
required: false
default: None
description:
- Windows logon script.
secretary:
required: false
default: []
description:
- A list of superiors as LDAP DNs.
serviceprovider:
required: false
default: ['']
description:
- Enable user for the following service providers.
shell:
required: false
default: '/bin/bash'
description:
- Login shell
street:
required: false
default: None
description:
- Street of users business address.
title:
required: false
default: None
description:
- Title, e.g. C(Prof.).
unixhome:
required: false
default: '/home/$USERNAME'
description:
- Unix home directory
userexpiry:
required: false
default: Today + 1 year
description:
- Account expiry date, e.g. C(1999-12-31).
position:
required: false
default: ''
description:
- "Define the whole position of users object inside the LDAP tree,
e.g. C(cn=employee,cn=users,ou=school,dc=example,dc=com)."
update_password:
required: false
default: always
description:
- "C(always) will update passwords if they differ.
C(on_create) will only set the password for newly created users."
version_added: "2.3"
ou:
required: false
default: ''
description:
- "Organizational Unit inside the LDAP Base DN, e.g. C(school) for
LDAP OU C(ou=school,dc=example,dc=com)."
subpath:
required: false
default: 'cn=users'
description:
- "LDAP subpath inside the organizational unit, e.g.
C(cn=teachers,cn=users) for LDAP container
C(cn=teachers,cn=users,dc=example,dc=com)."
'''
EXAMPLES = '''
# Create a user on a UCS
- udm_user:
name: FooBar
password: secure_password
firstname: Foo
lastname: Bar
# Create a user with the DN
# C(uid=foo,cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com)
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
ou: school
subpath: 'cn=teachers,cn=users'
# or define the position
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
position: 'cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com'
'''
RETURN = '''# '''
from datetime import date
import crypt
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.univention_umc import (
umc_module_for_add,
umc_module_for_edit,
ldap_search,
base_dn,
)
from dateutil.relativedelta import relativedelta
def main():
expiry = date.strftime(date.today() + relativedelta(years=1), "%Y-%m-%d")
module = AnsibleModule(
argument_spec = dict(
birthday = dict(default=None,
type='str'),
city = dict(default=None,
type='str'),
country = dict(default=None,
type='str'),
department_number = dict(default=None,
type='str',
aliases=['departmentNumber']),
description = dict(default=None,
type='str'),
display_name = dict(default=None,
type='str',
aliases=['displayName']),
email = dict(default=[''],
type='list'),
employee_number = dict(default=None,
type='str',
aliases=['employeeNumber']),
employee_type = dict(default=None,
type='str',
aliases=['employeeType']),
firstname = dict(default=None,
type='str'),
gecos = dict(default=None,
type='str'),
groups = dict(default=[],
type='list'),
home_share = dict(default=None,
type='str',
aliases=['homeShare']),
home_share_path = dict(default=None,
type='str',
aliases=['homeSharePath']),
home_telephone_number = dict(default=[],
type='list',
aliases=['homeTelephoneNumber']),
homedrive = dict(default=None,
type='str'),
lastname = dict(default=None,
type='str'),
mail_alternative_address= dict(default=[],
type='list',
aliases=['mailAlternativeAddress']),
mail_home_server = dict(default=None,
type='str',
aliases=['mailHomeServer']),
mail_primary_address = dict(default=None,
type='str',
aliases=['mailPrimaryAddress']),
mobile_telephone_number = dict(default=[],
type='list',
aliases=['mobileTelephoneNumber']),
organisation = dict(default=None,
type='str'),
overridePWHistory = dict(default=False,
type='bool',
aliases=['override_pw_history']),
overridePWLength = dict(default=False,
type='bool',
aliases=['override_pw_length']),
pager_telephonenumber = dict(default=[],
type='list',
aliases=['pagerTelephonenumber']),
password = dict(default=None,
type='str',
no_log=True),
phone = dict(default=[],
type='list'),
postcode = dict(default=None,
type='str'),
primary_group = dict(default=None,
type='str',
aliases=['primaryGroup']),
profilepath = dict(default=None,
type='str'),
pwd_change_next_login = dict(default=None,
type='str',
choices=['0', '1'],
aliases=['pwdChangeNextLogin']),
room_number = dict(default=None,
type='str',
aliases=['roomNumber']),
samba_privileges = dict(default=[],
type='list',
aliases=['sambaPrivileges']),
samba_user_workstations = dict(default=[],
type='list',
aliases=['sambaUserWorkstations']),
sambahome = dict(default=None,
type='str'),
scriptpath = dict(default=None,
type='str'),
secretary = dict(default=[],
type='list'),
serviceprovider = dict(default=[''],
type='list'),
shell = dict(default='/bin/bash',
type='str'),
street = dict(default=None,
type='str'),
title = dict(default=None,
type='str'),
unixhome = dict(default=None,
type='str'),
userexpiry = dict(default=expiry,
type='str'),
username = dict(required=True,
aliases=['name'],
type='str'),
position = dict(default='',
type='str'),
update_password = dict(default='always',
choices=['always', 'on_create'],
type='str'),
ou = dict(default='',
type='str'),
subpath = dict(default='cn=users',
type='str'),
state = dict(default='present',
choices=['present', 'absent'],
type='str')
),
supports_check_mode=True,
required_if = ([
('state', 'present', ['firstname', 'lastname', 'password'])
])
)
username = module.params['username']
position = module.params['position']
ou = module.params['ou']
subpath = module.params['subpath']
state = module.params['state']
changed = False
users = list(ldap_search(
'(&(objectClass=posixAccount)(uid={}))'.format(username),
attr=['uid']
))
if position != '':
container = position
else:
if ou != '':
ou = 'ou={},'.format(ou)
if subpath != '':
subpath = '{},'.format(subpath)
container = '{}{}{}'.format(subpath, ou, base_dn())
user_dn = 'uid={},{}'.format(username, container)
exists = bool(len(users))
if state == 'present':
try:
if not exists:
obj = umc_module_for_add('users/user', container)
else:
obj = umc_module_for_edit('users/user', user_dn)
if module.params['displayName'] is None:
module.params['displayName'] = '{} {}'.format(
module.params['firstname'],
module.params['lastname']
)
if module.params['unixhome'] is None:
module.params['unixhome'] = '/home/{}'.format(
module.params['username']
)
for k in obj.keys():
if (k != 'password' and
k != 'groups' and
k != 'overridePWHistory' and
k in module.params and
module.params[k] is not None):
obj[k] = module.params[k]
# handle some special values
obj['e-mail'] = module.params['email']
password = module.params['password']
if obj['password'] is None:
obj['password'] = password
if module.params['update_password'] == 'always':
old_password = obj['password'].split('}', 2)[1]
if crypt.crypt(password, old_password) != old_password:
obj['overridePWHistory'] = module.params['overridePWHistory']
obj['overridePWLength'] = module.params['overridePWLength']
obj['password'] = password
diff = obj.diff()
if exists:
for k in obj.keys():
if obj.hasChanged(k):
changed = True
else:
changed = True
if not module.check_mode:
if not exists:
obj.create()
elif changed:
obj.modify()
except:
module.fail_json(
msg="Creating/editing user {} in {} failed".format(
username,
container
)
)
try:
groups = module.params['groups']
if groups:
filter = '(&(objectClass=posixGroup)(|(cn={})))'.format(
')(cn='.join(groups)
)
group_dns = list(ldap_search(filter, attr=['dn']))
for dn in group_dns:
grp = umc_module_for_edit('groups/group', dn[0])
if user_dn not in grp['users']:
grp['users'].append(user_dn)
if not module.check_mode:
grp.modify()
changed = True
except:
module.fail_json(
msg="Adding groups to user {} failed".format(username)
)
if state == 'absent' and exists:
try:
obj = umc_module_for_edit('users/user', user_dn)
if not module.check_mode:
obj.remove()
changed = True
except:
module.fail_json(
msg="Removing user {} failed".format(username)
)
module.exit_json(
changed=changed,
username=username,
diff=diff,
container=container
)
if __name__ == '__main__':
main()
|
gpl-3.0
|
davicustodio/geonode
|
geonode/services/management/commands/importservice.py
|
35
|
4156
|
from django.core.management.base import BaseCommand
from optparse import make_option
from geonode.services.models import Service
from geonode.services.views import _register_cascaded_service, _register_indexed_service, \
_register_harvested_service, _register_cascaded_layers, _register_indexed_layers
import json
from geonode.people.utils import get_valid_user
import sys
class Command(BaseCommand):
help = 'Import a remote map service into GeoNode'
option_list = BaseCommand.option_list + (
make_option('-o', '--owner', dest="owner", default=None,
help="Name of the user account which should own the imported layers"),
make_option('-r', '--registerlayers', dest="registerlayers", default=False,
help="Register all layers found in the service"),
make_option('-u', '--username', dest="username", default=None,
help="Username required to login to this service if any"),
make_option('-p', '--password', dest="password", default=None,
help="Username required to login to this service if any"),
make_option('-s', '--security', dest="security", default=None,
help="Security permissions JSON - who can view/edit"),
)
args = 'url name type method'
def handle(self, url, name, type, method, console=sys.stdout, **options):
user = options.get('user')
owner = get_valid_user(user)
register_layers = options.get('registerlayers')
username = options.get('username')
password = options.get('password')
perm_spec = options.get('permspec')
register_service = True
# First Check if this service already exists based on the URL
base_url = url
try:
service = Service.objects.get(base_url=base_url)
except Service.DoesNotExist:
service = None
if service is not None:
print "This is an existing Service"
register_service = False
# Then Check that the name is Unique
try:
service = Service.objects.get(name=name)
except Service.DoesNotExist:
service = None
if service is not None:
print "This is an existing service using this name.\nPlease specify a different name."
if register_service:
if method == 'C':
response = _register_cascaded_service(type, url, name, username, password, owner=owner, verbosity=True)
elif method == 'I':
response = _register_indexed_service(type, url, name, username, password, owner=owner, verbosity=True)
elif method == 'H':
response = _register_harvested_service(url, name, username, password, owner=owner, verbosity=True)
elif method == 'X':
print 'Not Implemented (Yet)'
elif method == 'L':
print 'Local Services not configurable via API'
else:
print 'Invalid method'
json_response = json.loads(response.content)
if "id" in json_response:
print "Service created with id of %d" % json_response["id"]
service = Service.objects.get(id=json_response["id"])
else:
print "Something went wrong: %s" % response.content
return
print service.id
print register_layers
if service and register_layers:
layers = []
for layer in service.layer_set.all():
layers.append(layer.typename)
if service.method == 'C':
response = _register_cascaded_layers(user, service, layers, perm_spec)
elif service.method == 'I':
response = _register_indexed_layers(user, service, layers, perm_spec)
elif service.method == 'X':
print 'Not Implemented (Yet)'
elif service.method == 'L':
print 'Local Services not configurable via API'
else:
print('Invalid Service Type')
print response.content
|
gpl-3.0
|
modulexcite/catapult
|
third_party/mapreduce/mapreduce/kv_pb.py
|
35
|
10910
|
#!/usr/bin/env python
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: third_party/py/appengine_mapreduce/src/kv.proto
from google.net.proto import ProtocolBuffer
import array
import base64
import thread
try:
from google.net.proto import _net_proto___parse__python
except ImportError:
_net_proto___parse__python = None
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
class KeyValue(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
if _net_proto___parse__python is not None:
def _CMergeFromString(self, s):
_net_proto___parse__python.MergeFromString(self, 'KeyValue', s)
if _net_proto___parse__python is not None:
def _CEncode(self):
return _net_proto___parse__python.Encode(self, 'KeyValue')
if _net_proto___parse__python is not None:
def _CEncodePartial(self):
return _net_proto___parse__python.EncodePartial(self, 'KeyValue')
if _net_proto___parse__python is not None:
def _CToASCII(self, output_format):
return _net_proto___parse__python.ToASCII(self, 'KeyValue', output_format)
if _net_proto___parse__python is not None:
def ParseASCII(self, s):
_net_proto___parse__python.ParseASCII(self, 'KeyValue', s)
if _net_proto___parse__python is not None:
def ParseASCIIIgnoreUnknown(self, s):
_net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'KeyValue', s)
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 18:
self.set_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'KeyValue'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("Wi90aGlyZF9wYXJ0eS9weS9hcHBlbmdpbmVfbWFwcmVkdWNlL3NyYy9rdi5wcm90bwoIS2V5VmFsdWUTGgNrZXkgASgCMAk4AqMBqgEFY3R5cGWyAQRDb3JkpAEUExoFdmFsdWUgAigCMAk4AqMBqgEFY3R5cGWyAQRDb3JkpAEUugGWAQovdGhpcmRfcGFydHkvcHkvYXBwZW5naW5lX21hcHJlZHVjZS9zcmMva3YucHJvdG8iLgoIS2V5VmFsdWUSDwoDa2V5GAEgAigMQgIIARIRCgV2YWx1ZRgCIAIoDEICCAEiLwoJS2V5VmFsdWVzEg8KA2tleRgBIAIoDEICCAESEQoFdmFsdWUYAiADKAxCAggBQgIgAQ=="))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
class KeyValues(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
def __init__(self, contents=None):
self.value_ = []
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value_size(self): return len(self.value_)
def value_list(self): return self.value_
def value(self, i):
return self.value_[i]
def set_value(self, i, x):
self.value_[i] = x
def add_value(self, x):
self.value_.append(x)
def clear_value(self):
self.value_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
for i in xrange(x.value_size()): self.add_value(x.value(i))
if _net_proto___parse__python is not None:
def _CMergeFromString(self, s):
_net_proto___parse__python.MergeFromString(self, 'KeyValues', s)
if _net_proto___parse__python is not None:
def _CEncode(self):
return _net_proto___parse__python.Encode(self, 'KeyValues')
if _net_proto___parse__python is not None:
def _CEncodePartial(self):
return _net_proto___parse__python.EncodePartial(self, 'KeyValues')
if _net_proto___parse__python is not None:
def _CToASCII(self, output_format):
return _net_proto___parse__python.ToASCII(self, 'KeyValues', output_format)
if _net_proto___parse__python is not None:
def ParseASCII(self, s):
_net_proto___parse__python.ParseASCII(self, 'KeyValues', s)
if _net_proto___parse__python is not None:
def ParseASCIIIgnoreUnknown(self, s):
_net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'KeyValues', s)
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if len(self.value_) != len(x.value_): return 0
for e1, e2 in zip(self.value_, x.value_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 18:
self.add_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
cnt=0
for e in self.value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'KeyValues'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("Wi90aGlyZF9wYXJ0eS9weS9hcHBlbmdpbmVfbWFwcmVkdWNlL3NyYy9rdi5wcm90bwoJS2V5VmFsdWVzExoDa2V5IAEoAjAJOAKjAaoBBWN0eXBlsgEEQ29yZKQBFBMaBXZhbHVlIAIoAjAJOAOjAaoBBWN0eXBlsgEEQ29yZKQBFMIBCEtleVZhbHVl"))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
if _extension_runtime:
pass
__all__ = ['KeyValue','KeyValues']
|
bsd-3-clause
|
Stavitsky/nova
|
nova/objects/dns_domain.py
|
17
|
2588
|
# Copyright (C) 2014, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import objects
from nova.objects import base
from nova.objects import fields
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class DNSDomain(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'domain': fields.StringField(),
'scope': fields.StringField(nullable=True),
'availability_zone': fields.StringField(nullable=True),
'project_id': fields.StringField(nullable=True),
}
@staticmethod
def _from_db_object(context, vif, db_vif):
for field in vif.fields:
vif[field] = db_vif[field]
vif._context = context
vif.obj_reset_changes()
return vif
@base.remotable_classmethod
def get_by_domain(cls, context, domain):
db_dnsd = db.dnsdomain_get(context, domain)
if db_dnsd:
return cls._from_db_object(context, cls(), db_dnsd)
@base.remotable_classmethod
def register_for_zone(cls, context, domain, zone):
db.dnsdomain_register_for_zone(context, domain, zone)
@base.remotable_classmethod
def register_for_project(cls, context, domain, project):
db.dnsdomain_register_for_project(context, domain, project)
@base.remotable_classmethod
def delete_by_domain(cls, context, domain):
db.dnsdomain_unregister(context, domain)
@base.NovaObjectRegistry.register
class DNSDomainList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('DNSDomain'),
}
child_versions = {
'1.0': '1.0',
}
@base.remotable_classmethod
def get_all(cls, context):
db_domains = db.dnsdomain_get_all(context)
return base.obj_make_list(context, cls(context), objects.DNSDomain,
db_domains)
|
apache-2.0
|
m4h7/juriscraper
|
juriscraper/opinions/united_states/federal_appellate/ca7.py
|
2
|
4151
|
# Scraper for the United States Court of Appeals for the Seventh Circuit
# CourtID: ca7
# Court Short Name: 7th Cir.
import time
from datetime import date, timedelta
import urllib
from dateutil.rrule import rrule, DAILY
from lxml import html
from juriscraper.OpinionSite import OpinionSite
class Site(OpinionSite):
def __init__(self, *args, **kwargs):
super(Site, self).__init__(*args, **kwargs)
self.a_while_ago = date.today() - timedelta(days=60)
self.url = 'http://media.ca7.uscourts.gov/cgi-bin/rssExec.pl?Time=any&FromMonth={month}&FromDay={day}&FromYear={year}&' \
'ToMonth=&ToDay=&ToYear=&Author=any&AuthorName=&Case=any&CaseY1=&CaseY2=&CaseN1=&CaseN2=&CaseN3=&' \
'CaseN4=&Submit=Submit&RssJudgeName=Easterbrook&OpsOnly=no'.format(
month=self.a_while_ago.month,
day=self.a_while_ago.day,
year=self.a_while_ago.year,
)
self.court_id = self.__module__
self.interval = 30
self.back_scrape_iterable = [i.date() for i in rrule(
DAILY,
interval=self.interval, # Every interval days
dtstart=date(1999, 10, 1),
until=date(2015, 1, 1),
)]
def _get_case_names(self):
case_names = []
for e in self.html.xpath('//table//table/tr[position() >= 3]/td[2]'):
s = html.tostring(e, method='text', encoding='unicode')
case_names.append(s)
return case_names
def _get_download_urls(self):
return [e for e in self.html.xpath('//table[2]/tr/td/table/tr[position() >=3]/td/a/@href')]
def _get_case_dates(self):
return [date.fromtimestamp(time.mktime(time.strptime(date_string.strip(), '%m/%d/%Y')))
for date_string in self.html.xpath('//table//table/tr[position() >= 3]/td[4]/text()')]
def _get_docket_numbers(self):
return [docket_number for docket_number in
self.html.xpath('//table//table/tr[position() >= 3]/td[1]/text()')]
def _get_precedential_statuses(self):
statuses = []
for e in self.html.xpath('//table//table/tr[position() >= 3]/td[5]/a'):
s = html.tostring(e, method='text', encoding='unicode')
if 'Opinion' in s:
statuses.append('Published')
elif 'Nonprecedential' in s:
statuses.append('Unpublished')
else:
statuses.append('Unknown')
return statuses
def _get_nature_of_suit(self):
natures = []
for e in self.html.xpath('//table//table/tr[position() >= 3]/td[3]'):
natures.append(html.tostring(e, method='text', encoding='unicode'))
return natures
def _get_judges(self):
judges = []
for e in self.html.xpath('//table//table/tr[position() >= 3]/td[6]'):
s = html.tostring(e, method='text', encoding='unicode')
if s.lower().strip() == 'percuriam':
s = "Per Curiam"
judges.append(s)
return judges
def _download_backwards(self, d):
to_date = d + timedelta(self.interval)
params = urllib.urlencode({
'Time': 'any',
'FromMonth': d.month,
'FromDay': d.day,
'FromYear': d.year,
'ToMonth': to_date.month,
'ToDay': to_date.day,
'ToYear': to_date.year,
'Author': 'any',
'AuthorName': '',
'Case': 'any',
'CaseY1': '',
'CaseY2': '',
'CaseN1': '',
'CaseN2': '',
'CaseN3': '',
'CaseN4': '',
'Submit': 'Submit',
'RssJudgeName': 'Easterbrook',
'OpsOnly': 'no'
})
self.base_url = 'http://media.ca7.uscourts.gov/cgi-bin/rssExec.pl'
self.url = "{}?{}".format(self.base_url, params)
self.html = self._download()
if self.html is not None:
# Setting status is important because it prevents the download
# function from being run a second time by the parse method.
self.status = 200
|
bsd-2-clause
|
glewarne/S6-UniKernel
|
scripts/exynos_checkpatch_helper.py
|
169
|
2237
|
"""
exynos_checkpatch_helper.py - a helper script for exynos_checkpatch.sh
Dept : S/W Solution Dev Team
Author : Solution3 Power Part
Update : 2014.12.08
"""
import subprocess as sp
import sys
def print_log(color, log):
colored_log = ''
if color == 'r':
colored_log = "\033[31m" + log + "\033[0m"
else:
colored_log = "\033[32m" + log + "\033[0m"
print colored_log
def decide(result):
num_error, num_warning = int(result[1]), int(result[3])
return 'SUCCESS' if num_error == 0 and num_warning == 0 else 'FAIL'
def print_build_result():
defconfig, build_log, result = sys.argv[2], sys.argv[3], int(sys.argv[4])
if build_log == '0xefefefef':
print_log('r', '[ BUILD ] {} <- NOT EXIST'.format(defconfig))
return
if result == 1:
print_log('g', '[ BUILD ] {} <- SUCCESS'.format(defconfig))
else:
print_log('r', '[ BUILD ] {} <- FAIL (refer to {})'.format(defconfig, build_log))
def print_defconfig_result():
defconfig, def_log, result = sys.argv[2], sys.argv[3], int(sys.argv[4])
if def_log == '0xfefefefe':
print_log('r', '[ DEFCONFIG ] {} <- NOT EXIST'.format(defconfig))
return
if result == 1:
print_log('g', '[ DEFCONFIG ] {} <- SUCCESS'.format(defconfig))
else:
print_log('r', '[ DEFCONFIG ] {} <- FAIL (refer to {})'.format(defconfig, def_log))
def run_checkpatch_test():
num_patch = 5 if sys.argv[2] == '' else int(sys.argv[2])
patches = sp.check_output(['git', 'format-patch', '-'+str(num_patch)]).split()
for patch in patches:
try:
r = sp.check_output(['./scripts/checkpatch.pl', patch]).strip().split()
except sp.CalledProcessError:
print_log('r', '[ CHECKPATCH ] {} <- FAIL'.format(patch))
continue
color = 'g' if decide(r) == 'SUCCESS' else 'r'
print_log(color, '[ CHECKPATCH ] {} <- {}'.format(patch, decide(r)))
sp.check_output(['rm', patch])
def main():
if sys.argv[1] == '-b':
print_build_result()
elif sys.argv[1] == '-c':
run_checkpatch_test()
elif sys.argv[1] == '-d':
print_defconfig_result()
else:
assert(True == False)
main()
|
gpl-2.0
|
ckoch786/PrayerTimes
|
src/prayertimes.py
|
1
|
4151
|
#!/usr/bin/python
# Copyright 2012 Cory Koch
#
# This file is part of PrayerTimes.
#
# PrayerTimes is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PrayerTimes is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PrayerTimes. If not, see <http://www.gnu.org/licenses/>.
#
# ********************************************************************
import urllib2
from BeautifulSoup import BeautifulSoup
import os
HOME = os.getenv("HOME")
# Pain to parse but gives more options
islamicFinder="http://www.islamicfinder.org/prayerDetail.php?country=usa&city=Cleveland&state=OH&id=18707&month=&year=&email=&home=2012-7-18&lang=&aversion=&athan=&monthly="
# Easy to parse
ICGC="http://www.iccleveland.org/"
PAGE = urllib2.urlopen(ICGC)
#PAGE = check_cache()
SOUP = BeautifulSoup(PAGE)
PRAYER_TIMES_TBL = SOUP.find('table', attrs={'class':'stripe leftjustify', 'cellspacing':'0'})
INFO = SOUP.find('div', attrs={'class':'info'})
DATE = INFO.findAll('b')
COLS = PRAYER_TIMES_TBL.findAll('td')
# Check to see if a cache exists
# speed up performance/ reduce load on servers.
def open_cache():
'''open the prayertimes cache'''
try:
f = open('HOME/.prayertime', 'r+')
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
return f
def write_to_cache(x, f):
'''write the prayer times to cache'''
f.write(page)
def check_cache():
f = open_cach()
if True:#f.readline() == :
page = f.read()
# cache DNE or is out of date
else:
try:
page = urllib2.urlopen(ICGC)
write_to_cache(page, f)
except:
print "The site must be down..."
return page
def print_date():
'''print date from site '''
for i in DATE:
print i.text
print '-----------------'
def print_prayer_time():
'''print prayer times for site'''
x = 0
times = []
prayers = []
for element in COLS:
x=x+1
if (x %2 ) == 0:
times.append(element.text)
else:
prayers.append(element.text)
for prayer, time in zip(prayers, times):
prayerp = list(prayer)
timep = list(time)
for i in prayerp:
if i == ':':
n = prayerp.index(':')
prayerp[n] = '|'
for j in timep:
if j == 'a':
timep.append('m')
if j == 'p':
timep.append('m')
print '%8s %s' % ("".join(prayerp),"".join(timep))
print '-----------------'
def get_date():
''' gets date from site '''
for i in DATE:
return i.text
def get_prayer_times():
''' gets prayer times from site '''
x = 0
times = []
prayers = []
prayer_time = []
for element in COLS:
x=x+1
if (x %2 ) == 0:
times.append(element.text)
else:
prayers.append(element.text)
for prayer, time in zip(prayers, times):
prayerp = list(prayer)
timep = list(time)
for i in prayerp:
if i == ':':
n = prayerp.index(':')
prayerp[n] = '|'
for j in timep:
if j == 'a':
timep.append('m')
if j == 'p':
timep.append('m')
prayer_time.append('%8s %s' % ("".join(prayerp),"".join(timep)))
return prayer_time
if __name__=="__main__":
print_date()
print_prayer_time()
#table = soup.find({"table" : True, "width" : "330", "cellspacing" : "1","align" : "center", "bgcolor" : "#a9c6f7"})
#cols = rows.findAll('td')
#print cols
|
gpl-3.0
|
Benrflanders/Pytris
|
pyglet/media/drivers/directsound/lib_dsound.py
|
78
|
12874
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# $Id:$
import ctypes
from pyglet import com
lib = ctypes.oledll.dsound
DWORD = ctypes.c_uint32
LPDWORD = ctypes.POINTER(DWORD)
LONG = ctypes.c_long
LPLONG = ctypes.POINTER(LONG)
WORD = ctypes.c_uint16
HWND = DWORD
LPUNKNOWN = ctypes.c_void_p
D3DVALUE = ctypes.c_float
PD3DVALUE = ctypes.POINTER(D3DVALUE)
class D3DVECTOR(ctypes.Structure):
_fields_ = [
('x', ctypes.c_float),
('y', ctypes.c_float),
('z', ctypes.c_float),
]
PD3DVECTOR = ctypes.POINTER(D3DVECTOR)
class WAVEFORMATEX(ctypes.Structure):
_fields_ = [
('wFormatTag', WORD),
('nChannels', WORD),
('nSamplesPerSec', DWORD),
('nAvgBytesPerSec', DWORD),
('nBlockAlign', WORD),
('wBitsPerSample', WORD),
('cbSize', WORD),
]
LPWAVEFORMATEX = ctypes.POINTER(WAVEFORMATEX)
WAVE_FORMAT_PCM = 1
class DSCAPS(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('dwFlags', DWORD),
('dwMinSecondarySampleRate', DWORD),
('dwMaxSecondarySampleRate', DWORD),
('dwPrimaryBuffers', DWORD),
('dwMaxHwMixingAllBuffers', DWORD),
('dwMaxHwMixingStaticBuffers', DWORD),
('dwMaxHwMixingStreamingBuffers', DWORD),
('dwFreeHwMixingAllBuffers', DWORD),
('dwFreeHwMixingStaticBuffers', DWORD),
('dwFreeHwMixingStreamingBuffers', DWORD),
('dwMaxHw3DAllBuffers', DWORD),
('dwMaxHw3DStaticBuffers', DWORD),
('dwMaxHw3DStreamingBuffers', DWORD),
('dwFreeHw3DAllBuffers', DWORD),
('dwFreeHw3DStaticBuffers', DWORD),
('dwFreeHw3DStreamingBuffers', DWORD),
('dwTotalHwMemBytes', DWORD),
('dwFreeHwMemBytes', DWORD),
('dwMaxContigFreeHwMemBytes', DWORD),
('dwUnlockTransferRateHwBuffers', DWORD),
('dwPlayCpuOverheadSwBuffers', DWORD),
('dwReserved1', DWORD),
('dwReserved2', DWORD)
]
LPDSCAPS = ctypes.POINTER(DSCAPS)
class DSBCAPS(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('dwFlags', DWORD),
('dwBufferBytes', DWORD),
('dwUnlockTransferRate', DWORD),
('dwPlayCpuOverhead', DWORD),
]
LPDSBCAPS = ctypes.POINTER(DSBCAPS)
class DSBUFFERDESC(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('dwFlags', DWORD),
('dwBufferBytes', DWORD),
('dwReserved', DWORD),
('lpwfxFormat', LPWAVEFORMATEX),
]
LPDSBUFFERDESC = ctypes.POINTER(DSBUFFERDESC)
class DS3DBUFFER(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('vPosition', D3DVECTOR),
('vVelocity', D3DVECTOR),
('dwInsideConeAngle', DWORD),
('dwOutsideConeAngle', DWORD),
('vConeOrientation', D3DVECTOR),
('lConeOutsideVolume', LONG),
('flMinDistance', D3DVALUE),
('flMaxDistance', D3DVALUE),
('dwMode', DWORD),
]
LPDS3DBUFFER = ctypes.POINTER(DS3DBUFFER)
class DS3DLISTENER(ctypes.Structure):
_fields_ = [
('dwSize', DWORD),
('vPosition', D3DVECTOR),
('vVelocity', D3DVECTOR),
('vOrientFront', D3DVECTOR),
('vOrientTop', D3DVECTOR),
('flDistanceFactor', D3DVALUE),
('flRolloffFactor', D3DVALUE),
('flDopplerFactor', D3DVALUE),
]
LPDS3DLISTENER = ctypes.POINTER(DS3DLISTENER)
class IDirectSoundBuffer(com.IUnknown):
_methods_ = [
('GetCaps',
com.STDMETHOD(LPDSBCAPS)),
('GetCurrentPosition',
com.STDMETHOD(LPDWORD, LPDWORD)),
('GetFormat',
com.STDMETHOD(LPWAVEFORMATEX, DWORD, LPDWORD)),
('GetVolume',
com.STDMETHOD(LPLONG)),
('GetPan',
com.STDMETHOD(LPLONG)),
('GetFrequency',
com.STDMETHOD(LPDWORD)),
('GetStatus',
com.STDMETHOD(LPDWORD)),
('Initialize',
com.STDMETHOD(ctypes.c_void_p, LPDSBUFFERDESC)),
('Lock',
com.STDMETHOD(DWORD, DWORD,
ctypes.POINTER(ctypes.c_void_p), LPDWORD,
ctypes.POINTER(ctypes.c_void_p), LPDWORD,
DWORD)),
('Play',
com.STDMETHOD(DWORD, DWORD, DWORD)),
('SetCurrentPosition',
com.STDMETHOD(DWORD)),
('SetFormat',
com.STDMETHOD(LPWAVEFORMATEX)),
('SetVolume',
com.STDMETHOD(LONG)),
('SetPan',
com.STDMETHOD(LONG)),
('SetFrequency',
com.STDMETHOD(DWORD)),
('Stop',
com.STDMETHOD()),
('Unlock',
com.STDMETHOD(ctypes.c_void_p, DWORD, ctypes.c_void_p, DWORD)),
('Restore',
com.STDMETHOD()),
]
IID_IDirectSound3DListener = com.GUID(
0x279AFA84, 0x4981, 0x11CE, 0xA5, 0x21, 0x00, 0x20, 0xAF, 0x0B, 0xE5, 0x60)
class IDirectSound3DListener(com.IUnknown):
_methods_ = [
('GetAllParameters',
com.STDMETHOD(LPDS3DLISTENER)),
('GetDistanceFactor',
com.STDMETHOD(PD3DVALUE)),
('GetDopplerFactor',
com.STDMETHOD(PD3DVALUE)),
('GetOrientation',
com.STDMETHOD(PD3DVECTOR)),
('GetPosition',
com.STDMETHOD(PD3DVECTOR)),
('GetRolloffFactor',
com.STDMETHOD(PD3DVALUE)),
('GetVelocity',
com.STDMETHOD(PD3DVECTOR)),
('SetAllParameters',
com.STDMETHOD(LPDS3DLISTENER)),
('SetDistanceFactor',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetDopplerFactor',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetOrientation',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE,
D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('SetPosition',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('SetRolloffFactor',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetVelocity',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('CommitDeferredSettings',
com.STDMETHOD()),
]
IID_IDirectSound3DBuffer = com.GUID(
0x279AFA86, 0x4981, 0x11CE, 0xA5, 0x21, 0x00, 0x20, 0xAF, 0x0B, 0xE5, 0x60)
class IDirectSound3DBuffer(com.IUnknown):
_methods_ = [
('GetAllParameters',
com.STDMETHOD(LPDS3DBUFFER)),
('GetConeAngles',
com.STDMETHOD(LPDWORD, LPDWORD)),
('GetConeOrientation',
com.STDMETHOD(PD3DVECTOR)),
('GetConeOutsideVolume',
com.STDMETHOD(LPLONG)),
('GetMaxDistance',
com.STDMETHOD(PD3DVALUE)),
('GetMinDistance',
com.STDMETHOD(PD3DVALUE)),
('GetMode',
com.STDMETHOD(LPDWORD)),
('GetPosition',
com.STDMETHOD(PD3DVECTOR)),
('GetVelocity',
com.STDMETHOD(PD3DVECTOR)),
('SetAllParameters',
com.STDMETHOD(LPDS3DBUFFER, DWORD)),
('SetConeAngles',
com.STDMETHOD(DWORD, DWORD, DWORD)),
('SetConeOrientation',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('SetConeOutsideVolume',
com.STDMETHOD(LONG, DWORD)),
('SetMaxDistance',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetMinDistance',
com.STDMETHOD(D3DVALUE, DWORD)),
('SetMode',
com.STDMETHOD(DWORD, DWORD)),
('SetPosition',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
('SetVelocity',
com.STDMETHOD(D3DVALUE, D3DVALUE, D3DVALUE, DWORD)),
]
class IDirectSound(com.IUnknown):
_methods_ = [
('CreateSoundBuffer',
com.STDMETHOD(LPDSBUFFERDESC,
ctypes.POINTER(IDirectSoundBuffer),
LPUNKNOWN)),
('GetCaps',
com.STDMETHOD(LPDSCAPS)),
('DuplicateSoundBuffer',
com.STDMETHOD(IDirectSoundBuffer,
ctypes.POINTER(IDirectSoundBuffer))),
('SetCooperativeLevel',
com.STDMETHOD(HWND, DWORD)),
('Compact',
com.STDMETHOD()),
('GetSpeakerConfig',
com.STDMETHOD(LPDWORD)),
('SetSpeakerConfig',
com.STDMETHOD(DWORD)),
('Initialize',
com.STDMETHOD(com.LPGUID)),
]
_type_ = com.COMInterface
DirectSoundCreate = lib.DirectSoundCreate
DirectSoundCreate.argtypes = \
[com.LPGUID, ctypes.POINTER(IDirectSound), ctypes.c_void_p]
DSCAPS_PRIMARYMONO = 0x00000001
DSCAPS_PRIMARYSTEREO = 0x00000002
DSCAPS_PRIMARY8BIT = 0x00000004
DSCAPS_PRIMARY16BIT = 0x00000008
DSCAPS_CONTINUOUSRATE = 0x00000010
DSCAPS_EMULDRIVER = 0x00000020
DSCAPS_CERTIFIED = 0x00000040
DSCAPS_SECONDARYMONO = 0x00000100
DSCAPS_SECONDARYSTEREO = 0x00000200
DSCAPS_SECONDARY8BIT = 0x00000400
DSCAPS_SECONDARY16BIT = 0x00000800
DSSCL_NORMAL = 0x00000001
DSSCL_PRIORITY = 0x00000002
DSSCL_EXCLUSIVE = 0x00000003
DSSCL_WRITEPRIMARY = 0x00000004
DSSPEAKER_DIRECTOUT = 0x00000000
DSSPEAKER_HEADPHONE = 0x00000001
DSSPEAKER_MONO = 0x00000002
DSSPEAKER_QUAD = 0x00000003
DSSPEAKER_STEREO = 0x00000004
DSSPEAKER_SURROUND = 0x00000005
DSSPEAKER_5POINT1 = 0x00000006
DSSPEAKER_7POINT1 = 0x00000007
DSSPEAKER_GEOMETRY_MIN = 0x00000005 # 5 degrees
DSSPEAKER_GEOMETRY_NARROW = 0x0000000A # 10 degrees
DSSPEAKER_GEOMETRY_WIDE = 0x00000014 # 20 degrees
DSSPEAKER_GEOMETRY_MAX = 0x000000B4 # 180 degrees
DSBCAPS_PRIMARYBUFFER = 0x00000001
DSBCAPS_STATIC = 0x00000002
DSBCAPS_LOCHARDWARE = 0x00000004
DSBCAPS_LOCSOFTWARE = 0x00000008
DSBCAPS_CTRL3D = 0x00000010
DSBCAPS_CTRLFREQUENCY = 0x00000020
DSBCAPS_CTRLPAN = 0x00000040
DSBCAPS_CTRLVOLUME = 0x00000080
DSBCAPS_CTRLPOSITIONNOTIFY = 0x00000100
DSBCAPS_CTRLFX = 0x00000200
DSBCAPS_STICKYFOCUS = 0x00004000
DSBCAPS_GLOBALFOCUS = 0x00008000
DSBCAPS_GETCURRENTPOSITION2 = 0x00010000
DSBCAPS_MUTE3DATMAXDISTANCE = 0x00020000
DSBCAPS_LOCDEFER = 0x00040000
DSBPLAY_LOOPING = 0x00000001
DSBPLAY_LOCHARDWARE = 0x00000002
DSBPLAY_LOCSOFTWARE = 0x00000004
DSBPLAY_TERMINATEBY_TIME = 0x00000008
DSBPLAY_TERMINATEBY_DISTANCE = 0x000000010
DSBPLAY_TERMINATEBY_PRIORITY = 0x000000020
DSBSTATUS_PLAYING = 0x00000001
DSBSTATUS_BUFFERLOST = 0x00000002
DSBSTATUS_LOOPING = 0x00000004
DSBSTATUS_LOCHARDWARE = 0x00000008
DSBSTATUS_LOCSOFTWARE = 0x00000010
DSBSTATUS_TERMINATED = 0x00000020
DSBLOCK_FROMWRITECURSOR = 0x00000001
DSBLOCK_ENTIREBUFFER = 0x00000002
DSBFREQUENCY_MIN = 100
DSBFREQUENCY_MAX = 100000
DSBFREQUENCY_ORIGINAL = 0
DSBPAN_LEFT = -10000
DSBPAN_CENTER = 0
DSBPAN_RIGHT = 10000
DSBVOLUME_MIN = -10000
DSBVOLUME_MAX = 0
DSBSIZE_MIN = 4
DSBSIZE_MAX = 0x0FFFFFFF
DSBSIZE_FX_MIN = 150 # NOTE: Milliseconds, not bytes
DS3DMODE_NORMAL = 0x00000000
DS3DMODE_HEADRELATIVE = 0x00000001
DS3DMODE_DISABLE = 0x00000002
DS3D_IMMEDIATE = 0x00000000
DS3D_DEFERRED = 0x00000001
DS3D_MINDISTANCEFACTOR = -1000000.0 # XXX FLT_MIN
DS3D_MAXDISTANCEFACTOR = 1000000.0 # XXX FLT_MAX
DS3D_DEFAULTDISTANCEFACTOR = 1.0
DS3D_MINROLLOFFFACTOR = 0.0
DS3D_MAXROLLOFFFACTOR = 10.0
DS3D_DEFAULTROLLOFFFACTOR = 1.0
DS3D_MINDOPPLERFACTOR = 0.0
DS3D_MAXDOPPLERFACTOR = 10.0
DS3D_DEFAULTDOPPLERFACTOR = 1.0
DS3D_DEFAULTMINDISTANCE = 1.0
DS3D_DEFAULTMAXDISTANCE = 1000000000.0
DS3D_MINCONEANGLE = 0
DS3D_MAXCONEANGLE = 360
DS3D_DEFAULTCONEANGLE = 360
DS3D_DEFAULTCONEOUTSIDEVOLUME = DSBVOLUME_MAX
|
mit
|
drayanaindra/shoop
|
shoop_tests/utils/__init__.py
|
6
|
3979
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import contextlib
import logging
import sys
import string
import uuid
import types
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.core.urlresolvers import set_urlconf, clear_url_caches, get_urlconf
from django.test import override_settings, Client
from django.utils.crypto import get_random_string
from django.utils.module_loading import import_string
from django.utils.timezone import now
def printable_gibberish(length=10):
return get_random_string(length, allowed_chars=string.ascii_lowercase)
class SmartClient(Client):
def soup(self, path, data=None, method="get"):
response = getattr(self, method)(path=path, data=data)
assert 200 <= response.status_code <= 299, "Valid status"
return BeautifulSoup(response.content)
def response_and_soup(self, path, data=None, method="get"):
response = getattr(self, method)(path=path, data=data)
return (response, BeautifulSoup(response.content))
def empty_iterable(obj):
for x in obj:
return False
return True
def prepare_logger_for_stdout(logger, level=logging.DEBUG):
handler = logging.StreamHandler(stream=sys.stdout)
formatter = logging.Formatter(fmt=logging.BASIC_FORMAT)
handler.setFormatter(formatter)
logger.handlers = []
logger.addHandler(handler)
if level is not None:
logger.setLevel(level)
@contextlib.contextmanager
def replace_urls(patterns):
"""
Context manager to replace the root URLconf with a list of URLpatterns in-memory.
This is admittedly somewhat black-magicky.
:param patterns: List of URLpatterns
:type patterns: list[RegexURLResolver]
"""
old_urlconf = get_urlconf(default=settings.ROOT_URLCONF)
urlconf_module_name = "replace_urls_%s" % uuid.uuid4()
module = types.ModuleType(urlconf_module_name)
module.urlpatterns = patterns
sys.modules[urlconf_module_name] = module
set_urlconf(urlconf_module_name)
clear_url_caches()
with override_settings(ROOT_URLCONF=urlconf_module_name):
yield
set_urlconf(old_urlconf)
clear_url_caches()
sys.modules.pop(urlconf_module_name)
def error_code_test(errors, expect_flag, code):
errors = list(errors)
for error in errors:
if error.code == code:
if expect_flag:
return True
else:
raise ValueError("Code %r found in %r, did not expect it" % (code, errors))
if expect_flag:
raise ValueError("Code %r not found in %r, did expect it" % (code, errors))
return True
def error_exists(errors, code):
return error_code_test(errors, True, code)
def error_does_not_exist(errors, code):
return error_code_test(errors, False, code)
def apply_request_middleware(request, **attrs):
"""
Apply all the `process_request` capable middleware configured
into the given request.
:param request: The request to massage.
:type request: django.http.HttpRequest
:param attrs: Additional attributes to set after massage.
:type attrs: dict
:return: The same request, massaged in-place.
:rtype: django.http.HttpRequest
"""
for middleware_path in settings.MIDDLEWARE_CLASSES:
mw_class = import_string(middleware_path)
try:
mw_instance = mw_class()
except MiddlewareNotUsed:
continue
if hasattr(mw_instance, 'process_request'):
mw_instance.process_request(request)
for key, value in attrs.items():
setattr(request, key, value)
return request
def very_recently(datetime, how_recently=1):
return (abs(datetime - now()).total_seconds() < how_recently)
|
agpl-3.0
|
DBrianKimmel/PyHouse
|
Project/src/Modules/House/rooms.py
|
1
|
12194
|
"""
@name: Modules/House/rooms.py
@author: D. Brian Kimmel
@contact: [email protected]
@copyright: (c) 2013-2020 by D. Brian Kimmel
@license: MIT License
@note: Created on Apr 10, 2013
@summary: Handle the rooms information for a house.
"""
__updated__ = '2020-02-17'
__version_info__ = (19, 10, 5)
__version__ = '.'.join(map(str, __version_info__))
# Import system type stuff
import datetime
# Import PyMh files
from Modules.Core.Config import config_tools
from Modules.Core.Config.config_tools import Api as configApi
from Modules.Core.Utilities import extract_tools
from Modules.Core.Utilities.coordinate_tools import Coords
from Modules.Core.Utilities.debug_tools import PrettyFormatAny
from Modules.Core import logging_pyh as Logger
LOG = Logger.getLogger('PyHouse.Rooms ')
CONFIG_NAME = 'rooms'
class RoomInformation:
""" A room of the house.
Used to draw pictures of the house
Used to define the location of switches, lights etc.
==> PyHouse.House.Rooms.xxx as in the def below
"""
def __init__(self):
self.Name = None
self.Comment = None
self.Corner = None # Coords
self.Floor = None # Outside | Basement | 1st | 2nd | 3rd | 4th | Attic | Roof
self.RoomType = None
self.Size = None # Coords
self.Trigger = None
class MqttActions:
"""
"""
def __init__(self, p_pyhouse_obj):
self.m_pyhouse_obj - p_pyhouse_obj
def decode(self, p_msg):
l_topic = p_msg.UnprocessedTopic
p_msg.UnprocessedTopic = p_msg.UnprocessedTopic[1:]
p_msg.LogMessage += '\tRooms:\n'
if l_topic[0] == 'update':
p_msg.LogMessage += '\tName: {}\n'.format(extract_tools.get_mqtt_field(p_msg.Payload, 'Name'))
elif l_topic[0] == 'delete':
p_msg.LogMessage += '\tName: {}\n'.format(extract_tools.get_mqtt_field(p_msg.Payload, 'Name'))
elif l_topic[0] == 'update':
p_msg.LogMessage += '\tName: {}\n'.format(extract_tools.get_mqtt_field(p_msg.Payload, 'Name'))
elif l_topic[0] == 'request':
p_msg.LogMessage += '\tName: {}\n'.format(extract_tools.get_mqtt_field(p_msg.Payload, 'Name'))
else:
p_msg.LogMessage += '\tUnknown sub-topic {}'.format(PrettyFormatAny.form(p_msg.Payload, 'Rooms msg', 160))
def send_message(self, p_pyhouse_obj, p_topic, p_room_obj):
""" Messages are:
room/add - to add a new room to the database.
room/delete - to delete a room from all nodes
room/sync - to keep all nodes in sync periodically.
room/update - to add or modify a room
"""
l_topic = 'house/room/' + p_topic
p_pyhouse_obj.Core._MqttApi.MqttPublish(l_topic, p_room_obj)
def send_update(self):
""" Update rooms to keep the house in sync on all nodes.
"""
class Maint:
""" Maintain the room internal database.
"""
@staticmethod
def _json_2_obj(p_json):
l_obj = RoomInformation
l_obj.Name = p_json['Name']
# l_obj.Active = p_json['Active']
l_obj.Key = 0
l_obj.UUID = p_json['UUID']
l_obj.Comment = p_json['Comment']
l_obj.Corner = Coords._get_coords(p_json['Corner'])
l_obj.Floor = p_json['Floor']
l_obj.Size = Coords._get_coords(p_json['Size'])
l_obj.RoomType = p_json['RoomType']
l_obj._AddFlag = p_json['Add']
l_obj._DeleteFlag = p_json['Delete']
return l_obj
def from_web(self, p_pyhouse_obj, p_json):
""" The web browser has sent back an add/change/delete request.
"""
# LOG.info('Room debug {}'.format(p_json))
l_obj = Maint._json_2_obj(p_json)
if l_obj._DeleteFlag:
l_room = Sync.find_room_uuid(p_pyhouse_obj, l_obj.UUID)
if l_room is None:
LOG.error("Trying to delete non existent room {}".format(l_obj.Name))
else:
LOG.info('Deleting Room {}'.format(l_obj.Name))
Maint._delete_room(p_pyhouse_obj, l_obj)
else: # Add/Change
l_rooms = self._add_change_room(p_pyhouse_obj, l_obj)
p_pyhouse_obj.House.Rooms = l_rooms
def _add_change_room(self, p_pyhouse_obj, p_room_obj):
"""
Update a room or add a new room if the UUID does not exist
"""
l_rooms = p_pyhouse_obj.House.Rooms
l_len = len(l_rooms)
for l_key, l_val in l_rooms.items():
if l_val.UUID == p_room_obj.UUID:
LOG.info('Updating room {}'.format(p_room_obj.Name))
l_rooms[l_key] = l_val
l_rooms[l_key].LastUpdate = datetime.datetime.now()
# Mqtt().send_message(p_pyhouse_obj, "update", p_room_obj)
return l_rooms
LOG.info('Adding room {}'.format(p_room_obj.Name))
if Api(p_pyhouse_obj).find_room_uuid(p_pyhouse_obj, p_room_obj.UUID) is None and p_room_obj._DeleteFlag:
pass
p_room_obj.Key = l_len
p_room_obj.LastUpdate = datetime.datetime.now()
l_rooms[len(l_rooms)] = p_room_obj
p_pyhouse_obj.House.Rooms = l_rooms
# Mqtt().send_message(p_pyhouse_obj, "add", p_room_obj)
return l_rooms
@staticmethod
def _delete_room(p_pyhouse_obj, p_room_obj):
l_room_ix = int(p_room_obj.Key)
try:
del p_pyhouse_obj.House.Rooms[l_room_ix]
except AttributeError:
LOG.error("web_rooms - Failed to delete - JSON: {}".format(p_room_obj.Name))
# Mqtt().send_message(p_pyhouse_obj, "delete", p_room_obj)
return
class Sync:
""" Used to sync the rooms between all the nodes.
"""
@staticmethod
def find_room_name(p_pyhouse_obj, p_name):
l_rooms = p_pyhouse_obj.House.Rooms
for l_room in l_rooms.values():
if l_room.Name == p_name:
return l_room
return None
@staticmethod
def find_room_uuid(p_pyhouse_obj, p_uuid):
l_rooms = p_pyhouse_obj.House.Rooms
for l_room in l_rooms.values():
if l_room.UUID == p_uuid:
return l_room
return None
class Utility:
"""
"""
m_config_tools = None
m_module_needed = []
m_parts_needed = []
m_pyhouse_obj = None
m_debugging_skip = []
def __init__(self, p_pyhouse_obj):
"""
"""
self.m_pyhouse_obj = p_pyhouse_obj
self.m_config_tools = config_tools.Yaml(p_pyhouse_obj)
class LocalConfig:
""" This will handle the rooms.yaml file
"""
m_config = None
m_pyhouse_obj = None
def __init__(self, p_pyhouse_obj):
self.m_pyhouse_obj = p_pyhouse_obj
self.m_config = configApi(p_pyhouse_obj)
def _extract_one_room(self, p_config) -> dict:
""" Extract the config info for one room.
Warn if there are extra attributes in the config.
Warn if there are missing attributes in the config.
@param p_yaml: is the config fragment containing one room's information.
@return: a RoomInformation() obj filled in.
"""
l_required = ['Name']
l_obj = RoomInformation()
for l_key, l_value in p_config.items():
# Check for extra attributes in the config file.
try:
_l_x = getattr(l_obj, l_key)
except AttributeError:
LOG.warning('rooms config file contains a bad room item "{}" = {} - Ignored.'.format(l_key, l_value))
continue
setattr(l_obj, l_key, l_value)
# Check for data missing from the config file.
for l_key in [l_attr for l_attr in dir(l_obj) if not l_attr.startswith('_') and not callable(getattr(l_obj, l_attr))]:
if getattr(l_obj, l_key) == None and l_key in l_required:
LOG.warning('Location Yaml is missing an entry for "{}"'.format(l_key))
LOG.info('Extracted room "{}"'.format(l_obj.Name))
return l_obj
def _extract_all_rooms(self, p_config):
""" Copies the data from the yaml config file to the Rooms part of the PyHouse obj.
Check for duplicate room names!
@param p_pyhouse_obj: is the entire house object
@param p_node_yaml: is
{'Rooms': [{'Name': 'Outside', 'Active': 'True', 'Comment': 'Things outsi...
@return: a dict {index: RoomInformation()}
"""
l_rooms = {}
for l_ix, l_value in enumerate(p_config):
l_obj = self._extract_one_room(l_value)
l_rooms.update({l_ix:l_obj})
self.m_pyhouse_obj.House.Rooms = l_rooms
LOG.info('Extracted {} rooms'.format(len(l_rooms)))
return l_rooms # For testing.
def load_yaml_config(self):
""" Read the Rooms.Yaml file.
It contains Rooms data for all rooms in the house.
"""
# LOG.info('Loading Config - Version:{}'.format(__version__))
self.m_pyhouse_obj.House.Rooms = None
l_yaml = self.m_config.read_config_file(CONFIG_NAME)
if l_yaml == None:
LOG.error('{}.yaml is missing.'.format(CONFIG_NAME))
l_rooms = {}
else:
try:
l_yaml = l_yaml['Rooms']
l_rooms = self._extract_all_rooms(l_yaml)
except:
LOG.warning('The config file does not start with "Rooms:"')
l_rooms = {}
self.m_pyhouse_obj.House.Rooms = l_rooms
return l_rooms
# ----------
def _copy_to_yaml(self, p_pyhouse_obj):
""" Update the yaml information.
The information in the YamlTree is updated to be the same as the running pyhouse_obj info.
The running info is a dict and the yaml is a list!
@return: the updated yaml ready information.
"""
_l_node = None
l_working = p_pyhouse_obj.House.Rooms
for l_key in [l_attr for l_attr in dir(l_working) if not l_attr.startswith('_') and not callable(getattr(l_working, l_attr))]:
_l_val = getattr(l_working, l_key)
def save_yaml_config(self):
"""
"""
LOG.info('Saving Config - Version:{}'.format(__version__))
l_config = self._copy_to_yaml(self.m_pyhouse_obj)
# self.m_config.write_config(CONFIG_NAME, l_config, addnew=True)
return l_config
class Api:
"""
"""
m_pyhouse_obj = None
m_local_config = None
def __init__(self, p_pyhouse_obj):
self.m_pyhouse_obj = p_pyhouse_obj
self.m_local_config = LocalConfig(p_pyhouse_obj)
p_pyhouse_obj.House.Rooms = RoomInformation()
LOG.info("Initialized ")
def LoadConfig(self):
"""
"""
LOG.info('Loading Config - Version:{}'.format(__version__))
l_rooms = self.m_local_config.load_yaml_config()
self.m_pyhouse_obj.House.Rooms = l_rooms
# LOG.info('Loaded {} Rooms'.format(len(self.m_pyhouse_obj.House.Rooms)))
def Start(self):
pass
def SaveConfig(self):
"""
"""
LOG.info('Saving Config - Version:{}'.format(__version__))
self.m_local_config.save_yaml_config()
def Stop(self):
_x = PrettyFormatAny.form('', '')
def MqttDispatch(self, p_msg):
l_topic = p_msg.UnprocessedTopic
p_msg.UnprocessedTopic = p_msg.UnprocessedTopic[1:]
p_msg.LogMessage += '\tRooms:\n'
if l_topic[0] == 'update':
p_msg.LogMessage += '\tName: {}\n'.format(extract_tools.get_mqtt_field(p_msg.Payload, 'Name'))
elif l_topic[0] == 'delete':
p_msg.LogMessage += '\tName: {}\n'.format(extract_tools.get_mqtt_field(p_msg.Payload, 'Name'))
elif l_topic[0] == 'update':
p_msg.LogMessage += '\tName: {}\n'.format(extract_tools.get_mqtt_field(p_msg.Payload, 'Name'))
elif l_topic[0] == 'request':
p_msg.LogMessage += '\tName: {}\n'.format(extract_tools.get_mqtt_field(p_msg.Payload, 'Name'))
else:
p_msg.LogMessage += '\tUnknown sub-topic {}'.format(PrettyFormatAny.form(p_msg.Payload, 'Rooms msg', 160))
# ## END DBK
|
mit
|
onitake/ansible
|
lib/ansible/utils/module_docs_fragments/proxysql.py
|
29
|
1293
|
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt
class ModuleDocFragment(object):
# Documentation fragment for ProxySQL connectivity
CONNECTIVITY = '''
options:
login_user:
description:
- The username used to authenticate to ProxySQL admin interface.
login_password:
description:
- The password used to authenticate to ProxySQL admin interface.
login_host:
description:
- The host used to connect to ProxySQL admin interface.
default: '127.0.0.1'
login_port:
description:
- The port used to connect to ProxySQL admin interface.
default: 6032
config_file:
description:
- Specify a config file from which I(login_user) and I(login_password)
are to be read.
default: ''
requirements:
- PyMySQL (Python 2.7 and Python 3.X), or
- MySQLdb (Python 2.x)
'''
# Documentation fragment for managing ProxySQL configuration
MANAGING_CONFIG = '''
options:
save_to_disk:
description:
- Save config to sqlite db on disk to persist the configuration.
type: bool
default: 'yes'
load_to_runtime:
description:
- Dynamically load config to runtime memory.
type: bool
default: 'yes'
'''
|
gpl-3.0
|
Kriechi/mitmproxy
|
docs/scripts/api-events.py
|
1
|
4042
|
#!/usr/bin/env python3
import contextlib
import inspect
import textwrap
from pathlib import Path
from typing import List, Type
import mitmproxy.addons.next_layer # noqa
from mitmproxy import hooks, log, addonmanager
from mitmproxy.proxy import server_hooks, layer
from mitmproxy.proxy.layers import http, tcp, tls, websocket
known = set()
def category(name: str, desc: str, hooks: List[Type[hooks.Hook]]) -> None:
all_params = [
list(inspect.signature(hook.__init__).parameters.values())[1:]
for hook in hooks
]
# slightly overengineered, but this was fun to write. ¯\_(ツ)_/¯
imports = set()
types = set()
for params in all_params:
for param in params:
try:
mod = inspect.getmodule(param.annotation).__name__
if mod == "typing":
# this is ugly, but can be removed once we are on Python 3.9+ only
imports.add(inspect.getmodule(param.annotation.__args__[0]).__name__)
types.add(param.annotation._name)
else:
imports.add(mod)
except AttributeError:
raise ValueError(f"Missing type annotation: {params}")
imports.discard("builtins")
if types:
print(f"from typing import {', '.join(sorted(types))}")
print("from mitmproxy import ctx")
for imp in sorted(imports):
print(f"import {imp}")
print()
print(f"class {name}Events:")
print(f' """{desc}"""')
first = True
for hook, params in zip(hooks, all_params):
if first:
first = False
else:
print()
if hook.name in known:
raise RuntimeError(f"Already documented: {hook}")
known.add(hook.name)
doc = inspect.getdoc(hook)
print(f" def {hook.name}({', '.join(str(p) for p in ['self'] + params)}):")
print(textwrap.indent(f'"""\n{doc}\n"""', " "))
if params:
print(f' ctx.log(f"{hook.name}: {" ".join("{" + p.name + "=}" for p in params)}")')
else:
print(f' ctx.log("{hook.name}")')
print("")
outfile = Path(__file__).parent.parent / "src" / "generated" / "events.py"
with outfile.open("w") as f, contextlib.redirect_stdout(f):
print("# This file is autogenerated, do not edit manually.")
category(
"Lifecycle",
"",
[
addonmanager.LoadHook,
hooks.RunningHook,
hooks.ConfigureHook,
hooks.DoneHook,
]
)
category(
"Connection",
"",
[
server_hooks.ClientConnectedHook,
server_hooks.ClientDisconnectedHook,
server_hooks.ServerConnectHook,
server_hooks.ServerConnectedHook,
server_hooks.ServerDisconnectedHook,
]
)
category(
"HTTP",
"",
[
http.HttpRequestHeadersHook,
http.HttpRequestHook,
http.HttpResponseHeadersHook,
http.HttpResponseHook,
http.HttpErrorHook,
http.HttpConnectHook,
]
)
category(
"TCP",
"",
[
tcp.TcpStartHook,
tcp.TcpMessageHook,
tcp.TcpEndHook,
tcp.TcpErrorHook,
]
)
category(
"TLS",
"",
[
tls.TlsClienthelloHook,
tls.TlsStartHook,
]
)
category(
"WebSocket",
"",
[
websocket.WebsocketStartHook,
websocket.WebsocketMessageHook,
websocket.WebsocketEndHook,
websocket.WebsocketErrorHook,
]
)
category(
"AdvancedLifecycle",
"",
[
layer.NextLayerHook,
hooks.UpdateHook,
log.AddLogHook,
]
)
not_documented = set(hooks.all_hooks.keys()) - known
if not_documented:
raise RuntimeError(f"Not documented: {not_documented}")
|
mit
|
bottompawn/kbengine
|
kbe/res/scripts/common/Lib/site-packages/pip/_vendor/distlib/markers.py
|
1261
|
6282
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Parser for the environment markers micro-language defined in PEP 345."""
import ast
import os
import sys
import platform
from .compat import python_implementation, string_types
from .util import in_venv
__all__ = ['interpret']
class Evaluator(object):
"""
A limited evaluator for Python expressions.
"""
operators = {
'eq': lambda x, y: x == y,
'gt': lambda x, y: x > y,
'gte': lambda x, y: x >= y,
'in': lambda x, y: x in y,
'lt': lambda x, y: x < y,
'lte': lambda x, y: x <= y,
'not': lambda x: not x,
'noteq': lambda x, y: x != y,
'notin': lambda x, y: x not in y,
}
allowed_values = {
'sys_platform': sys.platform,
'python_version': '%s.%s' % sys.version_info[:2],
# parsing sys.platform is not reliable, but there is no other
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
'python_full_version': sys.version.split(' ', 1)[0],
'os_name': os.name,
'platform_in_venv': str(in_venv()),
'platform_release': platform.release(),
'platform_version': platform.version(),
'platform_machine': platform.machine(),
'platform_python_implementation': python_implementation(),
}
def __init__(self, context=None):
"""
Initialise an instance.
:param context: If specified, names are looked up in this mapping.
"""
self.context = context or {}
self.source = None
def get_fragment(self, offset):
"""
Get the part of the source which is causing a problem.
"""
fragment_len = 10
s = '%r' % (self.source[offset:offset + fragment_len])
if offset + fragment_len < len(self.source):
s += '...'
return s
def get_handler(self, node_type):
"""
Get a handler for the specified AST node type.
"""
return getattr(self, 'do_%s' % node_type, None)
def evaluate(self, node, filename=None):
"""
Evaluate a source string or node, using ``filename`` when
displaying errors.
"""
if isinstance(node, string_types):
self.source = node
kwargs = {'mode': 'eval'}
if filename:
kwargs['filename'] = filename
try:
node = ast.parse(node, **kwargs)
except SyntaxError as e:
s = self.get_fragment(e.offset)
raise SyntaxError('syntax error %s' % s)
node_type = node.__class__.__name__.lower()
handler = self.get_handler(node_type)
if handler is None:
if self.source is None:
s = '(source not available)'
else:
s = self.get_fragment(node.col_offset)
raise SyntaxError("don't know how to evaluate %r %s" % (
node_type, s))
return handler(node)
def get_attr_key(self, node):
assert isinstance(node, ast.Attribute), 'attribute node expected'
return '%s.%s' % (node.value.id, node.attr)
def do_attribute(self, node):
if not isinstance(node.value, ast.Name):
valid = False
else:
key = self.get_attr_key(node)
valid = key in self.context or key in self.allowed_values
if not valid:
raise SyntaxError('invalid expression: %s' % key)
if key in self.context:
result = self.context[key]
else:
result = self.allowed_values[key]
return result
def do_boolop(self, node):
result = self.evaluate(node.values[0])
is_or = node.op.__class__ is ast.Or
is_and = node.op.__class__ is ast.And
assert is_or or is_and
if (is_and and result) or (is_or and not result):
for n in node.values[1:]:
result = self.evaluate(n)
if (is_or and result) or (is_and and not result):
break
return result
def do_compare(self, node):
def sanity_check(lhsnode, rhsnode):
valid = True
if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str):
valid = False
#elif (isinstance(lhsnode, ast.Attribute)
# and isinstance(rhsnode, ast.Attribute)):
# klhs = self.get_attr_key(lhsnode)
# krhs = self.get_attr_key(rhsnode)
# valid = klhs != krhs
if not valid:
s = self.get_fragment(node.col_offset)
raise SyntaxError('Invalid comparison: %s' % s)
lhsnode = node.left
lhs = self.evaluate(lhsnode)
result = True
for op, rhsnode in zip(node.ops, node.comparators):
sanity_check(lhsnode, rhsnode)
op = op.__class__.__name__.lower()
if op not in self.operators:
raise SyntaxError('unsupported operation: %r' % op)
rhs = self.evaluate(rhsnode)
result = self.operators[op](lhs, rhs)
if not result:
break
lhs = rhs
lhsnode = rhsnode
return result
def do_expression(self, node):
return self.evaluate(node.body)
def do_name(self, node):
valid = False
if node.id in self.context:
valid = True
result = self.context[node.id]
elif node.id in self.allowed_values:
valid = True
result = self.allowed_values[node.id]
if not valid:
raise SyntaxError('invalid expression: %s' % node.id)
return result
def do_str(self, node):
return node.s
def interpret(marker, execution_context=None):
"""
Interpret a marker and return a result depending on environment.
:param marker: The marker to interpret.
:type marker: str
:param execution_context: The context used for name lookup.
:type execution_context: mapping
"""
return Evaluator(execution_context).evaluate(marker.strip())
|
lgpl-3.0
|
medspx/QGIS
|
python/plugins/processing/algs/qgis/TextToFloat.py
|
3
|
3143
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
TextToFloat.py
---------------------
Date : May 2010
Copyright : (C) 2010 by Michael Minn
Email : pyqgis at michaelminn dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Michael Minn'
__date__ = 'May 2010'
__copyright__ = '(C) 2010, Michael Minn'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QVariant
from qgis.core import (QgsField,
QgsProcessing,
QgsProcessingParameterField)
from processing.algs.qgis.QgisAlgorithm import QgisFeatureBasedAlgorithm
class TextToFloat(QgisFeatureBasedAlgorithm):
FIELD = 'FIELD'
def group(self):
return self.tr('Vector table')
def groupId(self):
return 'vectortable'
def __init__(self):
super().__init__()
self.field_name = None
self.field_idx = -1
def initParameters(self, config=None):
self.addParameter(QgsProcessingParameterField(self.FIELD,
self.tr('Text attribute to convert to float'),
parentLayerParameterName='INPUT',
type=QgsProcessingParameterField.String
))
def name(self):
return 'texttofloat'
def displayName(self):
return self.tr('Text to float')
def outputName(self):
return self.tr('Float from text')
def inputLayerTypes(self):
return [QgsProcessing.TypeVector]
def outputFields(self, inputFields):
self.field_idx = inputFields.lookupField(self.field_name)
if self.field_idx >= 0:
inputFields[self.field_idx] = QgsField(self.field_name, QVariant.Double, '', 24, 15)
return inputFields
def prepareAlgorithm(self, parameters, context, feedback):
self.field_name = self.parameterAsString(parameters, self.FIELD, context)
return True
def processFeature(self, feature, context, feedback):
value = feature[self.field_idx]
try:
if '%' in value:
feature[self.field_idx] = float(value.replace('%', '')) / 100.0
else:
feature[self.field_idx] = float(value)
except:
feature[self.field_idx] = None
return feature
|
gpl-2.0
|
lukeiwanski/tensorflow
|
tensorflow/python/kernel_tests/variable_scope_test.py
|
4
|
64569
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for variable store."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gc
import threading
import numpy
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.layers import core as core_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import test
from tensorflow.python.util import compat
class VariableScopeTest(test.TestCase):
def tearDown(self):
gc.collect()
# This will only contain uncollectable garbage, i.e. reference cycles
# involving objects with __del__ defined.
self.assertEqual(0, len(gc.garbage))
def testGetVar(self):
vs = variable_scope._get_default_variable_store()
v = vs.get_variable("v", [1])
v1 = vs.get_variable("v", [1])
self.assertEqual(v, v1)
@test_util.run_in_graph_and_eager_modes
def testResource(self):
vs = variable_scope._get_default_variable_store()
v1 = vs.get_variable("v", [1], use_resource=True)
self.assertTrue(isinstance(v1, resource_variable_ops.ResourceVariable))
def testNameExists(self):
vs = variable_scope._get_default_variable_store()
# No check by default, so we can both create and get existing names.
v = vs.get_variable("v", [1])
v1 = vs.get_variable("v", [1])
self.assertEqual(v, v1)
# When reuse is False, we fail when variables are already there.
vs.get_variable("w", [1], reuse=False) # That's ok.
with self.assertRaises(ValueError):
vs.get_variable("v", [1], reuse=False) # That fails.
# When reuse is True, we fail when variables are new.
vs.get_variable("v", [1], reuse=True) # That's ok.
with self.assertRaises(ValueError):
vs.get_variable("u", [1], reuse=True) # That fails.
def testNamelessStore(self):
vs = variable_scope._get_default_variable_store()
vs.get_variable("v1", [2])
vs.get_variable("v2", [2])
expected_names = ["%s:0" % name for name in ["v1", "v2"]]
self.assertEqual(
set(expected_names), set([v.name for v in vs._vars.values()]))
@test_util.run_in_graph_and_eager_modes
def testVarScopeInitializer(self):
init = init_ops.constant_initializer(0.3)
with variable_scope.variable_scope("tower0") as tower:
with variable_scope.variable_scope("foo", initializer=init):
v = variable_scope.get_variable("v", [])
self.evaluate(variables_lib.variables_initializer([v]))
self.assertAllClose(self.evaluate(v.value()), 0.3)
with variable_scope.variable_scope(tower, initializer=init):
w = variable_scope.get_variable("w", [])
self.evaluate(variables_lib.variables_initializer([w]))
self.assertAllClose(self.evaluate(w.value()), 0.3)
@test_util.run_in_graph_and_eager_modes
def testVarScopeConstraint(self):
constraint = lambda x: 0. * x
with variable_scope.variable_scope("tower1") as tower:
with variable_scope.variable_scope("foo", constraint=constraint):
v = variable_scope.get_variable("v", [])
self.assertEqual(v.constraint, constraint)
with variable_scope.variable_scope(tower, constraint=constraint):
w = variable_scope.get_variable("w", [])
self.assertEqual(w.constraint, constraint)
def testStringDefaultInitializer(self):
with self.test_session():
v = variable_scope.get_variable("string", shape=[], dtype=dtypes.string)
variables_lib.global_variables_initializer().run()
self.assertAllEqual(compat.as_bytes(v.eval()), b"")
@test_util.run_in_graph_and_eager_modes
def testVarScopeDType(self):
with variable_scope.variable_scope("tower2") as tower:
with variable_scope.variable_scope("foo", dtype=dtypes.float16):
v = variable_scope.get_variable("v", [])
self.assertEqual(v.dtype.base_dtype, dtypes.float16)
with variable_scope.variable_scope(tower, dtype=dtypes.float16):
w = variable_scope.get_variable("w", [])
self.assertEqual(w.dtype.base_dtype, dtypes.float16)
def testGetVariableInGraphNestedUnderEagerContext(self):
with context.eager_mode():
@function.defun
def f():
v = variable_scope.get_variable("should_be_resource", [])
self.assertEqual(type(v), resource_variable_ops.ResourceVariable)
f()
def testEagerVariableStore(self):
with context.eager_mode():
store = variable_scope.EagerVariableStore()
with store.as_default():
v = variable_scope.get_variable("v", shape=(), trainable=True)
w = variable_scope.get_variable("w", shape=(), trainable=False)
self.assertTrue(v in store.variables())
self.assertTrue(w in store.variables())
self.assertTrue(v in store.trainable_variables())
self.assertFalse(w in store.trainable_variables())
self.assertFalse(v in store.non_trainable_variables())
self.assertTrue(w in store.non_trainable_variables())
# Test copying.
new_store = store.copy()
with new_store.as_default():
new_v = variable_scope.get_variable("v")
new_w = variable_scope.get_variable("w")
self.assertEqual(new_v.numpy(), v.numpy())
self.assertEqual(new_w.numpy(), w.numpy())
self.assertTrue(new_v in new_store.variables())
self.assertTrue(new_w in new_store.variables())
self.assertTrue(new_v in new_store.trainable_variables())
self.assertFalse(new_w in new_store.trainable_variables())
self.assertFalse(new_v in new_store.non_trainable_variables())
self.assertTrue(new_w in new_store.non_trainable_variables())
# Check that variables are separate instances.
for v in store.variables():
v.assign(-1)
for v in new_store.variables():
v.assign(1)
for v in store.variables():
self.assertEqual(v.numpy(), -1)
for v in new_store.variables():
self.assertEqual(v.numpy(), 1)
def testEagerVariableStoreWithEagerDefun(self):
with context.eager_mode():
@function.defun
def f():
x = constant_op.constant([[2.0]])
d1 = core_layers.Dense(
1, name="my_dense", kernel_initializer=init_ops.ones_initializer())
_ = d1(x) # create variables
self.assertEqual(len(d1.variables), 2)
v1, v2 = d1.variables
d2 = core_layers.Dense(
1,
name="my_dense",
kernel_initializer=init_ops.ones_initializer(),
_reuse=True)
_ = d2(x)
self.assertEqual(len(d2.variables), 2)
v3, v4 = d2.variables
self.assertAllEqual([v1, v2], [v3, v4])
f()
@test_util.run_in_graph_and_eager_modes
def testEagerVariablesStoreAddsToCollections(self):
store = variable_scope.EagerVariableStore()
with store.as_default():
trainable = variable_scope.get_variable("v1", [], trainable=True)
not_trainable = variable_scope.get_variable("v2", [], trainable=False)
concat = variable_scope.get_variable(
"v3", [], collections=[ops.GraphKeys.CONCATENATED_VARIABLES])
self.assertEqual(
ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES),
[trainable, not_trainable])
self.assertEqual(
ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES),
[trainable, concat])
self.assertEqual(
ops.get_collection(ops.GraphKeys.CONCATENATED_VARIABLES), [concat])
@test_util.run_in_graph_and_eager_modes
def testEagerVariablesOutsideStoreNotAddedToCollections(self):
if not context.executing_eagerly():
return
variable_scope.get_variable("v1", [], trainable=True)
variable_scope.get_variable("v2", [], trainable=False)
self.assertFalse(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES))
self.assertFalse(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES))
@test_util.run_in_graph_and_eager_modes
def testInitFromNonTensorValue(self):
v = variable_scope.get_variable("v4", initializer=4, dtype=dtypes.int32)
self.evaluate(variables_lib.variables_initializer([v]))
self.assertAllClose(self.evaluate(v.value()), 4)
w = variable_scope.get_variable(
"w4", initializer=numpy.array([1, 2, 3]), dtype=dtypes.int64)
self.evaluate(variables_lib.variables_initializer([w]))
self.assertAllClose(self.evaluate(w.value()), [1, 2, 3])
# A quirk to be revisited?
error = ValueError if context.executing_eagerly() else TypeError
with self.assertRaises(error):
variable_scope.get_variable("x4", initializer={})
@test_util.run_in_graph_and_eager_modes
def testInitFromNonInitializer(self):
# Test various dtypes with zeros initializer as following:
types = [
dtypes.int8, dtypes.uint8, dtypes.int16, dtypes.uint16, dtypes.int32,
dtypes.int64, dtypes.bool
]
# Use different variable_name to distinguish various dtypes
for (i, dtype) in enumerate(types):
x = variable_scope.get_variable(
name="xx%d" % i, shape=(3, 4), dtype=dtype)
y = variable_scope.get_variable(
name="yy%d" % i,
shape=(3, 4),
dtype=dtype,
initializer=init_ops.zeros_initializer(dtype=dtype))
self.evaluate(variables_lib.global_variables_initializer())
self.assertAllEqual(self.evaluate(x.value()), self.evaluate(y.value()))
# TODO(alive): support variable partitioning/caching in eager mode.
def testVarScopeCachingDevice(self):
with self.test_session():
caching_device = "/job:moo"
with variable_scope.variable_scope("tower"):
with variable_scope.variable_scope(
"caching", caching_device=caching_device):
v = variable_scope.get_variable("v", [])
self.assertTrue(v.value().device.startswith(caching_device))
with variable_scope.variable_scope("child"):
v2 = variable_scope.get_variable("v", [])
self.assertTrue(v2.value().device.startswith(caching_device))
with variable_scope.variable_scope("not_cached", caching_device=""):
v2_not_cached = variable_scope.get_variable("v", [])
self.assertFalse(
v2_not_cached.value().device.startswith(caching_device))
with variable_scope.variable_scope(
"not_cached_identity_device",
caching_device=lambda op: op.device):
v2_identity_device = variable_scope.get_variable("v", [])
self.assertFalse(
v2_identity_device.value().device.startswith(caching_device))
with variable_scope.variable_scope("we_will_do_it_live") as vs_live:
vs_live.set_caching_device("/job:live")
v_live = variable_scope.get_variable("v", [])
self.assertTrue(v_live.value().device.startswith("/job:live"))
v_tower = variable_scope.get_variable("v", [])
self.assertFalse(v_tower.value().device.startswith(caching_device))
@test_util.run_in_graph_and_eager_modes
def testVarScopeRegularizer(self):
init = init_ops.constant_initializer(0.3)
def regularizer1(v):
return math_ops.reduce_mean(v) + 0.1
def regularizer2(v):
return math_ops.reduce_mean(v) + 0.2
with variable_scope.variable_scope(
"tower3", regularizer=regularizer1) as tower:
with variable_scope.variable_scope("foo", initializer=init):
v = variable_scope.get_variable("v", [])
self.evaluate(variables_lib.variables_initializer([v]))
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(1, len(losses))
self.assertAllClose(self.evaluate(losses[0]), 0.4)
with variable_scope.variable_scope(tower, initializer=init) as vs:
u = variable_scope.get_variable("u", [])
vs.set_regularizer(regularizer2)
w = variable_scope.get_variable("w", [])
# Next 3 variable not regularized to test disabling regularization.
x = variable_scope.get_variable(
"x", [], regularizer=variable_scope.no_regularizer)
with variable_scope.variable_scope(
"baz", regularizer=variable_scope.no_regularizer):
y = variable_scope.get_variable("y", [])
vs.set_regularizer(variable_scope.no_regularizer)
z = variable_scope.get_variable("z", [])
# Check results.
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(3, len(losses))
self.evaluate(variables_lib.variables_initializer([u, w, x, y, z]))
self.assertAllClose(self.evaluate(losses[0]), 0.4)
self.assertAllClose(self.evaluate(losses[1]), 0.4)
self.assertAllClose(self.evaluate(losses[2]), 0.5)
with variable_scope.variable_scope("foo", reuse=True):
# reuse=True is for now only supported when eager execution is disabled.
if not context.executing_eagerly():
v = variable_scope.get_variable("v",
[]) # "v" is alredy there, reused
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(3, len(losses)) # No new loss added.
@test_util.run_in_graph_and_eager_modes
def testInitializeFromValue(self):
init = constant_op.constant(0.1)
w = variable_scope.get_variable("v", initializer=init)
self.evaluate(variables_lib.variables_initializer([w]))
self.assertAllClose(self.evaluate(w.value()), 0.1)
with self.assertRaisesRegexp(ValueError, "shape"):
# We disallow explicit shape specification when initializer is constant.
variable_scope.get_variable("u", [1], initializer=init)
with variable_scope.variable_scope("foo", initializer=init):
# Constant initializer can be passed through scopes if needed.
v = variable_scope.get_variable("v")
self.evaluate(variables_lib.variables_initializer([v]))
self.assertAllClose(self.evaluate(v.value()), 0.1)
# Check that non-float32 initializer creates a non-float32 variable.
init = constant_op.constant(1, dtype=dtypes.int32)
t = variable_scope.get_variable("t", initializer=init)
self.assertEqual(t.dtype.base_dtype, dtypes.int32)
# Raise error if `initializer` dtype and `dtype` are not identical.
with self.assertRaisesRegexp(ValueError, "don't match"):
variable_scope.get_variable("s", initializer=init, dtype=dtypes.float64)
def testControlDeps(self):
with self.test_session() as sess:
v0 = variable_scope.get_variable(
"v0", [1], initializer=init_ops.constant_initializer(0))
with ops.control_dependencies([v0.value()]):
v1 = variable_scope.get_variable(
"v1", [1], initializer=init_ops.constant_initializer(1))
add = v1 + v0
# v0 should be uninitialized.
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(v0)
# We should be able to initialize and run v1 without initializing
# v0, even if the variable was created with a control dep on v0.
sess.run(v1.initializer)
self.assertEqual(1, sess.run(v1))
# v0 should still be uninitialized.
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(v0)
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(add)
# If we initialize v0 we should be able to run 'add'.
sess.run(v0.initializer)
sess.run(add)
def testControlFlow(self):
with self.test_session() as sess:
v0 = variable_scope.get_variable(
"v0", [], initializer=init_ops.constant_initializer(0))
var_dict = {}
# Call get_variable in each of the cond clauses.
def var_in_then_clause():
v1 = variable_scope.get_variable(
"v1", [1], initializer=init_ops.constant_initializer(1))
var_dict["v1"] = v1
return v1 + v0
def var_in_else_clause():
v2 = variable_scope.get_variable(
"v2", [1], initializer=init_ops.constant_initializer(2))
var_dict["v2"] = v2
return v2 + v0
add = control_flow_ops.cond(
math_ops.less(v0, 10), var_in_then_clause, var_in_else_clause)
v1 = var_dict["v1"]
v2 = var_dict["v2"]
# We should be able to initialize and run v1 and v2 without initializing
# v0, even if the variable was created with a control dep on v0.
sess.run(v1.initializer)
self.assertEqual([1], sess.run(v1))
sess.run(v2.initializer)
self.assertEqual([2], sess.run(v2))
# v0 should still be uninitialized.
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(v0)
# We should not be able to run 'add' yet.
with self.assertRaisesRegexp(errors.OpError, "uninitialized"):
sess.run(add)
# If we initialize v0 we should be able to run 'add'.
sess.run(v0.initializer)
sess.run(add)
@test_util.run_in_graph_and_eager_modes
def testGetVariableScope(self):
# Test the get_variable_scope() function and setting properties of result.
init = init_ops.constant_initializer(0.3)
with variable_scope.variable_scope("bar"):
new_init1 = variable_scope.get_variable_scope().initializer
self.assertEqual(new_init1, None)
# Check that we can set initializer like this.
variable_scope.get_variable_scope().set_initializer(init)
v = variable_scope.get_variable("v", [])
self.evaluate(variables_lib.variables_initializer([v]))
self.assertAllClose(self.evaluate(v.value()), 0.3)
if not context.executing_eagerly():
# Check that we can set reuse.
variable_scope.get_variable_scope().reuse_variables()
with self.assertRaises(ValueError): # Fail, w does not exist yet.
variable_scope.get_variable("w", [1])
# Check that the set initializer goes away.
new_init = variable_scope.get_variable_scope().initializer
self.assertEqual(new_init, None)
@test_util.run_in_graph_and_eager_modes
def testVarScope(self):
with variable_scope.variable_scope("tower4") as tower:
self.assertEqual(tower.name, "tower4")
with ops.name_scope("scope") as sc:
self.assertEqual(sc, "tower4/scope/")
with variable_scope.variable_scope("tower5"):
with variable_scope.variable_scope("bar") as bar:
self.assertEqual(bar.name, "tower5/bar")
with ops.name_scope("scope") as sc:
self.assertEqual(sc, "tower5/bar/scope/")
with variable_scope.variable_scope("tower6"):
with variable_scope.variable_scope(tower, reuse=True) as tower_shared:
self.assertEqual(tower_shared.name, "tower4")
with ops.name_scope("scope") as sc:
self.assertEqual(sc, "tower6/tower4/scope/")
@test_util.run_in_graph_and_eager_modes
def testVarScopeNameScope(self):
with ops.name_scope("testVarScopeNameScope1"):
with variable_scope.variable_scope("tower") as tower:
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope1/tower/scope2/")
if not context.executing_eagerly():
with variable_scope.variable_scope(
tower): # Re-entering acts like another "tower".
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope1/tower_1/scope2/")
with variable_scope.variable_scope(
"tower"): # Re-entering by string acts the same.
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope1/tower_2/scope2/")
with ops.name_scope("testVarScopeNameScope2"):
with variable_scope.variable_scope("tower"):
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope2/tower/scope2/")
if not context.executing_eagerly():
with variable_scope.variable_scope(tower):
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope2/tower_1/scope2/")
root_var_scope = variable_scope.get_variable_scope()
with ops.name_scope("testVarScopeNameScope3"):
with variable_scope.variable_scope(root_var_scope):
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "testVarScopeNameScope3/scope2/")
def testVarScopeOriginalNameScope(self):
with self.test_session():
with ops.name_scope("scope1"):
with variable_scope.variable_scope("tower") as tower:
self.assertEqual(tower.original_name_scope, "scope1/tower/")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "scope1/tower/scope2/")
with ops.name_scope("scope2"):
with variable_scope.variable_scope(tower) as tower1:
# Re-entering preserves original name scope.
self.assertEqual(tower1.original_name_scope, "scope1/tower/")
with ops.name_scope("foo") as sc2:
self.assertEqual(sc2, "scope2/tower/foo/")
# Test re-entering original name scope.
with ops.name_scope(tower.original_name_scope):
with ops.name_scope("bar") as sc3:
self.assertEqual(sc3, "scope1/tower/bar/")
with ops.name_scope("scope2"):
with variable_scope.variable_scope(tower):
with ops.name_scope(tower.original_name_scope):
with ops.name_scope("bar") as sc3:
self.assertEqual(sc3, "scope1/tower/bar_1/")
def testVarScopeObjectReuse(self):
with self.test_session():
vs = None
with variable_scope.variable_scope("jump", reuse=True) as scope:
vs = scope
with variable_scope.variable_scope(vs) as jump:
self.assertTrue(jump.reuse)
with variable_scope.variable_scope(vs, reuse=True) as jump_reuse:
self.assertTrue(jump_reuse.reuse)
with variable_scope.variable_scope(vs, reuse=False) as jump_no_reuse:
self.assertTrue(jump_no_reuse.reuse) # Inherited, cannot be undone.
with variable_scope.variable_scope("jump", reuse=False) as scope:
vs = scope
with variable_scope.variable_scope(vs) as jump:
self.assertFalse(jump.reuse)
with variable_scope.variable_scope(vs, reuse=True) as jump_reuse:
self.assertTrue(jump_reuse.reuse)
with variable_scope.variable_scope(vs, reuse=False) as jump_no_reuse:
self.assertFalse(jump_no_reuse.reuse)
def testVarScopeGetOrCreateReuse(self):
with self.test_session():
def test_value(value):
x = constant_op.constant(value)
with variable_scope.variable_scope(
"testVarScopeGetOrCreateReuse_bar",
reuse=variable_scope.AUTO_REUSE):
_ = state_ops.assign(variable_scope.get_variable("var", []), x)
with variable_scope.variable_scope(
"testVarScopeGetOrCreateReuse_bar",
reuse=variable_scope.AUTO_REUSE):
_ = variable_scope.get_variable("var", [])
self.assertEqual(value, x.eval())
test_value(42.) # Variable is created.
test_value(13.) # Variable is reused hereafter.
test_value(17.)
def testVarOpScope(self):
with self.test_session():
with ops.name_scope("testVarOpScope1"):
with variable_scope.variable_scope("tower", "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "tower/w:0")
with ops.name_scope("testVarOpScope2") as sc2:
self.assertEqual(sc2, "testVarOpScope1/tower/testVarOpScope2/")
with variable_scope.variable_scope("tower", "default", []):
with self.assertRaises(ValueError):
variable_scope.get_variable("w", [])
with ops.name_scope("testVarOpScope2") as sc2:
self.assertEqual(sc2, "testVarOpScope1/tower_1/testVarOpScope2/")
with ops.name_scope("testVarOpScope2"):
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/w:0")
with ops.name_scope("testVarOpScope2") as sc2:
self.assertEqual(sc2, "testVarOpScope2/default/testVarOpScope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default_1/w:0")
with ops.name_scope("testVarOpScope2") as sc2:
self.assertEqual(sc2, "testVarOpScope2/default_1/testVarOpScope2/")
def testVarOpScopeUniqueNamesInterleavedSubstringScopes(self):
with self.test_session():
with variable_scope.variable_scope(None, "defaultScope1"):
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name,
"defaultScope1/layer/w:0")
with variable_scope.variable_scope(None, "defaultScope1"):
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name,
"defaultScope1_1/layer/w:0")
with variable_scope.variable_scope(None, "defaultScope"):
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name,
"defaultScope/layer/w:0")
with variable_scope.variable_scope(None, "defaultScope1"):
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name,
"defaultScope1_2/layer/w:0")
def testVarOpScopeUniqueNamesWithJump(self):
with self.test_session():
with variable_scope.variable_scope("default") as default:
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/layer/w:0")
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/layer_1/w:0")
with variable_scope.variable_scope(default):
pass
# No matter the jump in the middle, unique numbering continues.
with variable_scope.variable_scope(None, "layer"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/layer_2/w:0")
def testVarOpScopeReuse(self):
with self.test_session():
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope("tower", "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/tower/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default/scope2/")
with variable_scope.variable_scope(outer, reuse=True) as outer:
with variable_scope.variable_scope("tower", "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/tower/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
def testVarScopeGetVar(self):
with self.test_session():
with variable_scope.variable_scope("root"):
with variable_scope.variable_scope("towerA") as tower_a:
va = variable_scope.get_variable("v", [1])
self.assertEqual(va.name, "root/towerA/v:0")
with variable_scope.variable_scope(tower_a, reuse=True):
va2 = variable_scope.get_variable("v", [1])
self.assertEqual(va2, va)
with variable_scope.variable_scope("towerB"):
vb = variable_scope.get_variable("v", [1])
self.assertEqual(vb.name, "root/towerB/v:0")
with self.assertRaises(ValueError):
with variable_scope.variable_scope("towerA"):
va2 = variable_scope.get_variable("v", [1])
with variable_scope.variable_scope("towerA", reuse=True):
va2 = variable_scope.get_variable("v", [1])
self.assertEqual(va2, va)
with variable_scope.variable_scope("foo"):
with variable_scope.variable_scope("bar"):
v = variable_scope.get_variable("v", [1])
self.assertEqual(v.name, "root/foo/bar/v:0")
with variable_scope.variable_scope(tower_a, reuse=True):
va3 = variable_scope.get_variable("v", [1])
self.assertEqual(va, va3)
with self.assertRaises(ValueError):
with variable_scope.variable_scope(tower_a, reuse=True):
with variable_scope.variable_scope("baz"):
variable_scope.get_variable("v", [1])
with self.assertRaises(ValueError) as exc:
with variable_scope.variable_scope(tower_a, reuse=True):
variable_scope.get_variable("v", [2]) # Different shape.
self.assertEqual("shape" in str(exc.exception), True)
with self.assertRaises(ValueError) as exc:
with variable_scope.variable_scope(tower_a, reuse=True):
variable_scope.get_variable("v", [1], dtype=dtypes.int32)
self.assertEqual("dtype" in str(exc.exception), True)
def testVarScopeOuterScope(self):
with self.test_session():
with variable_scope.variable_scope("outer") as outer:
pass
with variable_scope.variable_scope(outer):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/scope2/")
with variable_scope.variable_scope("default"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
with variable_scope.variable_scope(outer, reuse=True):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_2/scope2/")
with variable_scope.variable_scope("default", reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_2/default/scope2/")
def testVarScopeNestedOuterScope(self):
with self.test_session():
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope(outer):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/outer/scope2/")
with variable_scope.variable_scope("default"):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default/scope2/")
with variable_scope.variable_scope(outer, reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/outer_1/scope2/")
with variable_scope.variable_scope("default", reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default_1/scope2/")
def testVarOpScopeReuseParam(self):
with self.test_session():
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope("tower", "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/tower/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default/scope2/")
with variable_scope.variable_scope(outer) as outer:
with variable_scope.variable_scope("tower", "default", reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/tower/scope2/")
outer.reuse_variables()
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
def testVarOpScopeReuseError(self):
with self.test_session():
with self.assertRaises(ValueError):
with variable_scope.variable_scope(None, "default", reuse=True):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/tower/w:0")
def testVarOpScopeOuterScope(self):
with self.test_session():
with variable_scope.variable_scope("outer") as outer:
pass
with variable_scope.variable_scope(outer, "default", []):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
with variable_scope.variable_scope(outer, "default", reuse=True):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_2/scope2/")
outer.reuse_variables()
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_2/default/scope2/")
def testVarOpScopeNestedOuterScope(self):
with self.test_session():
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope(outer, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/outer/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer/default/scope2/")
with variable_scope.variable_scope(outer, "default", reuse=True):
self.assertEqual(variable_scope.get_variable("w", []).name, "outer/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/scope2/")
with variable_scope.variable_scope(None, "default", []):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
with ops.name_scope("scope2") as sc2:
self.assertEqual(sc2, "outer_1/default/scope2/")
def testBasicWhenAuxiliaryNameScopeIsFalse(self):
with self.test_session():
with variable_scope.variable_scope(
"scope", auxiliary_name_scope=False) as scope:
self.assertEqual(scope.original_name_scope, "")
self.assertEqual(variable_scope.get_variable("w", []).name, "scope/w:0")
self.assertEqual(constant_op.constant([], name="c").name, "c:0")
with variable_scope.variable_scope(scope, auxiliary_name_scope=False):
self.assertEqual(scope.original_name_scope, "")
self.assertEqual(
variable_scope.get_variable("w1", []).name, "scope/w1:0")
self.assertEqual(constant_op.constant([], name="c1").name, "c1:0")
# Recheck: new name scope is NOT created before
with ops.name_scope("scope"):
self.assertEqual(constant_op.constant([], name="c").name, "scope/c:0")
with variable_scope.variable_scope("outer"):
with variable_scope.variable_scope(
"inner", auxiliary_name_scope=False) as inner:
self.assertEqual(inner.original_name_scope, "outer/")
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/inner/w:0")
self.assertEqual(constant_op.constant([], name="c").name, "outer/c:0")
with variable_scope.variable_scope(
inner, auxiliary_name_scope=False) as inner1:
self.assertEqual(inner1.original_name_scope, "outer/")
self.assertEqual(
variable_scope.get_variable("w1", []).name, "outer/inner/w1:0")
self.assertEqual(
constant_op.constant([], name="c1").name, "outer/c1:0")
# Recheck: new name scope is NOT created before
with ops.name_scope("inner"):
self.assertEqual(
constant_op.constant([], name="c").name, "outer/inner/c:0")
def testCreatedByDefaultNameWhenAuxiliaryNameScopeIsFalse(self):
with self.test_session():
with variable_scope.variable_scope(
None, default_name="default", auxiliary_name_scope=False) as scope:
self.assertEqual(scope.original_name_scope, "")
self.assertEqual(
variable_scope.get_variable("w", []).name, "default/w:0")
self.assertEqual(constant_op.constant([], name="c").name, "c:0")
# Recheck: new name scope is NOT created before
with ops.name_scope("default"):
self.assertEqual(constant_op.constant([], name="c").name, "default/c:0")
with variable_scope.variable_scope("outer"):
with variable_scope.variable_scope(
None, default_name="default", auxiliary_name_scope=False) as inner:
self.assertEqual(inner.original_name_scope, "outer/")
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/default/w:0")
self.assertEqual(constant_op.constant([], name="c").name, "outer/c:0")
# Recheck: new name scope is NOT created before
with ops.name_scope("default"):
self.assertEqual(
constant_op.constant([], name="c").name, "outer/default/c:0")
def testReenterRootScopeWhenAuxiliaryNameScopeIsFalse(self):
with self.test_session():
root_scope = variable_scope.get_variable_scope()
with variable_scope.variable_scope(
root_scope, auxiliary_name_scope=False) as scope:
self.assertEqual(scope.original_name_scope, "")
self.assertEqual(variable_scope.get_variable("w", []).name, "w:0")
self.assertEqual(constant_op.constant([], name="c").name, "c:0")
with variable_scope.variable_scope("outer"):
with variable_scope.variable_scope(
root_scope, auxiliary_name_scope=False) as inner:
self.assertEqual(inner.original_name_scope, "")
self.assertEqual(variable_scope.get_variable("w1", []).name, "w1:0")
self.assertEqual(
constant_op.constant([], name="c1").name, "outer/c1:0")
def testAuxiliaryNameScopeIsInvalid(self):
with self.test_session():
with self.assertRaisesRegexp(TypeError, "auxiliary_name_scope"):
with variable_scope.variable_scope(
None, default_name="scope", auxiliary_name_scope="invalid"):
pass
with self.assertRaisesRegexp(TypeError, "auxiliary_name_scope"):
with variable_scope.variable_scope(
"scope", auxiliary_name_scope="invalid"):
pass
with variable_scope.variable_scope("scope") as scope:
pass
with self.assertRaisesRegexp(TypeError, "auxiliary_name_scope"):
with variable_scope.variable_scope(
scope, auxiliary_name_scope="invalid"):
pass
def testReuseScopeWithoutNameScopeCollision(self):
# Github issue: #13429
with self.test_session():
with variable_scope.variable_scope("outer"):
with variable_scope.variable_scope("inner") as inner:
pass
with variable_scope.variable_scope(
inner, auxiliary_name_scope=False) as scope:
with ops.name_scope(scope.original_name_scope):
self.assertEqual(
variable_scope.get_variable("w", []).name, "outer/inner/w:0")
self.assertEqual(
constant_op.constant([], name="c").name, "outer/inner/c:0")
with ops.name_scope("inner"):
self.assertEqual(constant_op.constant([], name="c").name, "inner/c:0")
with variable_scope.variable_scope("another"):
with variable_scope.variable_scope(
inner, auxiliary_name_scope=False) as scope1:
with ops.name_scope(scope1.original_name_scope):
self.assertEqual(
variable_scope.get_variable("w1", []).name, "outer/inner/w1:0")
self.assertEqual(
constant_op.constant([], name="c1").name, "outer/inner/c1:0")
with ops.name_scope("inner"):
self.assertEqual(
constant_op.constant([], name="c").name, "another/inner/c:0")
@test_util.run_in_graph_and_eager_modes
def testGetLocalVar(self):
# Check that local variable respects naming.
with variable_scope.variable_scope("outer") as outer:
with variable_scope.variable_scope(outer, "default", []):
local_var = variable_scope.get_local_variable(
"w", [], collections=["foo"])
self.assertEqual(local_var.name, "outer/w:0")
if not context.executing_eagerly():
# Since variable is local, it should be in the local variable collection
# but not the trainable collection.
self.assertIn(local_var,
ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES))
self.assertIn(local_var, ops.get_collection("foo"))
self.assertNotIn(local_var,
ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES))
# Check that local variable respects `reuse`.
with variable_scope.variable_scope(outer, "default", reuse=True):
self.assertEqual(
variable_scope.get_local_variable("w", []).name, "outer/w:0")
def testGetVarWithDevice(self):
g = ops.Graph()
varname_type = []
def device_func(op):
if op.type in ["Variable", "VariableV2", "VarHandleOp"]:
varname_type.append((op.name, op.get_attr("dtype")))
return "/device:GPU:0"
with g.as_default():
with ops.device(device_func):
_ = variable_scope.get_variable("x", (100, 200))
_ = variable_scope.get_variable(
"y", dtype=dtypes.int64, initializer=numpy.arange(73))
self.assertEqual(varname_type[0], ("x", dtypes.float32))
self.assertEqual(varname_type[1], ("y", dtypes.int64))
def testGetCollection(self):
with self.test_session():
_ = variable_scope.get_variable("testGetCollection_a", [])
_ = variable_scope.get_variable(
"testGetCollection_b", [], trainable=False)
with variable_scope.variable_scope("testGetCollection_foo_") as scope1:
_ = variable_scope.get_variable("testGetCollection_a", [])
_ = variable_scope.get_variable(
"testGetCollection_b", [], trainable=False)
self.assertEqual([
v.name
for v in scope1.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
], ["testGetCollection_foo_/testGetCollection_a:0"])
self.assertEqual([
v.name
for v in scope1.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
], [
"testGetCollection_foo_/testGetCollection_a:0",
"testGetCollection_foo_/testGetCollection_b:0"
])
with variable_scope.variable_scope("testGetCollection_foo") as scope2:
_ = variable_scope.get_variable("testGetCollection_a", [])
_ = variable_scope.get_variable(
"testGetCollection_b", [], trainable=False)
self.assertEqual([
v.name
for v in scope2.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
], ["testGetCollection_foo/testGetCollection_a:0"])
self.assertEqual([
v.name
for v in scope2.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
], [
"testGetCollection_foo/testGetCollection_a:0",
"testGetCollection_foo/testGetCollection_b:0"
])
scope = variable_scope.get_variable_scope()
self.assertEqual([
v.name for v in scope.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
], [
"testGetCollection_a:0", "testGetCollection_b:0",
"testGetCollection_foo_/testGetCollection_a:0",
"testGetCollection_foo_/testGetCollection_b:0",
"testGetCollection_foo/testGetCollection_a:0",
"testGetCollection_foo/testGetCollection_b:0"
])
self.assertEqual([
v.name
for v in scope.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
], [
"testGetCollection_a:0",
"testGetCollection_foo_/testGetCollection_a:0",
"testGetCollection_foo/testGetCollection_a:0"
])
def testGetTrainableVariables(self):
with self.test_session():
_ = variable_scope.get_variable("testGetTrainableVariables_a", [])
with variable_scope.variable_scope(
"testGetTrainableVariables_foo") as scope:
_ = variable_scope.get_variable("testGetTrainableVariables_b", [])
_ = variable_scope.get_variable(
"testGetTrainableVariables_c", [], trainable=False)
self.assertEqual(
[v.name for v in scope.trainable_variables()],
["testGetTrainableVariables_foo/"
"testGetTrainableVariables_b:0"])
def testGetGlobalVariables(self):
with self.test_session():
_ = variable_scope.get_variable("testGetGlobalVariables_a", [])
with variable_scope.variable_scope("testGetGlobalVariables_foo") as scope:
_ = variable_scope.get_variable("testGetGlobalVariables_b", [])
self.assertEqual(
[v.name for v in scope.global_variables()],
["testGetGlobalVariables_foo/"
"testGetGlobalVariables_b:0"])
def testGetLocalVariables(self):
with self.test_session():
_ = variable_scope.get_variable(
"a", [], collections=[ops.GraphKeys.LOCAL_VARIABLES])
with variable_scope.variable_scope("foo") as scope:
_ = variable_scope.get_variable(
"b", [], collections=[ops.GraphKeys.LOCAL_VARIABLES])
_ = variable_scope.get_variable("c", [])
self.assertEqual([v.name for v in scope.local_variables()], ["foo/b:0"])
def testGetVariableWithRefDtype(self):
v = variable_scope.get_variable("v", shape=[3, 4], dtype=dtypes.float32)
# Ensure it is possible to do get_variable with a _ref dtype passed in.
_ = variable_scope.get_variable("w", shape=[5, 6], dtype=v.dtype)
def testTwoGraphs(self):
def f():
g1 = ops.Graph()
g2 = ops.Graph()
with g1.as_default():
with g2.as_default():
with variable_scope.variable_scope("_"):
pass
self.assertRaisesRegexp(ValueError, "'_' is not a valid scope name", f)
def axis0_into1_partitioner(shape=None, **unused_kwargs):
part = [1] * len(shape)
return part
def axis0_into2_partitioner(shape=None, **unused_kwargs):
part = [1] * len(shape)
part[0] = 2
return part
def axis0_into3_partitioner(shape=None, **unused_kwargs):
part = [1] * len(shape)
part[0] = 3
return part
class VariableScopeWithPartitioningTest(test.TestCase):
def testResultNameMatchesRequested(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
v = variable_scope.get_variable("name0", shape=(3, 1, 1))
self.assertEqual(v.name, "scope0/name0")
v_concat = v.as_tensor()
self.assertEqual(v_concat.name, "scope0/name0:0")
variables = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
self.assertIn("scope0/name0/part_0:0", [x.name for x in variables])
self.assertIn("scope0/name0/part_1:0", [x.name for x in variables])
self.assertNotIn("scope0/name0/part_2:0", [x.name for x in variables])
def testBreaksIfPartitioningChanges(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
variable_scope.get_variable("name0", shape=(3, 1, 1))
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into3_partitioner, reuse=True):
with self.assertRaisesRegexp(
ValueError,
"Trying to reuse partitioned variable .* but specified partitions .* "
"and found partitions .*"):
variable_scope.get_variable("name0", shape=(3, 1, 1))
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into1_partitioner, reuse=True):
with self.assertRaisesRegexp(
ValueError,
"Trying to reuse partitioned variable .* but specified partitions .* "
"and found partitions .*"):
variable_scope.get_variable("name0", shape=(3, 1, 1))
def testReturnsExistingConcatenatedValueIfReuse(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
v_concat = variable_scope.get_variable("name0", shape=(3, 1, 1))
variable_scope.get_variable_scope().reuse_variables()
v_concat_2 = variable_scope.get_variable("name0", shape=(3, 1, 1))
self.assertEqual(v_concat, v_concat_2)
def testAllowsReuseWithoutPartitioner(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
v = variable_scope.get_variable("name0", shape=(3, 1, 1))
with variable_scope.variable_scope("scope0", reuse=True):
v_reused = variable_scope.get_variable("name0")
self.assertEqual(v, v_reused)
def testPropagatePartitionerOnReopening(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner) as vs:
self.assertEqual(axis0_into2_partitioner, vs.partitioner)
with variable_scope.variable_scope(vs) as vs1:
self.assertEqual(axis0_into2_partitioner, vs1.partitioner)
def testScalarIgnoresPartitioner(self):
with variable_scope.variable_scope(
"scope0", partitioner=axis0_into2_partitioner):
v = variable_scope.get_variable("name0", shape=())
self.assertEqual(v.name, "scope0/name0:0")
variables = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
self.assertIn("scope0/name0:0", [x.name for x in variables])
def _testPartitionConcatenatesAlongCorrectAxis(self, use_resource):
def _part_axis_0(**unused_kwargs):
return (2, 1, 1)
def _part_axis_1(**unused_kwargs):
return (1, 2, 1)
with variable_scope.variable_scope("root", use_resource=use_resource):
v0 = variable_scope.get_variable(
"n0", shape=(2, 2, 2), partitioner=_part_axis_0)
v1 = variable_scope.get_variable(
"n1", shape=(2, 2, 2), partitioner=_part_axis_1)
self.assertEqual(v0.get_shape(), (2, 2, 2))
self.assertEqual(v1.get_shape(), (2, 2, 2))
n0_0 = list(v0)[0]
n0_1 = list(v0)[1]
self.assertEqual(n0_0.get_shape(), (1, 2, 2))
self.assertEqual(n0_1.get_shape(), (1, 2, 2))
n1_0 = list(v1)[0]
n1_1 = list(v1)[1]
self.assertEqual(n1_0.get_shape(), (2, 1, 2))
self.assertEqual(n1_1.get_shape(), (2, 1, 2))
def testPartitionConcatenatesAlongCorrectAxis(self):
self._testPartitionConcatenatesAlongCorrectAxis(use_resource=False)
def testPartitionConcatenatesAlongCorrectAxisResource(self):
self._testPartitionConcatenatesAlongCorrectAxis(use_resource=True)
class VariableScopeWithCustomGetterTest(test.TestCase):
def testNonCallableGetterFails(self):
with self.assertRaisesRegexp(ValueError, r"custom_getter .* not callable:"):
with variable_scope.variable_scope("scope0", custom_getter=3):
variable_scope.get_variable("name0")
with self.assertRaisesRegexp(ValueError, r"custom_getter .* not callable:"):
variable_scope.get_variable("name0", custom_getter=3)
def testNoSideEffectsWithIdentityCustomGetter(self):
called = [0]
def custom_getter(getter, *args, **kwargs):
called[0] += 1
return getter(*args, **kwargs)
with variable_scope.variable_scope(
"scope", custom_getter=custom_getter) as scope:
v = variable_scope.get_variable("v", [1])
with variable_scope.variable_scope(scope, reuse=True):
v2 = variable_scope.get_variable("v", [1])
with variable_scope.variable_scope("new_scope") as new_scope:
v3 = variable_scope.get_variable("v3", [1])
with variable_scope.variable_scope(
new_scope, reuse=True, custom_getter=custom_getter):
v4 = variable_scope.get_variable("v3", [1])
self.assertEqual(v, v2)
self.assertEqual(v3, v4)
self.assertEqual(3, called[0]) # skipped one in the first new_scope
def testCustomGetterWithReuse(self):
# Custom getter can choose to behave differently on reused variables.
def custom_getter(getter, *args, **kwargs):
var = getter(*args, **kwargs)
if kwargs["reuse"]:
# This can be used, e.g., for changing the caching device if needed.
return array_ops.identity(var, name="reused")
else:
return array_ops.identity(var, name="not_reused")
with variable_scope.variable_scope(
"scope", custom_getter=custom_getter) as scope:
v = variable_scope.get_variable("v", [1])
with variable_scope.variable_scope(scope, reuse=True):
v2 = variable_scope.get_variable("v", [1])
self.assertEqual(v.name, "not_reused:0")
self.assertEqual(v2.name, "reused:0")
def testGetterThatCreatesTwoVariablesAndSumsThem(self):
def custom_getter(getter, name, *args, **kwargs):
g_0 = getter("%s/0" % name, *args, **kwargs)
g_1 = getter("%s/1" % name, *args, **kwargs)
with ops.name_scope("custom_getter"):
return g_0 + g_1
with variable_scope.variable_scope("scope", custom_getter=custom_getter):
v = variable_scope.get_variable("v", [1, 2, 3])
self.assertEqual([1, 2, 3], v.get_shape())
true_vars = variables_lib.trainable_variables()
self.assertEqual(2, len(true_vars))
self.assertEqual("scope/v/0:0", true_vars[0].name)
self.assertEqual("scope/v/1:0", true_vars[1].name)
self.assertEqual("custom_getter/add:0", v.name)
with self.test_session() as sess:
variables_lib.global_variables_initializer().run()
np_vars, np_v = sess.run([true_vars, v])
self.assertAllClose(np_v, sum(np_vars))
def testNestedCustomGetters(self):
def sum_getter(getter, name, *args, **kwargs):
g_0 = getter("%s/sum_0" % name, *args, **kwargs)
g_1 = getter("%s/sum_1" % name, *args, **kwargs)
with ops.name_scope("sum_getter"):
return g_0 + g_1
def prod_getter(getter, name, *args, **kwargs):
g_0 = getter("%s/prod_0" % name, *args, **kwargs)
g_1 = getter("%s/prod_1" % name, *args, **kwargs)
with ops.name_scope("prod_getter"):
return g_0 * g_1
with variable_scope.variable_scope("prod_scope", custom_getter=prod_getter):
with variable_scope.variable_scope("sum_scope", custom_getter=sum_getter):
with variable_scope.variable_scope(
"inner_sum_scope", custom_getter=sum_getter):
# take sums of sums of products
v = variable_scope.get_variable("v", [1, 2, 3])
self.assertEqual([1, 2, 3], v.get_shape())
true_vars = variables_lib.trainable_variables()
self.assertEqual(8, len(true_vars))
template = (
"prod_scope/sum_scope/inner_sum_scope/v/sum_%d/sum_%d/prod_%d:0")
self.assertEqual(template % (0, 0, 0), true_vars[0].name)
self.assertEqual(template % (0, 0, 1), true_vars[1].name)
self.assertEqual(template % (0, 1, 0), true_vars[2].name)
self.assertEqual(template % (0, 1, 1), true_vars[3].name)
self.assertEqual(template % (1, 0, 0), true_vars[4].name)
self.assertEqual(template % (1, 0, 1), true_vars[5].name)
self.assertEqual(template % (1, 1, 0), true_vars[6].name)
self.assertEqual(template % (1, 1, 1), true_vars[7].name)
with self.test_session() as sess:
variables_lib.global_variables_initializer().run()
np_vars, np_v = sess.run([true_vars, v])
# take products of sums of products
self.assertAllClose(
np_v, (((np_vars[0] * np_vars[1]) + (np_vars[2] * np_vars[3])) + (
(np_vars[4] * np_vars[5]) + (np_vars[6] * np_vars[7]))))
def testVariableCreator(self):
variable_names = []
def creator_a(next_creator, **kwargs):
variable_names.append(kwargs.get("name", ""))
return next_creator(**kwargs)
def creator_b(next_creator, **kwargs):
kwargs["name"] = "forced_name"
return next_creator(**kwargs)
with variable_scope.variable_creator_scope(creator_a):
with variable_scope.variable_creator_scope(creator_b):
variable_scope.variable(1.0, name="one_name")
self.assertAllEqual(variable_names, ["forced_name"])
class PartitionInfoTest(test.TestCase):
def testConstructorChecks(self):
# Invalid arg types.
with self.assertRaises(TypeError):
variable_scope._PartitionInfo(full_shape=None, var_offset=[0, 1])
with self.assertRaises(TypeError):
variable_scope._PartitionInfo(full_shape=[0, 1], var_offset=None)
with self.assertRaises(TypeError):
variable_scope._PartitionInfo(full_shape="foo", var_offset=[0, 1])
with self.assertRaises(TypeError):
variable_scope._PartitionInfo(full_shape=[0, 1], var_offset="foo")
# full_shape and var_offset must have same length.
with self.assertRaises(ValueError):
variable_scope._PartitionInfo(full_shape=[0, 1], var_offset=[0])
# Offset must always be less than shape.
with self.assertRaises(ValueError):
variable_scope._PartitionInfo(full_shape=[1, 1], var_offset=[0, 1])
def testSingleOffset(self):
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[4, 0])
self.assertEqual(4, partition_info.single_offset([1, 3]))
# Tests when the variable isn't partitioned at all.
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[0, 0])
self.assertEqual(0, partition_info.single_offset([9, 3]))
def testSingleSliceDim(self):
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[4, 0])
# Invalid shape.
with self.assertRaises(TypeError):
partition_info.single_slice_dim(None)
# Rank of shape differs from full_shape.
with self.assertRaises(ValueError):
partition_info.single_slice_dim([1, 2, 3])
# Shape is too large given var_offset (4+6 > 9).
with self.assertRaises(ValueError):
partition_info.single_slice_dim([6, 3])
# Multiple possible slice dim from shape.
with self.assertRaises(ValueError):
partition_info.single_slice_dim([1, 1])
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[0, 0])
self.assertEqual(1, partition_info.single_slice_dim([9, 2]))
partition_info = variable_scope._PartitionInfo(
full_shape=[9, 3], var_offset=[4, 0])
self.assertEqual(0, partition_info.single_slice_dim([2, 3]))
class VariableScopeMultithreadedTest(test.TestCase):
def testTwoThreadsDisjointScopeEntry(self):
def thread_fn(i, graph):
with graph.as_default():
with variable_scope.variable_scope("foo"):
if i == 0:
v = variable_scope.get_variable("v", [])
self.assertEquals("foo/v:0", v.name)
else:
# Any thread after the first one should fail to create variable
# with the same name.
with self.assertRaises(ValueError):
variable_scope.get_variable("v", [])
graph = ops.get_default_graph()
threads = [
threading.Thread(target=thread_fn, args=(
i,
graph,
)) for i in range(2)
]
threads[0].start()
# Allow thread 0 to finish before starting thread 1.
threads[0].join()
threads[1].start()
threads[1].join()
def testTwoThreadsNestedScopeEntry(self):
def thread_fn(i, graph, run_event, pause_event):
with graph.as_default():
with variable_scope.variable_scope("foo"):
if i == 0:
v = variable_scope.get_variable("v", [])
self.assertEquals("foo/v:0", v.name)
else:
# Any thread after the first one should fail to create variable
# with the same name.
with self.assertRaises(ValueError):
variable_scope.get_variable("v", [])
pause_event.set()
run_event.wait()
graph = ops.get_default_graph()
run_events = [threading.Event() for _ in range(2)]
pause_events = [threading.Event() for _ in range(2)]
threads = [
threading.Thread(
target=thread_fn, args=(i, graph, run_events[i], pause_events[i]))
for i in range(2)
]
# Start first thread.
threads[0].start()
pause_events[0].wait()
# Start next thread once the first thread has paused.
threads[1].start()
pause_events[1].wait()
# Resume both threads.
run_events[0].set()
run_events[1].set()
threads[0].join()
threads[1].join()
def testReenterMainScope(self):
def thread_fn(graph, main_thread_scope):
with graph.as_default():
# Variable created with main scope will have prefix "main".
with variable_scope.variable_scope(main_thread_scope):
with variable_scope.variable_scope("foo"):
v = variable_scope.get_variable("v", [])
self.assertEquals("main/foo/v:0", v.name)
# Variable created outside main scope will not have prefix "main".
with variable_scope.variable_scope("bar"):
v = variable_scope.get_variable("v", [])
self.assertEquals("bar/v:0", v.name)
graph = ops.get_default_graph()
with variable_scope.variable_scope("main") as main_thread_scope:
thread = threading.Thread(
target=thread_fn, args=(graph, main_thread_scope))
thread.start()
thread.join()
if __name__ == "__main__":
test.main()
|
apache-2.0
|
saimn/astropy
|
astropy/io/votable/tests/util_test.py
|
7
|
1648
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
A set of tests for the util.py module
"""
# LOCAL
from astropy.io.votable import util
from astropy.tests.helper import raises
def test_range_list():
assert util.coerce_range_list_param((5,)) == ("5.0", 1)
def test_range_list2():
assert util.coerce_range_list_param((5e-7, 8e-7)) == ("5e-07,8e-07", 2)
def test_range_list3():
assert util.coerce_range_list_param((5e-7, 8e-7, "FOO")) == (
"5e-07,8e-07;FOO", 3)
@raises(ValueError)
def test_range_list4a():
util.coerce_range_list_param(
(5e-7, (None, 8e-7), (4, None), (4, 5), "J", "FOO"))
def test_range_list4():
assert (util.coerce_range_list_param(
(5e-7, (None, 8e-7), (4, None), (4, 5), "J", "FOO"), numeric=False) ==
("5e-07,/8e-07,4/,4/5,J;FOO", 6))
@raises(ValueError)
def test_range_list5():
util.coerce_range_list_param(('FOO', ))
@raises(ValueError)
def test_range_list6():
print(util.coerce_range_list_param((5, 'FOO'), util.stc_reference_frames))
def test_range_list7():
assert util.coerce_range_list_param(("J",), numeric=False) == ("J", 1)
def test_range_list8():
for s in ["5.0",
"5e-07,8e-07",
"5e-07,8e-07;FOO",
"5e-07,/8e-07,4.0/,4.0/5.0;FOO",
"J"]:
assert util.coerce_range_list_param(s, numeric=False)[0] == s
@raises(ValueError)
def test_range_list9a():
util.coerce_range_list_param("52,-27.8;FOO", util.stc_reference_frames)
def test_range_list9():
assert util.coerce_range_list_param(
"52,-27.8;GALACTIC", util.stc_reference_frames)
|
bsd-3-clause
|
apark263/tensorflow
|
tensorflow/contrib/feature_column/__init__.py
|
42
|
1502
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Experimental utilities for tf.feature_column."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.contrib.feature_column.python.feature_column.sequence_feature_column import *
from tensorflow.python.util.all_util import remove_undocumented
# pylint: enable=unused-import,line-too-long,wildcard-import
_allowed_symbols = [
'sequence_categorical_column_with_hash_bucket',
'sequence_categorical_column_with_identity',
'sequence_categorical_column_with_vocabulary_list',
'sequence_categorical_column_with_vocabulary_file',
'sequence_input_layer',
'sequence_numeric_column',
]
remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
|
apache-2.0
|
Alwnikrotikz/volatility
|
volatility/plugins/crashinfo.py
|
44
|
5868
|
# Volatility
# Copyright (C) 2009-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
import volatility.utils as utils
import volatility.plugins.common as common
import volatility.cache as cache
import volatility.debug as debug
import volatility.obj as obj
import datetime
class _DMP_HEADER(obj.CType):
"""A class for crash dumps"""
@property
def SystemUpTime(self):
"""Returns a string uptime"""
# Some utilities write PAGEPAGE to this field when
# creating the dump header.
if self.m('SystemUpTime') == 0x4547415045474150:
return obj.NoneObject("No uptime recorded")
# 1 uptime is 100ns so convert that to microsec
msec = self.m('SystemUpTime') / 10
return datetime.timedelta(microseconds = msec)
class CrashInfoModification(obj.ProfileModification):
"""Applies overlays for crash dump headers"""
conditions = {'os': lambda x: x == 'windows'}
before = ["WindowsVTypes", "WindowsObjectClasses"]
def modification(self, profile):
profile.merge_overlay({
'_DMP_HEADER' : [ None, {
'Comment' : [ None, ['String', dict(length = 128)]],
'DumpType' : [ None, ['Enumeration', dict(choices = {0x1: "Full Dump", 0x2: "Kernel Dump"})]],
'SystemTime' : [ None, ['WinTimeStamp', dict(is_utc = True)]],
}],
'_DMP_HEADER64' : [ None, {
'Comment' : [ None, ['String', dict(length = 128)]],
'DumpType' : [ None, ['Enumeration', dict(choices = {0x1: "Full Dump", 0x2: "Kernel Dump"})]],
'SystemTime' : [ None, ['WinTimeStamp', dict(is_utc = True)]],
}],
})
## Both x86 and x64 use the same structure for now, just
## so they can share the same SystemUpTime property.
profile.object_classes.update({'_DMP_HEADER' : _DMP_HEADER, '_DMP_HEADER64' : _DMP_HEADER})
class CrashInfo(common.AbstractWindowsCommand):
"""Dump crash-dump information"""
target_as = ['WindowsCrashDumpSpace32', 'WindowsCrashDumpSpace64']
@cache.CacheDecorator("tests/crashinfo")
def calculate(self):
"""Determines the address space"""
addr_space = utils.load_as(self._config, astype = 'physical')
result = None
adrs = addr_space
while adrs:
if adrs.__class__.__name__ in self.target_as:
result = adrs
adrs = adrs.base
if result is None:
debug.error("Memory Image could not be identified as {0}".format(self.target_as))
return result
def render_text(self, outfd, data):
"""Renders the crashdump header as text"""
hdr = data.get_header()
outfd.write("{0}:\n".format(hdr.obj_name))
outfd.write(" Majorversion: 0x{0:08x} ({1})\n".format(hdr.MajorVersion, hdr.MajorVersion))
outfd.write(" Minorversion: 0x{0:08x} ({1})\n".format(hdr.MinorVersion, hdr.MinorVersion))
outfd.write(" KdSecondaryVersion 0x{0:08x}\n".format(hdr.KdSecondaryVersion))
outfd.write(" DirectoryTableBase 0x{0:08x}\n".format(hdr.DirectoryTableBase))
outfd.write(" PfnDataBase 0x{0:08x}\n".format(hdr.PfnDataBase))
outfd.write(" PsLoadedModuleList 0x{0:08x}\n".format(hdr.PsLoadedModuleList))
outfd.write(" PsActiveProcessHead 0x{0:08x}\n".format(hdr.PsActiveProcessHead))
outfd.write(" MachineImageType 0x{0:08x}\n".format(hdr.MachineImageType))
outfd.write(" NumberProcessors 0x{0:08x}\n".format(hdr.NumberProcessors))
outfd.write(" BugCheckCode 0x{0:08x}\n".format(hdr.BugCheckCode))
if hdr.obj_name != "_DMP_HEADER64":
outfd.write(" PaeEnabled 0x{0:08x}\n".format(hdr.PaeEnabled))
outfd.write(" KdDebuggerDataBlock 0x{0:08x}\n".format(hdr.KdDebuggerDataBlock))
outfd.write(" ProductType 0x{0:08x}\n".format(hdr.ProductType))
outfd.write(" SuiteMask 0x{0:08x}\n".format(hdr.SuiteMask))
outfd.write(" WriterStatus 0x{0:08x}\n".format(hdr.WriterStatus))
outfd.write(" Comment {0}\n".format(hdr.Comment))
outfd.write(" DumpType {0}\n".format(hdr.DumpType))
outfd.write(" SystemTime {0}\n".format(str(hdr.SystemTime or '')))
outfd.write(" SystemUpTime {0}\n".format(str(hdr.SystemUpTime or '')))
outfd.write("\nPhysical Memory Description:\n")
outfd.write("Number of runs: {0}\n".format(len(data.get_runs())))
outfd.write("FileOffset Start Address Length\n")
if hdr.obj_name != "_DMP_HEADER64":
foffset = 0x1000
else:
foffset = 0x2000
run = []
## FIXME. These runs differ for x86 vs x64. This is a reminder
## for MHL or AW to fix it.
for run in data.get_runs():
outfd.write("{0:08x} {1:08x} {2:08x}\n".format(foffset, run[0], run[2]))
foffset += (run[2])
outfd.write("{0:08x} {1:08x}\n".format(foffset - 0x1000, (run[0] + run[2] - 0x1000)))
|
gpl-2.0
|
juanyaw/PTVS
|
Python/Tests/TestData/Grammar/DedentErrorLargeFile.py
|
18
|
3050
|
# On the Insert tab, the galleries include items that are designed to coordinate with the overall look of your document. You can use these galleries to insert tables, headers, footers, lists, cover pages, and other document building blocks. When you create pictures, charts, or diagrams, they also coordinate with your current document look. You can easily change the formatting of selected text in the document text by choosing a look for the selected text from the Quick Styles gallery on the Home tab. You can also format text directly by using the other controls on the Home tab.
# Most controls offer a choice of using the look from the current theme or using a format that you specify directly. To change the overall look of your document, choose new Theme elements on the Page Layout tab. To change the looks available in the Quick Style gallery, use the Change Current Quick Style Set command. Both the Themes gallery and the Quick Styles gallery provide reset commands so that you can always restore the look of your document to the original contained in your current template. On the Insert tab, the galleries include items that are designed to coordinate with the overall look of your document.
# You can use these galleries to insert tables, headers, footers, lists, cover pages, and other document building blocks. When you create pictures, charts, or diagrams, they also coordinate with your current document look. You can easily change the formatting of selected text in the document text by choosing a look for the selected text from the Quick Styles gallery on the Home tab. You can also format text directly by using the other controls on the Home tab. Most controls offer a choice of using the look from the current theme or using a format that you specify directly.
# To change the overall look of your document, choose new Theme elements on the Page Layout tab. To change the looks available in the Quick Style gallery, use the Change Current Quick Style Set command. Both the Themes gallery and the Quick Styles gallery provide reset commands so that you can always restore the look of your document to the original contained in your current template. On the Insert tab, the galleries include items that are designed to coordinate with the overall look of your document. You can use these galleries to insert tables, headers, footers, lists, cover pages, and other document building blocks.
# When you create pictures, charts, or diagrams, they also coordinate with your current document look. You can easily change the formatting of selected text in the document text by choosing a look for the selected text from the Quick Styles gallery on the Home tab. You can also format text directly by using the other controls on the Home tab. Most controls offer a choice of using the look from the current theme or using a format that you specify directly. To change the overall look of your document, choose new Theme elements on the Page Layout tab.
def f():
if True:
pass
pass
|
apache-2.0
|
ellipsis14/dolfin
|
demo/documented/tensor-weighted-poisson/python/demo_tensor-weighted-poisson.py
|
3
|
3252
|
"""This demo program solves Poisson's equation
- div C grad u(x, y) = f(x, y)
on the unit square with source f given by
f(x, y) = 10*exp(-((x - 0.5)^2 + (y - 0.5)^2) / 0.02)
and boundary conditions given by
u(x, y) = 0 for x = 0 or x = 1
du/dn(x, y) = 0 for y = 0 or y = 1
The conductivity C is a symmetric 2 x 2 matrix which
varies throughout the domain. In the left part of the
domain, the conductivity is
C = ((1, 0.3), (0.3, 2))
and in the right part it is
C = ((3, 0.5), (0.5, 4))
The data files where these values are stored are generated
by the program generate_data.py
This demo is dedicated to BF and Marius... ;-)
"""
# Copyright (C) 2009-2011 Anders Logg
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# First added: 2009-12-16
# Last changed: 2011-06-28
# Begin demo
from dolfin import *
# Read mesh from file and create function space
mesh = Mesh("../unitsquare_32_32.xml.gz")
V = FunctionSpace(mesh, "Lagrange", 1)
# Define Dirichlet boundary (x = 0 or x = 1)
def boundary(x):
return x[0] < DOLFIN_EPS or x[0] > 1.0 - DOLFIN_EPS
# Define boundary condition
u0 = Constant(0.0)
bc = DirichletBC(V, u0, boundary)
# Code for C++ evaluation of conductivity
conductivity_code = """
class Conductivity : public Expression
{
public:
// Create expression with 3 components
Conductivity() : Expression(3) {}
// Function for evaluating expression on each cell
void eval(Array<double>& values, const Array<double>& x, const ufc::cell& cell) const
{
const uint D = cell.topological_dimension;
const uint cell_index = cell.index;
values[0] = (*c00)[cell_index];
values[1] = (*c01)[cell_index];
values[2] = (*c11)[cell_index];
}
// The data stored in mesh functions
std::shared_ptr<MeshFunction<double> > c00;
std::shared_ptr<MeshFunction<double> > c01;
std::shared_ptr<MeshFunction<double> > c11;
};
"""
# Define conductivity expression and matrix
c00 = MeshFunction("double", mesh, "../unitsquare_32_32_c00.xml.gz")
c01 = MeshFunction("double", mesh, "../unitsquare_32_32_c01.xml.gz")
c11 = MeshFunction("double", mesh, "../unitsquare_32_32_c11.xml.gz")
c = Expression(cppcode=conductivity_code)
c.c00 = c00
c.c01 = c01
c.c11 = c11
C = as_matrix(((c[0], c[1]), (c[1], c[2])))
# Define variational problem
u = TrialFunction(V)
v = TestFunction(V)
f = Expression("10*exp(-(pow(x[0] - 0.5, 2) + pow(x[1] - 0.5, 2)) / 0.02)")
a = inner(C*grad(u), grad(v))*dx
L = f*v*dx
# Compute solution
u = Function(V)
solve(a == L, u, bc)
# Save solution in VTK format
file = File("poisson.pvd")
file << u
# Plot solution
plot(u, interactive=True)
|
gpl-3.0
|
soylentdeen/Graffity
|
src/SQLTools/CIAO_Browser.py
|
1
|
3413
|
import scipy
import numpy
import sys
from astropy import time as aptime
sys.path.append('../')
import CIAO_DatabaseTools
import Graffity
import tkinter
def getGRAVITY_OBS(GRAVITY_values, frame):
tkinter.Label(frame, text="GRAVITY Observations", width = 3, borderwidth="1",
relief="solid").grid(row=0, column = 0)
""""
print("GRAVITY Observations")
i = 0
print("i | Filename | Strehl | Seeing")
order = numpy.argsort(GRAVITY_values[:, -2])
for val in GRAVITY_values[order]:
print("%03d | %s " % (i, aptime.Time(float(val[-2]), format='mjd').iso))
i += 1
choice = raw_input("Enter comma separated choices ('END' to quit): ")
try:
choices = [int(r.strip()) for r in choice.split(',')]
retval = []
for c in choices:
retval.append(GRAVITY_values[order][c,-2])
return retval
except:
return "END"
"""
def getDataLoggers(GRAVITY_OBS, CIAO_DB, UTS=[1,2,3,4]):
keywords= ['STREHL', 'SEEING', 'ASM_SEEING', 'M10_POSANG',
'WINDDIR', 'WINDSP', 'PRLTIC', 'TIP_RESIDUALS',
'TILT_RESIDUALS', 'ALT', 'AZ']
CIAO_values = CIAO_DB.query(keywords= keywords, timeOfDay='NIGHT',
startTime=startTime)
DataLoggers = {1:[], 2:[], 3:[], 4:[]}
Values = {}
for key in keywords:
Values[key] = {1:[], 2:[], 3:[], 4:[]}
for GRAVOBS in GRAVITY_OBS:
for UT in UTS:
timeStamp = numpy.argsort(numpy.abs(numpy.array(CIAO_values[UT][:,-4],
dtype=numpy.float32) - float(GRAVOBS)))[0]
timeDistance = (float(CIAO_values[UT][timeStamp, -4]) - float(GRAVOBS))*24*3600
if timeDistance < 90:
print("Distance for UT %d : %.3f seconds" % (UT, timeDistance))
DataLoggers[UT].append(Graffity.DataLogger(directory=CIAO_values[UT][timeStamp,-3]))
DataLoggers[UT][-1].loadData()
for key, i in zip(keywords, range(len(keywords))):
try:
Values[key][UT].append(float(CIAO_values[UT][timeStamp,i]))
except:
Values[key][UT].append(0.0)
else:
print("Error! Datalogger within 30 seconds does not exist for this observation!")
return Values, DataLoggers
def onFrameConfigure(canvas):
canvas.configure(scrollregion=canvas.bbox("all"))
Root = tkinter.Tk()
CIAO_DB = CIAO_DatabaseTools.CIAO_Database()
GRAVITY_DB = CIAO_DatabaseTools.GRAVITY_Database()
UTS = [2]
startTime = '2017-07-01 00:00:00'
GRAVITY_values = GRAVITY_DB.query(keywords = [], timeOfDay='NIGHT',
startTime=startTime)
canvas = tkinter.Canvas(Root, borderwidth=0, background='#ffffff')
frame = tkinter.Frame(canvas, background='#ffffff')
vsb = tkinter.Scrollbar(Root, orient='vertical', command=canvas.yview)
canvas.configure(yscrollcommand=vsb.set)
vsb.pack(side='right', fill='y')
canvas.pack(side='left', fill='both', expand=True)
canvas.create_window((4,4), window=frame, anchor='nw')
frame.bind("<Configure>", lambda event, canvas=canvas: onFrameConfigure(canvas))
#getGRAVITY_OBS(GRAVITY_values, frame)
Root.mainloop(frame)
"""
while True:
GRAVITY_OBS = getGRAVITY_OBS(GRAVITY_values)
if GRAVITY_OBS == "END":
break
CIAO_OBS, CIAO_DLS = getDataLoggers(GRAVITY_OBS, CIAO_DB)
print asdf
"""
|
mit
|
Immortalin/python-for-android
|
python3-alpha/python3-src/Lib/test/test_datetime.py
|
49
|
1656
|
import unittest
import sys
from test.support import import_fresh_module, run_unittest
TESTS = 'test.datetimetester'
# XXX: import_fresh_module() is supposed to leave sys.module cache untouched,
# XXX: but it does not, so we have to save and restore it ourselves.
save_sys_modules = sys.modules.copy()
try:
pure_tests = import_fresh_module(TESTS, fresh=['datetime', '_strptime'],
blocked=['_datetime'])
fast_tests = import_fresh_module(TESTS, fresh=['datetime',
'_datetime', '_strptime'])
finally:
sys.modules.clear()
sys.modules.update(save_sys_modules)
test_modules = [pure_tests, fast_tests]
test_suffixes = ["_Pure", "_Fast"]
for module, suffix in zip(test_modules, test_suffixes):
for name, cls in module.__dict__.items():
if isinstance(cls, type) and issubclass(cls, unittest.TestCase):
name += suffix
cls.__name__ = name
globals()[name] = cls
def setUp(self, module=module, setup=cls.setUp):
self._save_sys_modules = sys.modules.copy()
sys.modules[TESTS] = module
sys.modules['datetime'] = module.datetime_module
sys.modules['_strptime'] = module._strptime
setup(self)
def tearDown(self, teardown=cls.tearDown):
teardown(self)
sys.modules.clear()
sys.modules.update(self._save_sys_modules)
cls.setUp = setUp
cls.tearDown = tearDown
def test_main():
run_unittest(__name__)
if __name__ == "__main__":
test_main()
|
apache-2.0
|
grumpycoders/googletest
|
xcode/Scripts/versiongenerate.py
|
3088
|
4536
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A script to prepare version informtion for use the gtest Info.plist file.
This script extracts the version information from the configure.ac file and
uses it to generate a header file containing the same information. The
#defines in this header file will be included in during the generation of
the Info.plist of the framework, giving the correct value to the version
shown in the Finder.
This script makes the following assumptions (these are faults of the script,
not problems with the Autoconf):
1. The AC_INIT macro will be contained within the first 1024 characters
of configure.ac
2. The version string will be 3 integers separated by periods and will be
surrounded by squre brackets, "[" and "]" (e.g. [1.0.1]). The first
segment represents the major version, the second represents the minor
version and the third represents the fix version.
3. No ")" character exists between the opening "(" and closing ")" of
AC_INIT, including in comments and character strings.
"""
import sys
import re
# Read the command line argument (the output directory for Version.h)
if (len(sys.argv) < 3):
print "Usage: versiongenerate.py input_dir output_dir"
sys.exit(1)
else:
input_dir = sys.argv[1]
output_dir = sys.argv[2]
# Read the first 1024 characters of the configure.ac file
config_file = open("%s/configure.ac" % input_dir, 'r')
buffer_size = 1024
opening_string = config_file.read(buffer_size)
config_file.close()
# Extract the version string from the AC_INIT macro
# The following init_expression means:
# Extract three integers separated by periods and surrounded by squre
# brackets(e.g. "[1.0.1]") between "AC_INIT(" and ")". Do not be greedy
# (*? is the non-greedy flag) since that would pull in everything between
# the first "(" and the last ")" in the file.
version_expression = re.compile(r"AC_INIT\(.*?\[(\d+)\.(\d+)\.(\d+)\].*?\)",
re.DOTALL)
version_values = version_expression.search(opening_string)
major_version = version_values.group(1)
minor_version = version_values.group(2)
fix_version = version_values.group(3)
# Write the version information to a header file to be included in the
# Info.plist file.
file_data = """//
// DO NOT MODIFY THIS FILE (but you can delete it)
//
// This file is autogenerated by the versiongenerate.py script. This script
// is executed in a "Run Script" build phase when creating gtest.framework. This
// header file is not used during compilation of C-source. Rather, it simply
// defines some version strings for substitution in the Info.plist. Because of
// this, we are not not restricted to C-syntax nor are we using include guards.
//
#define GTEST_VERSIONINFO_SHORT %s.%s
#define GTEST_VERSIONINFO_LONG %s.%s.%s
""" % (major_version, minor_version, major_version, minor_version, fix_version)
version_file = open("%s/Version.h" % output_dir, 'w')
version_file.write(file_data)
version_file.close()
|
bsd-3-clause
|
alexston/calibre-webserver
|
src/calibre/utils/fonts/win_fonts.py
|
8
|
6186
|
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import os, sys, atexit
from itertools import product
from calibre import prints, isbytestring
from calibre.constants import plugins, filesystem_encoding
from calibre.utils.fonts.utils import (is_truetype_font, get_font_names,
get_font_characteristics)
class WinFonts(object):
def __init__(self, winfonts):
self.w = winfonts
# Windows requires font files to be executable for them to be loaded,
# so instead we use this hack.
self.app_font_families = {}
for f in ('Serif', 'Sans', 'Mono'):
base = 'fonts/liberation/Liberation%s-%s.ttf'
self.app_font_families['Liberation %s'%f] = m = {}
for weight, is_italic in product( (self.w.FW_NORMAL, self.w.FW_BOLD), (False, True) ):
name = {(self.w.FW_NORMAL, False):'Regular',
(self.w.FW_NORMAL, True):'Italic',
(self.w.FW_BOLD, False):'Bold',
(self.w.FW_BOLD, True):'BoldItalic'}[(weight,
is_italic)]
m[(weight, is_italic)] = base%(f, name)
# import pprint
# pprint.pprint(self.app_font_families)
def font_families(self):
names = set()
for font in self.w.enum_font_families():
if (
font['is_truetype'] and
# Fonts with names starting with @ are designed for
# vertical text
not font['name'].startswith('@')
):
names.add(font['name'])
return sorted(names.union(frozenset(self.app_font_families)))
def get_normalized_name(self, is_italic, weight):
if is_italic:
ft = 'bi' if weight == self.w.FW_BOLD else 'italic'
else:
ft = 'bold' if weight == self.w.FW_BOLD else 'normal'
return ft
def fonts_for_family(self, family, normalize=True):
family = type(u'')(family)
ans = {}
for weight, is_italic in product( (self.w.FW_NORMAL, self.w.FW_BOLD), (False, True) ):
if family in self.app_font_families:
m = self.app_font_families[family]
path = m.get((weight, is_italic), None)
if path is None: continue
data = P(path, data=True)
else:
try:
data = self.w.font_data(family, is_italic, weight)
except Exception as e:
prints('Failed to get font data for font: %s [%s] with error: %s'%
(family, self.get_normalized_name(is_italic, weight), e))
continue
ok, sig = is_truetype_font(data)
if not ok:
prints('Not a supported font, sfnt_version: %r'%sig)
continue
ext = 'otf' if sig == b'OTTO' else 'ttf'
try:
weight, is_italic, is_bold, is_regular = get_font_characteristics(data)[:4]
except Exception as e:
prints('Failed to get font characteristic for font: %s [%s]'
' with error: %s'%(family,
self.get_normalized_name(is_italic, weight), e))
continue
try:
family_name, sub_family_name, full_name = get_font_names(data)
except:
pass
if normalize:
ft = {(True, True):'bi', (True, False):'italic', (False,
True):'bold', (False, False):'normal'}[(is_italic,
is_bold)]
else:
ft = (1 if is_italic else 0, weight//10)
if not (family_name or full_name):
# prints('Font %s [%s] has no names'%(family,
# self.get_normalized_name(is_italic, weight)))
family_name = family
name = full_name or family + ' ' + (sub_family_name or '')
try:
name.encode('ascii')
except ValueError:
try:
sub_family_name.encode('ascii')
subf = sub_family_name
except:
subf = ''
name = family + ((' ' + subf) if subf else '')
ans[ft] = (ext, name, data)
return ans
def add_system_font(self, path):
'''
WARNING: The file you are adding must have execute permissions or
windows will fail to add it. (ls -l in cygwin to check)
'''
if isbytestring(path):
path = path.decode(filesystem_encoding)
path = os.path.abspath(path)
ret = self.w.add_system_font(path)
if ret > 0:
atexit.register(self.remove_system_font, path)
return ret
def remove_system_font(self, path):
return self.w.remove_system_font(path)
def load_winfonts():
w, err = plugins['winfonts']
if w is None:
raise RuntimeError('Failed to load the winfonts module: %s'%err)
return WinFonts(w)
def test_ttf_reading():
for f in sys.argv[1:]:
raw = open(f).read()
print (os.path.basename(f))
get_font_characteristics(raw)
print()
def test():
base = os.path.abspath(__file__)
d = os.path.dirname
pluginsd = os.path.join(d(d(d(base))), 'plugins')
if os.path.exists(os.path.join(pluginsd, 'winfonts.pyd')):
sys.path.insert(0, pluginsd)
import winfonts
w = WinFonts(winfonts)
else:
w = load_winfonts()
print (w.w)
families = w.font_families()
print (families)
for family in families:
prints(family + ':')
for font, data in w.fonts_for_family(family).iteritems():
prints(' ', font, data[0], data[1], len(data[2]))
print ()
if __name__ == '__main__':
test()
|
gpl-3.0
|
proversity-org/edx-platform
|
lms/djangoapps/badges/events/tests/test_course_complete.py
|
10
|
2548
|
"""
Tests for the course completion helper functions.
"""
from datetime import datetime
from badges.events import course_complete
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class CourseCompleteTestCase(ModuleStoreTestCase):
"""
Tests for the course completion helper functions.
"""
def setUp(self, **kwargs):
super(CourseCompleteTestCase, self).setUp()
# Need key to be deterministic to test slugs.
self.course = CourseFactory.create(
org='edX', course='course_test', run='test_run', display_name='Badged',
start=datetime(year=2015, month=5, day=19),
end=datetime(year=2015, month=5, day=20)
)
self.course_key = self.course.location.course_key
def test_slug(self):
"""
Verify slug generation is working as expected. If this test fails, the algorithm has changed, and it will cause
the handler to lose track of all badges it made in the past.
"""
self.assertEqual(
course_complete.course_slug(self.course_key, 'honor'),
'edxcourse_testtest_run_honor_fc5519b'
)
self.assertEqual(
course_complete.course_slug(self.course_key, 'verified'),
'edxcourse_testtest_run_verified_a199ec0'
)
def test_dated_description(self):
"""
Verify that a course with start/end dates contains a description with them.
"""
self.assertEqual(
course_complete.badge_description(self.course, 'honor'),
'Completed the course "Badged" (honor, 2015-05-19 - 2015-05-20)'
)
def test_self_paced_description(self):
"""
Verify that a badge created for a course with no end date gets a different description.
"""
self.course.end = None
self.assertEqual(
course_complete.badge_description(self.course, 'honor'),
'Completed the course "Badged" (honor)'
)
def test_evidence_url(self):
"""
Make sure the evidence URL points to the right place.
"""
user = UserFactory.create()
self.assertEqual(
'https://edx.org/certificates/user/{user_id}/course/{course_key}?evidence_visit=1'.format(
user_id=user.id, course_key=self.course_key
),
course_complete.evidence_url(user.id, self.course_key)
)
|
agpl-3.0
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/eggs/bx_python-0.7.2-py2.7-linux-x86_64-ucs4.egg/bx/intervals/intersection_tests.py
|
7
|
6579
|
import sys, os
import unittest
try:
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
except:
sys.path.insert(0, os.path.dirname(os.path.abspath(".")))
from bx.intervals.intersection import Interval
from bx.intervals.intersection import IntervalNode
from bx.intervals.intersection import IntervalTree
class NeighborTestCase(unittest.TestCase):
def setUp(self):
iv = IntervalNode( 50, 59, Interval(50, 59))
for i in range(0, 110, 10):
if i == 50: continue
f = Interval(i, i + 9)
iv = iv.insert( f.start, f.end, f)
self.intervals = iv
def test_left(self):
iv = self.intervals
self.assertEqual(str(iv.left(60, n=2)), str([Interval(50, 59), Interval(40, 49)]))
for i in range(10, 100, 10):
r = iv.left(i, max_dist=10, n=1)
self.assertEqual(r[0].end, i - 1)
def test_toomany(self):
iv = self.intervals
self.assertEqual(len(iv.left(60, n=200)) , 6)
def test_right(self):
iv = self.intervals
self.assertEqual(str(iv.left(60, n=2)), str([Interval(50, 59), Interval(40, 49)]))
def get_right_start(b10):
r = iv.right(b10+1, n=1)
assert len(r) == 1
return r[0].start
for i in range(10, 100, 10):
self.assertEqual(get_right_start(i), i + 10)
for i in range(0, 100, 10):
r = iv.right(i-1, max_dist=10, n=1)
print r
self.assertEqual(r[0].start, i)
class UpDownStreamTestCase(unittest.TestCase):
def setUp(self):
iv = IntervalTree()
iv.add_interval(Interval(50, 59))
for i in range(0, 110, 10):
if i == 50: continue
f = Interval(i, i + 9)
iv.add_interval(f)
self.intervals = iv
def test_upstream(self):
iv = self.intervals
upstreams = iv.upstream_of_interval(Interval(59, 60), num_intervals=200)
for u in upstreams:
self.assertTrue(u.end < 59)
upstreams = iv.upstream_of_interval(Interval(60, 70, strand=-1),
num_intervals=200)
for u in upstreams:
self.assertTrue(u.start > 70)
upstreams = iv.upstream_of_interval(Interval(58, 58, strand=-1),
num_intervals=200)
for u in upstreams:
self.assertTrue(u.start > 59)
def test_downstream(self):
iv = self.intervals
downstreams = iv.downstream_of_interval(Interval(59, 60),
num_intervals=200)
for d in downstreams:
self.assertTrue(d.start > 60)
downstreams = iv.downstream_of_interval(Interval(59, 60, strand=-1),
num_intervals=200)
for d in downstreams:
self.assertTrue(d.start < 59)
def test_n(self):
iv = self.intervals
for i in range(0, 90, 10):
r = iv.after(i, max_dist=20, num_intervals=2)
self.assertEqual(r[0].start, i + 10)
self.assertEqual(r[1].start, i + 20)
r = iv.after_interval(Interval(i, i), max_dist=20, num_intervals=2)
self.assertEqual(r[0].start, i + 10)
self.assertEqual(r[1].start, i + 20)
class LotsaTestCase(unittest.TestCase):
""" put lotsa data in the tree and make sure it works"""
def setUp(self):
iv = IntervalNode(1, 2, Interval(1, 2))
self.max = 1000000
for i in range(0, self.max, 10):
f = Interval(i, i)
iv = iv.insert(f.start, f.end, f)
for i in range(600):
iv = iv.insert( 0, 1, Interval(0, 1) )
self.intervals = iv
def test_count(self):
iv = self.intervals
r = iv.right(1, n=33)
self.assertEqual(len(r), 33)
l = iv.left(1, n=33)
self.assertEqual(len(l), 1)
u = iv.right(1, n=9999)
self.assertEqual(len(u), 250)
# now increase max_dist
u = iv.right(1, n=9999, max_dist=99999)
self.assertEqual(len(u), 9999)
def test_max_dist(self):
iv = self.intervals
r = iv.right(1, max_dist=0, n=10)
self.assertEqual(len(r), 0)
for n, d in enumerate(range(10, 1000, 10)):
r = iv.right(1, max_dist=d, n=10000)
self.assertEqual(len(r), n + 1)
def test_find(self):
iv = self.intervals
path = sys.path[:]
sys.path = sys.path[2:]
## import random
random = __import__("random")
sys.path = path
for t in range(25):
start = random.randint(0, self.max - 10000)
end = start + random.randint(100, 10000)
results = iv.find(start, end)
for feat in results:
self.assertTrue(
(feat.end >= start and feat.end <= end)
or
(feat.start <= end and feat.start >= start)
)
class IntervalTreeTest(unittest.TestCase):
def setUp(self):
iv = IntervalTree()
n = 0
for i in range(1, 1000, 80):
iv.insert(i, i + 10, dict(value=i*i))
# add is synonym for insert.
iv.add(i + 20, i + 30, dict(astr=str(i*i)))
# or insert/add an interval object with start, end attrs.
iv.insert_interval(Interval(i + 40, i + 50,
value=dict(astr=str(i*i))))
iv.add_interval(Interval(i + 60, i + 70,
value=dict(astr=str(i*i))))
n += 4
self.intervals = self.iv = iv
self.nintervals = n
def test_find(self):
r = self.iv.find(100, 200)
self.assertEqual(len(r), 5)
def test_traverse(self):
a = []
fn = a.append
self.iv.traverse(fn)
self.assertEqual(len(a), self.nintervals)
def test_empty(self):
iv = IntervalTree()
self.assertEqual([], iv.find(100, 300))
self.assertEqual([], iv.after(100))
self.assertEqual([], iv.before(100))
self.assertEqual([], iv.after_interval(100))
self.assertEqual([], iv.before_interval(100))
self.assertEqual([], iv.upstream_of_interval(100))
self.assertEqual([], iv.downstream_of_interval(100))
self.assertEqual(None, iv.traverse(lambda x: x.append(1)))
def test_public_interval(self):
fn = lambda ival: self.assert_(ival.interval)
self.iv.traverse(fn)
if __name__ == "__main__":
unittest.main()
|
gpl-3.0
|
coursemdetw/2014c2
|
exts/wsgi/static/Brython2.1.0-20140419-113919/Lib/sre_parse.py
|
111
|
29657
|
#
# Secret Labs' Regular Expression Engine
#
# convert re-style regular expression to sre pattern
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# XXX: show string offset and offending character for all errors
import sys
from sre_constants import *
from _sre import MAXREPEAT
SPECIAL_CHARS = ".\\[{()*+?^$|"
REPEAT_CHARS = "*+?{"
DIGITS = set("0123456789")
OCTDIGITS = set("01234567")
HEXDIGITS = set("0123456789abcdefABCDEF")
WHITESPACE = set(" \t\n\r\v\f")
ESCAPES = {
r"\a": (LITERAL, ord("\a")),
r"\b": (LITERAL, ord("\b")),
r"\f": (LITERAL, ord("\f")),
r"\n": (LITERAL, ord("\n")),
r"\r": (LITERAL, ord("\r")),
r"\t": (LITERAL, ord("\t")),
r"\v": (LITERAL, ord("\v")),
r"\\": (LITERAL, ord("\\"))
}
CATEGORIES = {
r"\A": (AT, AT_BEGINNING_STRING), # start of string
r"\b": (AT, AT_BOUNDARY),
r"\B": (AT, AT_NON_BOUNDARY),
r"\d": (IN, [(CATEGORY, CATEGORY_DIGIT)]),
r"\D": (IN, [(CATEGORY, CATEGORY_NOT_DIGIT)]),
r"\s": (IN, [(CATEGORY, CATEGORY_SPACE)]),
r"\S": (IN, [(CATEGORY, CATEGORY_NOT_SPACE)]),
r"\w": (IN, [(CATEGORY, CATEGORY_WORD)]),
r"\W": (IN, [(CATEGORY, CATEGORY_NOT_WORD)]),
r"\Z": (AT, AT_END_STRING), # end of string
}
FLAGS = {
# standard flags
"i": SRE_FLAG_IGNORECASE,
"L": SRE_FLAG_LOCALE,
"m": SRE_FLAG_MULTILINE,
"s": SRE_FLAG_DOTALL,
"x": SRE_FLAG_VERBOSE,
# extensions
"a": SRE_FLAG_ASCII,
"t": SRE_FLAG_TEMPLATE,
"u": SRE_FLAG_UNICODE,
}
class Pattern:
# master pattern object. keeps track of global attributes
def __init__(self):
self.flags = 0
self.open = []
self.groups = 1
self.groupdict = {}
def opengroup(self, name=None):
gid = self.groups
self.groups = gid + 1
if name is not None:
ogid = self.groupdict.get(name, None)
if ogid is not None:
raise error("redefinition of group name %s as group %d; "
"was group %d" % (repr(name), gid, ogid))
self.groupdict[name] = gid
self.open.append(gid)
return gid
def closegroup(self, gid):
self.open.remove(gid)
def checkgroup(self, gid):
return gid < self.groups and gid not in self.open
class SubPattern:
# a subpattern, in intermediate form
def __init__(self, pattern, data=None):
self.pattern = pattern
if data is None:
data = []
self.data = data
self.width = None
def __iter__(self):
return iter(self.data)
def dump(self, level=0):
nl = 1
seqtypes = (tuple, list)
for op, av in self.data:
print(level*" " + op, end=' '); nl = 0
if op == "in":
# member sublanguage
print(); nl = 1
for op, a in av:
print((level+1)*" " + op, a)
elif op == "branch":
print(); nl = 1
i = 0
for a in av[1]:
if i > 0:
print(level*" " + "or")
a.dump(level+1); nl = 1
i = i + 1
elif isinstance(av, seqtypes):
for a in av:
if isinstance(a, SubPattern):
if not nl: print()
a.dump(level+1); nl = 1
else:
print(a, end=' ') ; nl = 0
else:
print(av, end=' ') ; nl = 0
if not nl: print()
def __repr__(self):
return repr(self.data)
def __len__(self):
return len(self.data)
def __delitem__(self, index):
del self.data[index]
def __getitem__(self, index):
if isinstance(index, slice):
return SubPattern(self.pattern, self.data[index])
return self.data[index]
def __setitem__(self, index, code):
self.data[index] = code
def insert(self, index, code):
self.data.insert(index, code)
def append(self, code):
self.data.append(code)
def getwidth(self):
# determine the width (min, max) for this subpattern
if self.width:
return self.width
lo = hi = 0
UNITCODES = (ANY, RANGE, IN, LITERAL, NOT_LITERAL, CATEGORY)
REPEATCODES = (MIN_REPEAT, MAX_REPEAT)
for op, av in self.data:
if op is BRANCH:
i = sys.maxsize
j = 0
for av in av[1]:
l, h = av.getwidth()
i = min(i, l)
j = max(j, h)
lo = lo + i
hi = hi + j
elif op is CALL:
i, j = av.getwidth()
lo = lo + i
hi = hi + j
elif op is SUBPATTERN:
i, j = av[1].getwidth()
lo = lo + i
hi = hi + j
elif op in REPEATCODES:
i, j = av[2].getwidth()
lo = lo + int(i) * av[0]
hi = hi + int(j) * av[1]
elif op in UNITCODES:
lo = lo + 1
hi = hi + 1
elif op == SUCCESS:
break
self.width = int(min(lo, sys.maxsize)), int(min(hi, sys.maxsize))
return self.width
class Tokenizer:
def __init__(self, string):
self.istext = isinstance(string, str)
self.string = string
self.index = 0
self.__next()
def __next(self):
if self.index >= len(self.string):
self.next = None
return
char = self.string[self.index:self.index+1]
# Special case for the str8, since indexing returns a integer
# XXX This is only needed for test_bug_926075 in test_re.py
if char and not self.istext:
char = chr(char[0])
if char == "\\":
try:
c = self.string[self.index + 1]
except IndexError:
raise error("bogus escape (end of line)")
if not self.istext:
c = chr(c)
char = char + c
self.index = self.index + len(char)
self.next = char
def match(self, char, skip=1):
if char == self.next:
if skip:
self.__next()
return 1
return 0
def get(self):
this = self.next
self.__next()
return this
def getwhile(self, n, charset):
result = ''
for _ in range(n):
c = self.next
if c not in charset:
break
result += c
self.__next()
return result
def tell(self):
return self.index, self.next
def seek(self, index):
self.index, self.next = index
def isident(char):
return "a" <= char <= "z" or "A" <= char <= "Z" or char == "_"
def isdigit(char):
return "0" <= char <= "9"
def isname(name):
# check that group name is a valid string
if not isident(name[0]):
return False
for char in name[1:]:
if not isident(char) and not isdigit(char):
return False
return True
def _class_escape(source, escape):
# handle escape code inside character class
code = ESCAPES.get(escape)
if code:
return code
code = CATEGORIES.get(escape)
if code and code[0] == IN:
return code
try:
c = escape[1:2]
if c == "x":
# hexadecimal escape (exactly two digits)
escape += source.getwhile(2, HEXDIGITS)
if len(escape) != 4:
raise ValueError
return LITERAL, int(escape[2:], 16) & 0xff
elif c == "u" and source.istext:
# unicode escape (exactly four digits)
escape += source.getwhile(4, HEXDIGITS)
if len(escape) != 6:
raise ValueError
return LITERAL, int(escape[2:], 16)
elif c == "U" and source.istext:
# unicode escape (exactly eight digits)
escape += source.getwhile(8, HEXDIGITS)
if len(escape) != 10:
raise ValueError
c = int(escape[2:], 16)
chr(c) # raise ValueError for invalid code
return LITERAL, c
elif c in OCTDIGITS:
# octal escape (up to three digits)
escape += source.getwhile(2, OCTDIGITS)
return LITERAL, int(escape[1:], 8) & 0xff
elif c in DIGITS:
raise ValueError
if len(escape) == 2:
return LITERAL, ord(escape[1])
except ValueError:
pass
raise error("bogus escape: %s" % repr(escape))
def _escape(source, escape, state):
# handle escape code in expression
code = CATEGORIES.get(escape)
if code:
return code
code = ESCAPES.get(escape)
if code:
return code
try:
c = escape[1:2]
if c == "x":
# hexadecimal escape
escape += source.getwhile(2, HEXDIGITS)
if len(escape) != 4:
raise ValueError
return LITERAL, int(escape[2:], 16) & 0xff
elif c == "u" and source.istext:
# unicode escape (exactly four digits)
escape += source.getwhile(4, HEXDIGITS)
if len(escape) != 6:
raise ValueError
return LITERAL, int(escape[2:], 16)
elif c == "U" and source.istext:
# unicode escape (exactly eight digits)
escape += source.getwhile(8, HEXDIGITS)
if len(escape) != 10:
raise ValueError
c = int(escape[2:], 16)
chr(c) # raise ValueError for invalid code
return LITERAL, c
elif c == "0":
# octal escape
escape += source.getwhile(2, OCTDIGITS)
return LITERAL, int(escape[1:], 8) & 0xff
elif c in DIGITS:
# octal escape *or* decimal group reference (sigh)
if source.next in DIGITS:
escape = escape + source.get()
if (escape[1] in OCTDIGITS and escape[2] in OCTDIGITS and
source.next in OCTDIGITS):
# got three octal digits; this is an octal escape
escape = escape + source.get()
return LITERAL, int(escape[1:], 8) & 0xff
# not an octal escape, so this is a group reference
group = int(escape[1:])
if group < state.groups:
if not state.checkgroup(group):
raise error("cannot refer to open group")
return GROUPREF, group
raise ValueError
if len(escape) == 2:
return LITERAL, ord(escape[1])
except ValueError:
pass
raise error("bogus escape: %s" % repr(escape))
def _parse_sub(source, state, nested=1):
# parse an alternation: a|b|c
items = []
itemsappend = items.append
sourcematch = source.match
while 1:
itemsappend(_parse(source, state))
if sourcematch("|"):
continue
if not nested:
break
if not source.next or sourcematch(")", 0):
break
else:
raise error("pattern not properly closed")
if len(items) == 1:
return items[0]
subpattern = SubPattern(state)
subpatternappend = subpattern.append
# check if all items share a common prefix
while 1:
prefix = None
for item in items:
if not item:
break
if prefix is None:
prefix = item[0]
elif item[0] != prefix:
break
else:
# all subitems start with a common "prefix".
# move it out of the branch
for item in items:
del item[0]
subpatternappend(prefix)
continue # check next one
break
# check if the branch can be replaced by a character set
for item in items:
if len(item) != 1 or item[0][0] != LITERAL:
break
else:
# we can store this as a character set instead of a
# branch (the compiler may optimize this even more)
set = []
setappend = set.append
for item in items:
setappend(item[0])
subpatternappend((IN, set))
return subpattern
subpattern.append((BRANCH, (None, items)))
return subpattern
def _parse_sub_cond(source, state, condgroup):
item_yes = _parse(source, state)
if source.match("|"):
item_no = _parse(source, state)
if source.match("|"):
raise error("conditional backref with more than two branches")
else:
item_no = None
if source.next and not source.match(")", 0):
raise error("pattern not properly closed")
subpattern = SubPattern(state)
subpattern.append((GROUPREF_EXISTS, (condgroup, item_yes, item_no)))
return subpattern
_PATTERNENDERS = set("|)")
_ASSERTCHARS = set("=!<")
_LOOKBEHINDASSERTCHARS = set("=!")
_REPEATCODES = set([MIN_REPEAT, MAX_REPEAT])
def _parse(source, state):
# parse a simple pattern
subpattern = SubPattern(state)
# precompute constants into local variables
subpatternappend = subpattern.append
sourceget = source.get
sourcematch = source.match
_len = len
PATTERNENDERS = _PATTERNENDERS
ASSERTCHARS = _ASSERTCHARS
LOOKBEHINDASSERTCHARS = _LOOKBEHINDASSERTCHARS
REPEATCODES = _REPEATCODES
while 1:
if source.next in PATTERNENDERS:
break # end of subpattern
this = sourceget()
if this is None:
break # end of pattern
if state.flags & SRE_FLAG_VERBOSE:
# skip whitespace and comments
if this in WHITESPACE:
continue
if this == "#":
while 1:
this = sourceget()
if this in (None, "\n"):
break
continue
if this and this[0] not in SPECIAL_CHARS:
subpatternappend((LITERAL, ord(this)))
elif this == "[":
# character set
set = []
setappend = set.append
## if sourcematch(":"):
## pass # handle character classes
if sourcematch("^"):
setappend((NEGATE, None))
# check remaining characters
start = set[:]
while 1:
this = sourceget()
if this == "]" and set != start:
break
elif this and this[0] == "\\":
code1 = _class_escape(source, this)
elif this:
code1 = LITERAL, ord(this)
else:
raise error("unexpected end of regular expression")
if sourcematch("-"):
# potential range
this = sourceget()
if this == "]":
if code1[0] is IN:
code1 = code1[1][0]
setappend(code1)
setappend((LITERAL, ord("-")))
break
elif this:
if this[0] == "\\":
code2 = _class_escape(source, this)
else:
code2 = LITERAL, ord(this)
if code1[0] != LITERAL or code2[0] != LITERAL:
raise error("bad character range")
lo = code1[1]
hi = code2[1]
if hi < lo:
raise error("bad character range")
setappend((RANGE, (lo, hi)))
else:
raise error("unexpected end of regular expression")
else:
if code1[0] is IN:
code1 = code1[1][0]
setappend(code1)
# XXX: <fl> should move set optimization to compiler!
if _len(set)==1 and set[0][0] is LITERAL:
subpatternappend(set[0]) # optimization
elif _len(set)==2 and set[0][0] is NEGATE and set[1][0] is LITERAL:
subpatternappend((NOT_LITERAL, set[1][1])) # optimization
else:
# XXX: <fl> should add charmap optimization here
subpatternappend((IN, set))
elif this and this[0] in REPEAT_CHARS:
# repeat previous item
if this == "?":
min, max = 0, 1
elif this == "*":
min, max = 0, MAXREPEAT
elif this == "+":
min, max = 1, MAXREPEAT
elif this == "{":
if source.next == "}":
subpatternappend((LITERAL, ord(this)))
continue
here = source.tell()
min, max = 0, MAXREPEAT
lo = hi = ""
while source.next in DIGITS:
lo = lo + source.get()
if sourcematch(","):
while source.next in DIGITS:
hi = hi + sourceget()
else:
hi = lo
if not sourcematch("}"):
subpatternappend((LITERAL, ord(this)))
source.seek(here)
continue
if lo:
min = int(lo)
if min >= MAXREPEAT:
raise OverflowError("the repetition number is too large")
if hi:
max = int(hi)
if max >= MAXREPEAT:
raise OverflowError("the repetition number is too large")
if max < min:
raise error("bad repeat interval")
else:
raise error("not supported")
# figure out which item to repeat
if subpattern:
item = subpattern[-1:]
else:
item = None
if not item or (_len(item) == 1 and item[0][0] == AT):
raise error("nothing to repeat")
if item[0][0] in REPEATCODES:
raise error("multiple repeat")
if sourcematch("?"):
subpattern[-1] = (MIN_REPEAT, (min, max, item))
else:
subpattern[-1] = (MAX_REPEAT, (min, max, item))
elif this == ".":
subpatternappend((ANY, None))
elif this == "(":
group = 1
name = None
condgroup = None
if sourcematch("?"):
group = 0
# options
if sourcematch("P"):
# python extensions
if sourcematch("<"):
# named group: skip forward to end of name
name = ""
while 1:
char = sourceget()
if char is None:
raise error("unterminated name")
if char == ">":
break
name = name + char
group = 1
if not name:
raise error("missing group name")
if not isname(name):
raise error("bad character in group name")
elif sourcematch("="):
# named backreference
name = ""
while 1:
char = sourceget()
if char is None:
raise error("unterminated name")
if char == ")":
break
name = name + char
if not name:
raise error("missing group name")
if not isname(name):
raise error("bad character in group name")
gid = state.groupdict.get(name)
if gid is None:
raise error("unknown group name")
subpatternappend((GROUPREF, gid))
continue
else:
char = sourceget()
if char is None:
raise error("unexpected end of pattern")
raise error("unknown specifier: ?P%s" % char)
elif sourcematch(":"):
# non-capturing group
group = 2
elif sourcematch("#"):
# comment
while 1:
if source.next is None or source.next == ")":
break
sourceget()
if not sourcematch(")"):
raise error("unbalanced parenthesis")
continue
elif source.next in ASSERTCHARS:
# lookahead assertions
char = sourceget()
dir = 1
if char == "<":
if source.next not in LOOKBEHINDASSERTCHARS:
raise error("syntax error")
dir = -1 # lookbehind
char = sourceget()
p = _parse_sub(source, state)
if not sourcematch(")"):
raise error("unbalanced parenthesis")
if char == "=":
subpatternappend((ASSERT, (dir, p)))
else:
subpatternappend((ASSERT_NOT, (dir, p)))
continue
elif sourcematch("("):
# conditional backreference group
condname = ""
while 1:
char = sourceget()
if char is None:
raise error("unterminated name")
if char == ")":
break
condname = condname + char
group = 2
if not condname:
raise error("missing group name")
if isname(condname):
condgroup = state.groupdict.get(condname)
if condgroup is None:
raise error("unknown group name")
else:
try:
condgroup = int(condname)
except ValueError:
raise error("bad character in group name")
else:
# flags
if not source.next in FLAGS:
raise error("unexpected end of pattern")
while source.next in FLAGS:
state.flags = state.flags | FLAGS[sourceget()]
if group:
# parse group contents
if group == 2:
# anonymous group
group = None
else:
group = state.opengroup(name)
if condgroup:
p = _parse_sub_cond(source, state, condgroup)
else:
p = _parse_sub(source, state)
if not sourcematch(")"):
raise error("unbalanced parenthesis")
if group is not None:
state.closegroup(group)
subpatternappend((SUBPATTERN, (group, p)))
else:
while 1:
char = sourceget()
if char is None:
raise error("unexpected end of pattern")
if char == ")":
break
raise error("unknown extension")
elif this == "^":
subpatternappend((AT, AT_BEGINNING))
elif this == "$":
subpattern.append((AT, AT_END))
elif this and this[0] == "\\":
code = _escape(source, this, state)
subpatternappend(code)
else:
raise error("parser error")
return subpattern
def fix_flags(src, flags):
# Check and fix flags according to the type of pattern (str or bytes)
if isinstance(src, str):
if not flags & SRE_FLAG_ASCII:
flags |= SRE_FLAG_UNICODE
elif flags & SRE_FLAG_UNICODE:
raise ValueError("ASCII and UNICODE flags are incompatible")
else:
if flags & SRE_FLAG_UNICODE:
raise ValueError("can't use UNICODE flag with a bytes pattern")
return flags
def parse(str, flags=0, pattern=None):
# parse 're' pattern into list of (opcode, argument) tuples
source = Tokenizer(str)
if pattern is None:
pattern = Pattern()
pattern.flags = flags
pattern.str = str
p = _parse_sub(source, pattern, 0)
p.pattern.flags = fix_flags(str, p.pattern.flags)
tail = source.get()
if tail == ")":
raise error("unbalanced parenthesis")
elif tail:
raise error("bogus characters at end of regular expression")
if flags & SRE_FLAG_DEBUG:
p.dump()
if not (flags & SRE_FLAG_VERBOSE) and p.pattern.flags & SRE_FLAG_VERBOSE:
# the VERBOSE flag was switched on inside the pattern. to be
# on the safe side, we'll parse the whole thing again...
return parse(str, p.pattern.flags)
return p
def parse_template(source, pattern):
# parse 're' replacement string into list of literals and
# group references
s = Tokenizer(source)
sget = s.get
p = []
a = p.append
def literal(literal, p=p, pappend=a):
if p and p[-1][0] is LITERAL:
p[-1] = LITERAL, p[-1][1] + literal
else:
pappend((LITERAL, literal))
sep = source[:0]
if isinstance(sep, str):
makechar = chr
else:
makechar = chr
while 1:
this = sget()
if this is None:
break # end of replacement string
if this and this[0] == "\\":
# group
c = this[1:2]
if c == "g":
name = ""
if s.match("<"):
while 1:
char = sget()
if char is None:
raise error("unterminated group name")
if char == ">":
break
name = name + char
if not name:
raise error("missing group name")
try:
index = int(name)
if index < 0:
raise error("negative group number")
except ValueError:
if not isname(name):
raise error("bad character in group name")
try:
index = pattern.groupindex[name]
except KeyError:
raise IndexError("unknown group name")
a((MARK, index))
elif c == "0":
if s.next in OCTDIGITS:
this = this + sget()
if s.next in OCTDIGITS:
this = this + sget()
literal(makechar(int(this[1:], 8) & 0xff))
elif c in DIGITS:
isoctal = False
if s.next in DIGITS:
this = this + sget()
if (c in OCTDIGITS and this[2] in OCTDIGITS and
s.next in OCTDIGITS):
this = this + sget()
isoctal = True
literal(makechar(int(this[1:], 8) & 0xff))
if not isoctal:
a((MARK, int(this[1:])))
else:
try:
this = makechar(ESCAPES[this][1])
except KeyError:
pass
literal(this)
else:
literal(this)
# convert template to groups and literals lists
i = 0
groups = []
groupsappend = groups.append
literals = [None] * len(p)
if isinstance(source, str):
encode = lambda x: x
else:
# The tokenizer implicitly decodes bytes objects as latin-1, we must
# therefore re-encode the final representation.
encode = lambda x: x.encode('latin-1')
for c, s in p:
if c is MARK:
groupsappend((i, s))
# literal[i] is already None
else:
literals[i] = encode(s)
i = i + 1
return groups, literals
def expand_template(template, match):
g = match.group
sep = match.string[:0]
groups, literals = template
literals = literals[:]
try:
for index, group in groups:
literals[index] = s = g(group)
if s is None:
raise error("unmatched group")
except IndexError:
raise error("invalid group reference")
return sep.join(literals)
|
gpl-2.0
|
bdfoster/blumate
|
blumate/components/scsgate.py
|
1
|
5023
|
"""
Support for SCSGate components.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/scsgate/
"""
import logging
from threading import Lock
from blumate.core import EVENT_BLUMATE_STOP
REQUIREMENTS = ['scsgate==0.1.0']
DOMAIN = "scsgate"
SCSGATE = None
_LOGGER = logging.getLogger(__name__)
class SCSGate:
"""The class for dealing with the SCSGate device via scsgate.Reactor."""
def __init__(self, device, logger):
"""Initialize the SCSGate."""
self._logger = logger
self._devices = {}
self._devices_to_register = {}
self._devices_to_register_lock = Lock()
self._device_being_registered = None
self._device_being_registered_lock = Lock()
from scsgate.connection import Connection
connection = Connection(device=device, logger=self._logger)
from scsgate.reactor import Reactor
self._reactor = Reactor(
connection=connection,
logger=self._logger,
handle_message=self.handle_message)
def handle_message(self, message):
"""Method called whenever a message is seen on the bus."""
from scsgate.messages import StateMessage, ScenarioTriggeredMessage
self._logger.debug("Received message {}".format(message))
if not isinstance(message, StateMessage) and \
not isinstance(message, ScenarioTriggeredMessage):
msg = "Ignored message {} - not releavant type".format(
message)
self._logger.debug(msg)
return
if message.entity in self._devices:
new_device_activated = False
with self._devices_to_register_lock:
if message.entity == self._device_being_registered:
self._device_being_registered = None
new_device_activated = True
if new_device_activated:
self._activate_next_device()
# pylint: disable=broad-except
try:
self._devices[message.entity].process_event(message)
except Exception as exception:
msg = "Exception while processing event: {}".format(
exception)
self._logger.error(msg)
else:
self._logger.info(
"Ignoring state message for device {} because unknonw".format(
message.entity))
@property
def devices(self):
"""Dictionary with known devices.
Key is device ID, value is the device itself.
"""
return self._devices
def add_device(self, device):
"""Add the specified device.
The list contain already registered ones.
Beware: this is not what you usually want to do, take a look at
`add_devices_to_register`
"""
self._devices[device.scs_id] = device
def add_devices_to_register(self, devices):
"""List of devices to be registered."""
with self._devices_to_register_lock:
for device in devices:
self._devices_to_register[device.scs_id] = device
self._activate_next_device()
def _activate_next_device(self):
"""Start the activation of the first device."""
from scsgate.tasks import GetStatusTask
with self._devices_to_register_lock:
while len(self._devices_to_register) != 0:
_, device = self._devices_to_register.popitem()
self._devices[device.scs_id] = device
self._device_being_registered = device.scs_id
self._reactor.append_task(GetStatusTask(target=device.scs_id))
def is_device_registered(self, device_id):
"""Check whether a device is already registered or not."""
with self._devices_to_register_lock:
if device_id in self._devices_to_register.keys():
return False
with self._device_being_registered_lock:
if device_id == self._device_being_registered:
return False
return True
def start(self):
"""Start the scsgate.Reactor."""
self._reactor.start()
def stop(self):
"""Stop the scsgate.Reactor."""
self._reactor.stop()
def append_task(self, task):
"""Register a new task to be executed."""
self._reactor.append_task(task)
def setup(hass, config):
"""Setup the SCSGate component."""
device = config['scsgate']['device']
global SCSGATE
# pylint: disable=broad-except
try:
SCSGATE = SCSGate(device=device, logger=_LOGGER)
SCSGATE.start()
except Exception as exception:
_LOGGER.error("Cannot setup SCSGate component: %s", exception)
return False
def stop_monitor(event):
"""Stop the SCSGate."""
_LOGGER.info("Stopping SCSGate monitor thread")
SCSGATE.stop()
hass.bus.listen_once(EVENT_BLUMATE_STOP, stop_monitor)
return True
|
mit
|
anusornc/vitess
|
test/queryservice_test.py
|
8
|
2628
|
#!/usr/bin/env python
import logging
import optparse
import traceback
import unittest
import sys
import os
import utils
import framework
from queryservice_tests import cache_tests
from queryservice_tests import nocache_tests
from queryservice_tests import stream_tests
from queryservice_tests import status_tests
from queryservice_tests import test_env
from mysql_flavor import set_mysql_flavor
from protocols_flavor import set_protocols_flavor
from topo_flavor.server import set_topo_server_flavor
def main():
parser = optparse.OptionParser(usage="usage: %prog [options] [test_names]")
parser.add_option("-m", "--memcache", action="store_true", default=False,
help="starts a memcache d, and tests rowcache")
parser.add_option("-e", "--env", default='vttablet',
help="Environment that will be used. Valid options: vttablet, vtocc")
utils.add_options(parser)
(options, args) = parser.parse_args()
logging.getLogger().setLevel(logging.ERROR)
utils.set_options(options)
run_tests(options, args)
def run_tests(options, args):
suite = unittest.TestSuite()
if args:
if args[0] == 'teardown':
test_env.TestEnv(options.env).tearDown()
exit(0)
for arg in args:
if hasattr(nocache_tests.TestNocache, arg):
suite.addTest(nocache_tests.TestNocache(arg))
elif hasattr(stream_tests.TestStream, arg):
suite.addTest(stream_tests.TestStream(arg))
elif hasattr(cache_tests.TestCache, arg) and options.memcache:
suite.addTest(cache_tests.TestCache(arg))
elif hasattr(cache_tests.TestWillNotBeCached, arg) and options.memcache:
suite.addTest(cache_tests.TestWillNotBeCached(arg))
else:
raise Exception(arg, "not found in tests")
else:
modules = [nocache_tests, stream_tests, status_tests]
if options.memcache:
modules.append(cache_tests)
for m in modules:
suite.addTests(unittest.TestLoader().loadTestsFromModule(m))
env = test_env.TestEnv(options.env)
try:
env.memcache = options.memcache
env.setUp()
print "Starting queryservice_test.py: %s" % options.env
sys.stdout.flush()
framework.TestCase.setenv(env)
result = unittest.TextTestRunner(verbosity=options.verbose, failfast=True).run(suite)
if not result.wasSuccessful():
raise Exception("test failures")
finally:
if not options.skip_teardown:
env.tearDown()
if options.keep_logs:
print("Leaving temporary files behind (--keep-logs), please "
"clean up before next run: " + os.environ["VTDATAROOT"])
if __name__ == "__main__":
main()
|
bsd-3-clause
|
samtx/whatsmyrankine
|
venv/lib/python2.7/site-packages/werkzeug/datastructures.py
|
122
|
87447
|
# -*- coding: utf-8 -*-
"""
werkzeug.datastructures
~~~~~~~~~~~~~~~~~~~~~~~
This module provides mixins and classes with an immutable interface.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import codecs
import mimetypes
from copy import deepcopy
from itertools import repeat
from werkzeug._internal import _missing, _empty_stream
from werkzeug._compat import iterkeys, itervalues, iteritems, iterlists, \
PY2, text_type, integer_types, string_types, make_literal_wrapper, \
to_native
from werkzeug.filesystem import get_filesystem_encoding
_locale_delim_re = re.compile(r'[_-]')
def is_immutable(self):
raise TypeError('%r objects are immutable' % self.__class__.__name__)
def iter_multi_items(mapping):
"""Iterates over the items of a mapping yielding keys and values
without dropping any from more complex structures.
"""
if isinstance(mapping, MultiDict):
for item in iteritems(mapping, multi=True):
yield item
elif isinstance(mapping, dict):
for key, value in iteritems(mapping):
if isinstance(value, (tuple, list)):
for value in value:
yield key, value
else:
yield key, value
else:
for item in mapping:
yield item
def native_itermethods(names):
if not PY2:
return lambda x: x
def setmethod(cls, name):
itermethod = getattr(cls, name)
setattr(cls, 'iter%s' % name, itermethod)
listmethod = lambda self, *a, **kw: list(itermethod(self, *a, **kw))
listmethod.__doc__ = \
'Like :py:meth:`iter%s`, but returns a list.' % name
setattr(cls, name, listmethod)
def wrap(cls):
for name in names:
setmethod(cls, name)
return cls
return wrap
class ImmutableListMixin(object):
"""Makes a :class:`list` immutable.
.. versionadded:: 0.5
:private:
"""
_hash_cache = None
def __hash__(self):
if self._hash_cache is not None:
return self._hash_cache
rv = self._hash_cache = hash(tuple(self))
return rv
def __reduce_ex__(self, protocol):
return type(self), (list(self),)
def __delitem__(self, key):
is_immutable(self)
def __delslice__(self, i, j):
is_immutable(self)
def __iadd__(self, other):
is_immutable(self)
__imul__ = __iadd__
def __setitem__(self, key, value):
is_immutable(self)
def __setslice__(self, i, j, value):
is_immutable(self)
def append(self, item):
is_immutable(self)
remove = append
def extend(self, iterable):
is_immutable(self)
def insert(self, pos, value):
is_immutable(self)
def pop(self, index=-1):
is_immutable(self)
def reverse(self):
is_immutable(self)
def sort(self, cmp=None, key=None, reverse=None):
is_immutable(self)
class ImmutableList(ImmutableListMixin, list):
"""An immutable :class:`list`.
.. versionadded:: 0.5
:private:
"""
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
list.__repr__(self),
)
class ImmutableDictMixin(object):
"""Makes a :class:`dict` immutable.
.. versionadded:: 0.5
:private:
"""
_hash_cache = None
@classmethod
def fromkeys(cls, keys, value=None):
instance = super(cls, cls).__new__(cls)
instance.__init__(zip(keys, repeat(value)))
return instance
def __reduce_ex__(self, protocol):
return type(self), (dict(self),)
def _iter_hashitems(self):
return iteritems(self)
def __hash__(self):
if self._hash_cache is not None:
return self._hash_cache
rv = self._hash_cache = hash(frozenset(self._iter_hashitems()))
return rv
def setdefault(self, key, default=None):
is_immutable(self)
def update(self, *args, **kwargs):
is_immutable(self)
def pop(self, key, default=None):
is_immutable(self)
def popitem(self):
is_immutable(self)
def __setitem__(self, key, value):
is_immutable(self)
def __delitem__(self, key):
is_immutable(self)
def clear(self):
is_immutable(self)
class ImmutableMultiDictMixin(ImmutableDictMixin):
"""Makes a :class:`MultiDict` immutable.
.. versionadded:: 0.5
:private:
"""
def __reduce_ex__(self, protocol):
return type(self), (list(iteritems(self, multi=True)),)
def _iter_hashitems(self):
return iteritems(self, multi=True)
def add(self, key, value):
is_immutable(self)
def popitemlist(self):
is_immutable(self)
def poplist(self, key):
is_immutable(self)
def setlist(self, key, new_list):
is_immutable(self)
def setlistdefault(self, key, default_list=None):
is_immutable(self)
class UpdateDictMixin(object):
"""Makes dicts call `self.on_update` on modifications.
.. versionadded:: 0.5
:private:
"""
on_update = None
def calls_update(name):
def oncall(self, *args, **kw):
rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw)
if self.on_update is not None:
self.on_update(self)
return rv
oncall.__name__ = name
return oncall
def setdefault(self, key, default=None):
modified = key not in self
rv = super(UpdateDictMixin, self).setdefault(key, default)
if modified and self.on_update is not None:
self.on_update(self)
return rv
def pop(self, key, default=_missing):
modified = key in self
if default is _missing:
rv = super(UpdateDictMixin, self).pop(key)
else:
rv = super(UpdateDictMixin, self).pop(key, default)
if modified and self.on_update is not None:
self.on_update(self)
return rv
__setitem__ = calls_update('__setitem__')
__delitem__ = calls_update('__delitem__')
clear = calls_update('clear')
popitem = calls_update('popitem')
update = calls_update('update')
del calls_update
class TypeConversionDict(dict):
"""Works like a regular dict but the :meth:`get` method can perform
type conversions. :class:`MultiDict` and :class:`CombinedMultiDict`
are subclasses of this class and provide the same feature.
.. versionadded:: 0.5
"""
def get(self, key, default=None, type=None):
"""Return the default value if the requested data doesn't exist.
If `type` is provided and is a callable it should convert the value,
return it or raise a :exc:`ValueError` if that is not possible. In
this case the function will return the default as if the value was not
found:
>>> d = TypeConversionDict(foo='42', bar='blub')
>>> d.get('foo', type=int)
42
>>> d.get('bar', -1, type=int)
-1
:param key: The key to be looked up.
:param default: The default value to be returned if the key can't
be looked up. If not further specified `None` is
returned.
:param type: A callable that is used to cast the value in the
:class:`MultiDict`. If a :exc:`ValueError` is raised
by this callable the default value is returned.
"""
try:
rv = self[key]
if type is not None:
rv = type(rv)
except (KeyError, ValueError):
rv = default
return rv
class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict):
"""Works like a :class:`TypeConversionDict` but does not support
modifications.
.. versionadded:: 0.5
"""
def copy(self):
"""Return a shallow mutable copy of this object. Keep in mind that
the standard library's :func:`copy` function is a no-op for this class
like for any other python immutable type (eg: :class:`tuple`).
"""
return TypeConversionDict(self)
def __copy__(self):
return self
@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues'])
class MultiDict(TypeConversionDict):
"""A :class:`MultiDict` is a dictionary subclass customized to deal with
multiple values for the same key which is for example used by the parsing
functions in the wrappers. This is necessary because some HTML form
elements pass multiple values for the same key.
:class:`MultiDict` implements all standard dictionary methods.
Internally, it saves all values for a key as a list, but the standard dict
access methods will only return the first value for a key. If you want to
gain access to the other values, too, you have to use the `list` methods as
explained below.
Basic Usage:
>>> d = MultiDict([('a', 'b'), ('a', 'c')])
>>> d
MultiDict([('a', 'b'), ('a', 'c')])
>>> d['a']
'b'
>>> d.getlist('a')
['b', 'c']
>>> 'a' in d
True
It behaves like a normal dict thus all dict functions will only return the
first value when multiple values for one key are found.
From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP
exceptions.
A :class:`MultiDict` can be constructed from an iterable of
``(key, value)`` tuples, a dict, a :class:`MultiDict` or from Werkzeug 0.2
onwards some keyword parameters.
:param mapping: the initial value for the :class:`MultiDict`. Either a
regular dict, an iterable of ``(key, value)`` tuples
or `None`.
"""
def __init__(self, mapping=None):
if isinstance(mapping, MultiDict):
dict.__init__(self, ((k, l[:]) for k, l in iterlists(mapping)))
elif isinstance(mapping, dict):
tmp = {}
for key, value in iteritems(mapping):
if isinstance(value, (tuple, list)):
value = list(value)
else:
value = [value]
tmp[key] = value
dict.__init__(self, tmp)
else:
tmp = {}
for key, value in mapping or ():
tmp.setdefault(key, []).append(value)
dict.__init__(self, tmp)
def __getstate__(self):
return dict(self.lists())
def __setstate__(self, value):
dict.clear(self)
dict.update(self, value)
def __getitem__(self, key):
"""Return the first data value for this key;
raises KeyError if not found.
:param key: The key to be looked up.
:raise KeyError: if the key does not exist.
"""
if key in self:
return dict.__getitem__(self, key)[0]
raise exceptions.BadRequestKeyError(key)
def __setitem__(self, key, value):
"""Like :meth:`add` but removes an existing key first.
:param key: the key for the value.
:param value: the value to set.
"""
dict.__setitem__(self, key, [value])
def add(self, key, value):
"""Adds a new value for the key.
.. versionadded:: 0.6
:param key: the key for the value.
:param value: the value to add.
"""
dict.setdefault(self, key, []).append(value)
def getlist(self, key, type=None):
"""Return the list of items for a given key. If that key is not in the
`MultiDict`, the return value will be an empty list. Just as `get`
`getlist` accepts a `type` parameter. All items will be converted
with the callable defined there.
:param key: The key to be looked up.
:param type: A callable that is used to cast the value in the
:class:`MultiDict`. If a :exc:`ValueError` is raised
by this callable the value will be removed from the list.
:return: a :class:`list` of all the values for the key.
"""
try:
rv = dict.__getitem__(self, key)
except KeyError:
return []
if type is None:
return list(rv)
result = []
for item in rv:
try:
result.append(type(item))
except ValueError:
pass
return result
def setlist(self, key, new_list):
"""Remove the old values for a key and add new ones. Note that the list
you pass the values in will be shallow-copied before it is inserted in
the dictionary.
>>> d = MultiDict()
>>> d.setlist('foo', ['1', '2'])
>>> d['foo']
'1'
>>> d.getlist('foo')
['1', '2']
:param key: The key for which the values are set.
:param new_list: An iterable with the new values for the key. Old values
are removed first.
"""
dict.__setitem__(self, key, list(new_list))
def setdefault(self, key, default=None):
"""Returns the value for the key if it is in the dict, otherwise it
returns `default` and sets that value for `key`.
:param key: The key to be looked up.
:param default: The default value to be returned if the key is not
in the dict. If not further specified it's `None`.
"""
if key not in self:
self[key] = default
else:
default = self[key]
return default
def setlistdefault(self, key, default_list=None):
"""Like `setdefault` but sets multiple values. The list returned
is not a copy, but the list that is actually used internally. This
means that you can put new values into the dict by appending items
to the list:
>>> d = MultiDict({"foo": 1})
>>> d.setlistdefault("foo").extend([2, 3])
>>> d.getlist("foo")
[1, 2, 3]
:param key: The key to be looked up.
:param default: An iterable of default values. It is either copied
(in case it was a list) or converted into a list
before returned.
:return: a :class:`list`
"""
if key not in self:
default_list = list(default_list or ())
dict.__setitem__(self, key, default_list)
else:
default_list = dict.__getitem__(self, key)
return default_list
def items(self, multi=False):
"""Return an iterator of ``(key, value)`` pairs.
:param multi: If set to `True` the iterator returned will have a pair
for each value of each key. Otherwise it will only
contain pairs for the first value of each key.
"""
for key, values in iteritems(dict, self):
if multi:
for value in values:
yield key, value
else:
yield key, values[0]
def lists(self):
"""Return a list of ``(key, values)`` pairs, where values is the list
of all values associated with the key."""
for key, values in iteritems(dict, self):
yield key, list(values)
def keys(self):
return iterkeys(dict, self)
__iter__ = keys
def values(self):
"""Returns an iterator of the first value on every key's value list."""
for values in itervalues(dict, self):
yield values[0]
def listvalues(self):
"""Return an iterator of all values associated with a key. Zipping
:meth:`keys` and this is the same as calling :meth:`lists`:
>>> d = MultiDict({"foo": [1, 2, 3]})
>>> zip(d.keys(), d.listvalues()) == d.lists()
True
"""
return itervalues(dict, self)
def copy(self):
"""Return a shallow copy of this object."""
return self.__class__(self)
def deepcopy(self, memo=None):
"""Return a deep copy of this object."""
return self.__class__(deepcopy(self.to_dict(flat=False), memo))
def to_dict(self, flat=True):
"""Return the contents as regular dict. If `flat` is `True` the
returned dict will only have the first item present, if `flat` is
`False` all values will be returned as lists.
:param flat: If set to `False` the dict returned will have lists
with all the values in it. Otherwise it will only
contain the first value for each key.
:return: a :class:`dict`
"""
if flat:
return dict(iteritems(self))
return dict(self.lists())
def update(self, other_dict):
"""update() extends rather than replaces existing key lists:
>>> a = MultiDict({'x': 1})
>>> b = MultiDict({'x': 2, 'y': 3})
>>> a.update(b)
>>> a
MultiDict([('y', 3), ('x', 1), ('x', 2)])
If the value list for a key in ``other_dict`` is empty, no new values
will be added to the dict and the key will not be created:
>>> x = {'empty_list': []}
>>> y = MultiDict()
>>> y.update(x)
>>> y
MultiDict([])
"""
for key, value in iter_multi_items(other_dict):
MultiDict.add(self, key, value)
def pop(self, key, default=_missing):
"""Pop the first item for a list on the dict. Afterwards the
key is removed from the dict, so additional values are discarded:
>>> d = MultiDict({"foo": [1, 2, 3]})
>>> d.pop("foo")
1
>>> "foo" in d
False
:param key: the key to pop.
:param default: if provided the value to return if the key was
not in the dictionary.
"""
try:
return dict.pop(self, key)[0]
except KeyError as e:
if default is not _missing:
return default
raise exceptions.BadRequestKeyError(str(e))
def popitem(self):
"""Pop an item from the dict."""
try:
item = dict.popitem(self)
return (item[0], item[1][0])
except KeyError as e:
raise exceptions.BadRequestKeyError(str(e))
def poplist(self, key):
"""Pop the list for a key from the dict. If the key is not in the dict
an empty list is returned.
.. versionchanged:: 0.5
If the key does no longer exist a list is returned instead of
raising an error.
"""
return dict.pop(self, key, [])
def popitemlist(self):
"""Pop a ``(key, list)`` tuple from the dict."""
try:
return dict.popitem(self)
except KeyError as e:
raise exceptions.BadRequestKeyError(str(e))
def __copy__(self):
return self.copy()
def __deepcopy__(self, memo):
return self.deepcopy(memo=memo)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, list(iteritems(self, multi=True)))
class _omd_bucket(object):
"""Wraps values in the :class:`OrderedMultiDict`. This makes it
possible to keep an order over multiple different keys. It requires
a lot of extra memory and slows down access a lot, but makes it
possible to access elements in O(1) and iterate in O(n).
"""
__slots__ = ('prev', 'key', 'value', 'next')
def __init__(self, omd, key, value):
self.prev = omd._last_bucket
self.key = key
self.value = value
self.next = None
if omd._first_bucket is None:
omd._first_bucket = self
if omd._last_bucket is not None:
omd._last_bucket.next = self
omd._last_bucket = self
def unlink(self, omd):
if self.prev:
self.prev.next = self.next
if self.next:
self.next.prev = self.prev
if omd._first_bucket is self:
omd._first_bucket = self.next
if omd._last_bucket is self:
omd._last_bucket = self.prev
@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues'])
class OrderedMultiDict(MultiDict):
"""Works like a regular :class:`MultiDict` but preserves the
order of the fields. To convert the ordered multi dict into a
list you can use the :meth:`items` method and pass it ``multi=True``.
In general an :class:`OrderedMultiDict` is an order of magnitude
slower than a :class:`MultiDict`.
.. admonition:: note
Due to a limitation in Python you cannot convert an ordered
multi dict into a regular dict by using ``dict(multidict)``.
Instead you have to use the :meth:`to_dict` method, otherwise
the internal bucket objects are exposed.
"""
def __init__(self, mapping=None):
dict.__init__(self)
self._first_bucket = self._last_bucket = None
if mapping is not None:
OrderedMultiDict.update(self, mapping)
def __eq__(self, other):
if not isinstance(other, MultiDict):
return NotImplemented
if isinstance(other, OrderedMultiDict):
iter1 = iteritems(self, multi=True)
iter2 = iteritems(other, multi=True)
try:
for k1, v1 in iter1:
k2, v2 = next(iter2)
if k1 != k2 or v1 != v2:
return False
except StopIteration:
return False
try:
next(iter2)
except StopIteration:
return True
return False
if len(self) != len(other):
return False
for key, values in iterlists(self):
if other.getlist(key) != values:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __reduce_ex__(self, protocol):
return type(self), (list(iteritems(self, multi=True)),)
def __getstate__(self):
return list(iteritems(self, multi=True))
def __setstate__(self, values):
dict.clear(self)
for key, value in values:
self.add(key, value)
def __getitem__(self, key):
if key in self:
return dict.__getitem__(self, key)[0].value
raise exceptions.BadRequestKeyError(key)
def __setitem__(self, key, value):
self.poplist(key)
self.add(key, value)
def __delitem__(self, key):
self.pop(key)
def keys(self):
return (key for key, value in iteritems(self))
__iter__ = keys
def values(self):
return (value for key, value in iteritems(self))
def items(self, multi=False):
ptr = self._first_bucket
if multi:
while ptr is not None:
yield ptr.key, ptr.value
ptr = ptr.next
else:
returned_keys = set()
while ptr is not None:
if ptr.key not in returned_keys:
returned_keys.add(ptr.key)
yield ptr.key, ptr.value
ptr = ptr.next
def lists(self):
returned_keys = set()
ptr = self._first_bucket
while ptr is not None:
if ptr.key not in returned_keys:
yield ptr.key, self.getlist(ptr.key)
returned_keys.add(ptr.key)
ptr = ptr.next
def listvalues(self):
for key, values in iterlists(self):
yield values
def add(self, key, value):
dict.setdefault(self, key, []).append(_omd_bucket(self, key, value))
def getlist(self, key, type=None):
try:
rv = dict.__getitem__(self, key)
except KeyError:
return []
if type is None:
return [x.value for x in rv]
result = []
for item in rv:
try:
result.append(type(item.value))
except ValueError:
pass
return result
def setlist(self, key, new_list):
self.poplist(key)
for value in new_list:
self.add(key, value)
def setlistdefault(self, key, default_list=None):
raise TypeError('setlistdefault is unsupported for '
'ordered multi dicts')
def update(self, mapping):
for key, value in iter_multi_items(mapping):
OrderedMultiDict.add(self, key, value)
def poplist(self, key):
buckets = dict.pop(self, key, ())
for bucket in buckets:
bucket.unlink(self)
return [x.value for x in buckets]
def pop(self, key, default=_missing):
try:
buckets = dict.pop(self, key)
except KeyError as e:
if default is not _missing:
return default
raise exceptions.BadRequestKeyError(str(e))
for bucket in buckets:
bucket.unlink(self)
return buckets[0].value
def popitem(self):
try:
key, buckets = dict.popitem(self)
except KeyError as e:
raise exceptions.BadRequestKeyError(str(e))
for bucket in buckets:
bucket.unlink(self)
return key, buckets[0].value
def popitemlist(self):
try:
key, buckets = dict.popitem(self)
except KeyError as e:
raise exceptions.BadRequestKeyError(str(e))
for bucket in buckets:
bucket.unlink(self)
return key, [x.value for x in buckets]
def _options_header_vkw(value, kw):
return dump_options_header(value, dict((k.replace('_', '-'), v)
for k, v in kw.items()))
def _unicodify_header_value(value):
if isinstance(value, bytes):
value = value.decode('latin-1')
if not isinstance(value, text_type):
value = text_type(value)
return value
@native_itermethods(['keys', 'values', 'items'])
class Headers(object):
"""An object that stores some headers. It has a dict-like interface
but is ordered and can store the same keys multiple times.
This data structure is useful if you want a nicer way to handle WSGI
headers which are stored as tuples in a list.
From Werkzeug 0.3 onwards, the :exc:`KeyError` raised by this class is
also a subclass of the :class:`~exceptions.BadRequest` HTTP exception
and will render a page for a ``400 BAD REQUEST`` if caught in a
catch-all for HTTP exceptions.
Headers is mostly compatible with the Python :class:`wsgiref.headers.Headers`
class, with the exception of `__getitem__`. :mod:`wsgiref` will return
`None` for ``headers['missing']``, whereas :class:`Headers` will raise
a :class:`KeyError`.
To create a new :class:`Headers` object pass it a list or dict of headers
which are used as default values. This does not reuse the list passed
to the constructor for internal usage.
:param defaults: The list of default values for the :class:`Headers`.
.. versionchanged:: 0.9
This data structure now stores unicode values similar to how the
multi dicts do it. The main difference is that bytes can be set as
well which will automatically be latin1 decoded.
.. versionchanged:: 0.9
The :meth:`linked` function was removed without replacement as it
was an API that does not support the changes to the encoding model.
"""
def __init__(self, defaults=None):
self._list = []
if defaults is not None:
if isinstance(defaults, (list, Headers)):
self._list.extend(defaults)
else:
self.extend(defaults)
def __getitem__(self, key, _get_mode=False):
if not _get_mode:
if isinstance(key, integer_types):
return self._list[key]
elif isinstance(key, slice):
return self.__class__(self._list[key])
if not isinstance(key, string_types):
raise exceptions.BadRequestKeyError(key)
ikey = key.lower()
for k, v in self._list:
if k.lower() == ikey:
return v
# micro optimization: if we are in get mode we will catch that
# exception one stack level down so we can raise a standard
# key error instead of our special one.
if _get_mode:
raise KeyError()
raise exceptions.BadRequestKeyError(key)
def __eq__(self, other):
return other.__class__ is self.__class__ and \
set(other._list) == set(self._list)
def __ne__(self, other):
return not self.__eq__(other)
def get(self, key, default=None, type=None, as_bytes=False):
"""Return the default value if the requested data doesn't exist.
If `type` is provided and is a callable it should convert the value,
return it or raise a :exc:`ValueError` if that is not possible. In
this case the function will return the default as if the value was not
found:
>>> d = Headers([('Content-Length', '42')])
>>> d.get('Content-Length', type=int)
42
If a headers object is bound you must not add unicode strings
because no encoding takes place.
.. versionadded:: 0.9
Added support for `as_bytes`.
:param key: The key to be looked up.
:param default: The default value to be returned if the key can't
be looked up. If not further specified `None` is
returned.
:param type: A callable that is used to cast the value in the
:class:`Headers`. If a :exc:`ValueError` is raised
by this callable the default value is returned.
:param as_bytes: return bytes instead of unicode strings.
"""
try:
rv = self.__getitem__(key, _get_mode=True)
except KeyError:
return default
if as_bytes:
rv = rv.encode('latin1')
if type is None:
return rv
try:
return type(rv)
except ValueError:
return default
def getlist(self, key, type=None, as_bytes=False):
"""Return the list of items for a given key. If that key is not in the
:class:`Headers`, the return value will be an empty list. Just as
:meth:`get` :meth:`getlist` accepts a `type` parameter. All items will
be converted with the callable defined there.
.. versionadded:: 0.9
Added support for `as_bytes`.
:param key: The key to be looked up.
:param type: A callable that is used to cast the value in the
:class:`Headers`. If a :exc:`ValueError` is raised
by this callable the value will be removed from the list.
:return: a :class:`list` of all the values for the key.
:param as_bytes: return bytes instead of unicode strings.
"""
ikey = key.lower()
result = []
for k, v in self:
if k.lower() == ikey:
if as_bytes:
v = v.encode('latin1')
if type is not None:
try:
v = type(v)
except ValueError:
continue
result.append(v)
return result
def get_all(self, name):
"""Return a list of all the values for the named field.
This method is compatible with the :mod:`wsgiref`
:meth:`~wsgiref.headers.Headers.get_all` method.
"""
return self.getlist(name)
def items(self, lower=False):
for key, value in self:
if lower:
key = key.lower()
yield key, value
def keys(self, lower=False):
for key, _ in iteritems(self, lower):
yield key
def values(self):
for _, value in iteritems(self):
yield value
def extend(self, iterable):
"""Extend the headers with a dict or an iterable yielding keys and
values.
"""
if isinstance(iterable, dict):
for key, value in iteritems(iterable):
if isinstance(value, (tuple, list)):
for v in value:
self.add(key, v)
else:
self.add(key, value)
else:
for key, value in iterable:
self.add(key, value)
def __delitem__(self, key, _index_operation=True):
if _index_operation and isinstance(key, (integer_types, slice)):
del self._list[key]
return
key = key.lower()
new = []
for k, v in self._list:
if k.lower() != key:
new.append((k, v))
self._list[:] = new
def remove(self, key):
"""Remove a key.
:param key: The key to be removed.
"""
return self.__delitem__(key, _index_operation=False)
def pop(self, key=None, default=_missing):
"""Removes and returns a key or index.
:param key: The key to be popped. If this is an integer the item at
that position is removed, if it's a string the value for
that key is. If the key is omitted or `None` the last
item is removed.
:return: an item.
"""
if key is None:
return self._list.pop()
if isinstance(key, integer_types):
return self._list.pop(key)
try:
rv = self[key]
self.remove(key)
except KeyError:
if default is not _missing:
return default
raise
return rv
def popitem(self):
"""Removes a key or index and returns a (key, value) item."""
return self.pop()
def __contains__(self, key):
"""Check if a key is present."""
try:
self.__getitem__(key, _get_mode=True)
except KeyError:
return False
return True
has_key = __contains__
def __iter__(self):
"""Yield ``(key, value)`` tuples."""
return iter(self._list)
def __len__(self):
return len(self._list)
def add(self, _key, _value, **kw):
"""Add a new header tuple to the list.
Keyword arguments can specify additional parameters for the header
value, with underscores converted to dashes::
>>> d = Headers()
>>> d.add('Content-Type', 'text/plain')
>>> d.add('Content-Disposition', 'attachment', filename='foo.png')
The keyword argument dumping uses :func:`dump_options_header`
behind the scenes.
.. versionadded:: 0.4.1
keyword arguments were added for :mod:`wsgiref` compatibility.
"""
if kw:
_value = _options_header_vkw(_value, kw)
_value = _unicodify_header_value(_value)
self._validate_value(_value)
self._list.append((_key, _value))
def _validate_value(self, value):
if not isinstance(value, text_type):
raise TypeError('Value should be unicode.')
if u'\n' in value or u'\r' in value:
raise ValueError('Detected newline in header value. This is '
'a potential security problem')
def add_header(self, _key, _value, **_kw):
"""Add a new header tuple to the list.
An alias for :meth:`add` for compatibility with the :mod:`wsgiref`
:meth:`~wsgiref.headers.Headers.add_header` method.
"""
self.add(_key, _value, **_kw)
def clear(self):
"""Clears all headers."""
del self._list[:]
def set(self, _key, _value, **kw):
"""Remove all header tuples for `key` and add a new one. The newly
added key either appears at the end of the list if there was no
entry or replaces the first one.
Keyword arguments can specify additional parameters for the header
value, with underscores converted to dashes. See :meth:`add` for
more information.
.. versionchanged:: 0.6.1
:meth:`set` now accepts the same arguments as :meth:`add`.
:param key: The key to be inserted.
:param value: The value to be inserted.
"""
if kw:
_value = _options_header_vkw(_value, kw)
_value = _unicodify_header_value(_value)
self._validate_value(_value)
if not self._list:
self._list.append((_key, _value))
return
listiter = iter(self._list)
ikey = _key.lower()
for idx, (old_key, old_value) in enumerate(listiter):
if old_key.lower() == ikey:
# replace first ocurrence
self._list[idx] = (_key, _value)
break
else:
self._list.append((_key, _value))
return
self._list[idx + 1:] = [t for t in listiter if t[0].lower() != ikey]
def setdefault(self, key, value):
"""Returns the value for the key if it is in the dict, otherwise it
returns `default` and sets that value for `key`.
:param key: The key to be looked up.
:param default: The default value to be returned if the key is not
in the dict. If not further specified it's `None`.
"""
if key in self:
return self[key]
self.set(key, value)
return value
def __setitem__(self, key, value):
"""Like :meth:`set` but also supports index/slice based setting."""
if isinstance(key, (slice, integer_types)):
if isinstance(key, integer_types):
value = [value]
value = [(k, _unicodify_header_value(v)) for (k, v) in value]
[self._validate_value(v) for (k, v) in value]
if isinstance(key, integer_types):
self._list[key] = value[0]
else:
self._list[key] = value
else:
self.set(key, value)
def to_list(self, charset='iso-8859-1'):
"""Convert the headers into a list suitable for WSGI."""
from warnings import warn
warn(DeprecationWarning('Method removed, use to_wsgi_list instead'),
stacklevel=2)
return self.to_wsgi_list()
def to_wsgi_list(self):
"""Convert the headers into a list suitable for WSGI.
The values are byte strings in Python 2 converted to latin1 and unicode
strings in Python 3 for the WSGI server to encode.
:return: list
"""
if PY2:
return [(to_native(k), v.encode('latin1')) for k, v in self]
return list(self)
def copy(self):
return self.__class__(self._list)
def __copy__(self):
return self.copy()
def __str__(self):
"""Returns formatted headers suitable for HTTP transmission."""
strs = []
for key, value in self.to_wsgi_list():
strs.append('%s: %s' % (key, value))
strs.append('\r\n')
return '\r\n'.join(strs)
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__,
list(self)
)
class ImmutableHeadersMixin(object):
"""Makes a :class:`Headers` immutable. We do not mark them as
hashable though since the only usecase for this datastructure
in Werkzeug is a view on a mutable structure.
.. versionadded:: 0.5
:private:
"""
def __delitem__(self, key):
is_immutable(self)
def __setitem__(self, key, value):
is_immutable(self)
set = __setitem__
def add(self, item):
is_immutable(self)
remove = add_header = add
def extend(self, iterable):
is_immutable(self)
def insert(self, pos, value):
is_immutable(self)
def pop(self, index=-1):
is_immutable(self)
def popitem(self):
is_immutable(self)
def setdefault(self, key, default):
is_immutable(self)
class EnvironHeaders(ImmutableHeadersMixin, Headers):
"""Read only version of the headers from a WSGI environment. This
provides the same interface as `Headers` and is constructed from
a WSGI environment.
From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
render a page for a ``400 BAD REQUEST`` if caught in a catch-all for
HTTP exceptions.
"""
def __init__(self, environ):
self.environ = environ
def __eq__(self, other):
return self.environ is other.environ
def __getitem__(self, key, _get_mode=False):
# _get_mode is a no-op for this class as there is no index but
# used because get() calls it.
key = key.upper().replace('-', '_')
if key in ('CONTENT_TYPE', 'CONTENT_LENGTH'):
return _unicodify_header_value(self.environ[key])
return _unicodify_header_value(self.environ['HTTP_' + key])
def __len__(self):
# the iter is necessary because otherwise list calls our
# len which would call list again and so forth.
return len(list(iter(self)))
def __iter__(self):
for key, value in iteritems(self.environ):
if key.startswith('HTTP_') and key not in \
('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
yield (key[5:].replace('_', '-').title(),
_unicodify_header_value(value))
elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH'):
yield (key.replace('_', '-').title(),
_unicodify_header_value(value))
def copy(self):
raise TypeError('cannot create %r copies' % self.__class__.__name__)
@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues'])
class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict):
"""A read only :class:`MultiDict` that you can pass multiple :class:`MultiDict`
instances as sequence and it will combine the return values of all wrapped
dicts:
>>> from werkzeug.datastructures import CombinedMultiDict, MultiDict
>>> post = MultiDict([('foo', 'bar')])
>>> get = MultiDict([('blub', 'blah')])
>>> combined = CombinedMultiDict([get, post])
>>> combined['foo']
'bar'
>>> combined['blub']
'blah'
This works for all read operations and will raise a `TypeError` for
methods that usually change data which isn't possible.
From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP
exceptions.
"""
def __reduce_ex__(self, protocol):
return type(self), (self.dicts,)
def __init__(self, dicts=None):
self.dicts = dicts or []
@classmethod
def fromkeys(cls):
raise TypeError('cannot create %r instances by fromkeys' %
cls.__name__)
def __getitem__(self, key):
for d in self.dicts:
if key in d:
return d[key]
raise exceptions.BadRequestKeyError(key)
def get(self, key, default=None, type=None):
for d in self.dicts:
if key in d:
if type is not None:
try:
return type(d[key])
except ValueError:
continue
return d[key]
return default
def getlist(self, key, type=None):
rv = []
for d in self.dicts:
rv.extend(d.getlist(key, type))
return rv
def _keys_impl(self):
"""This function exists so __len__ can be implemented more efficiently,
saving one list creation from an iterator.
Using this for Python 2's ``dict.keys`` behavior would be useless since
`dict.keys` in Python 2 returns a list, while we have a set here.
"""
rv = set()
for d in self.dicts:
rv.update(iterkeys(d))
return rv
def keys(self):
return iter(self._keys_impl())
__iter__ = keys
def items(self, multi=False):
found = set()
for d in self.dicts:
for key, value in iteritems(d, multi):
if multi:
yield key, value
elif key not in found:
found.add(key)
yield key, value
def values(self):
for key, value in iteritems(self):
yield value
def lists(self):
rv = {}
for d in self.dicts:
for key, values in iterlists(d):
rv.setdefault(key, []).extend(values)
return iteritems(rv)
def listvalues(self):
return (x[1] for x in self.lists())
def copy(self):
"""Return a shallow copy of this object."""
return self.__class__(self.dicts[:])
def to_dict(self, flat=True):
"""Return the contents as regular dict. If `flat` is `True` the
returned dict will only have the first item present, if `flat` is
`False` all values will be returned as lists.
:param flat: If set to `False` the dict returned will have lists
with all the values in it. Otherwise it will only
contain the first item for each key.
:return: a :class:`dict`
"""
rv = {}
for d in reversed(self.dicts):
rv.update(d.to_dict(flat))
return rv
def __len__(self):
return len(self._keys_impl())
def __contains__(self, key):
for d in self.dicts:
if key in d:
return True
return False
has_key = __contains__
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.dicts)
class FileMultiDict(MultiDict):
"""A special :class:`MultiDict` that has convenience methods to add
files to it. This is used for :class:`EnvironBuilder` and generally
useful for unittesting.
.. versionadded:: 0.5
"""
def add_file(self, name, file, filename=None, content_type=None):
"""Adds a new file to the dict. `file` can be a file name or
a :class:`file`-like or a :class:`FileStorage` object.
:param name: the name of the field.
:param file: a filename or :class:`file`-like object
:param filename: an optional filename
:param content_type: an optional content type
"""
if isinstance(file, FileStorage):
value = file
else:
if isinstance(file, string_types):
if filename is None:
filename = file
file = open(file, 'rb')
if filename and content_type is None:
content_type = mimetypes.guess_type(filename)[0] or \
'application/octet-stream'
value = FileStorage(file, filename, name, content_type)
self.add(name, value)
class ImmutableDict(ImmutableDictMixin, dict):
"""An immutable :class:`dict`.
.. versionadded:: 0.5
"""
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
dict.__repr__(self),
)
def copy(self):
"""Return a shallow mutable copy of this object. Keep in mind that
the standard library's :func:`copy` function is a no-op for this class
like for any other python immutable type (eg: :class:`tuple`).
"""
return dict(self)
def __copy__(self):
return self
class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict):
"""An immutable :class:`MultiDict`.
.. versionadded:: 0.5
"""
def copy(self):
"""Return a shallow mutable copy of this object. Keep in mind that
the standard library's :func:`copy` function is a no-op for this class
like for any other python immutable type (eg: :class:`tuple`).
"""
return MultiDict(self)
def __copy__(self):
return self
class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict):
"""An immutable :class:`OrderedMultiDict`.
.. versionadded:: 0.6
"""
def _iter_hashitems(self):
return enumerate(iteritems(self, multi=True))
def copy(self):
"""Return a shallow mutable copy of this object. Keep in mind that
the standard library's :func:`copy` function is a no-op for this class
like for any other python immutable type (eg: :class:`tuple`).
"""
return OrderedMultiDict(self)
def __copy__(self):
return self
@native_itermethods(['values'])
class Accept(ImmutableList):
"""An :class:`Accept` object is just a list subclass for lists of
``(value, quality)`` tuples. It is automatically sorted by quality.
All :class:`Accept` objects work similar to a list but provide extra
functionality for working with the data. Containment checks are
normalized to the rules of that header:
>>> a = CharsetAccept([('ISO-8859-1', 1), ('utf-8', 0.7)])
>>> a.best
'ISO-8859-1'
>>> 'iso-8859-1' in a
True
>>> 'UTF8' in a
True
>>> 'utf7' in a
False
To get the quality for an item you can use normal item lookup:
>>> print a['utf-8']
0.7
>>> a['utf7']
0
.. versionchanged:: 0.5
:class:`Accept` objects are forced immutable now.
"""
def __init__(self, values=()):
if values is None:
list.__init__(self)
self.provided = False
elif isinstance(values, Accept):
self.provided = values.provided
list.__init__(self, values)
else:
self.provided = True
values = [(a, b) for b, a in values]
values.sort()
values.reverse()
list.__init__(self, [(a, b) for b, a in values])
def _value_matches(self, value, item):
"""Check if a value matches a given accept item."""
return item == '*' or item.lower() == value.lower()
def __getitem__(self, key):
"""Besides index lookup (getting item n) you can also pass it a string
to get the quality for the item. If the item is not in the list, the
returned quality is ``0``.
"""
if isinstance(key, string_types):
return self.quality(key)
return list.__getitem__(self, key)
def quality(self, key):
"""Returns the quality of the key.
.. versionadded:: 0.6
In previous versions you had to use the item-lookup syntax
(eg: ``obj[key]`` instead of ``obj.quality(key)``)
"""
for item, quality in self:
if self._value_matches(key, item):
return quality
return 0
def __contains__(self, value):
for item, quality in self:
if self._value_matches(value, item):
return True
return False
def __repr__(self):
return '%s([%s])' % (
self.__class__.__name__,
', '.join('(%r, %s)' % (x, y) for x, y in self)
)
def index(self, key):
"""Get the position of an entry or raise :exc:`ValueError`.
:param key: The key to be looked up.
.. versionchanged:: 0.5
This used to raise :exc:`IndexError`, which was inconsistent
with the list API.
"""
if isinstance(key, string_types):
for idx, (item, quality) in enumerate(self):
if self._value_matches(key, item):
return idx
raise ValueError(key)
return list.index(self, key)
def find(self, key):
"""Get the position of an entry or return -1.
:param key: The key to be looked up.
"""
try:
return self.index(key)
except ValueError:
return -1
def values(self):
"""Iterate over all values."""
for item in self:
yield item[0]
def to_header(self):
"""Convert the header set into an HTTP header string."""
result = []
for value, quality in self:
if quality != 1:
value = '%s;q=%s' % (value, quality)
result.append(value)
return ','.join(result)
def __str__(self):
return self.to_header()
def best_match(self, matches, default=None):
"""Returns the best match from a list of possible matches based
on the quality of the client. If two items have the same quality,
the one is returned that comes first.
:param matches: a list of matches to check for
:param default: the value that is returned if none match
"""
best_quality = -1
result = default
for server_item in matches:
for client_item, quality in self:
if quality <= best_quality:
break
if self._value_matches(server_item, client_item) \
and quality > 0:
best_quality = quality
result = server_item
return result
@property
def best(self):
"""The best match as value."""
if self:
return self[0][0]
class MIMEAccept(Accept):
"""Like :class:`Accept` but with special methods and behavior for
mimetypes.
"""
def _value_matches(self, value, item):
def _normalize(x):
x = x.lower()
return x == '*' and ('*', '*') or x.split('/', 1)
# this is from the application which is trusted. to avoid developer
# frustration we actually check these for valid values
if '/' not in value:
raise ValueError('invalid mimetype %r' % value)
value_type, value_subtype = _normalize(value)
if value_type == '*' and value_subtype != '*':
raise ValueError('invalid mimetype %r' % value)
if '/' not in item:
return False
item_type, item_subtype = _normalize(item)
if item_type == '*' and item_subtype != '*':
return False
return (
(item_type == item_subtype == '*' or
value_type == value_subtype == '*') or
(item_type == value_type and (item_subtype == '*' or
value_subtype == '*' or
item_subtype == value_subtype))
)
@property
def accept_html(self):
"""True if this object accepts HTML."""
return (
'text/html' in self or
'application/xhtml+xml' in self or
self.accept_xhtml
)
@property
def accept_xhtml(self):
"""True if this object accepts XHTML."""
return (
'application/xhtml+xml' in self or
'application/xml' in self
)
@property
def accept_json(self):
"""True if this object accepts JSON."""
return 'application/json' in self
class LanguageAccept(Accept):
"""Like :class:`Accept` but with normalization for languages."""
def _value_matches(self, value, item):
def _normalize(language):
return _locale_delim_re.split(language.lower())
return item == '*' or _normalize(value) == _normalize(item)
class CharsetAccept(Accept):
"""Like :class:`Accept` but with normalization for charsets."""
def _value_matches(self, value, item):
def _normalize(name):
try:
return codecs.lookup(name).name
except LookupError:
return name.lower()
return item == '*' or _normalize(value) == _normalize(item)
def cache_property(key, empty, type):
"""Return a new property object for a cache header. Useful if you
want to add support for a cache extension in a subclass."""
return property(lambda x: x._get_cache_value(key, empty, type),
lambda x, v: x._set_cache_value(key, v, type),
lambda x: x._del_cache_value(key),
'accessor for %r' % key)
class _CacheControl(UpdateDictMixin, dict):
"""Subclass of a dict that stores values for a Cache-Control header. It
has accessors for all the cache-control directives specified in RFC 2616.
The class does not differentiate between request and response directives.
Because the cache-control directives in the HTTP header use dashes the
python descriptors use underscores for that.
To get a header of the :class:`CacheControl` object again you can convert
the object into a string or call the :meth:`to_header` method. If you plan
to subclass it and add your own items have a look at the sourcecode for
that class.
.. versionchanged:: 0.4
Setting `no_cache` or `private` to boolean `True` will set the implicit
none-value which is ``*``:
>>> cc = ResponseCacheControl()
>>> cc.no_cache = True
>>> cc
<ResponseCacheControl 'no-cache'>
>>> cc.no_cache
'*'
>>> cc.no_cache = None
>>> cc
<ResponseCacheControl ''>
In versions before 0.5 the behavior documented here affected the now
no longer existing `CacheControl` class.
"""
no_cache = cache_property('no-cache', '*', None)
no_store = cache_property('no-store', None, bool)
max_age = cache_property('max-age', -1, int)
no_transform = cache_property('no-transform', None, None)
def __init__(self, values=(), on_update=None):
dict.__init__(self, values or ())
self.on_update = on_update
self.provided = values is not None
def _get_cache_value(self, key, empty, type):
"""Used internally by the accessor properties."""
if type is bool:
return key in self
if key in self:
value = self[key]
if value is None:
return empty
elif type is not None:
try:
value = type(value)
except ValueError:
pass
return value
def _set_cache_value(self, key, value, type):
"""Used internally by the accessor properties."""
if type is bool:
if value:
self[key] = None
else:
self.pop(key, None)
else:
if value is None:
self.pop(key)
elif value is True:
self[key] = None
else:
self[key] = value
def _del_cache_value(self, key):
"""Used internally by the accessor properties."""
if key in self:
del self[key]
def to_header(self):
"""Convert the stored values into a cache control header."""
return dump_header(self)
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %s>' % (
self.__class__.__name__,
" ".join(
"%s=%r" % (k, v) for k, v in sorted(self.items())
),
)
class RequestCacheControl(ImmutableDictMixin, _CacheControl):
"""A cache control for requests. This is immutable and gives access
to all the request-relevant cache control headers.
To get a header of the :class:`RequestCacheControl` object again you can
convert the object into a string or call the :meth:`to_header` method. If
you plan to subclass it and add your own items have a look at the sourcecode
for that class.
.. versionadded:: 0.5
In previous versions a `CacheControl` class existed that was used
both for request and response.
"""
max_stale = cache_property('max-stale', '*', int)
min_fresh = cache_property('min-fresh', '*', int)
no_transform = cache_property('no-transform', None, None)
only_if_cached = cache_property('only-if-cached', None, bool)
class ResponseCacheControl(_CacheControl):
"""A cache control for responses. Unlike :class:`RequestCacheControl`
this is mutable and gives access to response-relevant cache control
headers.
To get a header of the :class:`ResponseCacheControl` object again you can
convert the object into a string or call the :meth:`to_header` method. If
you plan to subclass it and add your own items have a look at the sourcecode
for that class.
.. versionadded:: 0.5
In previous versions a `CacheControl` class existed that was used
both for request and response.
"""
public = cache_property('public', None, bool)
private = cache_property('private', '*', None)
must_revalidate = cache_property('must-revalidate', None, bool)
proxy_revalidate = cache_property('proxy-revalidate', None, bool)
s_maxage = cache_property('s-maxage', None, None)
# attach cache_property to the _CacheControl as staticmethod
# so that others can reuse it.
_CacheControl.cache_property = staticmethod(cache_property)
class CallbackDict(UpdateDictMixin, dict):
"""A dict that calls a function passed every time something is changed.
The function is passed the dict instance.
"""
def __init__(self, initial=None, on_update=None):
dict.__init__(self, initial or ())
self.on_update = on_update
def __repr__(self):
return '<%s %s>' % (
self.__class__.__name__,
dict.__repr__(self)
)
class HeaderSet(object):
"""Similar to the :class:`ETags` class this implements a set-like structure.
Unlike :class:`ETags` this is case insensitive and used for vary, allow, and
content-language headers.
If not constructed using the :func:`parse_set_header` function the
instantiation works like this:
>>> hs = HeaderSet(['foo', 'bar', 'baz'])
>>> hs
HeaderSet(['foo', 'bar', 'baz'])
"""
def __init__(self, headers=None, on_update=None):
self._headers = list(headers or ())
self._set = set([x.lower() for x in self._headers])
self.on_update = on_update
def add(self, header):
"""Add a new header to the set."""
self.update((header,))
def remove(self, header):
"""Remove a header from the set. This raises an :exc:`KeyError` if the
header is not in the set.
.. versionchanged:: 0.5
In older versions a :exc:`IndexError` was raised instead of a
:exc:`KeyError` if the object was missing.
:param header: the header to be removed.
"""
key = header.lower()
if key not in self._set:
raise KeyError(header)
self._set.remove(key)
for idx, key in enumerate(self._headers):
if key.lower() == header:
del self._headers[idx]
break
if self.on_update is not None:
self.on_update(self)
def update(self, iterable):
"""Add all the headers from the iterable to the set.
:param iterable: updates the set with the items from the iterable.
"""
inserted_any = False
for header in iterable:
key = header.lower()
if key not in self._set:
self._headers.append(header)
self._set.add(key)
inserted_any = True
if inserted_any and self.on_update is not None:
self.on_update(self)
def discard(self, header):
"""Like :meth:`remove` but ignores errors.
:param header: the header to be discarded.
"""
try:
return self.remove(header)
except KeyError:
pass
def find(self, header):
"""Return the index of the header in the set or return -1 if not found.
:param header: the header to be looked up.
"""
header = header.lower()
for idx, item in enumerate(self._headers):
if item.lower() == header:
return idx
return -1
def index(self, header):
"""Return the index of the header in the set or raise an
:exc:`IndexError`.
:param header: the header to be looked up.
"""
rv = self.find(header)
if rv < 0:
raise IndexError(header)
return rv
def clear(self):
"""Clear the set."""
self._set.clear()
del self._headers[:]
if self.on_update is not None:
self.on_update(self)
def as_set(self, preserve_casing=False):
"""Return the set as real python set type. When calling this, all
the items are converted to lowercase and the ordering is lost.
:param preserve_casing: if set to `True` the items in the set returned
will have the original case like in the
:class:`HeaderSet`, otherwise they will
be lowercase.
"""
if preserve_casing:
return set(self._headers)
return set(self._set)
def to_header(self):
"""Convert the header set into an HTTP header string."""
return ', '.join(map(quote_header_value, self._headers))
def __getitem__(self, idx):
return self._headers[idx]
def __delitem__(self, idx):
rv = self._headers.pop(idx)
self._set.remove(rv.lower())
if self.on_update is not None:
self.on_update(self)
def __setitem__(self, idx, value):
old = self._headers[idx]
self._set.remove(old.lower())
self._headers[idx] = value
self._set.add(value.lower())
if self.on_update is not None:
self.on_update(self)
def __contains__(self, header):
return header.lower() in self._set
def __len__(self):
return len(self._set)
def __iter__(self):
return iter(self._headers)
def __nonzero__(self):
return bool(self._set)
def __str__(self):
return self.to_header()
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__,
self._headers
)
class ETags(object):
"""A set that can be used to check if one etag is present in a collection
of etags.
"""
def __init__(self, strong_etags=None, weak_etags=None, star_tag=False):
self._strong = frozenset(not star_tag and strong_etags or ())
self._weak = frozenset(weak_etags or ())
self.star_tag = star_tag
def as_set(self, include_weak=False):
"""Convert the `ETags` object into a python set. Per default all the
weak etags are not part of this set."""
rv = set(self._strong)
if include_weak:
rv.update(self._weak)
return rv
def is_weak(self, etag):
"""Check if an etag is weak."""
return etag in self._weak
def contains_weak(self, etag):
"""Check if an etag is part of the set including weak and strong tags."""
return self.is_weak(etag) or self.contains(etag)
def contains(self, etag):
"""Check if an etag is part of the set ignoring weak tags.
It is also possible to use the ``in`` operator.
"""
if self.star_tag:
return True
return etag in self._strong
def contains_raw(self, etag):
"""When passed a quoted tag it will check if this tag is part of the
set. If the tag is weak it is checked against weak and strong tags,
otherwise strong only."""
etag, weak = unquote_etag(etag)
if weak:
return self.contains_weak(etag)
return self.contains(etag)
def to_header(self):
"""Convert the etags set into a HTTP header string."""
if self.star_tag:
return '*'
return ', '.join(
['"%s"' % x for x in self._strong] +
['w/"%s"' % x for x in self._weak]
)
def __call__(self, etag=None, data=None, include_weak=False):
if [etag, data].count(None) != 1:
raise TypeError('either tag or data required, but at least one')
if etag is None:
etag = generate_etag(data)
if include_weak:
if etag in self._weak:
return True
return etag in self._strong
def __bool__(self):
return bool(self.star_tag or self._strong or self._weak)
__nonzero__ = __bool__
def __str__(self):
return self.to_header()
def __iter__(self):
return iter(self._strong)
def __contains__(self, etag):
return self.contains(etag)
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, str(self))
class IfRange(object):
"""Very simple object that represents the `If-Range` header in parsed
form. It will either have neither a etag or date or one of either but
never both.
.. versionadded:: 0.7
"""
def __init__(self, etag=None, date=None):
#: The etag parsed and unquoted. Ranges always operate on strong
#: etags so the weakness information is not necessary.
self.etag = etag
#: The date in parsed format or `None`.
self.date = date
def to_header(self):
"""Converts the object back into an HTTP header."""
if self.date is not None:
return http_date(self.date)
if self.etag is not None:
return quote_etag(self.etag)
return ''
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, str(self))
class Range(object):
"""Represents a range header. All the methods are only supporting bytes
as unit. It does store multiple ranges but :meth:`range_for_length` will
only work if only one range is provided.
.. versionadded:: 0.7
"""
def __init__(self, units, ranges):
#: The units of this range. Usually "bytes".
self.units = units
#: A list of ``(begin, end)`` tuples for the range header provided.
#: The ranges are non-inclusive.
self.ranges = ranges
def range_for_length(self, length):
"""If the range is for bytes, the length is not None and there is
exactly one range and it is satisfiable it returns a ``(start, stop)``
tuple, otherwise `None`.
"""
if self.units != 'bytes' or length is None or len(self.ranges) != 1:
return None
start, end = self.ranges[0]
if end is None:
end = length
if start < 0:
start += length
if is_byte_range_valid(start, end, length):
return start, min(end, length)
def make_content_range(self, length):
"""Creates a :class:`~werkzeug.datastructures.ContentRange` object
from the current range and given content length.
"""
rng = self.range_for_length(length)
if rng is not None:
return ContentRange(self.units, rng[0], rng[1], length)
def to_header(self):
"""Converts the object back into an HTTP header."""
ranges = []
for begin, end in self.ranges:
if end is None:
ranges.append(begin >= 0 and '%s-' % begin or str(begin))
else:
ranges.append('%s-%s' % (begin, end - 1))
return '%s=%s' % (self.units, ','.join(ranges))
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, str(self))
class ContentRange(object):
"""Represents the content range header.
.. versionadded:: 0.7
"""
def __init__(self, units, start, stop, length=None, on_update=None):
assert is_byte_range_valid(start, stop, length), \
'Bad range provided'
self.on_update = on_update
self.set(start, stop, length, units)
def _callback_property(name):
def fget(self):
return getattr(self, name)
def fset(self, value):
setattr(self, name, value)
if self.on_update is not None:
self.on_update(self)
return property(fget, fset)
#: The units to use, usually "bytes"
units = _callback_property('_units')
#: The start point of the range or `None`.
start = _callback_property('_start')
#: The stop point of the range (non-inclusive) or `None`. Can only be
#: `None` if also start is `None`.
stop = _callback_property('_stop')
#: The length of the range or `None`.
length = _callback_property('_length')
def set(self, start, stop, length=None, units='bytes'):
"""Simple method to update the ranges."""
assert is_byte_range_valid(start, stop, length), \
'Bad range provided'
self._units = units
self._start = start
self._stop = stop
self._length = length
if self.on_update is not None:
self.on_update(self)
def unset(self):
"""Sets the units to `None` which indicates that the header should
no longer be used.
"""
self.set(None, None, units=None)
def to_header(self):
if self.units is None:
return ''
if self.length is None:
length = '*'
else:
length = self.length
if self.start is None:
return '%s */%s' % (self.units, length)
return '%s %s-%s/%s' % (
self.units,
self.start,
self.stop - 1,
length
)
def __nonzero__(self):
return self.units is not None
__bool__ = __nonzero__
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, str(self))
class Authorization(ImmutableDictMixin, dict):
"""Represents an `Authorization` header sent by the client. You should
not create this kind of object yourself but use it when it's returned by
the `parse_authorization_header` function.
This object is a dict subclass and can be altered by setting dict items
but it should be considered immutable as it's returned by the client and
not meant for modifications.
.. versionchanged:: 0.5
This object became immutable.
"""
def __init__(self, auth_type, data=None):
dict.__init__(self, data or {})
self.type = auth_type
username = property(lambda x: x.get('username'), doc='''
The username transmitted. This is set for both basic and digest
auth all the time.''')
password = property(lambda x: x.get('password'), doc='''
When the authentication type is basic this is the password
transmitted by the client, else `None`.''')
realm = property(lambda x: x.get('realm'), doc='''
This is the server realm sent back for HTTP digest auth.''')
nonce = property(lambda x: x.get('nonce'), doc='''
The nonce the server sent for digest auth, sent back by the client.
A nonce should be unique for every 401 response for HTTP digest
auth.''')
uri = property(lambda x: x.get('uri'), doc='''
The URI from Request-URI of the Request-Line; duplicated because
proxies are allowed to change the Request-Line in transit. HTTP
digest auth only.''')
nc = property(lambda x: x.get('nc'), doc='''
The nonce count value transmitted by clients if a qop-header is
also transmitted. HTTP digest auth only.''')
cnonce = property(lambda x: x.get('cnonce'), doc='''
If the server sent a qop-header in the ``WWW-Authenticate``
header, the client has to provide this value for HTTP digest auth.
See the RFC for more details.''')
response = property(lambda x: x.get('response'), doc='''
A string of 32 hex digits computed as defined in RFC 2617, which
proves that the user knows a password. Digest auth only.''')
opaque = property(lambda x: x.get('opaque'), doc='''
The opaque header from the server returned unchanged by the client.
It is recommended that this string be base64 or hexadecimal data.
Digest auth only.''')
@property
def qop(self):
"""Indicates what "quality of protection" the client has applied to
the message for HTTP digest auth."""
def on_update(header_set):
if not header_set and 'qop' in self:
del self['qop']
elif header_set:
self['qop'] = header_set.to_header()
return parse_set_header(self.get('qop'), on_update)
class WWWAuthenticate(UpdateDictMixin, dict):
"""Provides simple access to `WWW-Authenticate` headers."""
#: list of keys that require quoting in the generated header
_require_quoting = frozenset(['domain', 'nonce', 'opaque', 'realm', 'qop'])
def __init__(self, auth_type=None, values=None, on_update=None):
dict.__init__(self, values or ())
if auth_type:
self['__auth_type__'] = auth_type
self.on_update = on_update
def set_basic(self, realm='authentication required'):
"""Clear the auth info and enable basic auth."""
dict.clear(self)
dict.update(self, {'__auth_type__': 'basic', 'realm': realm})
if self.on_update:
self.on_update(self)
def set_digest(self, realm, nonce, qop=('auth',), opaque=None,
algorithm=None, stale=False):
"""Clear the auth info and enable digest auth."""
d = {
'__auth_type__': 'digest',
'realm': realm,
'nonce': nonce,
'qop': dump_header(qop)
}
if stale:
d['stale'] = 'TRUE'
if opaque is not None:
d['opaque'] = opaque
if algorithm is not None:
d['algorithm'] = algorithm
dict.clear(self)
dict.update(self, d)
if self.on_update:
self.on_update(self)
def to_header(self):
"""Convert the stored values into a WWW-Authenticate header."""
d = dict(self)
auth_type = d.pop('__auth_type__', None) or 'basic'
return '%s %s' % (auth_type.title(), ', '.join([
'%s=%s' % (key, quote_header_value(value,
allow_token=key not in self._require_quoting))
for key, value in iteritems(d)
]))
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self.to_header()
)
def auth_property(name, doc=None):
"""A static helper function for subclasses to add extra authentication
system properties onto a class::
class FooAuthenticate(WWWAuthenticate):
special_realm = auth_property('special_realm')
For more information have a look at the sourcecode to see how the
regular properties (:attr:`realm` etc.) are implemented.
"""
def _set_value(self, value):
if value is None:
self.pop(name, None)
else:
self[name] = str(value)
return property(lambda x: x.get(name), _set_value, doc=doc)
def _set_property(name, doc=None):
def fget(self):
def on_update(header_set):
if not header_set and name in self:
del self[name]
elif header_set:
self[name] = header_set.to_header()
return parse_set_header(self.get(name), on_update)
return property(fget, doc=doc)
type = auth_property('__auth_type__', doc='''
The type of the auth mechanism. HTTP currently specifies
`Basic` and `Digest`.''')
realm = auth_property('realm', doc='''
A string to be displayed to users so they know which username and
password to use. This string should contain at least the name of
the host performing the authentication and might additionally
indicate the collection of users who might have access.''')
domain = _set_property('domain', doc='''
A list of URIs that define the protection space. If a URI is an
absolute path, it is relative to the canonical root URL of the
server being accessed.''')
nonce = auth_property('nonce', doc='''
A server-specified data string which should be uniquely generated
each time a 401 response is made. It is recommended that this
string be base64 or hexadecimal data.''')
opaque = auth_property('opaque', doc='''
A string of data, specified by the server, which should be returned
by the client unchanged in the Authorization header of subsequent
requests with URIs in the same protection space. It is recommended
that this string be base64 or hexadecimal data.''')
algorithm = auth_property('algorithm', doc='''
A string indicating a pair of algorithms used to produce the digest
and a checksum. If this is not present it is assumed to be "MD5".
If the algorithm is not understood, the challenge should be ignored
(and a different one used, if there is more than one).''')
qop = _set_property('qop', doc='''
A set of quality-of-privacy directives such as auth and auth-int.''')
def _get_stale(self):
val = self.get('stale')
if val is not None:
return val.lower() == 'true'
def _set_stale(self, value):
if value is None:
self.pop('stale', None)
else:
self['stale'] = value and 'TRUE' or 'FALSE'
stale = property(_get_stale, _set_stale, doc='''
A flag, indicating that the previous request from the client was
rejected because the nonce value was stale.''')
del _get_stale, _set_stale
# make auth_property a staticmethod so that subclasses of
# `WWWAuthenticate` can use it for new properties.
auth_property = staticmethod(auth_property)
del _set_property
class FileStorage(object):
"""The :class:`FileStorage` class is a thin wrapper over incoming files.
It is used by the request object to represent uploaded files. All the
attributes of the wrapper stream are proxied by the file storage so
it's possible to do ``storage.read()`` instead of the long form
``storage.stream.read()``.
"""
def __init__(self, stream=None, filename=None, name=None,
content_type=None, content_length=None,
headers=None):
self.name = name
self.stream = stream or _empty_stream
# if no filename is provided we can attempt to get the filename
# from the stream object passed. There we have to be careful to
# skip things like <fdopen>, <stderr> etc. Python marks these
# special filenames with angular brackets.
if filename is None:
filename = getattr(stream, 'name', None)
s = make_literal_wrapper(filename)
if filename and filename[0] == s('<') and filename[-1] == s('>'):
filename = None
# On Python 3 we want to make sure the filename is always unicode.
# This might not be if the name attribute is bytes due to the
# file being opened from the bytes API.
if not PY2 and isinstance(filename, bytes):
filename = filename.decode(get_filesystem_encoding(),
'replace')
self.filename = filename
if headers is None:
headers = Headers()
self.headers = headers
if content_type is not None:
headers['Content-Type'] = content_type
if content_length is not None:
headers['Content-Length'] = str(content_length)
def _parse_content_type(self):
if not hasattr(self, '_parsed_content_type'):
self._parsed_content_type = \
parse_options_header(self.content_type)
@property
def content_type(self):
"""The content-type sent in the header. Usually not available"""
return self.headers.get('content-type')
@property
def content_length(self):
"""The content-length sent in the header. Usually not available"""
return int(self.headers.get('content-length') or 0)
@property
def mimetype(self):
"""Like :attr:`content_type`, but without parameters (eg, without
charset, type etc.) and always lowercase. For example if the content
type is ``text/HTML; charset=utf-8`` the mimetype would be
``'text/html'``.
.. versionadded:: 0.7
"""
self._parse_content_type()
return self._parsed_content_type[0].lower()
@property
def mimetype_params(self):
"""The mimetype parameters as dict. For example if the content
type is ``text/html; charset=utf-8`` the params would be
``{'charset': 'utf-8'}``.
.. versionadded:: 0.7
"""
self._parse_content_type()
return self._parsed_content_type[1]
def save(self, dst, buffer_size=16384):
"""Save the file to a destination path or file object. If the
destination is a file object you have to close it yourself after the
call. The buffer size is the number of bytes held in memory during
the copy process. It defaults to 16KB.
For secure file saving also have a look at :func:`secure_filename`.
:param dst: a filename or open file object the uploaded file
is saved to.
:param buffer_size: the size of the buffer. This works the same as
the `length` parameter of
:func:`shutil.copyfileobj`.
"""
from shutil import copyfileobj
close_dst = False
if isinstance(dst, string_types):
dst = open(dst, 'wb')
close_dst = True
try:
copyfileobj(self.stream, dst, buffer_size)
finally:
if close_dst:
dst.close()
def close(self):
"""Close the underlying file if possible."""
try:
self.stream.close()
except Exception:
pass
def __nonzero__(self):
return bool(self.filename)
__bool__ = __nonzero__
def __getattr__(self, name):
return getattr(self.stream, name)
def __iter__(self):
return iter(self.readline, '')
def __repr__(self):
return '<%s: %r (%r)>' % (
self.__class__.__name__,
self.filename,
self.content_type
)
# circular dependencies
from werkzeug.http import dump_options_header, dump_header, generate_etag, \
quote_header_value, parse_set_header, unquote_etag, quote_etag, \
parse_options_header, http_date, is_byte_range_valid
from werkzeug import exceptions
|
mit
|
ThirdProject/android_external_chromium_org
|
tools/perf/benchmarks/indexeddb_perf.py
|
23
|
1654
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs Chromium's IndexedDB performance test. These test:
Databases:
create/delete
Keys:
create/delete
Indexes:
create/delete
Data access:
Random read/write
Sporadic writes
Read cache
Cursors:
Read & random writes
Walking multiple
Seeking.
"""
import json
import os
from telemetry import test
from telemetry.core import util
from telemetry.page import page_measurement
from telemetry.page import page_set
class _IndexedDbMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
tab.WaitForJavaScriptExpression(
'window.document.cookie.indexOf("__done=1") >= 0', 600)
js_get_results = "JSON.stringify(automation.getResults());"
result_dict = json.loads(tab.EvaluateJavaScript(js_get_results))
total = 0.0
for key in result_dict:
if key == 'OverallTestDuration':
continue
msec = float(result_dict[key])
results.Add(key, 'ms', msec, data_type='unimportant')
total += msec
results.Add('Total', 'ms', total)
class IndexedDb(test.Test):
"""Chromium's IndexedDB Performance tests."""
test = _IndexedDbMeasurement
def CreatePageSet(self, options):
indexeddb_dir = os.path.join(util.GetChromiumSrcDir(), 'chrome', 'test',
'data', 'indexeddb')
return page_set.PageSet.FromDict({
'pages': [
{ 'url': 'file://perf_test.html' }
]
}, indexeddb_dir)
|
bsd-3-clause
|
vipul-sharma20/oh-mainline
|
vendor/packages/django-inplaceedit/inplaceeditform/perms.py
|
19
|
1387
|
# Copyright (c) 2010-2013 by Yaco Sistemas <[email protected]> or <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this programe. If not, see <http://www.gnu.org/licenses/>.
class SuperUserPermEditInline(object):
@classmethod
def can_edit(cls, field):
return field.request.user.is_authenticated and field.request.user.is_superuser
class AdminDjangoPermEditInline(SuperUserPermEditInline):
@classmethod
def can_edit(cls, field):
is_super_user = super(AdminDjangoPermEditInline, cls).can_edit(field)
if not is_super_user:
model = field.model
model_edit = '%s.change_%s' % (model._meta.app_label,
model._meta.module_name)
return field.request.user.has_perm(model_edit)
return is_super_user
|
agpl-3.0
|
pk-sam/crosswalk-test-suite
|
webapi/tct-datasync-tizen-tests/inst.apk.py
|
903
|
3180
|
#!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARAMETERS = None
ADB_CMD = "adb"
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s uninstall org.xwalk.%s" % (
ADB_CMD, PARAMETERS.device, os.path.basename(os.path.splitext(file)[0]))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
return action_status
def instPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s install %s" % (ADB_CMD,
PARAMETERS.device, os.path.join(root, file))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.device:
(return_code, output) = doCMD("adb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
if not PARAMETERS.device:
print "No device found"
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
|
bsd-3-clause
|
sursum/buckanjaren
|
buckanjaren/lib/python3.5/site-packages/django/contrib/admin/templatetags/admin_modify.py
|
129
|
3006
|
import json
from django import template
from django.template.context import Context
register = template.Library()
@register.inclusion_tag('admin/prepopulated_fields_js.html', takes_context=True)
def prepopulated_fields_js(context):
"""
Creates a list of prepopulated_fields that should render Javascript for
the prepopulated fields for both the admin form and inlines.
"""
prepopulated_fields = []
if 'adminform' in context:
prepopulated_fields.extend(context['adminform'].prepopulated_fields)
if 'inline_admin_formsets' in context:
for inline_admin_formset in context['inline_admin_formsets']:
for inline_admin_form in inline_admin_formset:
if inline_admin_form.original is None:
prepopulated_fields.extend(inline_admin_form.prepopulated_fields)
prepopulated_fields_json = []
for field in prepopulated_fields:
prepopulated_fields_json.append({
"id": "#%s" % field["field"].auto_id,
"name": field["field"].name,
"dependency_ids": ["#%s" % dependency.auto_id for dependency in field["dependencies"]],
"dependency_list": [dependency.name for dependency in field["dependencies"]],
"maxLength": field["field"].field.max_length or 50,
"allowUnicode": getattr(field["field"].field, "allow_unicode", False)
})
context.update({
'prepopulated_fields': prepopulated_fields,
'prepopulated_fields_json': json.dumps(prepopulated_fields_json),
})
return context
@register.inclusion_tag('admin/submit_line.html', takes_context=True)
def submit_row(context):
"""
Displays the row of buttons for delete and save.
"""
change = context['change']
is_popup = context['is_popup']
save_as = context['save_as']
show_save = context.get('show_save', True)
show_save_and_continue = context.get('show_save_and_continue', True)
ctx = Context(context)
ctx.update({
'show_delete_link': (
not is_popup and context['has_delete_permission'] and
change and context.get('show_delete', True)
),
'show_save_as_new': not is_popup and change and save_as,
'show_save_and_add_another': (
context['has_add_permission'] and not is_popup and
(not save_as or context['add'])
),
'show_save_and_continue': not is_popup and context['has_change_permission'] and show_save_and_continue,
'show_save': show_save,
})
return ctx
@register.filter
def cell_count(inline_admin_form):
"""Returns the number of cells used in a tabular inline"""
count = 1 # Hidden cell with hidden 'id' field
for fieldset in inline_admin_form:
# Loop through all the fields (one per cell)
for line in fieldset:
for field in line:
count += 1
if inline_admin_form.formset.can_delete:
# Delete checkbox
count += 1
return count
|
mit
|
1st/django
|
tests/db_typecasts/tests.py
|
206
|
2412
|
# Unit tests for typecast functions in django.db.backends.util
import datetime
import unittest
from django.db.backends import utils as typecasts
from django.utils import six
TEST_CASES = {
'typecast_date': (
('', None),
(None, None),
('2005-08-11', datetime.date(2005, 8, 11)),
('1990-01-01', datetime.date(1990, 1, 1)),
),
'typecast_time': (
('', None),
(None, None),
('0:00:00', datetime.time(0, 0)),
('0:30:00', datetime.time(0, 30)),
('8:50:00', datetime.time(8, 50)),
('08:50:00', datetime.time(8, 50)),
('12:00:00', datetime.time(12, 00)),
('12:30:00', datetime.time(12, 30)),
('13:00:00', datetime.time(13, 00)),
('23:59:00', datetime.time(23, 59)),
('00:00:12', datetime.time(0, 0, 12)),
('00:00:12.5', datetime.time(0, 0, 12, 500000)),
('7:22:13.312', datetime.time(7, 22, 13, 312000)),
),
'typecast_timestamp': (
('', None),
(None, None),
('2005-08-11 0:00:00', datetime.datetime(2005, 8, 11)),
('2005-08-11 0:30:00', datetime.datetime(2005, 8, 11, 0, 30)),
('2005-08-11 8:50:30', datetime.datetime(2005, 8, 11, 8, 50, 30)),
('2005-08-11 8:50:30.123', datetime.datetime(2005, 8, 11, 8, 50, 30, 123000)),
('2005-08-11 8:50:30.9', datetime.datetime(2005, 8, 11, 8, 50, 30, 900000)),
('2005-08-11 8:50:30.312-05', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
('2005-08-11 8:50:30.312+02', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
# ticket 14453
('2010-10-12 15:29:22.063202', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.063202-03', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.063202+04', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.0632021', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.0632029', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
),
}
class DBTypeCasts(unittest.TestCase):
def test_typeCasts(self):
for k, v in six.iteritems(TEST_CASES):
for inpt, expected in v:
got = getattr(typecasts, k)(inpt)
self.assertEqual(got, expected, "In %s: %r doesn't match %r. Got %r instead." % (k, inpt, expected, got))
|
bsd-3-clause
|
stroykova/luigi
|
test/fib_test.py
|
21
|
2262
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from helpers import unittest
import luigi
import luigi.interface
from luigi.mock import MockTarget
# Calculates Fibonacci numbers :)
class Fib(luigi.Task):
n = luigi.IntParameter(default=100)
def requires(self):
if self.n >= 2:
return [Fib(self.n - 1), Fib(self.n - 2)]
else:
return []
def output(self):
return MockTarget('/tmp/fib_%d' % self.n)
def run(self):
if self.n == 0:
s = 0
elif self.n == 1:
s = 1
else:
s = 0
for input in self.input():
for line in input.open('r'):
s += int(line.strip())
f = self.output().open('w')
f.write('%d\n' % s)
f.close()
class FibTestBase(unittest.TestCase):
def setUp(self):
MockTarget.fs.clear()
class FibTest(FibTestBase):
def test_invoke(self):
luigi.build([Fib(100)], local_scheduler=True)
self.assertEqual(MockTarget.fs.get_data('/tmp/fib_10'), b'55\n')
self.assertEqual(MockTarget.fs.get_data('/tmp/fib_100'), b'354224848179261915075\n')
def test_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'Fib', '--n', '100'])
self.assertEqual(MockTarget.fs.get_data('/tmp/fib_10'), b'55\n')
self.assertEqual(MockTarget.fs.get_data('/tmp/fib_100'), b'354224848179261915075\n')
def test_build_internal(self):
luigi.build([Fib(100)], local_scheduler=True)
self.assertEqual(MockTarget.fs.get_data('/tmp/fib_10'), b'55\n')
self.assertEqual(MockTarget.fs.get_data('/tmp/fib_100'), b'354224848179261915075\n')
|
apache-2.0
|
emonty/ansible
|
lib/ansible/playbook/play.py
|
19
|
13265
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible import context
from ansible.errors import AnsibleParserError, AnsibleAssertionError
from ansible.module_utils._text import to_native
from ansible.module_utils.six import string_types
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.block import Block
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.helpers import load_list_of_blocks, load_list_of_roles
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.vars.manager import preprocess_vars
from ansible.utils.display import Display
display = Display()
__all__ = ['Play']
class Play(Base, Taggable, CollectionSearch):
"""
A play is a language feature that represents a list of roles and/or
task/handler blocks to execute on a given set of hosts.
Usage:
Play.load(datastructure) -> Play
Play.something(...)
"""
# =================================================================================
_hosts = FieldAttribute(isa='list', required=True, listof=string_types, always_post_validate=True, priority=-1)
# Facts
_gather_facts = FieldAttribute(isa='bool', default=None, always_post_validate=True)
_gather_subset = FieldAttribute(isa='list', default=(lambda: C.DEFAULT_GATHER_SUBSET), listof=string_types, always_post_validate=True)
_gather_timeout = FieldAttribute(isa='int', default=C.DEFAULT_GATHER_TIMEOUT, always_post_validate=True)
_fact_path = FieldAttribute(isa='string', default=C.DEFAULT_FACT_PATH)
# Variable Attributes
_vars_files = FieldAttribute(isa='list', default=list, priority=99)
_vars_prompt = FieldAttribute(isa='list', default=list, always_post_validate=False)
# Role Attributes
_roles = FieldAttribute(isa='list', default=list, priority=90)
# Block (Task) Lists Attributes
_handlers = FieldAttribute(isa='list', default=list)
_pre_tasks = FieldAttribute(isa='list', default=list)
_post_tasks = FieldAttribute(isa='list', default=list)
_tasks = FieldAttribute(isa='list', default=list)
# Flag/Setting Attributes
_force_handlers = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('force_handlers'), always_post_validate=True)
_max_fail_percentage = FieldAttribute(isa='percent', always_post_validate=True)
_serial = FieldAttribute(isa='list', default=list, always_post_validate=True)
_strategy = FieldAttribute(isa='string', default=C.DEFAULT_STRATEGY, always_post_validate=True)
_order = FieldAttribute(isa='string', always_post_validate=True)
# =================================================================================
def __init__(self):
super(Play, self).__init__()
self._included_conditional = None
self._included_path = None
self._removed_hosts = []
self.ROLE_CACHE = {}
self.only_tags = set(context.CLIARGS.get('tags', [])) or frozenset(('all',))
self.skip_tags = set(context.CLIARGS.get('skip_tags', []))
def __repr__(self):
return self.get_name()
def get_name(self):
''' return the name of the Play '''
return self.name
@staticmethod
def load(data, variable_manager=None, loader=None, vars=None):
if ('name' not in data or data['name'] is None) and 'hosts' in data:
if data['hosts'] is None or all(host is None for host in data['hosts']):
raise AnsibleParserError("Hosts list cannot be empty - please check your playbook")
if isinstance(data['hosts'], list):
data['name'] = ','.join(data['hosts'])
else:
data['name'] = data['hosts']
p = Play()
if vars:
p.vars = vars.copy()
return p.load_data(data, variable_manager=variable_manager, loader=loader)
def preprocess_data(self, ds):
'''
Adjusts play datastructure to cleanup old/legacy items
'''
if not isinstance(ds, dict):
raise AnsibleAssertionError('while preprocessing data (%s), ds should be a dict but was a %s' % (ds, type(ds)))
# The use of 'user' in the Play datastructure was deprecated to
# line up with the same change for Tasks, due to the fact that
# 'user' conflicted with the user module.
if 'user' in ds:
# this should never happen, but error out with a helpful message
# to the user if it does...
if 'remote_user' in ds:
raise AnsibleParserError("both 'user' and 'remote_user' are set for %s. "
"The use of 'user' is deprecated, and should be removed" % self.get_name(), obj=ds)
ds['remote_user'] = ds['user']
del ds['user']
return super(Play, self).preprocess_data(ds)
def _load_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
Bare tasks outside of a block are given an implicit block.
'''
try:
return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
except AssertionError as e:
raise AnsibleParserError("A malformed block was encountered while loading tasks: %s" % to_native(e), obj=self._ds, orig_exc=e)
def _load_pre_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
Bare tasks outside of a block are given an implicit block.
'''
try:
return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
except AssertionError as e:
raise AnsibleParserError("A malformed block was encountered while loading pre_tasks", obj=self._ds, orig_exc=e)
def _load_post_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
Bare tasks outside of a block are given an implicit block.
'''
try:
return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
except AssertionError as e:
raise AnsibleParserError("A malformed block was encountered while loading post_tasks", obj=self._ds, orig_exc=e)
def _load_handlers(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed handlers/blocks.
Bare handlers outside of a block are given an implicit block.
'''
try:
return self._extend_value(
self.handlers,
load_list_of_blocks(ds=ds, play=self, use_handlers=True, variable_manager=self._variable_manager, loader=self._loader),
prepend=True
)
except AssertionError as e:
raise AnsibleParserError("A malformed block was encountered while loading handlers", obj=self._ds, orig_exc=e)
def _load_roles(self, attr, ds):
'''
Loads and returns a list of RoleInclude objects from the datastructure
list of role definitions and creates the Role from those objects
'''
if ds is None:
ds = []
try:
role_includes = load_list_of_roles(ds, play=self, variable_manager=self._variable_manager,
loader=self._loader, collection_search_list=self.collections)
except AssertionError as e:
raise AnsibleParserError("A malformed role declaration was encountered.", obj=self._ds, orig_exc=e)
roles = []
for ri in role_includes:
roles.append(Role.load(ri, play=self))
self.roles[:0] = roles
return self.roles
def _load_vars_prompt(self, attr, ds):
new_ds = preprocess_vars(ds)
vars_prompts = []
if new_ds is not None:
for prompt_data in new_ds:
if 'name' not in prompt_data:
raise AnsibleParserError("Invalid vars_prompt data structure, missing 'name' key", obj=ds)
for key in prompt_data:
if key not in ('name', 'prompt', 'default', 'private', 'confirm', 'encrypt', 'salt_size', 'salt', 'unsafe'):
raise AnsibleParserError("Invalid vars_prompt data structure, found unsupported key '%s'" % key, obj=ds)
vars_prompts.append(prompt_data)
return vars_prompts
def _compile_roles(self):
'''
Handles the role compilation step, returning a flat list of tasks
with the lowest level dependencies first. For example, if a role R
has a dependency D1, which also has a dependency D2, the tasks from
D2 are merged first, followed by D1, and lastly by the tasks from
the parent role R last. This is done for all roles in the Play.
'''
block_list = []
if len(self.roles) > 0:
for r in self.roles:
# Don't insert tasks from ``import/include_role``, preventing
# duplicate execution at the wrong time
if r.from_include:
continue
block_list.extend(r.compile(play=self))
return block_list
def compile_roles_handlers(self):
'''
Handles the role handler compilation step, returning a flat list of Handlers
This is done for all roles in the Play.
'''
block_list = []
if len(self.roles) > 0:
for r in self.roles:
if r.from_include:
continue
block_list.extend(r.get_handler_blocks(play=self))
return block_list
def compile(self):
'''
Compiles and returns the task list for this play, compiled from the
roles (which are themselves compiled recursively) and/or the list of
tasks specified in the play.
'''
# create a block containing a single flush handlers meta
# task, so we can be sure to run handlers at certain points
# of the playbook execution
flush_block = Block.load(
data={'meta': 'flush_handlers'},
play=self,
variable_manager=self._variable_manager,
loader=self._loader
)
block_list = []
block_list.extend(self.pre_tasks)
block_list.append(flush_block)
block_list.extend(self._compile_roles())
block_list.extend(self.tasks)
block_list.append(flush_block)
block_list.extend(self.post_tasks)
block_list.append(flush_block)
return block_list
def get_vars(self):
return self.vars.copy()
def get_vars_files(self):
if self.vars_files is None:
return []
elif not isinstance(self.vars_files, list):
return [self.vars_files]
return self.vars_files
def get_handlers(self):
return self.handlers[:]
def get_roles(self):
return self.roles[:]
def get_tasks(self):
tasklist = []
for task in self.pre_tasks + self.tasks + self.post_tasks:
if isinstance(task, Block):
tasklist.append(task.block + task.rescue + task.always)
else:
tasklist.append(task)
return tasklist
def serialize(self):
data = super(Play, self).serialize()
roles = []
for role in self.get_roles():
roles.append(role.serialize())
data['roles'] = roles
data['included_path'] = self._included_path
return data
def deserialize(self, data):
super(Play, self).deserialize(data)
self._included_path = data.get('included_path', None)
if 'roles' in data:
role_data = data.get('roles', [])
roles = []
for role in role_data:
r = Role()
r.deserialize(role)
roles.append(r)
setattr(self, 'roles', roles)
del data['roles']
def copy(self):
new_me = super(Play, self).copy()
new_me.ROLE_CACHE = self.ROLE_CACHE.copy()
new_me._included_conditional = self._included_conditional
new_me._included_path = self._included_path
return new_me
|
gpl-3.0
|
JJediny/python-social-auth
|
examples/pyramid_example/example/settings.py
|
51
|
2415
|
SOCIAL_AUTH_SETTINGS = {
'SOCIAL_AUTH_LOGIN_URL': '/',
'SOCIAL_AUTH_LOGIN_REDIRECT_URL': '/done',
'SOCIAL_AUTH_USER_MODEL': 'example.models.User',
'SOCIAL_AUTH_LOGIN_FUNCTION': 'example.auth.login_user',
'SOCIAL_AUTH_LOGGEDIN_FUNCTION': 'example.auth.login_required',
'SOCIAL_AUTH_AUTHENTICATION_BACKENDS': (
'social.backends.twitter.TwitterOAuth',
'social.backends.open_id.OpenIdAuth',
'social.backends.google.GoogleOpenId',
'social.backends.google.GoogleOAuth2',
'social.backends.google.GoogleOAuth',
'social.backends.yahoo.YahooOpenId',
'social.backends.stripe.StripeOAuth2',
'social.backends.persona.PersonaAuth',
'social.backends.facebook.FacebookOAuth2',
'social.backends.facebook.FacebookAppOAuth2',
'social.backends.yahoo.YahooOAuth',
'social.backends.angel.AngelOAuth2',
'social.backends.behance.BehanceOAuth2',
'social.backends.bitbucket.BitbucketOAuth',
'social.backends.box.BoxOAuth2',
'social.backends.linkedin.LinkedinOAuth',
'social.backends.github.GithubOAuth2',
'social.backends.foursquare.FoursquareOAuth2',
'social.backends.instagram.InstagramOAuth2',
'social.backends.live.LiveOAuth2',
'social.backends.vk.VKOAuth2',
'social.backends.dailymotion.DailymotionOAuth2',
'social.backends.disqus.DisqusOAuth2',
'social.backends.dropbox.DropboxOAuth',
'social.backends.eveonline.EVEOnlineOAuth2',
'social.backends.evernote.EvernoteSandboxOAuth',
'social.backends.fitbit.FitbitOAuth',
'social.backends.flickr.FlickrOAuth',
'social.backends.livejournal.LiveJournalOpenId',
'social.backends.soundcloud.SoundcloudOAuth2',
'social.backends.thisismyjam.ThisIsMyJamOAuth1',
'social.backends.stocktwits.StocktwitsOAuth2',
'social.backends.tripit.TripItOAuth',
'social.backends.twilio.TwilioAuth',
'social.backends.clef.ClefOAuth2',
'social.backends.xing.XingOAuth',
'social.backends.yandex.YandexOAuth2',
'social.backends.podio.PodioOAuth2',
'social.backends.reddit.RedditOAuth2',
'social.backends.mineid.MineIDOAuth2',
'social.backends.wunderlist.WunderlistOAuth2',
)
}
def includeme(config):
config.registry.settings.update(SOCIAL_AUTH_SETTINGS)
|
bsd-3-clause
|
Suninus/NewsBlur
|
vendor/paypal/standard/ipn/signals.py
|
23
|
1129
|
"""
Note that sometimes you will get duplicate signals emitted, depending on configuration of your systems.
If you do encounter this, you will need to add the "dispatch_uid" to your connect handlers:
http://code.djangoproject.com/wiki/Signals#Helppost_saveseemstobeemittedtwiceforeachsave
"""
from django.dispatch import Signal
# Sent when a payment is successfully processed.
payment_was_successful = Signal()
# Sent when a payment is flagged.
payment_was_flagged = Signal()
# Sent when a payment was refunded by the seller.
payment_was_refunded = Signal()
# Sent when a payment was reversed by the buyer.
payment_was_reversed = Signal()
# Sent when a subscription was cancelled.
subscription_cancel = Signal()
# Sent when a subscription expires.
subscription_eot = Signal()
# Sent when a subscription was modified.
subscription_modify = Signal()
# Sent when a subscription is created.
subscription_signup = Signal()
# recurring_payment_profile_created
recurring_create = Signal()
# recurring_payment
recurring_payment = Signal()
recurring_cancel = Signal()
recurring_skipped = Signal()
recurring_failed = Signal()
|
mit
|
ac0x/googletest
|
scripts/release_docs.py
|
1167
|
6132
|
#!/usr/bin/env python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for branching Google Test/Mock wiki pages for a new version.
SYNOPSIS
release_docs.py NEW_RELEASE_VERSION
Google Test and Google Mock's external user documentation is in
interlinked wiki files. When we release a new version of
Google Test or Google Mock, we need to branch the wiki files
such that users of a specific version of Google Test/Mock can
look up documenation relevant for that version. This script
automates that process by:
- branching the current wiki pages (which document the
behavior of the SVN trunk head) to pages for the specified
version (e.g. branching FAQ.wiki to V2_6_FAQ.wiki when
NEW_RELEASE_VERSION is 2.6);
- updating the links in the branched files to point to the branched
version (e.g. a link in V2_6_FAQ.wiki that pointed to
Primer.wiki#Anchor will now point to V2_6_Primer.wiki#Anchor).
NOTE: NEW_RELEASE_VERSION must be a NEW version number for
which the wiki pages don't yet exist; otherwise you'll get SVN
errors like "svn: Path 'V1_7_PumpManual.wiki' is not a
directory" when running the script.
EXAMPLE
$ cd PATH/TO/GTEST_SVN_WORKSPACE/trunk
$ scripts/release_docs.py 2.6 # create wiki pages for v2.6
$ svn status # verify the file list
$ svn diff # verify the file contents
$ svn commit -m "release wiki pages for v2.6"
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import re
import sys
import common
# Wiki pages that shouldn't be branched for every gtest/gmock release.
GTEST_UNVERSIONED_WIKIS = ['DevGuide.wiki']
GMOCK_UNVERSIONED_WIKIS = [
'DesignDoc.wiki',
'DevGuide.wiki',
'KnownIssues.wiki'
]
def DropWikiSuffix(wiki_filename):
"""Removes the .wiki suffix (if any) from the given filename."""
return (wiki_filename[:-len('.wiki')] if wiki_filename.endswith('.wiki')
else wiki_filename)
class WikiBrancher(object):
"""Branches ..."""
def __init__(self, dot_version):
self.project, svn_root_path = common.GetSvnInfo()
if self.project not in ('googletest', 'googlemock'):
sys.exit('This script must be run in a gtest or gmock SVN workspace.')
self.wiki_dir = svn_root_path + '/wiki'
# Turn '2.6' to 'V2_6_'.
self.version_prefix = 'V' + dot_version.replace('.', '_') + '_'
self.files_to_branch = self.GetFilesToBranch()
page_names = [DropWikiSuffix(f) for f in self.files_to_branch]
# A link to Foo.wiki is in one of the following forms:
# [Foo words]
# [Foo#Anchor words]
# [http://code.google.com/.../wiki/Foo words]
# [http://code.google.com/.../wiki/Foo#Anchor words]
# We want to replace 'Foo' with 'V2_6_Foo' in the above cases.
self.search_for_re = re.compile(
# This regex matches either
# [Foo
# or
# /wiki/Foo
# followed by a space or a #, where Foo is the name of an
# unversioned wiki page.
r'(\[|/wiki/)(%s)([ #])' % '|'.join(page_names))
self.replace_with = r'\1%s\2\3' % (self.version_prefix,)
def GetFilesToBranch(self):
"""Returns a list of .wiki file names that need to be branched."""
unversioned_wikis = (GTEST_UNVERSIONED_WIKIS if self.project == 'googletest'
else GMOCK_UNVERSIONED_WIKIS)
return [f for f in os.listdir(self.wiki_dir)
if (f.endswith('.wiki') and
not re.match(r'^V\d', f) and # Excluded versioned .wiki files.
f not in unversioned_wikis)]
def BranchFiles(self):
"""Branches the .wiki files needed to be branched."""
print 'Branching %d .wiki files:' % (len(self.files_to_branch),)
os.chdir(self.wiki_dir)
for f in self.files_to_branch:
command = 'svn cp %s %s%s' % (f, self.version_prefix, f)
print command
os.system(command)
def UpdateLinksInBranchedFiles(self):
for f in self.files_to_branch:
source_file = os.path.join(self.wiki_dir, f)
versioned_file = os.path.join(self.wiki_dir, self.version_prefix + f)
print 'Updating links in %s.' % (versioned_file,)
text = file(source_file, 'r').read()
new_text = self.search_for_re.sub(self.replace_with, text)
file(versioned_file, 'w').write(new_text)
def main():
if len(sys.argv) != 2:
sys.exit(__doc__)
brancher = WikiBrancher(sys.argv[1])
brancher.BranchFiles()
brancher.UpdateLinksInBranchedFiles()
if __name__ == '__main__':
main()
|
bsd-3-clause
|
andreparrish/python-for-android
|
python3-alpha/python3-src/Lib/multiprocessing/synchronize.py
|
51
|
10685
|
#
# Module implementing synchronization primitives
#
# multiprocessing/synchronize.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = [
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event'
]
import threading
import os
import sys
from time import time as _time, sleep as _sleep
import _multiprocessing
from multiprocessing.process import current_process
from multiprocessing.util import Finalize, register_after_fork, debug
from multiprocessing.forking import assert_spawning, Popen
# Try to import the mp.synchronize module cleanly, if it fails
# raise ImportError for platforms lacking a working sem_open implementation.
# See issue 3770
try:
from _multiprocessing import SemLock
except (ImportError):
raise ImportError("This platform lacks a functioning sem_open" +
" implementation, therefore, the required" +
" synchronization primitives needed will not" +
" function, see issue 3770.")
#
# Constants
#
RECURSIVE_MUTEX, SEMAPHORE = list(range(2))
SEM_VALUE_MAX = _multiprocessing.SemLock.SEM_VALUE_MAX
#
# Base class for semaphores and mutexes; wraps `_multiprocessing.SemLock`
#
class SemLock(object):
def __init__(self, kind, value, maxvalue):
sl = self._semlock = _multiprocessing.SemLock(kind, value, maxvalue)
debug('created semlock with handle %s' % sl.handle)
self._make_methods()
if sys.platform != 'win32':
def _after_fork(obj):
obj._semlock._after_fork()
register_after_fork(self, _after_fork)
def _make_methods(self):
self.acquire = self._semlock.acquire
self.release = self._semlock.release
def __enter__(self):
return self._semlock.__enter__()
def __exit__(self, *args):
return self._semlock.__exit__(*args)
def __getstate__(self):
assert_spawning(self)
sl = self._semlock
return (Popen.duplicate_for_child(sl.handle), sl.kind, sl.maxvalue)
def __setstate__(self, state):
self._semlock = _multiprocessing.SemLock._rebuild(*state)
debug('recreated blocker with handle %r' % state[0])
self._make_methods()
#
# Semaphore
#
class Semaphore(SemLock):
def __init__(self, value=1):
SemLock.__init__(self, SEMAPHORE, value, SEM_VALUE_MAX)
def get_value(self):
return self._semlock._get_value()
def __repr__(self):
try:
value = self._semlock._get_value()
except Exception:
value = 'unknown'
return '<Semaphore(value=%s)>' % value
#
# Bounded semaphore
#
class BoundedSemaphore(Semaphore):
def __init__(self, value=1):
SemLock.__init__(self, SEMAPHORE, value, value)
def __repr__(self):
try:
value = self._semlock._get_value()
except Exception:
value = 'unknown'
return '<BoundedSemaphore(value=%s, maxvalue=%s)>' % \
(value, self._semlock.maxvalue)
#
# Non-recursive lock
#
class Lock(SemLock):
def __init__(self):
SemLock.__init__(self, SEMAPHORE, 1, 1)
def __repr__(self):
try:
if self._semlock._is_mine():
name = current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
elif self._semlock._get_value() == 1:
name = 'None'
elif self._semlock._count() > 0:
name = 'SomeOtherThread'
else:
name = 'SomeOtherProcess'
except Exception:
name = 'unknown'
return '<Lock(owner=%s)>' % name
#
# Recursive lock
#
class RLock(SemLock):
def __init__(self):
SemLock.__init__(self, RECURSIVE_MUTEX, 1, 1)
def __repr__(self):
try:
if self._semlock._is_mine():
name = current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
count = self._semlock._count()
elif self._semlock._get_value() == 1:
name, count = 'None', 0
elif self._semlock._count() > 0:
name, count = 'SomeOtherThread', 'nonzero'
else:
name, count = 'SomeOtherProcess', 'nonzero'
except Exception:
name, count = 'unknown', 'unknown'
return '<RLock(%s, %s)>' % (name, count)
#
# Condition variable
#
class Condition(object):
def __init__(self, lock=None):
self._lock = lock or RLock()
self._sleeping_count = Semaphore(0)
self._woken_count = Semaphore(0)
self._wait_semaphore = Semaphore(0)
self._make_methods()
def __getstate__(self):
assert_spawning(self)
return (self._lock, self._sleeping_count,
self._woken_count, self._wait_semaphore)
def __setstate__(self, state):
(self._lock, self._sleeping_count,
self._woken_count, self._wait_semaphore) = state
self._make_methods()
def __enter__(self):
return self._lock.__enter__()
def __exit__(self, *args):
return self._lock.__exit__(*args)
def _make_methods(self):
self.acquire = self._lock.acquire
self.release = self._lock.release
def __repr__(self):
try:
num_waiters = (self._sleeping_count._semlock._get_value() -
self._woken_count._semlock._get_value())
except Exception:
num_waiters = 'unkown'
return '<Condition(%s, %s)>' % (self._lock, num_waiters)
def wait(self, timeout=None):
assert self._lock._semlock._is_mine(), \
'must acquire() condition before using wait()'
# indicate that this thread is going to sleep
self._sleeping_count.release()
# release lock
count = self._lock._semlock._count()
for i in range(count):
self._lock.release()
try:
# wait for notification or timeout
ret = self._wait_semaphore.acquire(True, timeout)
finally:
# indicate that this thread has woken
self._woken_count.release()
# reacquire lock
for i in range(count):
self._lock.acquire()
return ret
def notify(self):
assert self._lock._semlock._is_mine(), 'lock is not owned'
assert not self._wait_semaphore.acquire(False)
# to take account of timeouts since last notify() we subtract
# woken_count from sleeping_count and rezero woken_count
while self._woken_count.acquire(False):
res = self._sleeping_count.acquire(False)
assert res
if self._sleeping_count.acquire(False): # try grabbing a sleeper
self._wait_semaphore.release() # wake up one sleeper
self._woken_count.acquire() # wait for the sleeper to wake
# rezero _wait_semaphore in case a timeout just happened
self._wait_semaphore.acquire(False)
def notify_all(self):
assert self._lock._semlock._is_mine(), 'lock is not owned'
assert not self._wait_semaphore.acquire(False)
# to take account of timeouts since last notify*() we subtract
# woken_count from sleeping_count and rezero woken_count
while self._woken_count.acquire(False):
res = self._sleeping_count.acquire(False)
assert res
sleepers = 0
while self._sleeping_count.acquire(False):
self._wait_semaphore.release() # wake up one sleeper
sleepers += 1
if sleepers:
for i in range(sleepers):
self._woken_count.acquire() # wait for a sleeper to wake
# rezero wait_semaphore in case some timeouts just happened
while self._wait_semaphore.acquire(False):
pass
#
# Event
#
class Event(object):
def __init__(self):
self._cond = Condition(Lock())
self._flag = Semaphore(0)
def is_set(self):
self._cond.acquire()
try:
if self._flag.acquire(False):
self._flag.release()
return True
return False
finally:
self._cond.release()
def set(self):
self._cond.acquire()
try:
self._flag.acquire(False)
self._flag.release()
self._cond.notify_all()
finally:
self._cond.release()
def clear(self):
self._cond.acquire()
try:
self._flag.acquire(False)
finally:
self._cond.release()
def wait(self, timeout=None):
self._cond.acquire()
try:
if self._flag.acquire(False):
self._flag.release()
else:
self._cond.wait(timeout)
if self._flag.acquire(False):
self._flag.release()
return True
return False
finally:
self._cond.release()
|
apache-2.0
|
agepoly/mezzanine
|
mezzanine/conf/admin.py
|
9
|
1866
|
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.messages import info
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
try:
from django.utils.encoding import force_text
except ImportError:
# Backward compatibility for Py2 and Django < 1.5
from django.utils.encoding import force_unicode as force_text
from mezzanine.conf.models import Setting
from mezzanine.conf.forms import SettingsForm
from mezzanine.utils.urls import admin_url
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
class Media:
css = {"all": ("mezzanine/css/admin/settings.css",)}
def changelist_redirect(self):
changelist_url = admin_url(Setting, "changelist")
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
info(request, _("Settings were successfully updated."))
return self.changelist_redirect()
extra_context["settings_form"] = settings_form
extra_context["title"] = u"%s %s" % (
_("Change"), force_text(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request,
extra_context)
admin.site.register(Setting, SettingsAdmin)
|
bsd-2-clause
|
iDTLabssl/kitsune
|
kitsune/flagit/migrations/0001_initial.py
|
19
|
1986
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FlaggedObject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('status', models.IntegerField(default=0, db_index=True, choices=[(0, 'Pending'), (1, 'Accepted and Fixed'), (2, 'Rejected')])),
('reason', models.CharField(max_length=64, choices=[(b'spam', 'Spam or other unrelated content'), (b'language', 'Inappropriate language/dialog'), (b'bug_support', 'Misplaced bug report or support request'), (b'abuse', 'Abusive content'), (b'other', 'Other (please specify)')])),
('notes', models.TextField(default=b'', blank=True)),
('created', models.DateTimeField(default=datetime.datetime.now, db_index=True)),
('handled', models.DateTimeField(default=datetime.datetime.now, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('creator', models.ForeignKey(related_name='flags', to=settings.AUTH_USER_MODEL)),
('handled_by', models.ForeignKey(to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ['created'],
'permissions': (('can_moderate', 'Can moderate flagged objects'),),
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='flaggedobject',
unique_together=set([('content_type', 'object_id', 'creator')]),
),
]
|
bsd-3-clause
|
pquentin/django
|
django/db/models/fields/subclassing.py
|
111
|
2015
|
"""
Convenience routines for creating non-trivial Field subclasses, as well as
backwards compatibility utilities.
Add SubfieldBase as the metaclass for your Field subclass, implement
to_python() and the other necessary methods and everything will work
seamlessly.
"""
import warnings
from django.utils.deprecation import RemovedInDjango20Warning
class SubfieldBase(type):
"""
A metaclass for custom Field subclasses. This ensures the model's attribute
has the descriptor protocol attached to it.
"""
def __new__(cls, name, bases, attrs):
warnings.warn("SubfieldBase has been deprecated. Use Field.from_db_value instead.",
RemovedInDjango20Warning)
new_class = super(SubfieldBase, cls).__new__(cls, name, bases, attrs)
new_class.contribute_to_class = make_contrib(
new_class, attrs.get('contribute_to_class')
)
return new_class
class Creator(object):
"""
A placeholder class that provides a way to set the attribute on the model.
"""
def __init__(self, field):
self.field = field
def __get__(self, obj, type=None):
if obj is None:
return self
return obj.__dict__[self.field.name]
def __set__(self, obj, value):
obj.__dict__[self.field.name] = self.field.to_python(value)
def make_contrib(superclass, func=None):
"""
Returns a suitable contribute_to_class() method for the Field subclass.
If 'func' is passed in, it is the existing contribute_to_class() method on
the subclass and it is called before anything else. It is assumed in this
case that the existing contribute_to_class() calls all the necessary
superclass methods.
"""
def contribute_to_class(self, cls, name, **kwargs):
if func:
func(self, cls, name, **kwargs)
else:
super(superclass, self).contribute_to_class(cls, name, **kwargs)
setattr(cls, self.name, Creator(self))
return contribute_to_class
|
bsd-3-clause
|
kalahbrown/HueBigSQL
|
desktop/core/ext-py/pysaml2-2.4.0/example/attributemaps/adfs_v20.py
|
40
|
3595
|
# See http://technet.microsoft.com/en-us/library/ee913589(v=ws.10).aspx
# for information regarding the default claim types supported by
# Microsoft ADFS v2.0.
MAP = {
"identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified",
"fro": {
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress': 'emailAddress',
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname': 'givenName',
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name': 'name',
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn': 'upn',
'http://schemas.xmlsoap.org/claims/commonname': 'commonName',
'http://schemas.xmlsoap.org/claims/group': 'group',
'http://schemas.microsoft.com/ws/2008/06/identity/claims/role': 'role',
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname': 'surname',
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/privatepersonalidentifier': 'privatePersonalId',
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier': 'nameId',
'http://schemas.microsoft.com/ws/2008/06/identity/claims/authenticationmethod': 'authenticationMethod',
'http://schemas.xmlsoap.com/ws/2005/05/identity/claims/denyonlysid': 'denyOnlySid',
'http://schemas.microsoft.com/ws/2008/06/identity/claims/denyonlyprimarysid': 'denyOnlyPrimarySid',
'http://schemas.microsoft.com/ws/2008/06/identity/claims/denyonlyprimarygroupsid': 'denyOnlyPrimaryGroupSid',
'http://schemas.microsoft.com/ws/2008/06/identity/claims/groupsid': 'groupSid',
'http://schemas.microsoft.com/ws/2008/06/identity/claims/primarygroupsid': 'primaryGroupSid',
'http://schemas.microsoft.com/ws/2008/06/identity/claims/primarysid': 'primarySid',
'http://schemas.microsoft.com/ws/2008/06/identity/claims/windowsaccountname': 'windowsAccountName',
},
"to": {
'emailAddress': 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress',
'givenName': 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname',
'name': 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name',
'upn': 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn',
'commonName': 'http://schemas.xmlsoap.org/claims/commonname',
'group': 'http://schemas.xmlsoap.org/claims/group',
'role': 'http://schemas.microsoft.com/ws/2008/06/identity/claims/role',
'surname': 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname',
'privatePersonalId': 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/privatepersonalidentifier',
'nameId': 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier',
'authenticationMethod': 'http://schemas.microsoft.com/ws/2008/06/identity/claims/authenticationmethod',
'denyOnlySid': 'http://schemas.xmlsoap.com/ws/2005/05/identity/claims/denyonlysid',
'denyOnlyPrimarySid': 'http://schemas.microsoft.com/ws/2008/06/identity/claims/denyonlyprimarysid',
'denyOnlyPrimaryGroupSid': 'http://schemas.microsoft.com/ws/2008/06/identity/claims/denyonlyprimarygroupsid',
'groupSid': 'http://schemas.microsoft.com/ws/2008/06/identity/claims/groupsid',
'primaryGroupSid': 'http://schemas.microsoft.com/ws/2008/06/identity/claims/primarygroupsid',
'primarySid': 'http://schemas.microsoft.com/ws/2008/06/identity/claims/primarysid',
'windowsAccountName': 'http://schemas.microsoft.com/ws/2008/06/identity/claims/windowsaccountname',
}
}
|
apache-2.0
|
zadgroup/edx-platform
|
common/djangoapps/util/model_utils.py
|
45
|
7133
|
"""
Utilities for django models.
"""
import unicodedata
import re
from eventtracking import tracker
from django.conf import settings
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from django_countries.fields import Country
# The setting name used for events when "settings" (account settings, preferences, profile information) change.
USER_SETTINGS_CHANGED_EVENT_NAME = u'edx.user.settings.changed'
def get_changed_fields_dict(instance, model_class):
"""
Helper method for tracking field changes on a model.
Given a model instance and class, return a dict whose keys are that
instance's fields which differ from the last saved ones and whose values
are the old values of those fields. Related fields are not considered.
Args:
instance (Model instance): the model instance with changes that are
being tracked
model_class (Model class): the class of the model instance we are
tracking
Returns:
dict: a mapping of field names to current database values of those
fields, or an empty dict if the model is new
"""
try:
old_model = model_class.objects.get(pk=instance.pk)
except model_class.DoesNotExist:
# Object is new, so fields haven't technically changed. We'll return
# an empty dict as a default value.
return {}
else:
field_names = [
field[0].name for field in model_class._meta.get_fields_with_model()
]
changed_fields = {
field_name: getattr(old_model, field_name) for field_name in field_names
if getattr(old_model, field_name) != getattr(instance, field_name)
}
return changed_fields
def emit_field_changed_events(instance, user, db_table, excluded_fields=None, hidden_fields=None):
"""Emits a settings changed event for each field that has changed.
Note that this function expects that a `_changed_fields` dict has been set
as an attribute on `instance` (see `get_changed_fields_dict`.
Args:
instance (Model instance): the model instance that is being saved
user (User): the user that this instance is associated with
db_table (str): the name of the table that we're modifying
excluded_fields (list): a list of field names for which events should
not be emitted
hidden_fields (list): a list of field names specifying fields whose
values should not be included in the event (None will be used
instead)
Returns:
None
"""
def clean_field(field_name, value):
"""
Prepare a field to be emitted in a JSON serializable format. If
`field_name` is a hidden field, return None.
"""
if field_name in hidden_fields:
return None
# Country is not JSON serializable. Return the country code.
if isinstance(value, Country):
if value.code:
return value.code
else:
return None
return value
excluded_fields = excluded_fields or []
hidden_fields = hidden_fields or []
changed_fields = getattr(instance, '_changed_fields', {})
for field_name in changed_fields:
if field_name not in excluded_fields:
old_value = clean_field(field_name, changed_fields[field_name])
new_value = clean_field(field_name, getattr(instance, field_name))
emit_setting_changed_event(user, db_table, field_name, old_value, new_value)
# Remove the now inaccurate _changed_fields attribute.
if hasattr(instance, '_changed_fields'):
del instance._changed_fields
def emit_setting_changed_event(user, db_table, setting_name, old_value, new_value):
"""Emits an event for a change in a setting.
Args:
user (User): the user that this setting is associated with.
db_table (str): the name of the table that we're modifying.
setting_name (str): the name of the setting being changed.
old_value (object): the value before the change.
new_value (object): the new value being saved.
Returns:
None
"""
# Compute the maximum value length so that two copies can fit into the maximum event size
# in addition to all the other fields recorded.
max_value_length = settings.TRACK_MAX_EVENT / 4
serialized_old_value, old_was_truncated = _get_truncated_setting_value(old_value, max_length=max_value_length)
serialized_new_value, new_was_truncated = _get_truncated_setting_value(new_value, max_length=max_value_length)
truncated_values = []
if old_was_truncated:
truncated_values.append("old")
if new_was_truncated:
truncated_values.append("new")
tracker.emit(
USER_SETTINGS_CHANGED_EVENT_NAME,
{
"setting": setting_name,
"old": serialized_old_value,
"new": serialized_new_value,
"truncated": truncated_values,
"user_id": user.id,
"table": db_table,
}
)
def _get_truncated_setting_value(value, max_length=None):
"""
Returns the truncated form of a setting value.
Returns:
truncated_value (object): the possibly truncated version of the value.
was_truncated (bool): returns true if the serialized value was truncated.
"""
if isinstance(value, basestring) and max_length is not None and len(value) > max_length:
return value[0:max_length], True
else:
return value, False
# Taken from Django 1.8 source code because it's not supported in 1.4
def slugify(value):
"""Converts value into a string suitable for readable URLs.
Converts to ASCII. Converts spaces to hyphens. Removes characters that
aren't alphanumerics, underscores, or hyphens. Converts to lowercase.
Also strips leading and trailing whitespace.
Args:
value (string): String to slugify.
"""
value = force_unicode(value)
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
value = re.sub(r'[^\w\s-]', '', value).strip().lower()
return mark_safe(re.sub(r'[-\s]+', '-', value))
def generate_unique_readable_id(name, queryset, lookup_field):
"""Generates a unique readable id from name by appending a numeric suffix.
Args:
name (string): Name to generate the id from. May include spaces.
queryset (QuerySet): QuerySet to check for uniqueness within.
lookup_field (string): Field name on the model that corresponds to the
unique identifier.
Returns:
string: generated unique identifier
"""
candidate = slugify(name)
conflicts = queryset.filter(**{lookup_field + '__startswith': candidate}).values_list(lookup_field, flat=True)
if conflicts and candidate in conflicts:
suffix = 2
while True:
new_id = candidate + '-' + str(suffix)
if new_id not in conflicts:
candidate = new_id
break
suffix += 1
return candidate
|
agpl-3.0
|
LumPenPacK/NetworkExtractionFromImages
|
osx_build/nefi2_osx_amd64_xcode_2015/site-packages/numpy/core/fromnumeric.py
|
12
|
95796
|
"""Module containing non-deprecated functions borrowed from Numeric.
"""
from __future__ import division, absolute_import, print_function
import types
import warnings
import numpy as np
from .. import VisibleDeprecationWarning
from . import multiarray as mu
from . import umath as um
from . import numerictypes as nt
from .numeric import asarray, array, asanyarray, concatenate
from . import _methods
_dt_ = nt.sctype2char
# functions that are methods
__all__ = [
'alen', 'all', 'alltrue', 'amax', 'amin', 'any', 'argmax',
'argmin', 'argpartition', 'argsort', 'around', 'choose', 'clip',
'compress', 'cumprod', 'cumproduct', 'cumsum', 'diagonal', 'mean',
'ndim', 'nonzero', 'partition', 'prod', 'product', 'ptp', 'put',
'rank', 'ravel', 'repeat', 'reshape', 'resize', 'round_',
'searchsorted', 'shape', 'size', 'sometrue', 'sort', 'squeeze',
'std', 'sum', 'swapaxes', 'take', 'trace', 'transpose', 'var',
]
try:
_gentype = types.GeneratorType
except AttributeError:
_gentype = type(None)
# save away Python sum
_sum_ = sum
# functions that are now methods
def _wrapit(obj, method, *args, **kwds):
try:
wrap = obj.__array_wrap__
except AttributeError:
wrap = None
result = getattr(asarray(obj), method)(*args, **kwds)
if wrap:
if not isinstance(result, mu.ndarray):
result = asarray(result)
result = wrap(result)
return result
def take(a, indices, axis=None, out=None, mode='raise'):
"""
Take elements from an array along an axis.
This function does the same thing as "fancy" indexing (indexing arrays
using arrays); however, it can be easier to use if you need elements
along a given axis.
Parameters
----------
a : array_like
The source array.
indices : array_like
The indices of the values to extract.
.. versionadded:: 1.8.0
Also allow scalars for indices.
axis : int, optional
The axis over which to select values. By default, the flattened
input array is used.
out : ndarray, optional
If provided, the result will be placed in this array. It should
be of the appropriate shape and dtype.
mode : {'raise', 'wrap', 'clip'}, optional
Specifies how out-of-bounds indices will behave.
* 'raise' -- raise an error (default)
* 'wrap' -- wrap around
* 'clip' -- clip to the range
'clip' mode means that all indices that are too large are replaced
by the index that addresses the last element along that axis. Note
that this disables indexing with negative numbers.
Returns
-------
subarray : ndarray
The returned array has the same type as `a`.
See Also
--------
compress : Take elements using a boolean mask
ndarray.take : equivalent method
Examples
--------
>>> a = [4, 3, 5, 7, 6, 8]
>>> indices = [0, 1, 4]
>>> np.take(a, indices)
array([4, 3, 6])
In this example if `a` is an ndarray, "fancy" indexing can be used.
>>> a = np.array(a)
>>> a[indices]
array([4, 3, 6])
If `indices` is not one dimensional, the output also has these dimensions.
>>> np.take(a, [[0, 1], [2, 3]])
array([[4, 3],
[5, 7]])
"""
try:
take = a.take
except AttributeError:
return _wrapit(a, 'take', indices, axis, out, mode)
return take(indices, axis, out, mode)
# not deprecated --- copy if necessary, view otherwise
def reshape(a, newshape, order='C'):
"""
Gives a new shape to an array without changing its data.
Parameters
----------
a : array_like
Array to be reshaped.
newshape : int or tuple of ints
The new shape should be compatible with the original shape. If
an integer, then the result will be a 1-D array of that length.
One shape dimension can be -1. In this case, the value is inferred
from the length of the array and remaining dimensions.
order : {'C', 'F', 'A'}, optional
Read the elements of `a` using this index order, and place the elements
into the reshaped array using this index order. 'C' means to
read / write the elements using C-like index order, with the last axis
index changing fastest, back to the first axis index changing slowest.
'F' means to read / write the elements using Fortran-like index order,
with the first index changing fastest, and the last index changing
slowest.
Note that the 'C' and 'F' options take no account of the memory layout
of the underlying array, and only refer to the order of indexing. 'A'
means to read / write the elements in Fortran-like index order if `a`
is Fortran *contiguous* in memory, C-like order otherwise.
Returns
-------
reshaped_array : ndarray
This will be a new view object if possible; otherwise, it will
be a copy. Note there is no guarantee of the *memory layout* (C- or
Fortran- contiguous) of the returned array.
See Also
--------
ndarray.reshape : Equivalent method.
Notes
-----
It is not always possible to change the shape of an array without
copying the data. If you want an error to be raise if the data is copied,
you should assign the new shape to the shape attribute of the array::
>>> a = np.zeros((10, 2))
# A transpose make the array non-contiguous
>>> b = a.T
# Taking a view makes it possible to modify the shape without modifying
# the initial object.
>>> c = b.view()
>>> c.shape = (20)
AttributeError: incompatible shape for a non-contiguous array
The `order` keyword gives the index ordering both for *fetching* the values
from `a`, and then *placing* the values into the output array.
For example, let's say you have an array:
>>> a = np.arange(6).reshape((3, 2))
>>> a
array([[0, 1],
[2, 3],
[4, 5]])
You can think of reshaping as first raveling the array (using the given
index order), then inserting the elements from the raveled array into the
new array using the same kind of index ordering as was used for the
raveling.
>>> np.reshape(a, (2, 3)) # C-like index ordering
array([[0, 1, 2],
[3, 4, 5]])
>>> np.reshape(np.ravel(a), (2, 3)) # equivalent to C ravel then C reshape
array([[0, 1, 2],
[3, 4, 5]])
>>> np.reshape(a, (2, 3), order='F') # Fortran-like index ordering
array([[0, 4, 3],
[2, 1, 5]])
>>> np.reshape(np.ravel(a, order='F'), (2, 3), order='F')
array([[0, 4, 3],
[2, 1, 5]])
Examples
--------
>>> a = np.array([[1,2,3], [4,5,6]])
>>> np.reshape(a, 6)
array([1, 2, 3, 4, 5, 6])
>>> np.reshape(a, 6, order='F')
array([1, 4, 2, 5, 3, 6])
>>> np.reshape(a, (3,-1)) # the unspecified value is inferred to be 2
array([[1, 2],
[3, 4],
[5, 6]])
"""
try:
reshape = a.reshape
except AttributeError:
return _wrapit(a, 'reshape', newshape, order=order)
return reshape(newshape, order=order)
def choose(a, choices, out=None, mode='raise'):
"""
Construct an array from an index array and a set of arrays to choose from.
First of all, if confused or uncertain, definitely look at the Examples -
in its full generality, this function is less simple than it might
seem from the following code description (below ndi =
`numpy.lib.index_tricks`):
``np.choose(a,c) == np.array([c[a[I]][I] for I in ndi.ndindex(a.shape)])``.
But this omits some subtleties. Here is a fully general summary:
Given an "index" array (`a`) of integers and a sequence of `n` arrays
(`choices`), `a` and each choice array are first broadcast, as necessary,
to arrays of a common shape; calling these *Ba* and *Bchoices[i], i =
0,...,n-1* we have that, necessarily, ``Ba.shape == Bchoices[i].shape``
for each `i`. Then, a new array with shape ``Ba.shape`` is created as
follows:
* if ``mode=raise`` (the default), then, first of all, each element of
`a` (and thus `Ba`) must be in the range `[0, n-1]`; now, suppose that
`i` (in that range) is the value at the `(j0, j1, ..., jm)` position
in `Ba` - then the value at the same position in the new array is the
value in `Bchoices[i]` at that same position;
* if ``mode=wrap``, values in `a` (and thus `Ba`) may be any (signed)
integer; modular arithmetic is used to map integers outside the range
`[0, n-1]` back into that range; and then the new array is constructed
as above;
* if ``mode=clip``, values in `a` (and thus `Ba`) may be any (signed)
integer; negative integers are mapped to 0; values greater than `n-1`
are mapped to `n-1`; and then the new array is constructed as above.
Parameters
----------
a : int array
This array must contain integers in `[0, n-1]`, where `n` is the number
of choices, unless ``mode=wrap`` or ``mode=clip``, in which cases any
integers are permissible.
choices : sequence of arrays
Choice arrays. `a` and all of the choices must be broadcastable to the
same shape. If `choices` is itself an array (not recommended), then
its outermost dimension (i.e., the one corresponding to
``choices.shape[0]``) is taken as defining the "sequence".
out : array, optional
If provided, the result will be inserted into this array. It should
be of the appropriate shape and dtype.
mode : {'raise' (default), 'wrap', 'clip'}, optional
Specifies how indices outside `[0, n-1]` will be treated:
* 'raise' : an exception is raised
* 'wrap' : value becomes value mod `n`
* 'clip' : values < 0 are mapped to 0, values > n-1 are mapped to n-1
Returns
-------
merged_array : array
The merged result.
Raises
------
ValueError: shape mismatch
If `a` and each choice array are not all broadcastable to the same
shape.
See Also
--------
ndarray.choose : equivalent method
Notes
-----
To reduce the chance of misinterpretation, even though the following
"abuse" is nominally supported, `choices` should neither be, nor be
thought of as, a single array, i.e., the outermost sequence-like container
should be either a list or a tuple.
Examples
--------
>>> choices = [[0, 1, 2, 3], [10, 11, 12, 13],
... [20, 21, 22, 23], [30, 31, 32, 33]]
>>> np.choose([2, 3, 1, 0], choices
... # the first element of the result will be the first element of the
... # third (2+1) "array" in choices, namely, 20; the second element
... # will be the second element of the fourth (3+1) choice array, i.e.,
... # 31, etc.
... )
array([20, 31, 12, 3])
>>> np.choose([2, 4, 1, 0], choices, mode='clip') # 4 goes to 3 (4-1)
array([20, 31, 12, 3])
>>> # because there are 4 choice arrays
>>> np.choose([2, 4, 1, 0], choices, mode='wrap') # 4 goes to (4 mod 4)
array([20, 1, 12, 3])
>>> # i.e., 0
A couple examples illustrating how choose broadcasts:
>>> a = [[1, 0, 1], [0, 1, 0], [1, 0, 1]]
>>> choices = [-10, 10]
>>> np.choose(a, choices)
array([[ 10, -10, 10],
[-10, 10, -10],
[ 10, -10, 10]])
>>> # With thanks to Anne Archibald
>>> a = np.array([0, 1]).reshape((2,1,1))
>>> c1 = np.array([1, 2, 3]).reshape((1,3,1))
>>> c2 = np.array([-1, -2, -3, -4, -5]).reshape((1,1,5))
>>> np.choose(a, (c1, c2)) # result is 2x3x5, res[0,:,:]=c1, res[1,:,:]=c2
array([[[ 1, 1, 1, 1, 1],
[ 2, 2, 2, 2, 2],
[ 3, 3, 3, 3, 3]],
[[-1, -2, -3, -4, -5],
[-1, -2, -3, -4, -5],
[-1, -2, -3, -4, -5]]])
"""
try:
choose = a.choose
except AttributeError:
return _wrapit(a, 'choose', choices, out=out, mode=mode)
return choose(choices, out=out, mode=mode)
def repeat(a, repeats, axis=None):
"""
Repeat elements of an array.
Parameters
----------
a : array_like
Input array.
repeats : int or array of ints
The number of repetitions for each element. `repeats` is broadcasted
to fit the shape of the given axis.
axis : int, optional
The axis along which to repeat values. By default, use the
flattened input array, and return a flat output array.
Returns
-------
repeated_array : ndarray
Output array which has the same shape as `a`, except along
the given axis.
See Also
--------
tile : Tile an array.
Examples
--------
>>> x = np.array([[1,2],[3,4]])
>>> np.repeat(x, 2)
array([1, 1, 2, 2, 3, 3, 4, 4])
>>> np.repeat(x, 3, axis=1)
array([[1, 1, 1, 2, 2, 2],
[3, 3, 3, 4, 4, 4]])
>>> np.repeat(x, [1, 2], axis=0)
array([[1, 2],
[3, 4],
[3, 4]])
"""
try:
repeat = a.repeat
except AttributeError:
return _wrapit(a, 'repeat', repeats, axis)
return repeat(repeats, axis)
def put(a, ind, v, mode='raise'):
"""
Replaces specified elements of an array with given values.
The indexing works on the flattened target array. `put` is roughly
equivalent to:
::
a.flat[ind] = v
Parameters
----------
a : ndarray
Target array.
ind : array_like
Target indices, interpreted as integers.
v : array_like
Values to place in `a` at target indices. If `v` is shorter than
`ind` it will be repeated as necessary.
mode : {'raise', 'wrap', 'clip'}, optional
Specifies how out-of-bounds indices will behave.
* 'raise' -- raise an error (default)
* 'wrap' -- wrap around
* 'clip' -- clip to the range
'clip' mode means that all indices that are too large are replaced
by the index that addresses the last element along that axis. Note
that this disables indexing with negative numbers.
See Also
--------
putmask, place
Examples
--------
>>> a = np.arange(5)
>>> np.put(a, [0, 2], [-44, -55])
>>> a
array([-44, 1, -55, 3, 4])
>>> a = np.arange(5)
>>> np.put(a, 22, -5, mode='clip')
>>> a
array([ 0, 1, 2, 3, -5])
"""
return a.put(ind, v, mode)
def swapaxes(a, axis1, axis2):
"""
Interchange two axes of an array.
Parameters
----------
a : array_like
Input array.
axis1 : int
First axis.
axis2 : int
Second axis.
Returns
-------
a_swapped : ndarray
For Numpy >= 1.10, if `a` is an ndarray, then a view of `a` is
returned; otherwise a new array is created. For earlier Numpy
versions a view of `a` is returned only if the order of the
axes is changed, otherwise the input array is returned.
Examples
--------
>>> x = np.array([[1,2,3]])
>>> np.swapaxes(x,0,1)
array([[1],
[2],
[3]])
>>> x = np.array([[[0,1],[2,3]],[[4,5],[6,7]]])
>>> x
array([[[0, 1],
[2, 3]],
[[4, 5],
[6, 7]]])
>>> np.swapaxes(x,0,2)
array([[[0, 4],
[2, 6]],
[[1, 5],
[3, 7]]])
"""
try:
swapaxes = a.swapaxes
except AttributeError:
return _wrapit(a, 'swapaxes', axis1, axis2)
return swapaxes(axis1, axis2)
def transpose(a, axes=None):
"""
Permute the dimensions of an array.
Parameters
----------
a : array_like
Input array.
axes : list of ints, optional
By default, reverse the dimensions, otherwise permute the axes
according to the values given.
Returns
-------
p : ndarray
`a` with its axes permuted. A view is returned whenever
possible.
See Also
--------
rollaxis
argsort
Notes
-----
Use `transpose(a, argsort(axes))` to invert the transposition of tensors
when using the `axes` keyword argument.
Transposing a 1-D array returns an unchanged view of the original array.
Examples
--------
>>> x = np.arange(4).reshape((2,2))
>>> x
array([[0, 1],
[2, 3]])
>>> np.transpose(x)
array([[0, 2],
[1, 3]])
>>> x = np.ones((1, 2, 3))
>>> np.transpose(x, (1, 0, 2)).shape
(2, 1, 3)
"""
try:
transpose = a.transpose
except AttributeError:
return _wrapit(a, 'transpose', axes)
return transpose(axes)
def partition(a, kth, axis=-1, kind='introselect', order=None):
"""
Return a partitioned copy of an array.
Creates a copy of the array with its elements rearranged in such a way that
the value of the element in kth position is in the position it would be in
a sorted array. All elements smaller than the kth element are moved before
this element and all equal or greater are moved behind it. The ordering of
the elements in the two partitions is undefined.
.. versionadded:: 1.8.0
Parameters
----------
a : array_like
Array to be sorted.
kth : int or sequence of ints
Element index to partition by. The kth value of the element will be in
its final sorted position and all smaller elements will be moved before
it and all equal or greater elements behind it.
The order all elements in the partitions is undefined.
If provided with a sequence of kth it will partition all elements
indexed by kth of them into their sorted position at once.
axis : int or None, optional
Axis along which to sort. If None, the array is flattened before
sorting. The default is -1, which sorts along the last axis.
kind : {'introselect'}, optional
Selection algorithm. Default is 'introselect'.
order : str or list of str, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. A single field can
be specified as a string. Not all fields need be specified, but
unspecified fields will still be used, in the order in which they
come up in the dtype, to break ties.
Returns
-------
partitioned_array : ndarray
Array of the same type and shape as `a`.
See Also
--------
ndarray.partition : Method to sort an array in-place.
argpartition : Indirect partition.
sort : Full sorting
Notes
-----
The various selection algorithms are characterized by their average speed,
worst case performance, work space size, and whether they are stable. A
stable sort keeps items with the same key in the same relative order. The
available algorithms have the following properties:
================= ======= ============= ============ =======
kind speed worst case work space stable
================= ======= ============= ============ =======
'introselect' 1 O(n) 0 no
================= ======= ============= ============ =======
All the partition algorithms make temporary copies of the data when
partitioning along any but the last axis. Consequently, partitioning
along the last axis is faster and uses less space than partitioning
along any other axis.
The sort order for complex numbers is lexicographic. If both the real
and imaginary parts are non-nan then the order is determined by the
real parts except when they are equal, in which case the order is
determined by the imaginary parts.
Examples
--------
>>> a = np.array([3, 4, 2, 1])
>>> np.partition(a, 3)
array([2, 1, 3, 4])
>>> np.partition(a, (1, 3))
array([1, 2, 3, 4])
"""
if axis is None:
a = asanyarray(a).flatten()
axis = 0
else:
a = asanyarray(a).copy(order="K")
a.partition(kth, axis=axis, kind=kind, order=order)
return a
def argpartition(a, kth, axis=-1, kind='introselect', order=None):
"""
Perform an indirect partition along the given axis using the algorithm
specified by the `kind` keyword. It returns an array of indices of the
same shape as `a` that index data along the given axis in partitioned
order.
.. versionadded:: 1.8.0
Parameters
----------
a : array_like
Array to sort.
kth : int or sequence of ints
Element index to partition by. The kth element will be in its final
sorted position and all smaller elements will be moved before it and
all larger elements behind it.
The order all elements in the partitions is undefined.
If provided with a sequence of kth it will partition all of them into
their sorted position at once.
axis : int or None, optional
Axis along which to sort. The default is -1 (the last axis). If None,
the flattened array is used.
kind : {'introselect'}, optional
Selection algorithm. Default is 'introselect'
order : str or list of str, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. A single field can
be specified as a string, and not all fields need be specified,
but unspecified fields will still be used, in the order in which
they come up in the dtype, to break ties.
Returns
-------
index_array : ndarray, int
Array of indices that partition `a` along the specified axis.
In other words, ``a[index_array]`` yields a sorted `a`.
See Also
--------
partition : Describes partition algorithms used.
ndarray.partition : Inplace partition.
argsort : Full indirect sort
Notes
-----
See `partition` for notes on the different selection algorithms.
Examples
--------
One dimensional array:
>>> x = np.array([3, 4, 2, 1])
>>> x[np.argpartition(x, 3)]
array([2, 1, 3, 4])
>>> x[np.argpartition(x, (1, 3))]
array([1, 2, 3, 4])
>>> x = [3, 4, 2, 1]
>>> np.array(x)[np.argpartition(x, 3)]
array([2, 1, 3, 4])
"""
try:
argpartition = a.argpartition
except AttributeError:
return _wrapit(a, 'argpartition',kth, axis, kind, order)
return argpartition(kth, axis, kind=kind, order=order)
def sort(a, axis=-1, kind='quicksort', order=None):
"""
Return a sorted copy of an array.
Parameters
----------
a : array_like
Array to be sorted.
axis : int or None, optional
Axis along which to sort. If None, the array is flattened before
sorting. The default is -1, which sorts along the last axis.
kind : {'quicksort', 'mergesort', 'heapsort'}, optional
Sorting algorithm. Default is 'quicksort'.
order : str or list of str, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. A single field can
be specified as a string, and not all fields need be specified,
but unspecified fields will still be used, in the order in which
they come up in the dtype, to break ties.
Returns
-------
sorted_array : ndarray
Array of the same type and shape as `a`.
See Also
--------
ndarray.sort : Method to sort an array in-place.
argsort : Indirect sort.
lexsort : Indirect stable sort on multiple keys.
searchsorted : Find elements in a sorted array.
partition : Partial sort.
Notes
-----
The various sorting algorithms are characterized by their average speed,
worst case performance, work space size, and whether they are stable. A
stable sort keeps items with the same key in the same relative
order. The three available algorithms have the following
properties:
=========== ======= ============= ============ =======
kind speed worst case work space stable
=========== ======= ============= ============ =======
'quicksort' 1 O(n^2) 0 no
'mergesort' 2 O(n*log(n)) ~n/2 yes
'heapsort' 3 O(n*log(n)) 0 no
=========== ======= ============= ============ =======
All the sort algorithms make temporary copies of the data when
sorting along any but the last axis. Consequently, sorting along
the last axis is faster and uses less space than sorting along
any other axis.
The sort order for complex numbers is lexicographic. If both the real
and imaginary parts are non-nan then the order is determined by the
real parts except when they are equal, in which case the order is
determined by the imaginary parts.
Previous to numpy 1.4.0 sorting real and complex arrays containing nan
values led to undefined behaviour. In numpy versions >= 1.4.0 nan
values are sorted to the end. The extended sort order is:
* Real: [R, nan]
* Complex: [R + Rj, R + nanj, nan + Rj, nan + nanj]
where R is a non-nan real value. Complex values with the same nan
placements are sorted according to the non-nan part if it exists.
Non-nan values are sorted as before.
Examples
--------
>>> a = np.array([[1,4],[3,1]])
>>> np.sort(a) # sort along the last axis
array([[1, 4],
[1, 3]])
>>> np.sort(a, axis=None) # sort the flattened array
array([1, 1, 3, 4])
>>> np.sort(a, axis=0) # sort along the first axis
array([[1, 1],
[3, 4]])
Use the `order` keyword to specify a field to use when sorting a
structured array:
>>> dtype = [('name', 'S10'), ('height', float), ('age', int)]
>>> values = [('Arthur', 1.8, 41), ('Lancelot', 1.9, 38),
... ('Galahad', 1.7, 38)]
>>> a = np.array(values, dtype=dtype) # create a structured array
>>> np.sort(a, order='height') # doctest: +SKIP
array([('Galahad', 1.7, 38), ('Arthur', 1.8, 41),
('Lancelot', 1.8999999999999999, 38)],
dtype=[('name', '|S10'), ('height', '<f8'), ('age', '<i4')])
Sort by age, then height if ages are equal:
>>> np.sort(a, order=['age', 'height']) # doctest: +SKIP
array([('Galahad', 1.7, 38), ('Lancelot', 1.8999999999999999, 38),
('Arthur', 1.8, 41)],
dtype=[('name', '|S10'), ('height', '<f8'), ('age', '<i4')])
"""
if axis is None:
a = asanyarray(a).flatten()
axis = 0
else:
a = asanyarray(a).copy(order="K")
a.sort(axis, kind, order)
return a
def argsort(a, axis=-1, kind='quicksort', order=None):
"""
Returns the indices that would sort an array.
Perform an indirect sort along the given axis using the algorithm specified
by the `kind` keyword. It returns an array of indices of the same shape as
`a` that index data along the given axis in sorted order.
Parameters
----------
a : array_like
Array to sort.
axis : int or None, optional
Axis along which to sort. The default is -1 (the last axis). If None,
the flattened array is used.
kind : {'quicksort', 'mergesort', 'heapsort'}, optional
Sorting algorithm.
order : str or list of str, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. A single field can
be specified as a string, and not all fields need be specified,
but unspecified fields will still be used, in the order in which
they come up in the dtype, to break ties.
Returns
-------
index_array : ndarray, int
Array of indices that sort `a` along the specified axis.
In other words, ``a[index_array]`` yields a sorted `a`.
See Also
--------
sort : Describes sorting algorithms used.
lexsort : Indirect stable sort with multiple keys.
ndarray.sort : Inplace sort.
argpartition : Indirect partial sort.
Notes
-----
See `sort` for notes on the different sorting algorithms.
As of NumPy 1.4.0 `argsort` works with real/complex arrays containing
nan values. The enhanced sort order is documented in `sort`.
Examples
--------
One dimensional array:
>>> x = np.array([3, 1, 2])
>>> np.argsort(x)
array([1, 2, 0])
Two-dimensional array:
>>> x = np.array([[0, 3], [2, 2]])
>>> x
array([[0, 3],
[2, 2]])
>>> np.argsort(x, axis=0)
array([[0, 1],
[1, 0]])
>>> np.argsort(x, axis=1)
array([[0, 1],
[0, 1]])
Sorting with keys:
>>> x = np.array([(1, 0), (0, 1)], dtype=[('x', '<i4'), ('y', '<i4')])
>>> x
array([(1, 0), (0, 1)],
dtype=[('x', '<i4'), ('y', '<i4')])
>>> np.argsort(x, order=('x','y'))
array([1, 0])
>>> np.argsort(x, order=('y','x'))
array([0, 1])
"""
try:
argsort = a.argsort
except AttributeError:
return _wrapit(a, 'argsort', axis, kind, order)
return argsort(axis, kind, order)
def argmax(a, axis=None, out=None):
"""
Returns the indices of the maximum values along an axis.
Parameters
----------
a : array_like
Input array.
axis : int, optional
By default, the index is into the flattened array, otherwise
along the specified axis.
out : array, optional
If provided, the result will be inserted into this array. It should
be of the appropriate shape and dtype.
Returns
-------
index_array : ndarray of ints
Array of indices into the array. It has the same shape as `a.shape`
with the dimension along `axis` removed.
See Also
--------
ndarray.argmax, argmin
amax : The maximum value along a given axis.
unravel_index : Convert a flat index into an index tuple.
Notes
-----
In case of multiple occurrences of the maximum values, the indices
corresponding to the first occurrence are returned.
Examples
--------
>>> a = np.arange(6).reshape(2,3)
>>> a
array([[0, 1, 2],
[3, 4, 5]])
>>> np.argmax(a)
5
>>> np.argmax(a, axis=0)
array([1, 1, 1])
>>> np.argmax(a, axis=1)
array([2, 2])
>>> b = np.arange(6)
>>> b[1] = 5
>>> b
array([0, 5, 2, 3, 4, 5])
>>> np.argmax(b) # Only the first occurrence is returned.
1
"""
try:
argmax = a.argmax
except AttributeError:
return _wrapit(a, 'argmax', axis, out)
return argmax(axis, out)
def argmin(a, axis=None, out=None):
"""
Returns the indices of the minimum values along an axis.
Parameters
----------
a : array_like
Input array.
axis : int, optional
By default, the index is into the flattened array, otherwise
along the specified axis.
out : array, optional
If provided, the result will be inserted into this array. It should
be of the appropriate shape and dtype.
Returns
-------
index_array : ndarray of ints
Array of indices into the array. It has the same shape as `a.shape`
with the dimension along `axis` removed.
See Also
--------
ndarray.argmin, argmax
amin : The minimum value along a given axis.
unravel_index : Convert a flat index into an index tuple.
Notes
-----
In case of multiple occurrences of the minimum values, the indices
corresponding to the first occurrence are returned.
Examples
--------
>>> a = np.arange(6).reshape(2,3)
>>> a
array([[0, 1, 2],
[3, 4, 5]])
>>> np.argmin(a)
0
>>> np.argmin(a, axis=0)
array([0, 0, 0])
>>> np.argmin(a, axis=1)
array([0, 0])
>>> b = np.arange(6)
>>> b[4] = 0
>>> b
array([0, 1, 2, 3, 0, 5])
>>> np.argmin(b) # Only the first occurrence is returned.
0
"""
try:
argmin = a.argmin
except AttributeError:
return _wrapit(a, 'argmin', axis, out)
return argmin(axis, out)
def searchsorted(a, v, side='left', sorter=None):
"""
Find indices where elements should be inserted to maintain order.
Find the indices into a sorted array `a` such that, if the
corresponding elements in `v` were inserted before the indices, the
order of `a` would be preserved.
Parameters
----------
a : 1-D array_like
Input array. If `sorter` is None, then it must be sorted in
ascending order, otherwise `sorter` must be an array of indices
that sort it.
v : array_like
Values to insert into `a`.
side : {'left', 'right'}, optional
If 'left', the index of the first suitable location found is given.
If 'right', return the last such index. If there is no suitable
index, return either 0 or N (where N is the length of `a`).
sorter : 1-D array_like, optional
Optional array of integer indices that sort array a into ascending
order. They are typically the result of argsort.
.. versionadded:: 1.7.0
Returns
-------
indices : array of ints
Array of insertion points with the same shape as `v`.
See Also
--------
sort : Return a sorted copy of an array.
histogram : Produce histogram from 1-D data.
Notes
-----
Binary search is used to find the required insertion points.
As of Numpy 1.4.0 `searchsorted` works with real/complex arrays containing
`nan` values. The enhanced sort order is documented in `sort`.
Examples
--------
>>> np.searchsorted([1,2,3,4,5], 3)
2
>>> np.searchsorted([1,2,3,4,5], 3, side='right')
3
>>> np.searchsorted([1,2,3,4,5], [-10, 10, 2, 3])
array([0, 5, 1, 2])
"""
try:
searchsorted = a.searchsorted
except AttributeError:
return _wrapit(a, 'searchsorted', v, side, sorter)
return searchsorted(v, side, sorter)
def resize(a, new_shape):
"""
Return a new array with the specified shape.
If the new array is larger than the original array, then the new
array is filled with repeated copies of `a`. Note that this behavior
is different from a.resize(new_shape) which fills with zeros instead
of repeated copies of `a`.
Parameters
----------
a : array_like
Array to be resized.
new_shape : int or tuple of int
Shape of resized array.
Returns
-------
reshaped_array : ndarray
The new array is formed from the data in the old array, repeated
if necessary to fill out the required number of elements. The
data are repeated in the order that they are stored in memory.
See Also
--------
ndarray.resize : resize an array in-place.
Examples
--------
>>> a=np.array([[0,1],[2,3]])
>>> np.resize(a,(2,3))
array([[0, 1, 2],
[3, 0, 1]])
>>> np.resize(a,(1,4))
array([[0, 1, 2, 3]])
>>> np.resize(a,(2,4))
array([[0, 1, 2, 3],
[0, 1, 2, 3]])
"""
if isinstance(new_shape, (int, nt.integer)):
new_shape = (new_shape,)
a = ravel(a)
Na = len(a)
if not Na:
return mu.zeros(new_shape, a.dtype.char)
total_size = um.multiply.reduce(new_shape)
n_copies = int(total_size / Na)
extra = total_size % Na
if total_size == 0:
return a[:0]
if extra != 0:
n_copies = n_copies+1
extra = Na-extra
a = concatenate((a,)*n_copies)
if extra > 0:
a = a[:-extra]
return reshape(a, new_shape)
def squeeze(a, axis=None):
"""
Remove single-dimensional entries from the shape of an array.
Parameters
----------
a : array_like
Input data.
axis : None or int or tuple of ints, optional
.. versionadded:: 1.7.0
Selects a subset of the single-dimensional entries in the
shape. If an axis is selected with shape entry greater than
one, an error is raised.
Returns
-------
squeezed : ndarray
The input array, but with all or a subset of the
dimensions of length 1 removed. This is always `a` itself
or a view into `a`.
Examples
--------
>>> x = np.array([[[0], [1], [2]]])
>>> x.shape
(1, 3, 1)
>>> np.squeeze(x).shape
(3,)
>>> np.squeeze(x, axis=(2,)).shape
(1, 3)
"""
try:
squeeze = a.squeeze
except AttributeError:
return _wrapit(a, 'squeeze')
try:
# First try to use the new axis= parameter
return squeeze(axis=axis)
except TypeError:
# For backwards compatibility
return squeeze()
def diagonal(a, offset=0, axis1=0, axis2=1):
"""
Return specified diagonals.
If `a` is 2-D, returns the diagonal of `a` with the given offset,
i.e., the collection of elements of the form ``a[i, i+offset]``. If
`a` has more than two dimensions, then the axes specified by `axis1`
and `axis2` are used to determine the 2-D sub-array whose diagonal is
returned. The shape of the resulting array can be determined by
removing `axis1` and `axis2` and appending an index to the right equal
to the size of the resulting diagonals.
In versions of NumPy prior to 1.7, this function always returned a new,
independent array containing a copy of the values in the diagonal.
In NumPy 1.7 and 1.8, it continues to return a copy of the diagonal,
but depending on this fact is deprecated. Writing to the resulting
array continues to work as it used to, but a FutureWarning is issued.
In NumPy 1.9 it returns a read-only view on the original array.
Attempting to write to the resulting array will produce an error.
In NumPy 1.10, it will return a read/write view and writing to the
returned array will alter your original array. The returned array
will have the same type as the input array.
If you don't write to the array returned by this function, then you can
just ignore all of the above.
If you depend on the current behavior, then we suggest copying the
returned array explicitly, i.e., use ``np.diagonal(a).copy()`` instead
of just ``np.diagonal(a)``. This will work with both past and future
versions of NumPy.
Parameters
----------
a : array_like
Array from which the diagonals are taken.
offset : int, optional
Offset of the diagonal from the main diagonal. Can be positive or
negative. Defaults to main diagonal (0).
axis1 : int, optional
Axis to be used as the first axis of the 2-D sub-arrays from which
the diagonals should be taken. Defaults to first axis (0).
axis2 : int, optional
Axis to be used as the second axis of the 2-D sub-arrays from
which the diagonals should be taken. Defaults to second axis (1).
Returns
-------
array_of_diagonals : ndarray
If `a` is 2-D and not a matrix, a 1-D array of the same type as `a`
containing the diagonal is returned. If `a` is a matrix, a 1-D
array containing the diagonal is returned in order to maintain
backward compatibility. If the dimension of `a` is greater than
two, then an array of diagonals is returned, "packed" from
left-most dimension to right-most (e.g., if `a` is 3-D, then the
diagonals are "packed" along rows).
Raises
------
ValueError
If the dimension of `a` is less than 2.
See Also
--------
diag : MATLAB work-a-like for 1-D and 2-D arrays.
diagflat : Create diagonal arrays.
trace : Sum along diagonals.
Examples
--------
>>> a = np.arange(4).reshape(2,2)
>>> a
array([[0, 1],
[2, 3]])
>>> a.diagonal()
array([0, 3])
>>> a.diagonal(1)
array([1])
A 3-D example:
>>> a = np.arange(8).reshape(2,2,2); a
array([[[0, 1],
[2, 3]],
[[4, 5],
[6, 7]]])
>>> a.diagonal(0, # Main diagonals of two arrays created by skipping
... 0, # across the outer(left)-most axis last and
... 1) # the "middle" (row) axis first.
array([[0, 6],
[1, 7]])
The sub-arrays whose main diagonals we just obtained; note that each
corresponds to fixing the right-most (column) axis, and that the
diagonals are "packed" in rows.
>>> a[:,:,0] # main diagonal is [0 6]
array([[0, 2],
[4, 6]])
>>> a[:,:,1] # main diagonal is [1 7]
array([[1, 3],
[5, 7]])
"""
if isinstance(a, np.matrix):
# Make diagonal of matrix 1-D to preserve backward compatibility.
return asarray(a).diagonal(offset, axis1, axis2)
else:
return asanyarray(a).diagonal(offset, axis1, axis2)
def trace(a, offset=0, axis1=0, axis2=1, dtype=None, out=None):
"""
Return the sum along diagonals of the array.
If `a` is 2-D, the sum along its diagonal with the given offset
is returned, i.e., the sum of elements ``a[i,i+offset]`` for all i.
If `a` has more than two dimensions, then the axes specified by axis1 and
axis2 are used to determine the 2-D sub-arrays whose traces are returned.
The shape of the resulting array is the same as that of `a` with `axis1`
and `axis2` removed.
Parameters
----------
a : array_like
Input array, from which the diagonals are taken.
offset : int, optional
Offset of the diagonal from the main diagonal. Can be both positive
and negative. Defaults to 0.
axis1, axis2 : int, optional
Axes to be used as the first and second axis of the 2-D sub-arrays
from which the diagonals should be taken. Defaults are the first two
axes of `a`.
dtype : dtype, optional
Determines the data-type of the returned array and of the accumulator
where the elements are summed. If dtype has the value None and `a` is
of integer type of precision less than the default integer
precision, then the default integer precision is used. Otherwise,
the precision is the same as that of `a`.
out : ndarray, optional
Array into which the output is placed. Its type is preserved and
it must be of the right shape to hold the output.
Returns
-------
sum_along_diagonals : ndarray
If `a` is 2-D, the sum along the diagonal is returned. If `a` has
larger dimensions, then an array of sums along diagonals is returned.
See Also
--------
diag, diagonal, diagflat
Examples
--------
>>> np.trace(np.eye(3))
3.0
>>> a = np.arange(8).reshape((2,2,2))
>>> np.trace(a)
array([6, 8])
>>> a = np.arange(24).reshape((2,2,2,3))
>>> np.trace(a).shape
(2, 3)
"""
if isinstance(a, np.matrix):
# Get trace of matrix via an array to preserve backward compatibility.
return asarray(a).trace(offset, axis1, axis2, dtype, out)
else:
return asanyarray(a).trace(offset, axis1, axis2, dtype, out)
def ravel(a, order='C'):
"""Return a contiguous flattened array.
A 1-D array, containing the elements of the input, is returned. A copy is
made only if needed.
As of NumPy 1.10, the returned array will have the same type as the input
array. (for example, a masked array will be returned for a masked array
input)
Parameters
----------
a : array_like
Input array. The elements in `a` are read in the order specified by
`order`, and packed as a 1-D array.
order : {'C','F', 'A', 'K'}, optional
The elements of `a` are read using this index order. 'C' means
to index the elements in row-major, C-style order,
with the last axis index changing fastest, back to the first
axis index changing slowest. 'F' means to index the elements
in column-major, Fortran-style order, with the
first index changing fastest, and the last index changing
slowest. Note that the 'C' and 'F' options take no account of
the memory layout of the underlying array, and only refer to
the order of axis indexing. 'A' means to read the elements in
Fortran-like index order if `a` is Fortran *contiguous* in
memory, C-like order otherwise. 'K' means to read the
elements in the order they occur in memory, except for
reversing the data when strides are negative. By default, 'C'
index order is used.
Returns
-------
y : array_like
If `a` is a matrix, y is a 1-D ndarray, otherwise y is an array of
the same subtype as `a`. The shape of the returned array is
``(a.size,)``. Matrices are special cased for backward
compatibility.
See Also
--------
ndarray.flat : 1-D iterator over an array.
ndarray.flatten : 1-D array copy of the elements of an array
in row-major order.
ndarray.reshape : Change the shape of an array without changing its data.
Notes
-----
In row-major, C-style order, in two dimensions, the row index
varies the slowest, and the column index the quickest. This can
be generalized to multiple dimensions, where row-major order
implies that the index along the first axis varies slowest, and
the index along the last quickest. The opposite holds for
column-major, Fortran-style index ordering.
When a view is desired in as many cases as possible, ``arr.reshape(-1)``
may be preferable.
Examples
--------
It is equivalent to ``reshape(-1, order=order)``.
>>> x = np.array([[1, 2, 3], [4, 5, 6]])
>>> print np.ravel(x)
[1 2 3 4 5 6]
>>> print x.reshape(-1)
[1 2 3 4 5 6]
>>> print np.ravel(x, order='F')
[1 4 2 5 3 6]
When ``order`` is 'A', it will preserve the array's 'C' or 'F' ordering:
>>> print np.ravel(x.T)
[1 4 2 5 3 6]
>>> print np.ravel(x.T, order='A')
[1 2 3 4 5 6]
When ``order`` is 'K', it will preserve orderings that are neither 'C'
nor 'F', but won't reverse axes:
>>> a = np.arange(3)[::-1]; a
array([2, 1, 0])
>>> a.ravel(order='C')
array([2, 1, 0])
>>> a.ravel(order='K')
array([2, 1, 0])
>>> a = np.arange(12).reshape(2,3,2).swapaxes(1,2); a
array([[[ 0, 2, 4],
[ 1, 3, 5]],
[[ 6, 8, 10],
[ 7, 9, 11]]])
>>> a.ravel(order='C')
array([ 0, 2, 4, 1, 3, 5, 6, 8, 10, 7, 9, 11])
>>> a.ravel(order='K')
array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
"""
if isinstance(a, np.matrix):
return asarray(a).ravel(order)
else:
return asanyarray(a).ravel(order)
def nonzero(a):
"""
Return the indices of the elements that are non-zero.
Returns a tuple of arrays, one for each dimension of `a`,
containing the indices of the non-zero elements in that
dimension. The values in `a` are always tested and returned in
row-major, C-style order. The corresponding non-zero
values can be obtained with::
a[nonzero(a)]
To group the indices by element, rather than dimension, use::
transpose(nonzero(a))
The result of this is always a 2-D array, with a row for
each non-zero element.
Parameters
----------
a : array_like
Input array.
Returns
-------
tuple_of_arrays : tuple
Indices of elements that are non-zero.
See Also
--------
flatnonzero :
Return indices that are non-zero in the flattened version of the input
array.
ndarray.nonzero :
Equivalent ndarray method.
count_nonzero :
Counts the number of non-zero elements in the input array.
Examples
--------
>>> x = np.eye(3)
>>> x
array([[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]])
>>> np.nonzero(x)
(array([0, 1, 2]), array([0, 1, 2]))
>>> x[np.nonzero(x)]
array([ 1., 1., 1.])
>>> np.transpose(np.nonzero(x))
array([[0, 0],
[1, 1],
[2, 2]])
A common use for ``nonzero`` is to find the indices of an array, where
a condition is True. Given an array `a`, the condition `a` > 3 is a
boolean array and since False is interpreted as 0, np.nonzero(a > 3)
yields the indices of the `a` where the condition is true.
>>> a = np.array([[1,2,3],[4,5,6],[7,8,9]])
>>> a > 3
array([[False, False, False],
[ True, True, True],
[ True, True, True]], dtype=bool)
>>> np.nonzero(a > 3)
(array([1, 1, 1, 2, 2, 2]), array([0, 1, 2, 0, 1, 2]))
The ``nonzero`` method of the boolean array can also be called.
>>> (a > 3).nonzero()
(array([1, 1, 1, 2, 2, 2]), array([0, 1, 2, 0, 1, 2]))
"""
try:
nonzero = a.nonzero
except AttributeError:
res = _wrapit(a, 'nonzero')
else:
res = nonzero()
return res
def shape(a):
"""
Return the shape of an array.
Parameters
----------
a : array_like
Input array.
Returns
-------
shape : tuple of ints
The elements of the shape tuple give the lengths of the
corresponding array dimensions.
See Also
--------
alen
ndarray.shape : Equivalent array method.
Examples
--------
>>> np.shape(np.eye(3))
(3, 3)
>>> np.shape([[1, 2]])
(1, 2)
>>> np.shape([0])
(1,)
>>> np.shape(0)
()
>>> a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
>>> np.shape(a)
(2,)
>>> a.shape
(2,)
"""
try:
result = a.shape
except AttributeError:
result = asarray(a).shape
return result
def compress(condition, a, axis=None, out=None):
"""
Return selected slices of an array along given axis.
When working along a given axis, a slice along that axis is returned in
`output` for each index where `condition` evaluates to True. When
working on a 1-D array, `compress` is equivalent to `extract`.
Parameters
----------
condition : 1-D array of bools
Array that selects which entries to return. If len(condition)
is less than the size of `a` along the given axis, then output is
truncated to the length of the condition array.
a : array_like
Array from which to extract a part.
axis : int, optional
Axis along which to take slices. If None (default), work on the
flattened array.
out : ndarray, optional
Output array. Its type is preserved and it must be of the right
shape to hold the output.
Returns
-------
compressed_array : ndarray
A copy of `a` without the slices along axis for which `condition`
is false.
See Also
--------
take, choose, diag, diagonal, select
ndarray.compress : Equivalent method in ndarray
np.extract: Equivalent method when working on 1-D arrays
numpy.doc.ufuncs : Section "Output arguments"
Examples
--------
>>> a = np.array([[1, 2], [3, 4], [5, 6]])
>>> a
array([[1, 2],
[3, 4],
[5, 6]])
>>> np.compress([0, 1], a, axis=0)
array([[3, 4]])
>>> np.compress([False, True, True], a, axis=0)
array([[3, 4],
[5, 6]])
>>> np.compress([False, True], a, axis=1)
array([[2],
[4],
[6]])
Working on the flattened array does not return slices along an axis but
selects elements.
>>> np.compress([False, True], a)
array([2])
"""
try:
compress = a.compress
except AttributeError:
return _wrapit(a, 'compress', condition, axis, out)
return compress(condition, axis, out)
def clip(a, a_min, a_max, out=None):
"""
Clip (limit) the values in an array.
Given an interval, values outside the interval are clipped to
the interval edges. For example, if an interval of ``[0, 1]``
is specified, values smaller than 0 become 0, and values larger
than 1 become 1.
Parameters
----------
a : array_like
Array containing elements to clip.
a_min : scalar or array_like
Minimum value.
a_max : scalar or array_like
Maximum value. If `a_min` or `a_max` are array_like, then they will
be broadcasted to the shape of `a`.
out : ndarray, optional
The results will be placed in this array. It may be the input
array for in-place clipping. `out` must be of the right shape
to hold the output. Its type is preserved.
Returns
-------
clipped_array : ndarray
An array with the elements of `a`, but where values
< `a_min` are replaced with `a_min`, and those > `a_max`
with `a_max`.
See Also
--------
numpy.doc.ufuncs : Section "Output arguments"
Examples
--------
>>> a = np.arange(10)
>>> np.clip(a, 1, 8)
array([1, 1, 2, 3, 4, 5, 6, 7, 8, 8])
>>> a
array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
>>> np.clip(a, 3, 6, out=a)
array([3, 3, 3, 3, 4, 5, 6, 6, 6, 6])
>>> a = np.arange(10)
>>> a
array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
>>> np.clip(a, [3,4,1,1,1,4,4,4,4,4], 8)
array([3, 4, 2, 3, 4, 5, 6, 7, 8, 8])
"""
try:
clip = a.clip
except AttributeError:
return _wrapit(a, 'clip', a_min, a_max, out)
return clip(a_min, a_max, out)
def sum(a, axis=None, dtype=None, out=None, keepdims=False):
"""
Sum of array elements over a given axis.
Parameters
----------
a : array_like
Elements to sum.
axis : None or int or tuple of ints, optional
Axis or axes along which a sum is performed.
The default (`axis` = `None`) is perform a sum over all
the dimensions of the input array. `axis` may be negative, in
which case it counts from the last to the first axis.
.. versionadded:: 1.7.0
If this is a tuple of ints, a sum is performed on multiple
axes, instead of a single axis or all the axes as before.
dtype : dtype, optional
The type of the returned array and of the accumulator in which
the elements are summed. By default, the dtype of `a` is used.
An exception is when `a` has an integer type with less precision
than the default platform integer. In that case, the default
platform integer is used instead.
out : ndarray, optional
Array into which the output is placed. By default, a new array is
created. If `out` is given, it must be of the appropriate shape
(the shape of `a` with `axis` removed, i.e.,
``numpy.delete(a.shape, axis)``). Its type is preserved. See
`doc.ufuncs` (Section "Output arguments") for more details.
keepdims : bool, optional
If this is set to True, the axes which are reduced are left
in the result as dimensions with size one. With this option,
the result will broadcast correctly against the original `arr`.
Returns
-------
sum_along_axis : ndarray
An array with the same shape as `a`, with the specified
axis removed. If `a` is a 0-d array, or if `axis` is None, a scalar
is returned. If an output array is specified, a reference to
`out` is returned.
See Also
--------
ndarray.sum : Equivalent method.
cumsum : Cumulative sum of array elements.
trapz : Integration of array values using the composite trapezoidal rule.
mean, average
Notes
-----
Arithmetic is modular when using integer types, and no error is
raised on overflow.
The sum of an empty array is the neutral element 0:
>>> np.sum([])
0.0
Examples
--------
>>> np.sum([0.5, 1.5])
2.0
>>> np.sum([0.5, 0.7, 0.2, 1.5], dtype=np.int32)
1
>>> np.sum([[0, 1], [0, 5]])
6
>>> np.sum([[0, 1], [0, 5]], axis=0)
array([0, 6])
>>> np.sum([[0, 1], [0, 5]], axis=1)
array([1, 5])
If the accumulator is too small, overflow occurs:
>>> np.ones(128, dtype=np.int8).sum(dtype=np.int8)
-128
"""
if isinstance(a, _gentype):
res = _sum_(a)
if out is not None:
out[...] = res
return out
return res
elif type(a) is not mu.ndarray:
try:
sum = a.sum
except AttributeError:
return _methods._sum(a, axis=axis, dtype=dtype,
out=out, keepdims=keepdims)
# NOTE: Dropping the keepdims parameters here...
return sum(axis=axis, dtype=dtype, out=out)
else:
return _methods._sum(a, axis=axis, dtype=dtype,
out=out, keepdims=keepdims)
def product(a, axis=None, dtype=None, out=None, keepdims=False):
"""
Return the product of array elements over a given axis.
See Also
--------
prod : equivalent function; see for details.
"""
return um.multiply.reduce(a, axis=axis, dtype=dtype,
out=out, keepdims=keepdims)
def sometrue(a, axis=None, out=None, keepdims=False):
"""
Check whether some values are true.
Refer to `any` for full documentation.
See Also
--------
any : equivalent function
"""
arr = asanyarray(a)
try:
return arr.any(axis=axis, out=out, keepdims=keepdims)
except TypeError:
return arr.any(axis=axis, out=out)
def alltrue(a, axis=None, out=None, keepdims=False):
"""
Check if all elements of input array are true.
See Also
--------
numpy.all : Equivalent function; see for details.
"""
arr = asanyarray(a)
try:
return arr.all(axis=axis, out=out, keepdims=keepdims)
except TypeError:
return arr.all(axis=axis, out=out)
def any(a, axis=None, out=None, keepdims=False):
"""
Test whether any array element along a given axis evaluates to True.
Returns single boolean unless `axis` is not ``None``
Parameters
----------
a : array_like
Input array or object that can be converted to an array.
axis : None or int or tuple of ints, optional
Axis or axes along which a logical OR reduction is performed.
The default (`axis` = `None`) is to perform a logical OR over all
the dimensions of the input array. `axis` may be negative, in
which case it counts from the last to the first axis.
.. versionadded:: 1.7.0
If this is a tuple of ints, a reduction is performed on multiple
axes, instead of a single axis or all the axes as before.
out : ndarray, optional
Alternate output array in which to place the result. It must have
the same shape as the expected output and its type is preserved
(e.g., if it is of type float, then it will remain so, returning
1.0 for True and 0.0 for False, regardless of the type of `a`).
See `doc.ufuncs` (Section "Output arguments") for details.
keepdims : bool, optional
If this is set to True, the axes which are reduced are left
in the result as dimensions with size one. With this option,
the result will broadcast correctly against the original `arr`.
Returns
-------
any : bool or ndarray
A new boolean or `ndarray` is returned unless `out` is specified,
in which case a reference to `out` is returned.
See Also
--------
ndarray.any : equivalent method
all : Test whether all elements along a given axis evaluate to True.
Notes
-----
Not a Number (NaN), positive infinity and negative infinity evaluate
to `True` because these are not equal to zero.
Examples
--------
>>> np.any([[True, False], [True, True]])
True
>>> np.any([[True, False], [False, False]], axis=0)
array([ True, False], dtype=bool)
>>> np.any([-1, 0, 5])
True
>>> np.any(np.nan)
True
>>> o=np.array([False])
>>> z=np.any([-1, 4, 5], out=o)
>>> z, o
(array([ True], dtype=bool), array([ True], dtype=bool))
>>> # Check now that z is a reference to o
>>> z is o
True
>>> id(z), id(o) # identity of z and o # doctest: +SKIP
(191614240, 191614240)
"""
arr = asanyarray(a)
try:
return arr.any(axis=axis, out=out, keepdims=keepdims)
except TypeError:
return arr.any(axis=axis, out=out)
def all(a, axis=None, out=None, keepdims=False):
"""
Test whether all array elements along a given axis evaluate to True.
Parameters
----------
a : array_like
Input array or object that can be converted to an array.
axis : None or int or tuple of ints, optional
Axis or axes along which a logical AND reduction is performed.
The default (`axis` = `None`) is to perform a logical AND over all
the dimensions of the input array. `axis` may be negative, in
which case it counts from the last to the first axis.
.. versionadded:: 1.7.0
If this is a tuple of ints, a reduction is performed on multiple
axes, instead of a single axis or all the axes as before.
out : ndarray, optional
Alternate output array in which to place the result.
It must have the same shape as the expected output and its
type is preserved (e.g., if ``dtype(out)`` is float, the result
will consist of 0.0's and 1.0's). See `doc.ufuncs` (Section
"Output arguments") for more details.
keepdims : bool, optional
If this is set to True, the axes which are reduced are left
in the result as dimensions with size one. With this option,
the result will broadcast correctly against the original `arr`.
Returns
-------
all : ndarray, bool
A new boolean or array is returned unless `out` is specified,
in which case a reference to `out` is returned.
See Also
--------
ndarray.all : equivalent method
any : Test whether any element along a given axis evaluates to True.
Notes
-----
Not a Number (NaN), positive infinity and negative infinity
evaluate to `True` because these are not equal to zero.
Examples
--------
>>> np.all([[True,False],[True,True]])
False
>>> np.all([[True,False],[True,True]], axis=0)
array([ True, False], dtype=bool)
>>> np.all([-1, 4, 5])
True
>>> np.all([1.0, np.nan])
True
>>> o=np.array([False])
>>> z=np.all([-1, 4, 5], out=o)
>>> id(z), id(o), z # doctest: +SKIP
(28293632, 28293632, array([ True], dtype=bool))
"""
arr = asanyarray(a)
try:
return arr.all(axis=axis, out=out, keepdims=keepdims)
except TypeError:
return arr.all(axis=axis, out=out)
def cumsum(a, axis=None, dtype=None, out=None):
"""
Return the cumulative sum of the elements along a given axis.
Parameters
----------
a : array_like
Input array.
axis : int, optional
Axis along which the cumulative sum is computed. The default
(None) is to compute the cumsum over the flattened array.
dtype : dtype, optional
Type of the returned array and of the accumulator in which the
elements are summed. If `dtype` is not specified, it defaults
to the dtype of `a`, unless `a` has an integer dtype with a
precision less than that of the default platform integer. In
that case, the default platform integer is used.
out : ndarray, optional
Alternative output array in which to place the result. It must
have the same shape and buffer length as the expected output
but the type will be cast if necessary. See `doc.ufuncs`
(Section "Output arguments") for more details.
Returns
-------
cumsum_along_axis : ndarray.
A new array holding the result is returned unless `out` is
specified, in which case a reference to `out` is returned. The
result has the same size as `a`, and the same shape as `a` if
`axis` is not None or `a` is a 1-d array.
See Also
--------
sum : Sum array elements.
trapz : Integration of array values using the composite trapezoidal rule.
diff : Calculate the n-th order discrete difference along given axis.
Notes
-----
Arithmetic is modular when using integer types, and no error is
raised on overflow.
Examples
--------
>>> a = np.array([[1,2,3], [4,5,6]])
>>> a
array([[1, 2, 3],
[4, 5, 6]])
>>> np.cumsum(a)
array([ 1, 3, 6, 10, 15, 21])
>>> np.cumsum(a, dtype=float) # specifies type of output value(s)
array([ 1., 3., 6., 10., 15., 21.])
>>> np.cumsum(a,axis=0) # sum over rows for each of the 3 columns
array([[1, 2, 3],
[5, 7, 9]])
>>> np.cumsum(a,axis=1) # sum over columns for each of the 2 rows
array([[ 1, 3, 6],
[ 4, 9, 15]])
"""
try:
cumsum = a.cumsum
except AttributeError:
return _wrapit(a, 'cumsum', axis, dtype, out)
return cumsum(axis, dtype, out)
def cumproduct(a, axis=None, dtype=None, out=None):
"""
Return the cumulative product over the given axis.
See Also
--------
cumprod : equivalent function; see for details.
"""
try:
cumprod = a.cumprod
except AttributeError:
return _wrapit(a, 'cumprod', axis, dtype, out)
return cumprod(axis, dtype, out)
def ptp(a, axis=None, out=None):
"""
Range of values (maximum - minimum) along an axis.
The name of the function comes from the acronym for 'peak to peak'.
Parameters
----------
a : array_like
Input values.
axis : int, optional
Axis along which to find the peaks. By default, flatten the
array.
out : array_like
Alternative output array in which to place the result. It must
have the same shape and buffer length as the expected output,
but the type of the output values will be cast if necessary.
Returns
-------
ptp : ndarray
A new array holding the result, unless `out` was
specified, in which case a reference to `out` is returned.
Examples
--------
>>> x = np.arange(4).reshape((2,2))
>>> x
array([[0, 1],
[2, 3]])
>>> np.ptp(x, axis=0)
array([2, 2])
>>> np.ptp(x, axis=1)
array([1, 1])
"""
try:
ptp = a.ptp
except AttributeError:
return _wrapit(a, 'ptp', axis, out)
return ptp(axis, out)
def amax(a, axis=None, out=None, keepdims=False):
"""
Return the maximum of an array or maximum along an axis.
Parameters
----------
a : array_like
Input data.
axis : None or int or tuple of ints, optional
Axis or axes along which to operate. By default, flattened input is
used.
.. versionadded: 1.7.0
If this is a tuple of ints, the maximum is selected over multiple axes,
instead of a single axis or all the axes as before.
out : ndarray, optional
Alternative output array in which to place the result. Must
be of the same shape and buffer length as the expected output.
See `doc.ufuncs` (Section "Output arguments") for more details.
keepdims : bool, optional
If this is set to True, the axes which are reduced are left
in the result as dimensions with size one. With this option,
the result will broadcast correctly against the original `arr`.
Returns
-------
amax : ndarray or scalar
Maximum of `a`. If `axis` is None, the result is a scalar value.
If `axis` is given, the result is an array of dimension
``a.ndim - 1``.
See Also
--------
amin :
The minimum value of an array along a given axis, propagating any NaNs.
nanmax :
The maximum value of an array along a given axis, ignoring any NaNs.
maximum :
Element-wise maximum of two arrays, propagating any NaNs.
fmax :
Element-wise maximum of two arrays, ignoring any NaNs.
argmax :
Return the indices of the maximum values.
nanmin, minimum, fmin
Notes
-----
NaN values are propagated, that is if at least one item is NaN, the
corresponding max value will be NaN as well. To ignore NaN values
(MATLAB behavior), please use nanmax.
Don't use `amax` for element-wise comparison of 2 arrays; when
``a.shape[0]`` is 2, ``maximum(a[0], a[1])`` is faster than
``amax(a, axis=0)``.
Examples
--------
>>> a = np.arange(4).reshape((2,2))
>>> a
array([[0, 1],
[2, 3]])
>>> np.amax(a) # Maximum of the flattened array
3
>>> np.amax(a, axis=0) # Maxima along the first axis
array([2, 3])
>>> np.amax(a, axis=1) # Maxima along the second axis
array([1, 3])
>>> b = np.arange(5, dtype=np.float)
>>> b[2] = np.NaN
>>> np.amax(b)
nan
>>> np.nanmax(b)
4.0
"""
if type(a) is not mu.ndarray:
try:
amax = a.max
except AttributeError:
return _methods._amax(a, axis=axis,
out=out, keepdims=keepdims)
# NOTE: Dropping the keepdims parameter
return amax(axis=axis, out=out)
else:
return _methods._amax(a, axis=axis,
out=out, keepdims=keepdims)
def amin(a, axis=None, out=None, keepdims=False):
"""
Return the minimum of an array or minimum along an axis.
Parameters
----------
a : array_like
Input data.
axis : None or int or tuple of ints, optional
Axis or axes along which to operate. By default, flattened input is
used.
.. versionadded: 1.7.0
If this is a tuple of ints, the minimum is selected over multiple axes,
instead of a single axis or all the axes as before.
out : ndarray, optional
Alternative output array in which to place the result. Must
be of the same shape and buffer length as the expected output.
See `doc.ufuncs` (Section "Output arguments") for more details.
keepdims : bool, optional
If this is set to True, the axes which are reduced are left
in the result as dimensions with size one. With this option,
the result will broadcast correctly against the original `arr`.
Returns
-------
amin : ndarray or scalar
Minimum of `a`. If `axis` is None, the result is a scalar value.
If `axis` is given, the result is an array of dimension
``a.ndim - 1``.
See Also
--------
amax :
The maximum value of an array along a given axis, propagating any NaNs.
nanmin :
The minimum value of an array along a given axis, ignoring any NaNs.
minimum :
Element-wise minimum of two arrays, propagating any NaNs.
fmin :
Element-wise minimum of two arrays, ignoring any NaNs.
argmin :
Return the indices of the minimum values.
nanmax, maximum, fmax
Notes
-----
NaN values are propagated, that is if at least one item is NaN, the
corresponding min value will be NaN as well. To ignore NaN values
(MATLAB behavior), please use nanmin.
Don't use `amin` for element-wise comparison of 2 arrays; when
``a.shape[0]`` is 2, ``minimum(a[0], a[1])`` is faster than
``amin(a, axis=0)``.
Examples
--------
>>> a = np.arange(4).reshape((2,2))
>>> a
array([[0, 1],
[2, 3]])
>>> np.amin(a) # Minimum of the flattened array
0
>>> np.amin(a, axis=0) # Minima along the first axis
array([0, 1])
>>> np.amin(a, axis=1) # Minima along the second axis
array([0, 2])
>>> b = np.arange(5, dtype=np.float)
>>> b[2] = np.NaN
>>> np.amin(b)
nan
>>> np.nanmin(b)
0.0
"""
if type(a) is not mu.ndarray:
try:
amin = a.min
except AttributeError:
return _methods._amin(a, axis=axis,
out=out, keepdims=keepdims)
# NOTE: Dropping the keepdims parameter
return amin(axis=axis, out=out)
else:
return _methods._amin(a, axis=axis,
out=out, keepdims=keepdims)
def alen(a):
"""
Return the length of the first dimension of the input array.
Parameters
----------
a : array_like
Input array.
Returns
-------
alen : int
Length of the first dimension of `a`.
See Also
--------
shape, size
Examples
--------
>>> a = np.zeros((7,4,5))
>>> a.shape[0]
7
>>> np.alen(a)
7
"""
try:
return len(a)
except TypeError:
return len(array(a, ndmin=1))
def prod(a, axis=None, dtype=None, out=None, keepdims=False):
"""
Return the product of array elements over a given axis.
Parameters
----------
a : array_like
Input data.
axis : None or int or tuple of ints, optional
Axis or axes along which a product is performed.
The default (`axis` = `None`) is perform a product over all
the dimensions of the input array. `axis` may be negative, in
which case it counts from the last to the first axis.
.. versionadded:: 1.7.0
If this is a tuple of ints, a product is performed on multiple
axes, instead of a single axis or all the axes as before.
dtype : data-type, optional
The data-type of the returned array, as well as of the accumulator
in which the elements are multiplied. By default, if `a` is of
integer type, `dtype` is the default platform integer. (Note: if
the type of `a` is unsigned, then so is `dtype`.) Otherwise,
the dtype is the same as that of `a`.
out : ndarray, optional
Alternative output array in which to place the result. It must have
the same shape as the expected output, but the type of the
output values will be cast if necessary.
keepdims : bool, optional
If this is set to True, the axes which are reduced are left
in the result as dimensions with size one. With this option,
the result will broadcast correctly against the original `arr`.
Returns
-------
product_along_axis : ndarray, see `dtype` parameter above.
An array shaped as `a` but with the specified axis removed.
Returns a reference to `out` if specified.
See Also
--------
ndarray.prod : equivalent method
numpy.doc.ufuncs : Section "Output arguments"
Notes
-----
Arithmetic is modular when using integer types, and no error is
raised on overflow. That means that, on a 32-bit platform:
>>> x = np.array([536870910, 536870910, 536870910, 536870910])
>>> np.prod(x) #random
16
The product of an empty array is the neutral element 1:
>>> np.prod([])
1.0
Examples
--------
By default, calculate the product of all elements:
>>> np.prod([1.,2.])
2.0
Even when the input array is two-dimensional:
>>> np.prod([[1.,2.],[3.,4.]])
24.0
But we can also specify the axis over which to multiply:
>>> np.prod([[1.,2.],[3.,4.]], axis=1)
array([ 2., 12.])
If the type of `x` is unsigned, then the output type is
the unsigned platform integer:
>>> x = np.array([1, 2, 3], dtype=np.uint8)
>>> np.prod(x).dtype == np.uint
True
If `x` is of a signed integer type, then the output type
is the default platform integer:
>>> x = np.array([1, 2, 3], dtype=np.int8)
>>> np.prod(x).dtype == np.int
True
"""
if type(a) is not mu.ndarray:
try:
prod = a.prod
except AttributeError:
return _methods._prod(a, axis=axis, dtype=dtype,
out=out, keepdims=keepdims)
return prod(axis=axis, dtype=dtype, out=out)
else:
return _methods._prod(a, axis=axis, dtype=dtype,
out=out, keepdims=keepdims)
def cumprod(a, axis=None, dtype=None, out=None):
"""
Return the cumulative product of elements along a given axis.
Parameters
----------
a : array_like
Input array.
axis : int, optional
Axis along which the cumulative product is computed. By default
the input is flattened.
dtype : dtype, optional
Type of the returned array, as well as of the accumulator in which
the elements are multiplied. If *dtype* is not specified, it
defaults to the dtype of `a`, unless `a` has an integer dtype with
a precision less than that of the default platform integer. In
that case, the default platform integer is used instead.
out : ndarray, optional
Alternative output array in which to place the result. It must
have the same shape and buffer length as the expected output
but the type of the resulting values will be cast if necessary.
Returns
-------
cumprod : ndarray
A new array holding the result is returned unless `out` is
specified, in which case a reference to out is returned.
See Also
--------
numpy.doc.ufuncs : Section "Output arguments"
Notes
-----
Arithmetic is modular when using integer types, and no error is
raised on overflow.
Examples
--------
>>> a = np.array([1,2,3])
>>> np.cumprod(a) # intermediate results 1, 1*2
... # total product 1*2*3 = 6
array([1, 2, 6])
>>> a = np.array([[1, 2, 3], [4, 5, 6]])
>>> np.cumprod(a, dtype=float) # specify type of output
array([ 1., 2., 6., 24., 120., 720.])
The cumulative product for each column (i.e., over the rows) of `a`:
>>> np.cumprod(a, axis=0)
array([[ 1, 2, 3],
[ 4, 10, 18]])
The cumulative product for each row (i.e. over the columns) of `a`:
>>> np.cumprod(a,axis=1)
array([[ 1, 2, 6],
[ 4, 20, 120]])
"""
try:
cumprod = a.cumprod
except AttributeError:
return _wrapit(a, 'cumprod', axis, dtype, out)
return cumprod(axis, dtype, out)
def ndim(a):
"""
Return the number of dimensions of an array.
Parameters
----------
a : array_like
Input array. If it is not already an ndarray, a conversion is
attempted.
Returns
-------
number_of_dimensions : int
The number of dimensions in `a`. Scalars are zero-dimensional.
See Also
--------
ndarray.ndim : equivalent method
shape : dimensions of array
ndarray.shape : dimensions of array
Examples
--------
>>> np.ndim([[1,2,3],[4,5,6]])
2
>>> np.ndim(np.array([[1,2,3],[4,5,6]]))
2
>>> np.ndim(1)
0
"""
try:
return a.ndim
except AttributeError:
return asarray(a).ndim
def rank(a):
"""
Return the number of dimensions of an array.
If `a` is not already an array, a conversion is attempted.
Scalars are zero dimensional.
.. note::
This function is deprecated in NumPy 1.9 to avoid confusion with
`numpy.linalg.matrix_rank`. The ``ndim`` attribute or function
should be used instead.
Parameters
----------
a : array_like
Array whose number of dimensions is desired. If `a` is not an array,
a conversion is attempted.
Returns
-------
number_of_dimensions : int
The number of dimensions in the array.
See Also
--------
ndim : equivalent function
ndarray.ndim : equivalent property
shape : dimensions of array
ndarray.shape : dimensions of array
Notes
-----
In the old Numeric package, `rank` was the term used for the number of
dimensions, but in Numpy `ndim` is used instead.
Examples
--------
>>> np.rank([1,2,3])
1
>>> np.rank(np.array([[1,2,3],[4,5,6]]))
2
>>> np.rank(1)
0
"""
# 2014-04-12, 1.9
warnings.warn(
"`rank` is deprecated; use the `ndim` attribute or function instead. "
"To find the rank of a matrix see `numpy.linalg.matrix_rank`.",
VisibleDeprecationWarning)
try:
return a.ndim
except AttributeError:
return asarray(a).ndim
def size(a, axis=None):
"""
Return the number of elements along a given axis.
Parameters
----------
a : array_like
Input data.
axis : int, optional
Axis along which the elements are counted. By default, give
the total number of elements.
Returns
-------
element_count : int
Number of elements along the specified axis.
See Also
--------
shape : dimensions of array
ndarray.shape : dimensions of array
ndarray.size : number of elements in array
Examples
--------
>>> a = np.array([[1,2,3],[4,5,6]])
>>> np.size(a)
6
>>> np.size(a,1)
3
>>> np.size(a,0)
2
"""
if axis is None:
try:
return a.size
except AttributeError:
return asarray(a).size
else:
try:
return a.shape[axis]
except AttributeError:
return asarray(a).shape[axis]
def around(a, decimals=0, out=None):
"""
Evenly round to the given number of decimals.
Parameters
----------
a : array_like
Input data.
decimals : int, optional
Number of decimal places to round to (default: 0). If
decimals is negative, it specifies the number of positions to
the left of the decimal point.
out : ndarray, optional
Alternative output array in which to place the result. It must have
the same shape as the expected output, but the type of the output
values will be cast if necessary. See `doc.ufuncs` (Section
"Output arguments") for details.
Returns
-------
rounded_array : ndarray
An array of the same type as `a`, containing the rounded values.
Unless `out` was specified, a new array is created. A reference to
the result is returned.
The real and imaginary parts of complex numbers are rounded
separately. The result of rounding a float is a float.
See Also
--------
ndarray.round : equivalent method
ceil, fix, floor, rint, trunc
Notes
-----
For values exactly halfway between rounded decimal values, Numpy
rounds to the nearest even value. Thus 1.5 and 2.5 round to 2.0,
-0.5 and 0.5 round to 0.0, etc. Results may also be surprising due
to the inexact representation of decimal fractions in the IEEE
floating point standard [1]_ and errors introduced when scaling
by powers of ten.
References
----------
.. [1] "Lecture Notes on the Status of IEEE 754", William Kahan,
http://www.cs.berkeley.edu/~wkahan/ieee754status/IEEE754.PDF
.. [2] "How Futile are Mindless Assessments of
Roundoff in Floating-Point Computation?", William Kahan,
http://www.cs.berkeley.edu/~wkahan/Mindless.pdf
Examples
--------
>>> np.around([0.37, 1.64])
array([ 0., 2.])
>>> np.around([0.37, 1.64], decimals=1)
array([ 0.4, 1.6])
>>> np.around([.5, 1.5, 2.5, 3.5, 4.5]) # rounds to nearest even value
array([ 0., 2., 2., 4., 4.])
>>> np.around([1,2,3,11], decimals=1) # ndarray of ints is returned
array([ 1, 2, 3, 11])
>>> np.around([1,2,3,11], decimals=-1)
array([ 0, 0, 0, 10])
"""
try:
round = a.round
except AttributeError:
return _wrapit(a, 'round', decimals, out)
return round(decimals, out)
def round_(a, decimals=0, out=None):
"""
Round an array to the given number of decimals.
Refer to `around` for full documentation.
See Also
--------
around : equivalent function
"""
try:
round = a.round
except AttributeError:
return _wrapit(a, 'round', decimals, out)
return round(decimals, out)
def mean(a, axis=None, dtype=None, out=None, keepdims=False):
"""
Compute the arithmetic mean along the specified axis.
Returns the average of the array elements. The average is taken over
the flattened array by default, otherwise over the specified axis.
`float64` intermediate and return values are used for integer inputs.
Parameters
----------
a : array_like
Array containing numbers whose mean is desired. If `a` is not an
array, a conversion is attempted.
axis : None or int or tuple of ints, optional
Axis or axes along which the means are computed. The default is to
compute the mean of the flattened array.
.. versionadded: 1.7.0
If this is a tuple of ints, a mean is performed over multiple axes,
instead of a single axis or all the axes as before.
dtype : data-type, optional
Type to use in computing the mean. For integer inputs, the default
is `float64`; for floating point inputs, it is the same as the
input dtype.
out : ndarray, optional
Alternate output array in which to place the result. The default
is ``None``; if provided, it must have the same shape as the
expected output, but the type will be cast if necessary.
See `doc.ufuncs` for details.
keepdims : bool, optional
If this is set to True, the axes which are reduced are left
in the result as dimensions with size one. With this option,
the result will broadcast correctly against the original `arr`.
Returns
-------
m : ndarray, see dtype parameter above
If `out=None`, returns a new array containing the mean values,
otherwise a reference to the output array is returned.
See Also
--------
average : Weighted average
std, var, nanmean, nanstd, nanvar
Notes
-----
The arithmetic mean is the sum of the elements along the axis divided
by the number of elements.
Note that for floating-point input, the mean is computed using the
same precision the input has. Depending on the input data, this can
cause the results to be inaccurate, especially for `float32` (see
example below). Specifying a higher-precision accumulator using the
`dtype` keyword can alleviate this issue.
Examples
--------
>>> a = np.array([[1, 2], [3, 4]])
>>> np.mean(a)
2.5
>>> np.mean(a, axis=0)
array([ 2., 3.])
>>> np.mean(a, axis=1)
array([ 1.5, 3.5])
In single precision, `mean` can be inaccurate:
>>> a = np.zeros((2, 512*512), dtype=np.float32)
>>> a[0, :] = 1.0
>>> a[1, :] = 0.1
>>> np.mean(a)
0.546875
Computing the mean in float64 is more accurate:
>>> np.mean(a, dtype=np.float64)
0.55000000074505806
"""
if type(a) is not mu.ndarray:
try:
mean = a.mean
return mean(axis=axis, dtype=dtype, out=out)
except AttributeError:
pass
return _methods._mean(a, axis=axis, dtype=dtype,
out=out, keepdims=keepdims)
def std(a, axis=None, dtype=None, out=None, ddof=0, keepdims=False):
"""
Compute the standard deviation along the specified axis.
Returns the standard deviation, a measure of the spread of a distribution,
of the array elements. The standard deviation is computed for the
flattened array by default, otherwise over the specified axis.
Parameters
----------
a : array_like
Calculate the standard deviation of these values.
axis : None or int or tuple of ints, optional
Axis or axes along which the standard deviation is computed. The
default is to compute the standard deviation of the flattened array.
.. versionadded: 1.7.0
If this is a tuple of ints, a standard deviation is performed over
multiple axes, instead of a single axis or all the axes as before.
dtype : dtype, optional
Type to use in computing the standard deviation. For arrays of
integer type the default is float64, for arrays of float types it is
the same as the array type.
out : ndarray, optional
Alternative output array in which to place the result. It must have
the same shape as the expected output but the type (of the calculated
values) will be cast if necessary.
ddof : int, optional
Means Delta Degrees of Freedom. The divisor used in calculations
is ``N - ddof``, where ``N`` represents the number of elements.
By default `ddof` is zero.
keepdims : bool, optional
If this is set to True, the axes which are reduced are left
in the result as dimensions with size one. With this option,
the result will broadcast correctly against the original `arr`.
Returns
-------
standard_deviation : ndarray, see dtype parameter above.
If `out` is None, return a new array containing the standard deviation,
otherwise return a reference to the output array.
See Also
--------
var, mean, nanmean, nanstd, nanvar
numpy.doc.ufuncs : Section "Output arguments"
Notes
-----
The standard deviation is the square root of the average of the squared
deviations from the mean, i.e., ``std = sqrt(mean(abs(x - x.mean())**2))``.
The average squared deviation is normally calculated as
``x.sum() / N``, where ``N = len(x)``. If, however, `ddof` is specified,
the divisor ``N - ddof`` is used instead. In standard statistical
practice, ``ddof=1`` provides an unbiased estimator of the variance
of the infinite population. ``ddof=0`` provides a maximum likelihood
estimate of the variance for normally distributed variables. The
standard deviation computed in this function is the square root of
the estimated variance, so even with ``ddof=1``, it will not be an
unbiased estimate of the standard deviation per se.
Note that, for complex numbers, `std` takes the absolute
value before squaring, so that the result is always real and nonnegative.
For floating-point input, the *std* is computed using the same
precision the input has. Depending on the input data, this can cause
the results to be inaccurate, especially for float32 (see example below).
Specifying a higher-accuracy accumulator using the `dtype` keyword can
alleviate this issue.
Examples
--------
>>> a = np.array([[1, 2], [3, 4]])
>>> np.std(a)
1.1180339887498949
>>> np.std(a, axis=0)
array([ 1., 1.])
>>> np.std(a, axis=1)
array([ 0.5, 0.5])
In single precision, std() can be inaccurate:
>>> a = np.zeros((2, 512*512), dtype=np.float32)
>>> a[0, :] = 1.0
>>> a[1, :] = 0.1
>>> np.std(a)
0.45000005
Computing the standard deviation in float64 is more accurate:
>>> np.std(a, dtype=np.float64)
0.44999999925494177
"""
if type(a) is not mu.ndarray:
try:
std = a.std
return std(axis=axis, dtype=dtype, out=out, ddof=ddof)
except AttributeError:
pass
return _methods._std(a, axis=axis, dtype=dtype, out=out, ddof=ddof,
keepdims=keepdims)
def var(a, axis=None, dtype=None, out=None, ddof=0,
keepdims=False):
"""
Compute the variance along the specified axis.
Returns the variance of the array elements, a measure of the spread of a
distribution. The variance is computed for the flattened array by
default, otherwise over the specified axis.
Parameters
----------
a : array_like
Array containing numbers whose variance is desired. If `a` is not an
array, a conversion is attempted.
axis : None or int or tuple of ints, optional
Axis or axes along which the variance is computed. The default is to
compute the variance of the flattened array.
.. versionadded: 1.7.0
If this is a tuple of ints, a variance is performed over multiple axes,
instead of a single axis or all the axes as before.
dtype : data-type, optional
Type to use in computing the variance. For arrays of integer type
the default is `float32`; for arrays of float types it is the same as
the array type.
out : ndarray, optional
Alternate output array in which to place the result. It must have
the same shape as the expected output, but the type is cast if
necessary.
ddof : int, optional
"Delta Degrees of Freedom": the divisor used in the calculation is
``N - ddof``, where ``N`` represents the number of elements. By
default `ddof` is zero.
keepdims : bool, optional
If this is set to True, the axes which are reduced are left
in the result as dimensions with size one. With this option,
the result will broadcast correctly against the original `arr`.
Returns
-------
variance : ndarray, see dtype parameter above
If ``out=None``, returns a new array containing the variance;
otherwise, a reference to the output array is returned.
See Also
--------
std , mean, nanmean, nanstd, nanvar
numpy.doc.ufuncs : Section "Output arguments"
Notes
-----
The variance is the average of the squared deviations from the mean,
i.e., ``var = mean(abs(x - x.mean())**2)``.
The mean is normally calculated as ``x.sum() / N``, where ``N = len(x)``.
If, however, `ddof` is specified, the divisor ``N - ddof`` is used
instead. In standard statistical practice, ``ddof=1`` provides an
unbiased estimator of the variance of a hypothetical infinite population.
``ddof=0`` provides a maximum likelihood estimate of the variance for
normally distributed variables.
Note that for complex numbers, the absolute value is taken before
squaring, so that the result is always real and nonnegative.
For floating-point input, the variance is computed using the same
precision the input has. Depending on the input data, this can cause
the results to be inaccurate, especially for `float32` (see example
below). Specifying a higher-accuracy accumulator using the ``dtype``
keyword can alleviate this issue.
Examples
--------
>>> a = np.array([[1, 2], [3, 4]])
>>> np.var(a)
1.25
>>> np.var(a, axis=0)
array([ 1., 1.])
>>> np.var(a, axis=1)
array([ 0.25, 0.25])
In single precision, var() can be inaccurate:
>>> a = np.zeros((2, 512*512), dtype=np.float32)
>>> a[0, :] = 1.0
>>> a[1, :] = 0.1
>>> np.var(a)
0.20250003
Computing the variance in float64 is more accurate:
>>> np.var(a, dtype=np.float64)
0.20249999932944759
>>> ((1-0.55)**2 + (0.1-0.55)**2)/2
0.2025
"""
if type(a) is not mu.ndarray:
try:
var = a.var
return var(axis=axis, dtype=dtype, out=out, ddof=ddof)
except AttributeError:
pass
return _methods._var(a, axis=axis, dtype=dtype, out=out, ddof=ddof,
keepdims=keepdims)
|
bsd-2-clause
|
PrincetonUniversity/AdvNet-OF_Scripts
|
evaluation/switch/flowmod_test/pox/pox/samples/old/l2_modification_proactive_total.py
|
1
|
6001
|
# Copyright 2011 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
An L2 learning switch.
It is derived from one written live for an SDN crash course.
It is somwhat similar to NOX's pyswitch in that it installs
exact-match rules for each flow.
"""
from __future__ import division
from pox.core import core
import pox.openflow.libopenflow_01 as of
from pox.lib.util import dpid_to_str
from pox.lib.util import str_to_bool
import sys, os, commands, time
log = core.getLogger()
#-------------------------------define flow rate----------
flow_rate = 10
interval = 1/flow_rate
print 'current flow modification rate is:', flow_rate
# We don't want to flood immediately when a switch connects.
# Can be overriden on commandline.
_flood_delay = 0
class LearningSwitch (object):
"""
The learning switch "brain" associated with a single OpenFlow switch.
When we see a packet, we'd like to output it on a port which will
eventually lead to the destination. To accomplish this, we build a
table that maps addresses to ports.
We populate the table by observing traffic. When we see a packet
from some source coming from some port, we know that source is out
that port.
When we want to forward traffic, we look up the desintation in our
table. If we don't know the port, we simply send the message out
all ports except the one it came in on. (In the presence of loops,
this is bad!).
In short, our algorithm looks like this:
For each packet from the switch:
1) Use source address and switch port to update address/port table
2) Is transparent = False and either Ethertype is LLDP or the packet's
destination address is a Bridge Filtered address?
Yes:
2a) Drop packet -- don't forward link-local traffic (LLDP, 802.1x)
DONE
3) Is destination multicast?
Yes:
3a) Flood the packet
DONE
4) Port for destination address in our address/port table?
No:
4a) Flood the packet
DONE
5) Is output port the same as input port?
Yes:
5a) Drop packet and similar ones for a while
6) Install flow table entry in the switch so that this
flow goes out the appopriate port
6a) Send the packet out appropriate port
"""
def __init__ (self, connection, transparent):
# Switch we'll be adding L2 learning switch capabilities to
self.connection = connection
self.transparent = transparent
# Our table
self.macToPort = {}
# We want to hear PacketIn messages, so we listen
# to the connection
connection.addListeners(self)
# We just use this to know when to log a helpful message
self.hold_down_expired = _flood_delay == 0
#log.debug("Initializing LearningSwitch, transparent=%s",
# str(self.transparent))
for k in xrange(0,900):#the number of rules to install
msg = of.ofp_flow_mod()
#msg.match = of.ofp_match.from_packet(packet, event.port)
msg.priority = 42
msg.match.dl_type = 0x800
i = int(k / 256) + 56
j = k % 256
dst = '192.168.' + str(i) + '.' + str(j)
msg.match.nw_src = '10.0.0.1'
msg.match.nw_dst = dst
print 'INFO',dst
msg.idle_timeout = 0
msg.hard_timeout = 0
#msg.actions.append(of.ofp_action_output(port = port))
#msg.data = event.ofp # 6a
self.connection.send(msg)
time.sleep(0.1)
print 'INFO: starting tcpdump...'
os.system('sudo bash start_tcpdump.sh &')
time.sleep(10) #wait for tcpdump
y = 0
print 'INFO: starting sending flow mod...'
for k in xrange(0,512):#the number of rules to install
y = y + 1
msg = of.ofp_flow_mod()
#msg.match = of.ofp_match.from_packet(packet, event.port)
msg.priority = 42
msg.command = of.OFPFC_MODIFY_STRICT
msg.match.dl_type = 0x800
i = int(k / 256) + 56
j = k % 256
dst = '192.168.' + str(i) + '.' + str(j)
msg.match.nw_src = '10.0.0.1'
msg.match.nw_dst = dst
msg.idle_timeout = 0
msg.hard_timeout = 0
msg.actions.append(of.ofp_action_output(port = 2))
#msg.data = event.ofp # 6a
self.connection.send(msg)
print 'DATA: 10.0.0.1', dst, '%f' %time.time()
#time.sleep(interval)
print 'INFO:rules modified', y
time.sleep(5)# wait for all the rules modified
os.system("sudo bash close_tcpdump.sh")
print 'INFO: flow mod measure finished...'
def _handle_PacketIn (self, event):
"""
Handle packet in messages from the switch to implement above algorithm.
"""
packet = event.parsed
#print 'PACKET_IN:', event.port, packet.next.dstip,'%f' % time.time()
class l2_learning (object):
"""
Waits for OpenFlow switches to connect and makes them learning switches.
"""
def __init__ (self, transparent):
core.openflow.addListeners(self)
self.transparent = transparent
def _handle_ConnectionUp (self, event):
log.debug("Connection %s" % (event.connection,))
LearningSwitch(event.connection, self.transparent)
def launch (transparent=False, hold_down=_flood_delay):
"""
Starts an L2 learning switch.
"""
try:
global _flood_delay
_flood_delay = int(str(hold_down), 10)
assert _flood_delay >= 0
except:
raise RuntimeError("Expected hold-down to be a number")
core.registerNew(l2_learning, str_to_bool(transparent))
|
apache-2.0
|
qateam123/eq
|
tests/app/validation/test_textarea_type_check.py
|
1
|
1095
|
import unittest
from app.validation.abstract_validator import AbstractValidator
from app.validation.textarea_type_check import TextAreaTypeCheck
class TextAreaTest(unittest.TestCase):
def test_textarea_validator(self):
textarea = TextAreaTypeCheck()
# validate integer
result = textarea.validate(1)
self.assertFalse(result.is_valid)
self.assertEqual(len(result.errors), 1)
self.assertEqual(AbstractValidator.NOT_STRING, result.errors[0])
# validate None
result = textarea.validate(None)
self.assertFalse(result.is_valid)
self.assertEqual(len(result.errors), 1)
self.assertEqual(AbstractValidator.NOT_STRING, result.errors[0])
# validate <space>
result = textarea.validate(' ')
self.assertFalse(result.is_valid)
# validate string
result = textarea.validate('string')
self.assertTrue(result.is_valid)
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.warnings), 0)
if __name__ == '__main__':
unittest.main()
|
mit
|
pbrunet/pythran
|
pythran/tests/test_import_all.py
|
2
|
1424
|
from test_env import TestEnv
from textwrap import dedent
import pythran
class TestImportAll(TestEnv):
def test_import_all(self):
self.run_test("from math import *\ndef import_all(l): return cos(l)", 3.3, import_all=[float])
def test_import_cmath_all(self):
self.run_test("from cmath import *\ndef import_cmath_all(l): return cos(l)", 2.2, import_cmath_all=[float])
def test_import_all_cos(self):
self.run_test("from math import *\nfrom math import cos\ndef import_all_cos(l): return cos(l)", 1.1, import_all_cos=[float])
def test_import_all_twice(self):
self.run_test("from math import *\nfrom math import *\ndef import_all_twice(l): return cos(l)", 0.1, import_all_twice=[float])
def test_import_same_name(self):
self.run_test("from math import *\ndef cos(l): return 100", 0.1, cos=[float])
def test_import_collections(self):
"""
Check correct error is returned for incorrect module import.
Check is done for module as .py file.
"""
code = """
import collections
def unsupported_module():
return collections.Counter()"""
with self.assertRaises(pythran.syntax.PythranSyntaxError) as ex:
pythran.compile_pythrancode("flamby", dedent(code))
self.assertEqual(ex.exception.message,
"Unpythranizable module: collections")
|
bsd-3-clause
|
sudkannan/xen-hv
|
tools/python/xen/remus/vm.py
|
43
|
4590
|
#!/usr/bin/env python
import xmlrpclib
from xen.xend.XendClient import server
from xen.xend import sxp, osdep
from xen.lowlevel.xc import xc
import vif
import blkdev
# need a nicer way to load disk drivers
import vbd
class VMException(Exception): pass
class VM(object):
"Representation of a virtual machine"
def __init__(self, domid=None, dominfo=None):
self.dominfo = dominfo
self.domid = -1
self.name = 'unknown'
self.dom = {}
self.disks = []
self.vifs = []
if domid:
try:
self.dominfo = server.xend.domain(domid, 'all')
except xmlrpclib.Fault:
raise VMException('error looking up domain %s' % str(domid))
if self.dominfo:
self.loaddominfo()
def loaddominfo(self):
self.dom = parsedominfo(self.dominfo)
self.domid = self.dom['domid']
self.name = self.dom['name']
self.disks = getdisks(self.dom)
self.vifs = getvifs(self.dom)
def __str__(self):
return 'VM %d (%s), vifs: [%s], disks: [%s]' % \
(self.domid, self.name,
', '.join([str(v) for v in self.vifs]),
', '.join([str(d) for d in self.disks]))
def parsedominfo(dominfo):
"parses a dominfo sexpression in the form of python lists of lists"
def s2d(s):
r = {}
for elem in s:
if len(elem) == 0:
continue
name = elem[0]
if len(elem) == 1:
val = None
else:
val = elem[1]
if isinstance(val, list):
val = s2d(elem[1:])
if isinstance(name, list):
# hack for ['cpus', [[1]]]
return s2d(elem)
if name in r:
for k, v in val.iteritems():
if k in r[name]:
if not isinstance(r[name][k], list):
r[name][k] = [r[name][k]]
r[name][k].append(v)
else:
r[name][k] = v
else:
r[name] = val
return r
return s2d(dominfo[1:])
def domtosxpr(dom):
"convert a dominfo into a python sxpr"
def d2s(d):
r = []
for k, v in d.iteritems():
elem = [k]
if isinstance(v, dict):
elem.extend(d2s(v))
else:
if v is None:
v = ''
elem.append(v)
r.append(elem)
return r
sxpr = ['domain']
sxpr.extend(d2s(dom))
return sxpr
def strtosxpr(s):
"convert a string to a python sxpr"
p = sxp.Parser()
p.input(s)
return p.get_val()
def sxprtostr(sxpr):
"convert an sxpr to string"
return sxp.to_string(sxpr)
def getvifs(dom):
"return vif objects for devices in dom"
vifs = dom['device'].get('vif', [])
if type(vifs) != list:
vifs = [vifs]
vifno = 0
parsed = []
for v in vifs:
parsed.append(vif.parse(v, dom['domid'], vifno))
vifno += 1
return parsed
def getdisks(dom):
"return block device objects for devices in dom"
disks = dom['device'].get('vbd', [])
if type(disks) != list:
disks = [disks]
# tapdisk1 devices
tap1s = dom['device'].get('tap', [])
if type(tap1s) != list:
disks.append(tap1s)
else:
disks.extend(tap1s)
# tapdisk2 devices
tap2s = dom['device'].get('tap2', [])
if type(tap2s) != list:
disks.append(tap2s)
else:
disks.extend(tap2s)
return [blkdev.parse(disk) for disk in disks]
def fromxend(domid):
"create a VM object from xend information"
return VM(domid)
def getshadowmem(vm):
"Balloon down domain0 to create free memory for shadow paging."
maxmem = int(vm.dom['maxmem'])
shadow = int(vm.dom['shadow_memory'])
vcpus = int(vm.dom['vcpus'])
# from XendDomainInfo.checkLiveMigrateMemory:
# 1MB per vcpu plus 4Kib/Mib of RAM. This is higher than
# the minimum that Xen would allocate if no value were given.
shadowneeded = vcpus * 1024 + maxmem * 4 - shadow * 1024
physinfo = xc().physinfo()
freemem = int(physinfo['free_memory'])
needed = shadowneeded - freemem
if needed > 0:
print "Freeing %d kB for shadow mode" % needed
dom0cur = osdep.lookup_balloon_stat('current')
# target is in MB, not KB
target = (dom0cur - needed) / 1024
server.xend.domain.setMemoryTarget(0, target)
|
gpl-2.0
|
AIML/scikit-learn
|
sklearn/utils/tests/test_validation.py
|
133
|
18339
|
"""Tests for input validation functions"""
import warnings
from tempfile import NamedTemporaryFile
from itertools import product
import numpy as np
from numpy.testing import assert_array_equal
import scipy.sparse as sp
from nose.tools import assert_raises, assert_true, assert_false, assert_equal
from sklearn.utils.testing import assert_raises_regexp
from sklearn.utils.testing import assert_no_warnings
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import assert_warns
from sklearn.utils import as_float_array, check_array, check_symmetric
from sklearn.utils import check_X_y
from sklearn.utils.mocking import MockDataFrame
from sklearn.utils.estimator_checks import NotAnArray
from sklearn.random_projection import sparse_random_matrix
from sklearn.linear_model import ARDRegression
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestRegressor
from sklearn.svm import SVR
from sklearn.datasets import make_blobs
from sklearn.utils.validation import (
NotFittedError,
has_fit_parameter,
check_is_fitted,
check_consistent_length,
DataConversionWarning,
)
from sklearn.utils.testing import assert_raise_message
def test_as_float_array():
# Test function for as_float_array
X = np.ones((3, 10), dtype=np.int32)
X = X + np.arange(10, dtype=np.int32)
# Checks that the return type is ok
X2 = as_float_array(X, copy=False)
np.testing.assert_equal(X2.dtype, np.float32)
# Another test
X = X.astype(np.int64)
X2 = as_float_array(X, copy=True)
# Checking that the array wasn't overwritten
assert_true(as_float_array(X, False) is not X)
# Checking that the new type is ok
np.testing.assert_equal(X2.dtype, np.float64)
# Here, X is of the right type, it shouldn't be modified
X = np.ones((3, 2), dtype=np.float32)
assert_true(as_float_array(X, copy=False) is X)
# Test that if X is fortran ordered it stays
X = np.asfortranarray(X)
assert_true(np.isfortran(as_float_array(X, copy=True)))
# Test the copy parameter with some matrices
matrices = [
np.matrix(np.arange(5)),
sp.csc_matrix(np.arange(5)).toarray(),
sparse_random_matrix(10, 10, density=0.10).toarray()
]
for M in matrices:
N = as_float_array(M, copy=True)
N[0, 0] = np.nan
assert_false(np.isnan(M).any())
def test_np_matrix():
# Confirm that input validation code does not return np.matrix
X = np.arange(12).reshape(3, 4)
assert_false(isinstance(as_float_array(X), np.matrix))
assert_false(isinstance(as_float_array(np.matrix(X)), np.matrix))
assert_false(isinstance(as_float_array(sp.csc_matrix(X)), np.matrix))
def test_memmap():
# Confirm that input validation code doesn't copy memory mapped arrays
asflt = lambda x: as_float_array(x, copy=False)
with NamedTemporaryFile(prefix='sklearn-test') as tmp:
M = np.memmap(tmp, shape=100, dtype=np.float32)
M[:] = 0
for f in (check_array, np.asarray, asflt):
X = f(M)
X[:] = 1
assert_array_equal(X.ravel(), M)
X[:] = 0
def test_ordering():
# Check that ordering is enforced correctly by validation utilities.
# We need to check each validation utility, because a 'copy' without
# 'order=K' will kill the ordering.
X = np.ones((10, 5))
for A in X, X.T:
for copy in (True, False):
B = check_array(A, order='C', copy=copy)
assert_true(B.flags['C_CONTIGUOUS'])
B = check_array(A, order='F', copy=copy)
assert_true(B.flags['F_CONTIGUOUS'])
if copy:
assert_false(A is B)
X = sp.csr_matrix(X)
X.data = X.data[::-1]
assert_false(X.data.flags['C_CONTIGUOUS'])
def test_check_array():
# accept_sparse == None
# raise error on sparse inputs
X = [[1, 2], [3, 4]]
X_csr = sp.csr_matrix(X)
assert_raises(TypeError, check_array, X_csr)
# ensure_2d
X_array = check_array([0, 1, 2])
assert_equal(X_array.ndim, 2)
X_array = check_array([0, 1, 2], ensure_2d=False)
assert_equal(X_array.ndim, 1)
# don't allow ndim > 3
X_ndim = np.arange(8).reshape(2, 2, 2)
assert_raises(ValueError, check_array, X_ndim)
check_array(X_ndim, allow_nd=True) # doesn't raise
# force_all_finite
X_inf = np.arange(4).reshape(2, 2).astype(np.float)
X_inf[0, 0] = np.inf
assert_raises(ValueError, check_array, X_inf)
check_array(X_inf, force_all_finite=False) # no raise
# nan check
X_nan = np.arange(4).reshape(2, 2).astype(np.float)
X_nan[0, 0] = np.nan
assert_raises(ValueError, check_array, X_nan)
check_array(X_inf, force_all_finite=False) # no raise
# dtype and order enforcement.
X_C = np.arange(4).reshape(2, 2).copy("C")
X_F = X_C.copy("F")
X_int = X_C.astype(np.int)
X_float = X_C.astype(np.float)
Xs = [X_C, X_F, X_int, X_float]
dtypes = [np.int32, np.int, np.float, np.float32, None, np.bool, object]
orders = ['C', 'F', None]
copys = [True, False]
for X, dtype, order, copy in product(Xs, dtypes, orders, copys):
X_checked = check_array(X, dtype=dtype, order=order, copy=copy)
if dtype is not None:
assert_equal(X_checked.dtype, dtype)
else:
assert_equal(X_checked.dtype, X.dtype)
if order == 'C':
assert_true(X_checked.flags['C_CONTIGUOUS'])
assert_false(X_checked.flags['F_CONTIGUOUS'])
elif order == 'F':
assert_true(X_checked.flags['F_CONTIGUOUS'])
assert_false(X_checked.flags['C_CONTIGUOUS'])
if copy:
assert_false(X is X_checked)
else:
# doesn't copy if it was already good
if (X.dtype == X_checked.dtype and
X_checked.flags['C_CONTIGUOUS'] == X.flags['C_CONTIGUOUS']
and X_checked.flags['F_CONTIGUOUS'] == X.flags['F_CONTIGUOUS']):
assert_true(X is X_checked)
# allowed sparse != None
X_csc = sp.csc_matrix(X_C)
X_coo = X_csc.tocoo()
X_dok = X_csc.todok()
X_int = X_csc.astype(np.int)
X_float = X_csc.astype(np.float)
Xs = [X_csc, X_coo, X_dok, X_int, X_float]
accept_sparses = [['csr', 'coo'], ['coo', 'dok']]
for X, dtype, accept_sparse, copy in product(Xs, dtypes, accept_sparses,
copys):
with warnings.catch_warnings(record=True) as w:
X_checked = check_array(X, dtype=dtype,
accept_sparse=accept_sparse, copy=copy)
if (dtype is object or sp.isspmatrix_dok(X)) and len(w):
message = str(w[0].message)
messages = ["object dtype is not supported by sparse matrices",
"Can't check dok sparse matrix for nan or inf."]
assert_true(message in messages)
else:
assert_equal(len(w), 0)
if dtype is not None:
assert_equal(X_checked.dtype, dtype)
else:
assert_equal(X_checked.dtype, X.dtype)
if X.format in accept_sparse:
# no change if allowed
assert_equal(X.format, X_checked.format)
else:
# got converted
assert_equal(X_checked.format, accept_sparse[0])
if copy:
assert_false(X is X_checked)
else:
# doesn't copy if it was already good
if (X.dtype == X_checked.dtype and X.format == X_checked.format):
assert_true(X is X_checked)
# other input formats
# convert lists to arrays
X_dense = check_array([[1, 2], [3, 4]])
assert_true(isinstance(X_dense, np.ndarray))
# raise on too deep lists
assert_raises(ValueError, check_array, X_ndim.tolist())
check_array(X_ndim.tolist(), allow_nd=True) # doesn't raise
# convert weird stuff to arrays
X_no_array = NotAnArray(X_dense)
result = check_array(X_no_array)
assert_true(isinstance(result, np.ndarray))
def test_check_array_pandas_dtype_object_conversion():
# test that data-frame like objects with dtype object
# get converted
X = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.object)
X_df = MockDataFrame(X)
assert_equal(check_array(X_df).dtype.kind, "f")
assert_equal(check_array(X_df, ensure_2d=False).dtype.kind, "f")
# smoke-test against dataframes with column named "dtype"
X_df.dtype = "Hans"
assert_equal(check_array(X_df, ensure_2d=False).dtype.kind, "f")
def test_check_array_dtype_stability():
# test that lists with ints don't get converted to floats
X = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
assert_equal(check_array(X).dtype.kind, "i")
assert_equal(check_array(X, ensure_2d=False).dtype.kind, "i")
def test_check_array_dtype_warning():
X_int_list = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
X_float64 = np.asarray(X_int_list, dtype=np.float64)
X_float32 = np.asarray(X_int_list, dtype=np.float32)
X_int64 = np.asarray(X_int_list, dtype=np.int64)
X_csr_float64 = sp.csr_matrix(X_float64)
X_csr_float32 = sp.csr_matrix(X_float32)
X_csc_float32 = sp.csc_matrix(X_float32)
X_csc_int32 = sp.csc_matrix(X_int64, dtype=np.int32)
y = [0, 0, 1]
integer_data = [X_int64, X_csc_int32]
float64_data = [X_float64, X_csr_float64]
float32_data = [X_float32, X_csr_float32, X_csc_float32]
for X in integer_data:
X_checked = assert_no_warnings(check_array, X, dtype=np.float64,
accept_sparse=True)
assert_equal(X_checked.dtype, np.float64)
X_checked = assert_warns(DataConversionWarning, check_array, X,
dtype=np.float64,
accept_sparse=True, warn_on_dtype=True)
assert_equal(X_checked.dtype, np.float64)
# Check that the warning message includes the name of the Estimator
X_checked = assert_warns_message(DataConversionWarning,
'SomeEstimator',
check_array, X,
dtype=[np.float64, np.float32],
accept_sparse=True,
warn_on_dtype=True,
estimator='SomeEstimator')
assert_equal(X_checked.dtype, np.float64)
X_checked, y_checked = assert_warns_message(
DataConversionWarning, 'KNeighborsClassifier',
check_X_y, X, y, dtype=np.float64, accept_sparse=True,
warn_on_dtype=True, estimator=KNeighborsClassifier())
assert_equal(X_checked.dtype, np.float64)
for X in float64_data:
X_checked = assert_no_warnings(check_array, X, dtype=np.float64,
accept_sparse=True, warn_on_dtype=True)
assert_equal(X_checked.dtype, np.float64)
X_checked = assert_no_warnings(check_array, X, dtype=np.float64,
accept_sparse=True, warn_on_dtype=False)
assert_equal(X_checked.dtype, np.float64)
for X in float32_data:
X_checked = assert_no_warnings(check_array, X,
dtype=[np.float64, np.float32],
accept_sparse=True)
assert_equal(X_checked.dtype, np.float32)
assert_true(X_checked is X)
X_checked = assert_no_warnings(check_array, X,
dtype=[np.float64, np.float32],
accept_sparse=['csr', 'dok'],
copy=True)
assert_equal(X_checked.dtype, np.float32)
assert_false(X_checked is X)
X_checked = assert_no_warnings(check_array, X_csc_float32,
dtype=[np.float64, np.float32],
accept_sparse=['csr', 'dok'],
copy=False)
assert_equal(X_checked.dtype, np.float32)
assert_false(X_checked is X_csc_float32)
assert_equal(X_checked.format, 'csr')
def test_check_array_min_samples_and_features_messages():
# empty list is considered 2D by default:
msg = "0 feature(s) (shape=(1, 0)) while a minimum of 1 is required."
assert_raise_message(ValueError, msg, check_array, [])
# If considered a 1D collection when ensure_2d=False, then the minimum
# number of samples will break:
msg = "0 sample(s) (shape=(0,)) while a minimum of 1 is required."
assert_raise_message(ValueError, msg, check_array, [], ensure_2d=False)
# Invalid edge case when checking the default minimum sample of a scalar
msg = "Singleton array array(42) cannot be considered a valid collection."
assert_raise_message(TypeError, msg, check_array, 42, ensure_2d=False)
# But this works if the input data is forced to look like a 2 array with
# one sample and one feature:
X_checked = check_array(42, ensure_2d=True)
assert_array_equal(np.array([[42]]), X_checked)
# Simulate a model that would need at least 2 samples to be well defined
X = np.ones((1, 10))
y = np.ones(1)
msg = "1 sample(s) (shape=(1, 10)) while a minimum of 2 is required."
assert_raise_message(ValueError, msg, check_X_y, X, y,
ensure_min_samples=2)
# The same message is raised if the data has 2 dimensions even if this is
# not mandatory
assert_raise_message(ValueError, msg, check_X_y, X, y,
ensure_min_samples=2, ensure_2d=False)
# Simulate a model that would require at least 3 features (e.g. SelectKBest
# with k=3)
X = np.ones((10, 2))
y = np.ones(2)
msg = "2 feature(s) (shape=(10, 2)) while a minimum of 3 is required."
assert_raise_message(ValueError, msg, check_X_y, X, y,
ensure_min_features=3)
# Only the feature check is enabled whenever the number of dimensions is 2
# even if allow_nd is enabled:
assert_raise_message(ValueError, msg, check_X_y, X, y,
ensure_min_features=3, allow_nd=True)
# Simulate a case where a pipeline stage as trimmed all the features of a
# 2D dataset.
X = np.empty(0).reshape(10, 0)
y = np.ones(10)
msg = "0 feature(s) (shape=(10, 0)) while a minimum of 1 is required."
assert_raise_message(ValueError, msg, check_X_y, X, y)
# nd-data is not checked for any minimum number of features by default:
X = np.ones((10, 0, 28, 28))
y = np.ones(10)
X_checked, y_checked = check_X_y(X, y, allow_nd=True)
assert_array_equal(X, X_checked)
assert_array_equal(y, y_checked)
def test_has_fit_parameter():
assert_false(has_fit_parameter(KNeighborsClassifier, "sample_weight"))
assert_true(has_fit_parameter(RandomForestRegressor, "sample_weight"))
assert_true(has_fit_parameter(SVR, "sample_weight"))
assert_true(has_fit_parameter(SVR(), "sample_weight"))
def test_check_symmetric():
arr_sym = np.array([[0, 1], [1, 2]])
arr_bad = np.ones(2)
arr_asym = np.array([[0, 2], [0, 2]])
test_arrays = {'dense': arr_asym,
'dok': sp.dok_matrix(arr_asym),
'csr': sp.csr_matrix(arr_asym),
'csc': sp.csc_matrix(arr_asym),
'coo': sp.coo_matrix(arr_asym),
'lil': sp.lil_matrix(arr_asym),
'bsr': sp.bsr_matrix(arr_asym)}
# check error for bad inputs
assert_raises(ValueError, check_symmetric, arr_bad)
# check that asymmetric arrays are properly symmetrized
for arr_format, arr in test_arrays.items():
# Check for warnings and errors
assert_warns(UserWarning, check_symmetric, arr)
assert_raises(ValueError, check_symmetric, arr, raise_exception=True)
output = check_symmetric(arr, raise_warning=False)
if sp.issparse(output):
assert_equal(output.format, arr_format)
assert_array_equal(output.toarray(), arr_sym)
else:
assert_array_equal(output, arr_sym)
def test_check_is_fitted():
# Check is ValueError raised when non estimator instance passed
assert_raises(ValueError, check_is_fitted, ARDRegression, "coef_")
assert_raises(TypeError, check_is_fitted, "SVR", "support_")
ard = ARDRegression()
svr = SVR()
try:
assert_raises(NotFittedError, check_is_fitted, ard, "coef_")
assert_raises(NotFittedError, check_is_fitted, svr, "support_")
except ValueError:
assert False, "check_is_fitted failed with ValueError"
# NotFittedError is a subclass of both ValueError and AttributeError
try:
check_is_fitted(ard, "coef_", "Random message %(name)s, %(name)s")
except ValueError as e:
assert_equal(str(e), "Random message ARDRegression, ARDRegression")
try:
check_is_fitted(svr, "support_", "Another message %(name)s, %(name)s")
except AttributeError as e:
assert_equal(str(e), "Another message SVR, SVR")
ard.fit(*make_blobs())
svr.fit(*make_blobs())
assert_equal(None, check_is_fitted(ard, "coef_"))
assert_equal(None, check_is_fitted(svr, "support_"))
def test_check_consistent_length():
check_consistent_length([1], [2], [3], [4], [5])
check_consistent_length([[1, 2], [[1, 2]]], [1, 2], ['a', 'b'])
check_consistent_length([1], (2,), np.array([3]), sp.csr_matrix((1, 2)))
assert_raises_regexp(ValueError, 'inconsistent numbers of samples',
check_consistent_length, [1, 2], [1])
assert_raises_regexp(TypeError, 'got <\w+ \'int\'>',
check_consistent_length, [1, 2], 1)
assert_raises_regexp(TypeError, 'got <\w+ \'object\'>',
check_consistent_length, [1, 2], object())
assert_raises(TypeError, check_consistent_length, [1, 2], np.array(1))
# Despite ensembles having __len__ they must raise TypeError
assert_raises_regexp(TypeError, 'estimator', check_consistent_length,
[1, 2], RandomForestRegressor())
# XXX: We should have a test with a string, but what is correct behaviour?
|
bsd-3-clause
|
andante20/volatility
|
volatility/plugins/overlays/windows/win2003_sp1_x64_vtypes.py
|
58
|
326930
|
ntkrnlmp_types = {
'LIST_ENTRY64' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'LIST_ENTRY32' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1015' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
} ],
'_ULARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
'u' : [ 0x0, ['__unnamed_1015']],
'QuadPart' : [ 0x0, ['unsigned long long']],
} ],
'_LIST_ENTRY' : [ 0x10, {
'Flink' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'Blink' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_IMAGE_NT_HEADERS64' : [ 0x108, {
'Signature' : [ 0x0, ['unsigned long']],
'FileHeader' : [ 0x4, ['_IMAGE_FILE_HEADER']],
'OptionalHeader' : [ 0x18, ['_IMAGE_OPTIONAL_HEADER64']],
} ],
'__unnamed_1026' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_LARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
'u' : [ 0x0, ['__unnamed_1026']],
'QuadPart' : [ 0x0, ['long long']],
} ],
'_RTL_BITMAP' : [ 0x10, {
'SizeOfBitMap' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned long']]],
} ],
'_LUID' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_KPRCB' : [ 0x2480, {
'MxCsr' : [ 0x0, ['unsigned long']],
'Number' : [ 0x4, ['unsigned char']],
'NestingLevel' : [ 0x5, ['unsigned char']],
'InterruptRequest' : [ 0x6, ['unsigned char']],
'IdleHalt' : [ 0x7, ['unsigned char']],
'CurrentThread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'NextThread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'IdleThread' : [ 0x18, ['pointer64', ['_KTHREAD']]],
'UserRsp' : [ 0x20, ['unsigned long long']],
'RspBase' : [ 0x28, ['unsigned long long']],
'PrcbLock' : [ 0x30, ['unsigned long long']],
'SetMember' : [ 0x38, ['unsigned long long']],
'ProcessorState' : [ 0x40, ['_KPROCESSOR_STATE']],
'CpuType' : [ 0x5f0, ['unsigned char']],
'CpuID' : [ 0x5f1, ['unsigned char']],
'CpuStep' : [ 0x5f2, ['unsigned short']],
'MHz' : [ 0x5f4, ['unsigned long']],
'HalReserved' : [ 0x5f8, ['array', 8, ['unsigned long long']]],
'MinorVersion' : [ 0x638, ['unsigned short']],
'MajorVersion' : [ 0x63a, ['unsigned short']],
'BuildType' : [ 0x63c, ['unsigned char']],
'CpuVendor' : [ 0x63d, ['unsigned char']],
'InitialApicId' : [ 0x63e, ['unsigned char']],
'LogicalProcessorsPerPhysicalProcessor' : [ 0x63f, ['unsigned char']],
'ApicMask' : [ 0x640, ['unsigned long']],
'CFlushSize' : [ 0x644, ['unsigned char']],
'PrcbPad0x' : [ 0x645, ['array', 3, ['unsigned char']]],
'AcpiReserved' : [ 0x648, ['pointer64', ['void']]],
'PrcbPad00' : [ 0x650, ['array', 4, ['unsigned long long']]],
'LockQueue' : [ 0x670, ['array', 33, ['_KSPIN_LOCK_QUEUE']]],
'PPLookasideList' : [ 0x880, ['array', 16, ['_PP_LOOKASIDE_LIST']]],
'PPNPagedLookasideList' : [ 0x980, ['array', 32, ['_PP_LOOKASIDE_LIST']]],
'PPPagedLookasideList' : [ 0xb80, ['array', 32, ['_PP_LOOKASIDE_LIST']]],
'PacketBarrier' : [ 0xd80, ['unsigned long long']],
'DeferredReadyListHead' : [ 0xd88, ['_SINGLE_LIST_ENTRY']],
'MmPageFaultCount' : [ 0xd90, ['long']],
'MmCopyOnWriteCount' : [ 0xd94, ['long']],
'MmTransitionCount' : [ 0xd98, ['long']],
'MmCacheTransitionCount' : [ 0xd9c, ['long']],
'MmDemandZeroCount' : [ 0xda0, ['long']],
'MmPageReadCount' : [ 0xda4, ['long']],
'MmPageReadIoCount' : [ 0xda8, ['long']],
'MmCacheReadCount' : [ 0xdac, ['long']],
'MmCacheIoCount' : [ 0xdb0, ['long']],
'MmDirtyPagesWriteCount' : [ 0xdb4, ['long']],
'MmDirtyWriteIoCount' : [ 0xdb8, ['long']],
'MmMappedPagesWriteCount' : [ 0xdbc, ['long']],
'MmMappedWriteIoCount' : [ 0xdc0, ['long']],
'LookasideIrpFloat' : [ 0xdc4, ['long']],
'KeSystemCalls' : [ 0xdc8, ['unsigned long']],
'IoReadOperationCount' : [ 0xdcc, ['long']],
'IoWriteOperationCount' : [ 0xdd0, ['long']],
'IoOtherOperationCount' : [ 0xdd4, ['long']],
'IoReadTransferCount' : [ 0xdd8, ['_LARGE_INTEGER']],
'IoWriteTransferCount' : [ 0xde0, ['_LARGE_INTEGER']],
'IoOtherTransferCount' : [ 0xde8, ['_LARGE_INTEGER']],
'KeContextSwitches' : [ 0xdf0, ['unsigned long']],
'PrcbPad2' : [ 0xdf4, ['array', 12, ['unsigned char']]],
'TargetSet' : [ 0xe00, ['unsigned long long']],
'IpiFrozen' : [ 0xe08, ['unsigned long']],
'PrcbPad3' : [ 0xe0c, ['array', 116, ['unsigned char']]],
'RequestMailbox' : [ 0xe80, ['array', 64, ['_REQUEST_MAILBOX']]],
'SenderSummary' : [ 0x1e80, ['unsigned long long']],
'PrcbPad4' : [ 0x1e88, ['array', 120, ['unsigned char']]],
'DpcData' : [ 0x1f00, ['array', 2, ['_KDPC_DATA']]],
'DpcStack' : [ 0x1f40, ['pointer64', ['void']]],
'SavedRsp' : [ 0x1f48, ['pointer64', ['void']]],
'MaximumDpcQueueDepth' : [ 0x1f50, ['long']],
'DpcRequestRate' : [ 0x1f54, ['unsigned long']],
'MinimumDpcRate' : [ 0x1f58, ['unsigned long']],
'DpcInterruptRequested' : [ 0x1f5c, ['unsigned char']],
'DpcThreadRequested' : [ 0x1f5d, ['unsigned char']],
'DpcRoutineActive' : [ 0x1f5e, ['unsigned char']],
'DpcThreadActive' : [ 0x1f5f, ['unsigned char']],
'TimerHand' : [ 0x1f60, ['unsigned long long']],
'TimerRequest' : [ 0x1f60, ['unsigned long long']],
'TickOffset' : [ 0x1f68, ['long']],
'MasterOffset' : [ 0x1f6c, ['long']],
'DpcLastCount' : [ 0x1f70, ['unsigned long']],
'ThreadDpcEnable' : [ 0x1f74, ['unsigned char']],
'QuantumEnd' : [ 0x1f75, ['unsigned char']],
'PrcbPad50' : [ 0x1f76, ['unsigned char']],
'IdleSchedule' : [ 0x1f77, ['unsigned char']],
'DpcSetEventRequest' : [ 0x1f78, ['long']],
'PrcbPad40' : [ 0x1f7c, ['long']],
'DpcThread' : [ 0x1f80, ['pointer64', ['void']]],
'DpcEvent' : [ 0x1f88, ['_KEVENT']],
'CallDpc' : [ 0x1fa0, ['_KDPC']],
'PrcbPad7' : [ 0x1fe0, ['array', 4, ['unsigned long long']]],
'WaitListHead' : [ 0x2000, ['_LIST_ENTRY']],
'ReadySummary' : [ 0x2010, ['unsigned long']],
'QueueIndex' : [ 0x2014, ['unsigned long']],
'DispatcherReadyListHead' : [ 0x2018, ['array', 32, ['_LIST_ENTRY']]],
'InterruptCount' : [ 0x2218, ['unsigned long']],
'KernelTime' : [ 0x221c, ['unsigned long']],
'UserTime' : [ 0x2220, ['unsigned long']],
'DpcTime' : [ 0x2224, ['unsigned long']],
'InterruptTime' : [ 0x2228, ['unsigned long']],
'AdjustDpcThreshold' : [ 0x222c, ['unsigned long']],
'SkipTick' : [ 0x2230, ['unsigned char']],
'DebuggerSavedIRQL' : [ 0x2231, ['unsigned char']],
'PollSlot' : [ 0x2232, ['unsigned char']],
'PrcbPad8' : [ 0x2233, ['array', 13, ['unsigned char']]],
'ParentNode' : [ 0x2240, ['pointer64', ['_KNODE']]],
'MultiThreadProcessorSet' : [ 0x2248, ['unsigned long long']],
'MultiThreadSetMaster' : [ 0x2250, ['pointer64', ['_KPRCB']]],
'Sleeping' : [ 0x2258, ['long']],
'PrcbPad90' : [ 0x225c, ['array', 1, ['unsigned long']]],
'DebugDpcTime' : [ 0x2260, ['unsigned long']],
'PageColor' : [ 0x2264, ['unsigned long']],
'NodeColor' : [ 0x2268, ['unsigned long']],
'NodeShiftedColor' : [ 0x226c, ['unsigned long']],
'SecondaryColorMask' : [ 0x2270, ['unsigned long']],
'PrcbPad9' : [ 0x2274, ['array', 12, ['unsigned char']]],
'CcFastReadNoWait' : [ 0x2280, ['unsigned long']],
'CcFastReadWait' : [ 0x2284, ['unsigned long']],
'CcFastReadNotPossible' : [ 0x2288, ['unsigned long']],
'CcCopyReadNoWait' : [ 0x228c, ['unsigned long']],
'CcCopyReadWait' : [ 0x2290, ['unsigned long']],
'CcCopyReadNoWaitMiss' : [ 0x2294, ['unsigned long']],
'KeAlignmentFixupCount' : [ 0x2298, ['unsigned long']],
'KeDcacheFlushCount' : [ 0x229c, ['unsigned long']],
'KeExceptionDispatchCount' : [ 0x22a0, ['unsigned long']],
'KeFirstLevelTbFills' : [ 0x22a4, ['unsigned long']],
'KeFloatingEmulationCount' : [ 0x22a8, ['unsigned long']],
'KeIcacheFlushCount' : [ 0x22ac, ['unsigned long']],
'KeSecondLevelTbFills' : [ 0x22b0, ['unsigned long']],
'VendorString' : [ 0x22b4, ['array', 13, ['unsigned char']]],
'PrcbPad10' : [ 0x22c1, ['array', 2, ['unsigned char']]],
'FeatureBits' : [ 0x22c4, ['unsigned long']],
'UpdateSignature' : [ 0x22c8, ['_LARGE_INTEGER']],
'PowerState' : [ 0x22d0, ['_PROCESSOR_POWER_STATE']],
'Cache' : [ 0x2440, ['array', 5, ['_CACHE_DESCRIPTOR']]],
'CacheCount' : [ 0x247c, ['unsigned long']],
} ],
'_SINGLE_LIST_ENTRY' : [ 0x8, {
'Next' : [ 0x0, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
} ],
'_KDPC' : [ 0x40, {
'Type' : [ 0x0, ['unsigned char']],
'Importance' : [ 0x1, ['unsigned char']],
'Number' : [ 0x2, ['unsigned char']],
'Expedite' : [ 0x3, ['unsigned char']],
'DpcListEntry' : [ 0x8, ['_LIST_ENTRY']],
'DeferredRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeferredContext' : [ 0x20, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x28, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x30, ['pointer64', ['void']]],
'DpcData' : [ 0x38, ['pointer64', ['void']]],
} ],
'_KERNEL_STACK_CONTROL' : [ 0x200, {
'XmmSaveArea' : [ 0x0, ['_XMM_SAVE_AREA32']],
'Fill' : [ 0x0, ['array', 432, ['unsigned char']]],
'Current' : [ 0x1b0, ['_KERNEL_STACK_SEGMENT']],
'Previous' : [ 0x1d8, ['_KERNEL_STACK_SEGMENT']],
} ],
'_KTHREAD' : [ 0x320, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListHead' : [ 0x18, ['_LIST_ENTRY']],
'InitialStack' : [ 0x28, ['pointer64', ['void']]],
'StackLimit' : [ 0x30, ['pointer64', ['void']]],
'KernelStack' : [ 0x38, ['pointer64', ['void']]],
'ThreadLock' : [ 0x40, ['unsigned long long']],
'ApcState' : [ 0x48, ['_KAPC_STATE']],
'ApcStateFill' : [ 0x48, ['array', 43, ['unsigned char']]],
'ApcQueueable' : [ 0x73, ['unsigned char']],
'NextProcessor' : [ 0x74, ['unsigned char']],
'DeferredProcessor' : [ 0x75, ['unsigned char']],
'AdjustReason' : [ 0x76, ['unsigned char']],
'AdjustIncrement' : [ 0x77, ['unsigned char']],
'ApcQueueLock' : [ 0x78, ['unsigned long long']],
'WaitStatus' : [ 0x80, ['long long']],
'WaitBlockList' : [ 0x88, ['pointer64', ['_KWAIT_BLOCK']]],
'GateObject' : [ 0x88, ['pointer64', ['_KGATE']]],
'Alertable' : [ 0x90, ['unsigned char']],
'WaitNext' : [ 0x91, ['unsigned char']],
'WaitReason' : [ 0x92, ['unsigned char']],
'Priority' : [ 0x93, ['unsigned char']],
'EnableStackSwap' : [ 0x94, ['unsigned char']],
'SwapBusy' : [ 0x95, ['unsigned char']],
'Alerted' : [ 0x96, ['array', 2, ['unsigned char']]],
'WaitListEntry' : [ 0x98, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x98, ['_SINGLE_LIST_ENTRY']],
'Queue' : [ 0xa8, ['pointer64', ['_KQUEUE']]],
'Teb' : [ 0xb0, ['pointer64', ['void']]],
'Timer' : [ 0xb8, ['_KTIMER']],
'TimerFill' : [ 0xb8, ['array', 60, ['unsigned char']]],
'AutoAlignment' : [ 0xf4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'DisableBoost' : [ 0xf4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='long')]],
'ReservedFlags' : [ 0xf4, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='long')]],
'ThreadFlags' : [ 0xf4, ['long']],
'WaitBlock' : [ 0xf8, ['array', 4, ['_KWAIT_BLOCK']]],
'WaitBlockFill0' : [ 0xf8, ['array', 43, ['unsigned char']]],
'SystemAffinityActive' : [ 0x123, ['unsigned char']],
'WaitBlockFill1' : [ 0xf8, ['array', 91, ['unsigned char']]],
'PreviousMode' : [ 0x153, ['unsigned char']],
'WaitBlockFill2' : [ 0xf8, ['array', 139, ['unsigned char']]],
'ResourceIndex' : [ 0x183, ['unsigned char']],
'WaitBlockFill3' : [ 0xf8, ['array', 187, ['unsigned char']]],
'LargeStack' : [ 0x1b3, ['unsigned char']],
'WaitBlockFill4' : [ 0xf8, ['array', 44, ['unsigned char']]],
'ContextSwitches' : [ 0x124, ['unsigned long']],
'WaitBlockFill5' : [ 0xf8, ['array', 92, ['unsigned char']]],
'State' : [ 0x154, ['unsigned char']],
'NpxState' : [ 0x155, ['unsigned char']],
'WaitIrql' : [ 0x156, ['unsigned char']],
'WaitMode' : [ 0x157, ['unsigned char']],
'WaitBlockFill6' : [ 0xf8, ['array', 140, ['unsigned char']]],
'WaitTime' : [ 0x184, ['unsigned long']],
'WaitBlockFill7' : [ 0xf8, ['array', 188, ['unsigned char']]],
'KernelApcDisable' : [ 0x1b4, ['short']],
'SpecialApcDisable' : [ 0x1b6, ['short']],
'CombinedApcDisable' : [ 0x1b4, ['unsigned long']],
'QueueListEntry' : [ 0x1b8, ['_LIST_ENTRY']],
'TrapFrame' : [ 0x1c8, ['pointer64', ['_KTRAP_FRAME']]],
'CallbackStack' : [ 0x1d0, ['pointer64', ['void']]],
'ServiceTable' : [ 0x1d8, ['pointer64', ['void']]],
'KernelLimit' : [ 0x1e0, ['unsigned long']],
'ApcStateIndex' : [ 0x1e4, ['unsigned char']],
'IdealProcessor' : [ 0x1e5, ['unsigned char']],
'Preempted' : [ 0x1e6, ['unsigned char']],
'ProcessReadyQueue' : [ 0x1e7, ['unsigned char']],
'Win32kTable' : [ 0x1e8, ['pointer64', ['void']]],
'Win32kLimit' : [ 0x1f0, ['unsigned long']],
'KernelStackResident' : [ 0x1f4, ['unsigned char']],
'BasePriority' : [ 0x1f5, ['unsigned char']],
'PriorityDecrement' : [ 0x1f6, ['unsigned char']],
'Saturation' : [ 0x1f7, ['unsigned char']],
'UserAffinity' : [ 0x1f8, ['unsigned long long']],
'Process' : [ 0x200, ['pointer64', ['_KPROCESS']]],
'Affinity' : [ 0x208, ['unsigned long long']],
'ApcStatePointer' : [ 0x210, ['array', 2, ['pointer64', ['_KAPC_STATE']]]],
'SavedApcState' : [ 0x220, ['_KAPC_STATE']],
'SavedApcStateFill' : [ 0x220, ['array', 43, ['unsigned char']]],
'FreezeCount' : [ 0x24b, ['unsigned char']],
'SuspendCount' : [ 0x24c, ['unsigned char']],
'UserIdealProcessor' : [ 0x24d, ['unsigned char']],
'CalloutActive' : [ 0x24e, ['unsigned char']],
'CodePatchInProgress' : [ 0x24f, ['unsigned char']],
'Win32Thread' : [ 0x250, ['pointer64', ['void']]],
'StackBase' : [ 0x258, ['pointer64', ['void']]],
'SuspendApc' : [ 0x260, ['_KAPC']],
'SuspendApcFill0' : [ 0x260, ['array', 1, ['unsigned char']]],
'Quantum' : [ 0x261, ['unsigned char']],
'SuspendApcFill1' : [ 0x260, ['array', 3, ['unsigned char']]],
'QuantumReset' : [ 0x263, ['unsigned char']],
'SuspendApcFill2' : [ 0x260, ['array', 4, ['unsigned char']]],
'KernelTime' : [ 0x264, ['unsigned long']],
'SuspendApcFill3' : [ 0x260, ['array', 64, ['unsigned char']]],
'TlsArray' : [ 0x2a0, ['pointer64', ['void']]],
'SuspendApcFill4' : [ 0x260, ['array', 72, ['unsigned char']]],
'LegoData' : [ 0x2a8, ['pointer64', ['void']]],
'SuspendApcFill5' : [ 0x260, ['array', 83, ['unsigned char']]],
'PowerState' : [ 0x2b3, ['unsigned char']],
'UserTime' : [ 0x2b4, ['unsigned long']],
'SuspendSemaphore' : [ 0x2b8, ['_KSEMAPHORE']],
'SuspendSemaphorefill' : [ 0x2b8, ['array', 28, ['unsigned char']]],
'SListFaultCount' : [ 0x2d4, ['unsigned long']],
'ThreadListEntry' : [ 0x2d8, ['_LIST_ENTRY']],
'SListFaultAddress' : [ 0x2e8, ['pointer64', ['void']]],
'ReadOperationCount' : [ 0x2f0, ['long long']],
'WriteOperationCount' : [ 0x2f8, ['long long']],
'OtherOperationCount' : [ 0x300, ['long long']],
'ReadTransferCount' : [ 0x308, ['long long']],
'WriteTransferCount' : [ 0x310, ['long long']],
'OtherTransferCount' : [ 0x318, ['long long']],
} ],
'_KERNEL_STACK_SEGMENT' : [ 0x28, {
'StackBase' : [ 0x0, ['unsigned long long']],
'StackLimit' : [ 0x8, ['unsigned long long']],
'KernelStack' : [ 0x10, ['unsigned long long']],
'InitialStack' : [ 0x18, ['unsigned long long']],
'ActualLimit' : [ 0x20, ['unsigned long long']],
} ],
'_FAST_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Gate' : [ 0x18, ['_KEVENT']],
'OldIrql' : [ 0x30, ['unsigned long']],
} ],
'_SLIST_HEADER' : [ 0x10, {
'Alignment' : [ 0x0, ['unsigned long long']],
'Region' : [ 0x8, ['unsigned long long']],
} ],
'_NPAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_PAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_GENERAL_LOOKASIDE' : [ 0x80, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_QUAD' : [ 0x8, {
'UseThisFieldToCopy' : [ 0x0, ['long long']],
'DoNotUseThisField' : [ 0x0, ['double']],
} ],
'_UNICODE_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned short']]],
} ],
'_IO_STATUS_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['long']],
'Pointer' : [ 0x0, ['pointer64', ['void']]],
'Information' : [ 0x8, ['unsigned long long']],
} ],
'_EX_RUNDOWN_REF' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_EX_FAST_REF' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_EX_PUSH_LOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_EX_PUSH_LOCK_WAIT_BLOCK' : [ 0x40, {
'WakeGate' : [ 0x0, ['_KGATE']],
'WakeEvent' : [ 0x0, ['_KEVENT']],
'Next' : [ 0x18, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Last' : [ 0x20, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Previous' : [ 0x28, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'ShareCount' : [ 0x30, ['long']],
'Flags' : [ 0x34, ['long']],
} ],
'_EX_PUSH_LOCK_CACHE_AWARE' : [ 0x100, {
'Locks' : [ 0x0, ['array', 32, ['pointer64', ['_EX_PUSH_LOCK']]]],
} ],
'_ETHREAD' : [ 0x428, {
'Tcb' : [ 0x0, ['_KTHREAD']],
'CreateTime' : [ 0x320, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x328, ['_LARGE_INTEGER']],
'LpcReplyChain' : [ 0x328, ['_LIST_ENTRY']],
'KeyedWaitChain' : [ 0x328, ['_LIST_ENTRY']],
'ExitStatus' : [ 0x338, ['long']],
'OfsChain' : [ 0x338, ['pointer64', ['void']]],
'PostBlockList' : [ 0x340, ['_LIST_ENTRY']],
'TerminationPort' : [ 0x350, ['pointer64', ['_TERMINATION_PORT']]],
'ReaperLink' : [ 0x350, ['pointer64', ['_ETHREAD']]],
'KeyedWaitValue' : [ 0x350, ['pointer64', ['void']]],
'ActiveTimerListLock' : [ 0x358, ['unsigned long long']],
'ActiveTimerListHead' : [ 0x360, ['_LIST_ENTRY']],
'Cid' : [ 0x370, ['_CLIENT_ID']],
'LpcReplySemaphore' : [ 0x380, ['_KSEMAPHORE']],
'KeyedWaitSemaphore' : [ 0x380, ['_KSEMAPHORE']],
'LpcReplyMessage' : [ 0x3a0, ['pointer64', ['void']]],
'LpcWaitingOnPort' : [ 0x3a0, ['pointer64', ['void']]],
'ImpersonationInfo' : [ 0x3a8, ['pointer64', ['_PS_IMPERSONATION_INFORMATION']]],
'IrpList' : [ 0x3b0, ['_LIST_ENTRY']],
'TopLevelIrp' : [ 0x3c0, ['unsigned long long']],
'DeviceToVerify' : [ 0x3c8, ['pointer64', ['_DEVICE_OBJECT']]],
'ThreadsProcess' : [ 0x3d0, ['pointer64', ['_EPROCESS']]],
'StartAddress' : [ 0x3d8, ['pointer64', ['void']]],
'Win32StartAddress' : [ 0x3e0, ['pointer64', ['void']]],
'LpcReceivedMessageId' : [ 0x3e0, ['unsigned long']],
'ThreadListEntry' : [ 0x3e8, ['_LIST_ENTRY']],
'RundownProtect' : [ 0x3f8, ['_EX_RUNDOWN_REF']],
'ThreadLock' : [ 0x400, ['_EX_PUSH_LOCK']],
'LpcReplyMessageId' : [ 0x408, ['unsigned long']],
'ReadClusterSize' : [ 0x40c, ['unsigned long']],
'GrantedAccess' : [ 0x410, ['unsigned long']],
'CrossThreadFlags' : [ 0x414, ['unsigned long']],
'Terminated' : [ 0x414, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeadThread' : [ 0x414, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HideFromDebugger' : [ 0x414, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ActiveImpersonationInfo' : [ 0x414, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemThread' : [ 0x414, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'HardErrorsAreDisabled' : [ 0x414, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x414, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SkipCreationMsg' : [ 0x414, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SkipTerminationMsg' : [ 0x414, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SameThreadPassiveFlags' : [ 0x418, ['unsigned long']],
'ActiveExWorker' : [ 0x418, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ExWorkerCanWaitUser' : [ 0x418, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'MemoryMaker' : [ 0x418, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'KeyedEventInUse' : [ 0x418, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SameThreadApcFlags' : [ 0x41c, ['unsigned long']],
'LpcReceivedMsgIdValid' : [ 0x41c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'LpcExitThreadCalled' : [ 0x41c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'AddressSpaceOwner' : [ 0x41c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsProcessWorkingSetExclusive' : [ 0x41c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'OwnsProcessWorkingSetShared' : [ 0x41c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsSystemWorkingSetExclusive' : [ 0x41c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsSystemWorkingSetShared' : [ 0x41c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsSessionWorkingSetExclusive' : [ 0x41c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsSessionWorkingSetShared' : [ 0x41d, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ApcNeeded' : [ 0x41d, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ForwardClusterOnly' : [ 0x420, ['unsigned char']],
'DisablePageFaultClustering' : [ 0x421, ['unsigned char']],
'ActiveFaultCount' : [ 0x422, ['unsigned char']],
} ],
'_EPROCESS' : [ 0x3e0, {
'Pcb' : [ 0x0, ['_KPROCESS']],
'ProcessLock' : [ 0xb8, ['_EX_PUSH_LOCK']],
'CreateTime' : [ 0xc0, ['_LARGE_INTEGER']],
'ExitTime' : [ 0xc8, ['_LARGE_INTEGER']],
'RundownProtect' : [ 0xd0, ['_EX_RUNDOWN_REF']],
'UniqueProcessId' : [ 0xd8, ['pointer64', ['void']]],
'ActiveProcessLinks' : [ 0xe0, ['_LIST_ENTRY']],
'QuotaUsage' : [ 0xf0, ['array', 3, ['unsigned long long']]],
'QuotaPeak' : [ 0x108, ['array', 3, ['unsigned long long']]],
'CommitCharge' : [ 0x120, ['unsigned long long']],
'PeakVirtualSize' : [ 0x128, ['unsigned long long']],
'VirtualSize' : [ 0x130, ['unsigned long long']],
'SessionProcessLinks' : [ 0x138, ['_LIST_ENTRY']],
'DebugPort' : [ 0x148, ['pointer64', ['void']]],
'ExceptionPort' : [ 0x150, ['pointer64', ['void']]],
'ObjectTable' : [ 0x158, ['pointer64', ['_HANDLE_TABLE']]],
'Token' : [ 0x160, ['_EX_FAST_REF']],
'WorkingSetPage' : [ 0x168, ['unsigned long long']],
'AddressCreationLock' : [ 0x170, ['_KGUARDED_MUTEX']],
'HyperSpaceLock' : [ 0x1a8, ['unsigned long long']],
'ForkInProgress' : [ 0x1b0, ['pointer64', ['_ETHREAD']]],
'HardwareTrigger' : [ 0x1b8, ['unsigned long long']],
'PhysicalVadRoot' : [ 0x1c0, ['pointer64', ['_MM_AVL_TABLE']]],
'CloneRoot' : [ 0x1c8, ['pointer64', ['void']]],
'NumberOfPrivatePages' : [ 0x1d0, ['unsigned long long']],
'NumberOfLockedPages' : [ 0x1d8, ['unsigned long long']],
'Win32Process' : [ 0x1e0, ['pointer64', ['void']]],
'Job' : [ 0x1e8, ['pointer64', ['_EJOB']]],
'SectionObject' : [ 0x1f0, ['pointer64', ['void']]],
'SectionBaseAddress' : [ 0x1f8, ['pointer64', ['void']]],
'QuotaBlock' : [ 0x200, ['pointer64', ['_EPROCESS_QUOTA_BLOCK']]],
'WorkingSetWatch' : [ 0x208, ['pointer64', ['_PAGEFAULT_HISTORY']]],
'Win32WindowStation' : [ 0x210, ['pointer64', ['void']]],
'InheritedFromUniqueProcessId' : [ 0x218, ['pointer64', ['void']]],
'LdtInformation' : [ 0x220, ['pointer64', ['void']]],
'VadFreeHint' : [ 0x228, ['pointer64', ['void']]],
'VdmObjects' : [ 0x230, ['pointer64', ['void']]],
'DeviceMap' : [ 0x238, ['pointer64', ['void']]],
'Spare0' : [ 0x240, ['array', 3, ['pointer64', ['void']]]],
'PageDirectoryPte' : [ 0x258, ['_HARDWARE_PTE']],
'Filler' : [ 0x258, ['unsigned long long']],
'Session' : [ 0x260, ['pointer64', ['void']]],
'ImageFileName' : [ 0x268, ['array', 16, ['unsigned char']]],
'JobLinks' : [ 0x278, ['_LIST_ENTRY']],
'LockedPagesList' : [ 0x288, ['pointer64', ['void']]],
'ThreadListHead' : [ 0x290, ['_LIST_ENTRY']],
'SecurityPort' : [ 0x2a0, ['pointer64', ['void']]],
'Wow64Process' : [ 0x2a8, ['pointer64', ['_WOW64_PROCESS']]],
'ActiveThreads' : [ 0x2b0, ['unsigned long']],
'GrantedAccess' : [ 0x2b4, ['unsigned long']],
'DefaultHardErrorProcessing' : [ 0x2b8, ['unsigned long']],
'LastThreadExitStatus' : [ 0x2bc, ['long']],
'Peb' : [ 0x2c0, ['pointer64', ['_PEB']]],
'PrefetchTrace' : [ 0x2c8, ['_EX_FAST_REF']],
'ReadOperationCount' : [ 0x2d0, ['_LARGE_INTEGER']],
'WriteOperationCount' : [ 0x2d8, ['_LARGE_INTEGER']],
'OtherOperationCount' : [ 0x2e0, ['_LARGE_INTEGER']],
'ReadTransferCount' : [ 0x2e8, ['_LARGE_INTEGER']],
'WriteTransferCount' : [ 0x2f0, ['_LARGE_INTEGER']],
'OtherTransferCount' : [ 0x2f8, ['_LARGE_INTEGER']],
'CommitChargeLimit' : [ 0x300, ['unsigned long long']],
'CommitChargePeak' : [ 0x308, ['unsigned long long']],
'AweInfo' : [ 0x310, ['pointer64', ['void']]],
'SeAuditProcessCreationInfo' : [ 0x318, ['_SE_AUDIT_PROCESS_CREATION_INFO']],
'Vm' : [ 0x320, ['_MMSUPPORT']],
'Spares' : [ 0x378, ['array', 2, ['unsigned long']]],
'ModifiedPageCount' : [ 0x380, ['unsigned long']],
'JobStatus' : [ 0x384, ['unsigned long']],
'Flags' : [ 0x388, ['unsigned long']],
'CreateReported' : [ 0x388, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDebugInherit' : [ 0x388, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessExiting' : [ 0x388, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessDelete' : [ 0x388, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow64SplitPages' : [ 0x388, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VmDeleted' : [ 0x388, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OutswapEnabled' : [ 0x388, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Outswapped' : [ 0x388, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ForkFailed' : [ 0x388, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Wow64VaSpace4Gb' : [ 0x388, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'AddressSpaceInitialized' : [ 0x388, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'SetTimerResolution' : [ 0x388, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x388, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'SessionCreationUnderway' : [ 0x388, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WriteWatch' : [ 0x388, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessInSession' : [ 0x388, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'OverrideAddressSpace' : [ 0x388, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HasAddressSpace' : [ 0x388, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'LaunchPrefetched' : [ 0x388, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'InjectInpageErrors' : [ 0x388, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'VmTopDown' : [ 0x388, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'ImageNotifyDone' : [ 0x388, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'PdeUpdateNeeded' : [ 0x388, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'VdmAllowed' : [ 0x388, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'SmapAllowed' : [ 0x388, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'CreateFailed' : [ 0x388, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'DefaultIoPriority' : [ 0x388, ['BitField', dict(start_bit = 27, end_bit = 30, native_type='unsigned long')]],
'Spare1' : [ 0x388, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'Spare2' : [ 0x388, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'ExitStatus' : [ 0x38c, ['long']],
'NextPageColor' : [ 0x390, ['unsigned short']],
'SubSystemMinorVersion' : [ 0x392, ['unsigned char']],
'SubSystemMajorVersion' : [ 0x393, ['unsigned char']],
'SubSystemVersion' : [ 0x392, ['unsigned short']],
'PriorityClass' : [ 0x394, ['unsigned char']],
'VadRoot' : [ 0x398, ['_MM_AVL_TABLE']],
'Cookie' : [ 0x3d8, ['unsigned long']],
} ],
'_OBJECT_HEADER' : [ 0x38, {
'PointerCount' : [ 0x0, ['long long']],
'HandleCount' : [ 0x8, ['long long']],
'NextToFree' : [ 0x8, ['pointer64', ['void']]],
'Type' : [ 0x10, ['pointer64', ['_OBJECT_TYPE']]],
'NameInfoOffset' : [ 0x18, ['unsigned char']],
'HandleInfoOffset' : [ 0x19, ['unsigned char']],
'QuotaInfoOffset' : [ 0x1a, ['unsigned char']],
'Flags' : [ 0x1b, ['unsigned char']],
'ObjectCreateInfo' : [ 0x20, ['pointer64', ['_OBJECT_CREATE_INFORMATION']]],
'QuotaBlockCharged' : [ 0x20, ['pointer64', ['void']]],
'SecurityDescriptor' : [ 0x28, ['pointer64', ['void']]],
'Body' : [ 0x30, ['_QUAD']],
} ],
'_OBJECT_HEADER_QUOTA_INFO' : [ 0x20, {
'PagedPoolCharge' : [ 0x0, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x4, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x8, ['unsigned long']],
'ExclusiveProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'Reserved' : [ 0x18, ['unsigned long long']],
} ],
'_OBJECT_HEADER_HANDLE_INFO' : [ 0x10, {
'HandleCountDataBase' : [ 0x0, ['pointer64', ['_OBJECT_HANDLE_COUNT_DATABASE']]],
'SingleEntry' : [ 0x0, ['_OBJECT_HANDLE_COUNT_ENTRY']],
} ],
'_OBJECT_HEADER_NAME_INFO' : [ 0x20, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Name' : [ 0x8, ['_UNICODE_STRING']],
'QueryReferences' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_HEADER_CREATOR_INFO' : [ 0x20, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'CreatorUniqueProcess' : [ 0x10, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x18, ['unsigned short']],
'Reserved' : [ 0x1a, ['unsigned short']],
} ],
'_OBJECT_ATTRIBUTES' : [ 0x30, {
'Length' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ObjectName' : [ 0x10, ['pointer64', ['_UNICODE_STRING']]],
'Attributes' : [ 0x18, ['unsigned long']],
'SecurityDescriptor' : [ 0x20, ['pointer64', ['void']]],
'SecurityQualityOfService' : [ 0x28, ['pointer64', ['void']]],
} ],
'_OBJECT_TYPE' : [ 0x2c0, {
'Mutex' : [ 0x0, ['_ERESOURCE']],
'TypeList' : [ 0x68, ['_LIST_ENTRY']],
'Name' : [ 0x78, ['_UNICODE_STRING']],
'DefaultObject' : [ 0x88, ['pointer64', ['void']]],
'Index' : [ 0x90, ['unsigned long']],
'TotalNumberOfObjects' : [ 0x94, ['unsigned long']],
'TotalNumberOfHandles' : [ 0x98, ['unsigned long']],
'HighWaterNumberOfObjects' : [ 0x9c, ['unsigned long']],
'HighWaterNumberOfHandles' : [ 0xa0, ['unsigned long']],
'TypeInfo' : [ 0xa8, ['_OBJECT_TYPE_INITIALIZER']],
'Key' : [ 0x118, ['unsigned long']],
'ObjectLocks' : [ 0x120, ['array', 4, ['_ERESOURCE']]],
} ],
'_OBJECT_HANDLE_INFORMATION' : [ 0x8, {
'HandleAttributes' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
} ],
'_PERFINFO_GROUPMASK' : [ 0x20, {
'Masks' : [ 0x0, ['array', 8, ['unsigned long']]],
} ],
'_KGUARDED_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Gate' : [ 0x18, ['_KGATE']],
'KernelApcDisable' : [ 0x30, ['short']],
'SpecialApcDisable' : [ 0x32, ['short']],
'CombinedApcDisable' : [ 0x30, ['unsigned long']],
} ],
'__unnamed_1161' : [ 0x8, {
'Long' : [ 0x0, ['unsigned long long']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE']],
'HardLarge' : [ 0x0, ['_MMPTE_HARDWARE_LARGEPAGE']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTE' : [ 0x8, {
'u' : [ 0x0, ['__unnamed_1161']],
} ],
'__unnamed_116c' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long long']],
'WsIndex' : [ 0x0, ['unsigned long']],
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
'ReadStatus' : [ 0x0, ['long']],
'NextStackPfn' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'__unnamed_116e' : [ 0x8, {
'Blink' : [ 0x0, ['unsigned long long']],
'ShareCount' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1171' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'ShortFlags' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_1173' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'e1' : [ 0x2, ['_MMPFNENTRY']],
'e2' : [ 0x0, ['__unnamed_1171']],
} ],
'__unnamed_117b' : [ 0x8, {
'EntireFrame' : [ 0x0, ['unsigned long long']],
'PteFrame' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 57, native_type='unsigned long long')]],
'InPageError' : [ 0x0, ['BitField', dict(start_bit = 57, end_bit = 58, native_type='unsigned long long')]],
'VerifierAllocation' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 59, native_type='unsigned long long')]],
'AweAllocation' : [ 0x0, ['BitField', dict(start_bit = 59, end_bit = 60, native_type='unsigned long long')]],
'Priority' : [ 0x0, ['BitField', dict(start_bit = 60, end_bit = 63, native_type='unsigned long long')]],
'MustBeCached' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MMPFN' : [ 0x30, {
'u1' : [ 0x0, ['__unnamed_116c']],
'PteAddress' : [ 0x8, ['pointer64', ['_MMPTE']]],
'u2' : [ 0x10, ['__unnamed_116e']],
'u3' : [ 0x18, ['__unnamed_1173']],
'UsedPageTableEntries' : [ 0x1c, ['unsigned long']],
'OriginalPte' : [ 0x20, ['_MMPTE']],
'AweReferenceCount' : [ 0x20, ['long']],
'u4' : [ 0x28, ['__unnamed_117b']],
} ],
'__unnamed_1182' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMVAD']]],
} ],
'__unnamed_1185' : [ 0x8, {
'LongFlags' : [ 0x0, ['unsigned long long']],
'VadFlags' : [ 0x0, ['_MMVAD_FLAGS']],
} ],
'__unnamed_118a' : [ 0x4, {
'LongFlags2' : [ 0x0, ['unsigned long']],
'VadFlags2' : [ 0x0, ['_MMVAD_FLAGS2']],
} ],
'_MMVAD' : [ 0x50, {
'u1' : [ 0x0, ['__unnamed_1182']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_1185']],
'ControlArea' : [ 0x30, ['pointer64', ['_CONTROL_AREA']]],
'FirstPrototypePte' : [ 0x38, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x40, ['pointer64', ['_MMPTE']]],
'u2' : [ 0x48, ['__unnamed_118a']],
} ],
'_MM_AVL_TABLE' : [ 0x40, {
'BalancedRoot' : [ 0x0, ['_MMADDRESS_NODE']],
'DepthOfTree' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long long')]],
'Unused' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long long')]],
'NumberGenericTableElements' : [ 0x28, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='unsigned long long')]],
'NodeHint' : [ 0x30, ['pointer64', ['void']]],
'NodeFreeHint' : [ 0x38, ['pointer64', ['void']]],
} ],
'_MMPTE_FLUSH_LIST' : [ 0xa8, {
'Count' : [ 0x0, ['unsigned long']],
'FlushVa' : [ 0x8, ['array', 20, ['pointer64', ['void']]]],
} ],
'__unnamed_119c' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'SubsectionFlags' : [ 0x0, ['_MMSUBSECTION_FLAGS']],
} ],
'_SUBSECTION' : [ 0x30, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'u' : [ 0x8, ['__unnamed_119c']],
'StartingSector' : [ 0xc, ['unsigned long']],
'NumberOfFullSectors' : [ 0x10, ['unsigned long']],
'SubsectionBase' : [ 0x18, ['pointer64', ['_MMPTE']]],
'UnusedPtes' : [ 0x20, ['unsigned long']],
'PtesInSubsection' : [ 0x24, ['unsigned long']],
'NextSubsection' : [ 0x28, ['pointer64', ['_SUBSECTION']]],
} ],
'_MMPAGING_FILE' : [ 0x78, {
'Size' : [ 0x0, ['unsigned long long']],
'MaximumSize' : [ 0x8, ['unsigned long long']],
'MinimumSize' : [ 0x10, ['unsigned long long']],
'FreeSpace' : [ 0x18, ['unsigned long long']],
'CurrentUsage' : [ 0x20, ['unsigned long long']],
'PeakUsage' : [ 0x28, ['unsigned long long']],
'HighestPage' : [ 0x30, ['unsigned long long']],
'File' : [ 0x38, ['pointer64', ['_FILE_OBJECT']]],
'Entry' : [ 0x40, ['array', 2, ['pointer64', ['_MMMOD_WRITER_MDL_ENTRY']]]],
'PageFileName' : [ 0x50, ['_UNICODE_STRING']],
'Bitmap' : [ 0x60, ['pointer64', ['_RTL_BITMAP']]],
'PageFileNumber' : [ 0x68, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'ReferenceCount' : [ 0x68, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'BootPartition' : [ 0x68, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Reserved' : [ 0x68, ['BitField', dict(start_bit = 9, end_bit = 32, native_type='unsigned long')]],
'FileHandle' : [ 0x70, ['pointer64', ['void']]],
} ],
'_EXCEPTION_RECORD64' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long long']],
'ExceptionAddress' : [ 0x10, ['unsigned long long']],
'NumberParameters' : [ 0x18, ['unsigned long']],
'__unusedAlignment' : [ 0x1c, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_EXCEPTION_RECORD32' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long']],
'ExceptionAddress' : [ 0xc, ['unsigned long']],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_DBGKM_EXCEPTION64' : [ 0xa0, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD64']],
'FirstChance' : [ 0x98, ['unsigned long']],
} ],
'_DBGKM_EXCEPTION32' : [ 0x54, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD32']],
'FirstChance' : [ 0x50, ['unsigned long']],
} ],
'_DBGKD_LOAD_SYMBOLS64' : [ 0x28, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x8, ['unsigned long long']],
'ProcessId' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
'SizeOfImage' : [ 0x1c, ['unsigned long']],
'UnloadSymbols' : [ 0x20, ['unsigned char']],
} ],
'_DBGKD_LOAD_SYMBOLS32' : [ 0x18, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x4, ['unsigned long']],
'ProcessId' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'UnloadSymbols' : [ 0x14, ['unsigned char']],
} ],
'_DBGKD_READ_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesRead' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesRead' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesWritten' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesWritten' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT64' : [ 0x10, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointHandle' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT32' : [ 0x8, {
'BreakPointAddress' : [ 0x0, ['unsigned long']],
'BreakPointHandle' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO64' : [ 0x10, {
'IoAddress' : [ 0x0, ['unsigned long long']],
'DataSize' : [ 0x8, ['unsigned long']],
'DataValue' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO32' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'IoAddress' : [ 0x4, ['unsigned long']],
'DataValue' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED64' : [ 0x20, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long long']],
'DataValue' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED32' : [ 0x18, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long']],
'DataValue' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL32' : [ 0x4, {
'SpecialCall' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL64' : [ 0x8, {
'SpecialCall' : [ 0x0, ['unsigned long long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT32' : [ 0x8, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT64' : [ 0x10, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT64' : [ 0x20, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
'Calls' : [ 0xc, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0x10, ['unsigned long']],
'MinInstructions' : [ 0x14, ['unsigned long']],
'MaxInstructions' : [ 0x18, ['unsigned long']],
'TotalInstructions' : [ 0x1c, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT32' : [ 0x1c, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'Calls' : [ 0x8, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0xc, ['unsigned long']],
'MinInstructions' : [ 0x10, ['unsigned long']],
'MaxInstructions' : [ 0x14, ['unsigned long']],
'TotalInstructions' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1216' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT64']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO64']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED64']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL64']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT64']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT64']],
'GetVersion64' : [ 0x0, ['_DBGKD_GET_VERSION64']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetSetBusData' : [ 0x0, ['_DBGKD_GET_SET_BUS_DATA']],
'FillMemory' : [ 0x0, ['_DBGKD_FILL_MEMORY']],
'QueryMemory' : [ 0x0, ['_DBGKD_QUERY_MEMORY']],
'SwitchPartition' : [ 0x0, ['_DBGKD_SWITCH_PARTITION']],
} ],
'_DBGKD_MANIPULATE_STATE64' : [ 0x38, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0x10, ['__unnamed_1216']],
} ],
'__unnamed_121d' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY32']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY32']],
'ReadMemory64' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory64' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT32']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO32']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED32']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL32']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT32']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT32']],
'GetVersion32' : [ 0x0, ['_DBGKD_GET_VERSION32']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
} ],
'_DBGKD_MANIPULATE_STATE32' : [ 0x34, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_121d']],
} ],
'_KLOCK_QUEUE_HANDLE' : [ 0x18, {
'LockQueue' : [ 0x0, ['_KSPIN_LOCK_QUEUE']],
'OldIrql' : [ 0x10, ['unsigned char']],
} ],
'_SHARED_CACHE_MAP' : [ 0x1b0, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'OpenCount' : [ 0x4, ['unsigned long']],
'FileSize' : [ 0x8, ['_LARGE_INTEGER']],
'BcbList' : [ 0x10, ['_LIST_ENTRY']],
'SectionSize' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x28, ['_LARGE_INTEGER']],
'ValidDataGoal' : [ 0x30, ['_LARGE_INTEGER']],
'InitialVacbs' : [ 0x38, ['array', 4, ['pointer64', ['_VACB']]]],
'Vacbs' : [ 0x58, ['pointer64', ['pointer64', ['_VACB']]]],
'FileObject' : [ 0x60, ['pointer64', ['_FILE_OBJECT']]],
'ActiveVacb' : [ 0x68, ['pointer64', ['_VACB']]],
'NeedToZero' : [ 0x70, ['pointer64', ['void']]],
'ActivePage' : [ 0x78, ['unsigned long']],
'NeedToZeroPage' : [ 0x7c, ['unsigned long']],
'ActiveVacbSpinLock' : [ 0x80, ['unsigned long long']],
'VacbActiveCount' : [ 0x88, ['unsigned long']],
'DirtyPages' : [ 0x8c, ['unsigned long']],
'SharedCacheMapLinks' : [ 0x90, ['_LIST_ENTRY']],
'Flags' : [ 0xa0, ['unsigned long']],
'Status' : [ 0xa4, ['long']],
'Mbcb' : [ 0xa8, ['pointer64', ['_MBCB']]],
'Section' : [ 0xb0, ['pointer64', ['void']]],
'CreateEvent' : [ 0xb8, ['pointer64', ['_KEVENT']]],
'WaitOnActiveCount' : [ 0xc0, ['pointer64', ['_KEVENT']]],
'PagesToWrite' : [ 0xc8, ['unsigned long']],
'BeyondLastFlush' : [ 0xd0, ['long long']],
'Callbacks' : [ 0xd8, ['pointer64', ['_CACHE_MANAGER_CALLBACKS']]],
'LazyWriteContext' : [ 0xe0, ['pointer64', ['void']]],
'PrivateList' : [ 0xe8, ['_LIST_ENTRY']],
'LogHandle' : [ 0xf8, ['pointer64', ['void']]],
'FlushToLsnRoutine' : [ 0x100, ['pointer64', ['void']]],
'DirtyPageThreshold' : [ 0x108, ['unsigned long']],
'LazyWritePassCount' : [ 0x10c, ['unsigned long']],
'UninitializeEvent' : [ 0x110, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'NeedToZeroVacb' : [ 0x118, ['pointer64', ['_VACB']]],
'BcbSpinLock' : [ 0x120, ['unsigned long long']],
'Reserved' : [ 0x128, ['pointer64', ['void']]],
'Event' : [ 0x130, ['_KEVENT']],
'VacbPushLock' : [ 0x148, ['_EX_PUSH_LOCK']],
'PrivateCacheMap' : [ 0x150, ['_PRIVATE_CACHE_MAP']],
} ],
'_FILE_OBJECT' : [ 0xb8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Vpb' : [ 0x10, ['pointer64', ['_VPB']]],
'FsContext' : [ 0x18, ['pointer64', ['void']]],
'FsContext2' : [ 0x20, ['pointer64', ['void']]],
'SectionObjectPointer' : [ 0x28, ['pointer64', ['_SECTION_OBJECT_POINTERS']]],
'PrivateCacheMap' : [ 0x30, ['pointer64', ['void']]],
'FinalStatus' : [ 0x38, ['long']],
'RelatedFileObject' : [ 0x40, ['pointer64', ['_FILE_OBJECT']]],
'LockOperation' : [ 0x48, ['unsigned char']],
'DeletePending' : [ 0x49, ['unsigned char']],
'ReadAccess' : [ 0x4a, ['unsigned char']],
'WriteAccess' : [ 0x4b, ['unsigned char']],
'DeleteAccess' : [ 0x4c, ['unsigned char']],
'SharedRead' : [ 0x4d, ['unsigned char']],
'SharedWrite' : [ 0x4e, ['unsigned char']],
'SharedDelete' : [ 0x4f, ['unsigned char']],
'Flags' : [ 0x50, ['unsigned long']],
'FileName' : [ 0x58, ['_UNICODE_STRING']],
'CurrentByteOffset' : [ 0x68, ['_LARGE_INTEGER']],
'Waiters' : [ 0x70, ['unsigned long']],
'Busy' : [ 0x74, ['unsigned long']],
'LastLock' : [ 0x78, ['pointer64', ['void']]],
'Lock' : [ 0x80, ['_KEVENT']],
'Event' : [ 0x98, ['_KEVENT']],
'CompletionContext' : [ 0xb0, ['pointer64', ['_IO_COMPLETION_CONTEXT']]],
} ],
'__unnamed_1247' : [ 0x8, {
'FileOffset' : [ 0x0, ['_LARGE_INTEGER']],
'ActiveCount' : [ 0x0, ['unsigned short']],
} ],
'_VACB' : [ 0x28, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['_SHARED_CACHE_MAP']]],
'Overlay' : [ 0x10, ['__unnamed_1247']],
'LruList' : [ 0x18, ['_LIST_ENTRY']],
} ],
'_VACB_LEVEL_REFERENCE' : [ 0x8, {
'Reference' : [ 0x0, ['long']],
'SpecialReference' : [ 0x4, ['long']],
} ],
'__unnamed_125c' : [ 0x10, {
'FreeListsInUseUlong' : [ 0x0, ['array', 4, ['unsigned long']]],
'FreeListsInUseBytes' : [ 0x0, ['array', 16, ['unsigned char']]],
} ],
'__unnamed_125e' : [ 0x2, {
'FreeListsInUseTerminate' : [ 0x0, ['unsigned short']],
'DecommitCount' : [ 0x0, ['unsigned short']],
} ],
'_HEAP' : [ 0xae8, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'Signature' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
'ForceFlags' : [ 0x18, ['unsigned long']],
'VirtualMemoryThreshold' : [ 0x1c, ['unsigned long']],
'SegmentReserve' : [ 0x20, ['unsigned long long']],
'SegmentCommit' : [ 0x28, ['unsigned long long']],
'DeCommitFreeBlockThreshold' : [ 0x30, ['unsigned long long']],
'DeCommitTotalFreeThreshold' : [ 0x38, ['unsigned long long']],
'TotalFreeSize' : [ 0x40, ['unsigned long long']],
'MaximumAllocationSize' : [ 0x48, ['unsigned long long']],
'ProcessHeapsListIndex' : [ 0x50, ['unsigned short']],
'HeaderValidateLength' : [ 0x52, ['unsigned short']],
'HeaderValidateCopy' : [ 0x58, ['pointer64', ['void']]],
'NextAvailableTagIndex' : [ 0x60, ['unsigned short']],
'MaximumTagIndex' : [ 0x62, ['unsigned short']],
'TagEntries' : [ 0x68, ['pointer64', ['_HEAP_TAG_ENTRY']]],
'UCRSegments' : [ 0x70, ['pointer64', ['_HEAP_UCR_SEGMENT']]],
'UnusedUnCommittedRanges' : [ 0x78, ['pointer64', ['_HEAP_UNCOMMMTTED_RANGE']]],
'AlignRound' : [ 0x80, ['unsigned long long']],
'AlignMask' : [ 0x88, ['unsigned long long']],
'VirtualAllocdBlocks' : [ 0x90, ['_LIST_ENTRY']],
'Segments' : [ 0xa0, ['array', 64, ['pointer64', ['_HEAP_SEGMENT']]]],
'u' : [ 0x2a0, ['__unnamed_125c']],
'u2' : [ 0x2b0, ['__unnamed_125e']],
'AllocatorBackTraceIndex' : [ 0x2b2, ['unsigned short']],
'NonDedicatedListLength' : [ 0x2b4, ['unsigned long']],
'LargeBlocksIndex' : [ 0x2b8, ['pointer64', ['void']]],
'PseudoTagEntries' : [ 0x2c0, ['pointer64', ['_HEAP_PSEUDO_TAG_ENTRY']]],
'FreeLists' : [ 0x2c8, ['array', 128, ['_LIST_ENTRY']]],
'LockVariable' : [ 0xac8, ['pointer64', ['_HEAP_LOCK']]],
'CommitRoutine' : [ 0xad0, ['pointer64', ['void']]],
'FrontEndHeap' : [ 0xad8, ['pointer64', ['void']]],
'FrontHeapLockCount' : [ 0xae0, ['unsigned short']],
'FrontEndHeapType' : [ 0xae2, ['unsigned char']],
'LastSegmentIndex' : [ 0xae3, ['unsigned char']],
} ],
'_HEAP_ENTRY' : [ 0x10, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'PreviousSize' : [ 0xa, ['unsigned short']],
'SmallTagIndex' : [ 0xc, ['unsigned char']],
'Flags' : [ 0xd, ['unsigned char']],
'UnusedBytes' : [ 0xe, ['unsigned char']],
'SegmentIndex' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
} ],
'_HEAP_SEGMENT' : [ 0x68, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'Signature' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
'Heap' : [ 0x18, ['pointer64', ['_HEAP']]],
'LargestUnCommittedRange' : [ 0x20, ['unsigned long long']],
'BaseAddress' : [ 0x28, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x30, ['unsigned long']],
'FirstEntry' : [ 0x38, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x48, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x4c, ['unsigned long']],
'UnCommittedRanges' : [ 0x50, ['pointer64', ['_HEAP_UNCOMMMTTED_RANGE']]],
'AllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'LastEntryInSegment' : [ 0x60, ['pointer64', ['_HEAP_ENTRY']]],
} ],
'_HEAP_SUBSEGMENT' : [ 0x30, {
'Bucket' : [ 0x0, ['pointer64', ['void']]],
'UserBlocks' : [ 0x8, ['pointer64', ['_HEAP_USERDATA_HEADER']]],
'AggregateExchg' : [ 0x10, ['_INTERLOCK_SEQ']],
'BlockSize' : [ 0x18, ['unsigned short']],
'FreeThreshold' : [ 0x1a, ['unsigned short']],
'BlockCount' : [ 0x1c, ['unsigned short']],
'SizeIndex' : [ 0x1e, ['unsigned char']],
'AffinityIndex' : [ 0x1f, ['unsigned char']],
'Alignment' : [ 0x18, ['array', 2, ['unsigned long']]],
'SFreeListEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'Lock' : [ 0x28, ['unsigned long']],
} ],
'_EXCEPTION_RECORD' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['pointer64', ['_EXCEPTION_RECORD']]],
'ExceptionAddress' : [ 0x10, ['pointer64', ['void']]],
'NumberParameters' : [ 0x18, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_TOKEN' : [ 0xd0, {
'TokenSource' : [ 0x0, ['_TOKEN_SOURCE']],
'TokenId' : [ 0x10, ['_LUID']],
'AuthenticationId' : [ 0x18, ['_LUID']],
'ParentTokenId' : [ 0x20, ['_LUID']],
'ExpirationTime' : [ 0x28, ['_LARGE_INTEGER']],
'TokenLock' : [ 0x30, ['pointer64', ['_ERESOURCE']]],
'AuditPolicy' : [ 0x38, ['_SEP_AUDIT_POLICY']],
'ModifiedId' : [ 0x40, ['_LUID']],
'SessionId' : [ 0x48, ['unsigned long']],
'UserAndGroupCount' : [ 0x4c, ['unsigned long']],
'RestrictedSidCount' : [ 0x50, ['unsigned long']],
'PrivilegeCount' : [ 0x54, ['unsigned long']],
'VariableLength' : [ 0x58, ['unsigned long']],
'DynamicCharged' : [ 0x5c, ['unsigned long']],
'DynamicAvailable' : [ 0x60, ['unsigned long']],
'DefaultOwnerIndex' : [ 0x64, ['unsigned long']],
'UserAndGroups' : [ 0x68, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'RestrictedSids' : [ 0x70, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'PrimaryGroup' : [ 0x78, ['pointer64', ['void']]],
'Privileges' : [ 0x80, ['pointer64', ['_LUID_AND_ATTRIBUTES']]],
'DynamicPart' : [ 0x88, ['pointer64', ['unsigned long']]],
'DefaultDacl' : [ 0x90, ['pointer64', ['_ACL']]],
'TokenType' : [ 0x98, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0x9c, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'TokenFlags' : [ 0xa0, ['unsigned char']],
'TokenInUse' : [ 0xa1, ['unsigned char']],
'ProxyData' : [ 0xa8, ['pointer64', ['_SECURITY_TOKEN_PROXY_DATA']]],
'AuditData' : [ 0xb0, ['pointer64', ['_SECURITY_TOKEN_AUDIT_DATA']]],
'LogonSession' : [ 0xb8, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'OriginatingLogonSession' : [ 0xc0, ['_LUID']],
'VariablePart' : [ 0xc8, ['unsigned long']],
} ],
'_SEP_LOGON_SESSION_REFERENCES' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'LogonId' : [ 0x8, ['_LUID']],
'ReferenceCount' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
'pDeviceMap' : [ 0x18, ['pointer64', ['_DEVICE_MAP']]],
} ],
'_TEB' : [ 0x17d8, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'EnvironmentPointer' : [ 0x38, ['pointer64', ['void']]],
'ClientId' : [ 0x40, ['_CLIENT_ID']],
'ActiveRpcHandle' : [ 0x50, ['pointer64', ['void']]],
'ThreadLocalStoragePointer' : [ 0x58, ['pointer64', ['void']]],
'ProcessEnvironmentBlock' : [ 0x60, ['pointer64', ['_PEB']]],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['pointer64', ['void']]],
'Win32ThreadInfo' : [ 0x78, ['pointer64', ['void']]],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['pointer64', ['void']]],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['pointer64', ['void']]]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['pointer64', ['_ACTIVATION_CONTEXT_STACK']]],
'SpareBytes1' : [ 0x2d0, ['array', 28, ['unsigned char']]],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID']],
'GdiCachedProcessHandle' : [ 0x7e8, ['pointer64', ['void']]],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['pointer64', ['void']]],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['pointer64', ['void']]]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['pointer64', ['void']]],
'glSectionInfo' : [ 0x1228, ['pointer64', ['void']]],
'glSection' : [ 0x1230, ['pointer64', ['void']]],
'glTable' : [ 0x1238, ['pointer64', ['void']]],
'glCurrentRC' : [ 0x1240, ['pointer64', ['void']]],
'glContext' : [ 0x1248, ['pointer64', ['void']]],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_UNICODE_STRING']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['unsigned short']]],
'DeallocationStack' : [ 0x1478, ['pointer64', ['void']]],
'TlsSlots' : [ 0x1480, ['array', 64, ['pointer64', ['void']]]],
'TlsLinks' : [ 0x1680, ['_LIST_ENTRY']],
'Vdm' : [ 0x1690, ['pointer64', ['void']]],
'ReservedForNtRpc' : [ 0x1698, ['pointer64', ['void']]],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['pointer64', ['void']]]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 14, ['pointer64', ['void']]]],
'SubProcessTag' : [ 0x1728, ['pointer64', ['void']]],
'EtwTraceData' : [ 0x1730, ['pointer64', ['void']]],
'WinSockData' : [ 0x1738, ['pointer64', ['void']]],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'InDbgPrint' : [ 0x1744, ['unsigned char']],
'FreeStackOnTermination' : [ 0x1745, ['unsigned char']],
'HasFiberData' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['pointer64', ['void']]],
'ReservedForOle' : [ 0x1758, ['pointer64', ['void']]],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SparePointer1' : [ 0x1768, ['unsigned long long']],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'SoftPatchPtr2' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['pointer64', ['pointer64', ['void']]]],
'DeallocationBStore' : [ 0x1788, ['pointer64', ['void']]],
'BStoreLimit' : [ 0x1790, ['pointer64', ['void']]],
'ImpersonationLocale' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['pointer64', ['void']]],
'pShimData' : [ 0x17a8, ['pointer64', ['void']]],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['pointer64', ['void']]],
'ActiveFrame' : [ 0x17c0, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'FlsData' : [ 0x17c8, ['pointer64', ['void']]],
'SafeThunkCall' : [ 0x17d0, ['unsigned char']],
'BooleanSpare' : [ 0x17d1, ['array', 3, ['unsigned char']]],
} ],
'_HEAP_UCR_SEGMENT' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_HEAP_UCR_SEGMENT']]],
'ReservedSize' : [ 0x8, ['unsigned long long']],
'CommittedSize' : [ 0x10, ['unsigned long long']],
'filler' : [ 0x18, ['unsigned long']],
} ],
'_HMAP_TABLE' : [ 0x4000, {
'Table' : [ 0x0, ['array', 512, ['_HMAP_ENTRY']]],
} ],
'_ERESOURCE' : [ 0x68, {
'SystemResourcesList' : [ 0x0, ['_LIST_ENTRY']],
'OwnerTable' : [ 0x10, ['pointer64', ['_OWNER_ENTRY']]],
'ActiveCount' : [ 0x18, ['short']],
'Flag' : [ 0x1a, ['unsigned short']],
'SharedWaiters' : [ 0x20, ['pointer64', ['_KSEMAPHORE']]],
'ExclusiveWaiters' : [ 0x28, ['pointer64', ['_KEVENT']]],
'OwnerThreads' : [ 0x30, ['array', 2, ['_OWNER_ENTRY']]],
'ContentionCount' : [ 0x50, ['unsigned long']],
'NumberOfSharedWaiters' : [ 0x54, ['unsigned short']],
'NumberOfExclusiveWaiters' : [ 0x56, ['unsigned short']],
'Address' : [ 0x58, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x58, ['unsigned long long']],
'SpinLock' : [ 0x60, ['unsigned long long']],
} ],
'_OBJECT_SYMBOLIC_LINK' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LinkTarget' : [ 0x8, ['_UNICODE_STRING']],
'LinkTargetRemaining' : [ 0x18, ['_UNICODE_STRING']],
'LinkTargetObject' : [ 0x28, ['pointer64', ['void']]],
'DosDeviceDriveIndex' : [ 0x30, ['unsigned long']],
} ],
'_POOL_BLOCK_HEAD' : [ 0x20, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'List' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_DISPATCHER_HEADER' : [ 0x18, {
'Type' : [ 0x0, ['unsigned char']],
'Absolute' : [ 0x1, ['unsigned char']],
'NpxIrql' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'Hand' : [ 0x2, ['unsigned char']],
'Inserted' : [ 0x3, ['unsigned char']],
'DebugActive' : [ 0x3, ['unsigned char']],
'Lock' : [ 0x0, ['long']],
'SignalState' : [ 0x4, ['long']],
'WaitListHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_LDR_DATA_TABLE_ENTRY' : [ 0x98, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'InMemoryOrderLinks' : [ 0x10, ['_LIST_ENTRY']],
'InInitializationOrderLinks' : [ 0x20, ['_LIST_ENTRY']],
'DllBase' : [ 0x30, ['pointer64', ['void']]],
'EntryPoint' : [ 0x38, ['pointer64', ['void']]],
'SizeOfImage' : [ 0x40, ['unsigned long']],
'FullDllName' : [ 0x48, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x58, ['_UNICODE_STRING']],
'Flags' : [ 0x68, ['unsigned long']],
'LoadCount' : [ 0x6c, ['unsigned short']],
'TlsIndex' : [ 0x6e, ['unsigned short']],
'HashLinks' : [ 0x70, ['_LIST_ENTRY']],
'SectionPointer' : [ 0x70, ['pointer64', ['void']]],
'CheckSum' : [ 0x78, ['unsigned long']],
'TimeDateStamp' : [ 0x80, ['unsigned long']],
'LoadedImports' : [ 0x80, ['pointer64', ['void']]],
'EntryPointActivationContext' : [ 0x88, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'PatchInformation' : [ 0x90, ['pointer64', ['void']]],
} ],
'_HEAP_UNCOMMMTTED_RANGE' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_HEAP_UNCOMMMTTED_RANGE']]],
'Address' : [ 0x8, ['unsigned long long']],
'Size' : [ 0x10, ['unsigned long long']],
'filler' : [ 0x18, ['unsigned long']],
} ],
'_LUID_AND_ATTRIBUTES' : [ 0xc, {
'Luid' : [ 0x0, ['_LUID']],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_VI_DEADLOCK_GLOBALS' : [ 0x1e0, {
'Nodes' : [ 0x0, ['array', 2, ['unsigned long']]],
'Resources' : [ 0x8, ['array', 2, ['unsigned long']]],
'Threads' : [ 0x10, ['array', 2, ['unsigned long']]],
'TimeAcquire' : [ 0x18, ['long long']],
'TimeRelease' : [ 0x20, ['long long']],
'BytesAllocated' : [ 0x28, ['unsigned long long']],
'ResourceDatabase' : [ 0x30, ['pointer64', ['_LIST_ENTRY']]],
'ThreadDatabase' : [ 0x38, ['pointer64', ['_LIST_ENTRY']]],
'AllocationFailures' : [ 0x40, ['unsigned long']],
'NodesTrimmedBasedOnAge' : [ 0x44, ['unsigned long']],
'NodesTrimmedBasedOnCount' : [ 0x48, ['unsigned long']],
'NodesSearched' : [ 0x4c, ['unsigned long']],
'MaxNodesSearched' : [ 0x50, ['unsigned long']],
'SequenceNumber' : [ 0x54, ['unsigned long']],
'RecursionDepthLimit' : [ 0x58, ['unsigned long']],
'SearchedNodesLimit' : [ 0x5c, ['unsigned long']],
'DepthLimitHits' : [ 0x60, ['unsigned long']],
'SearchLimitHits' : [ 0x64, ['unsigned long']],
'ABC_ACB_Skipped' : [ 0x68, ['unsigned long']],
'OutOfOrderReleases' : [ 0x6c, ['unsigned long']],
'NodesReleasedOutOfOrder' : [ 0x70, ['unsigned long']],
'TotalReleases' : [ 0x74, ['unsigned long']],
'RootNodesDeleted' : [ 0x78, ['unsigned long']],
'ForgetHistoryCounter' : [ 0x7c, ['unsigned long']],
'PoolTrimCounter' : [ 0x80, ['unsigned long']],
'FreeResourceList' : [ 0x88, ['_LIST_ENTRY']],
'FreeThreadList' : [ 0x98, ['_LIST_ENTRY']],
'FreeNodeList' : [ 0xa8, ['_LIST_ENTRY']],
'FreeResourceCount' : [ 0xb8, ['unsigned long']],
'FreeThreadCount' : [ 0xbc, ['unsigned long']],
'FreeNodeCount' : [ 0xc0, ['unsigned long']],
'Instigator' : [ 0xc8, ['pointer64', ['void']]],
'NumberOfParticipants' : [ 0xd0, ['unsigned long']],
'Participant' : [ 0xd8, ['array', 32, ['pointer64', ['_VI_DEADLOCK_NODE']]]],
'CacheReductionInProgress' : [ 0x1d8, ['unsigned long']],
} ],
'_THERMAL_INFORMATION' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0x10, ['unsigned long long']],
'SamplingPeriod' : [ 0x18, ['unsigned long']],
'CurrentTemperature' : [ 0x1c, ['unsigned long']],
'PassiveTripPoint' : [ 0x20, ['unsigned long']],
'CriticalTripPoint' : [ 0x24, ['unsigned long']],
'ActiveTripPointCount' : [ 0x28, ['unsigned char']],
'ActiveTripPoint' : [ 0x2c, ['array', 10, ['unsigned long']]],
} ],
'_DBGKD_SEARCH_MEMORY' : [ 0x18, {
'SearchAddress' : [ 0x0, ['unsigned long long']],
'FoundAddress' : [ 0x0, ['unsigned long long']],
'SearchLength' : [ 0x8, ['unsigned long long']],
'PatternLength' : [ 0x10, ['unsigned long']],
} ],
'_SECTION_OBJECT' : [ 0x30, {
'StartingVa' : [ 0x0, ['pointer64', ['void']]],
'EndingVa' : [ 0x8, ['pointer64', ['void']]],
'Parent' : [ 0x10, ['pointer64', ['void']]],
'LeftChild' : [ 0x18, ['pointer64', ['void']]],
'RightChild' : [ 0x20, ['pointer64', ['void']]],
'Segment' : [ 0x28, ['pointer64', ['_SEGMENT_OBJECT']]],
} ],
'_POWER_STATE' : [ 0x4, {
'SystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_KAPC' : [ 0x58, {
'Type' : [ 0x0, ['unsigned char']],
'SpareByte0' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'SpareByte1' : [ 0x3, ['unsigned char']],
'SpareLong0' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'ApcListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KernelRoutine' : [ 0x20, ['pointer64', ['void']]],
'RundownRoutine' : [ 0x28, ['pointer64', ['void']]],
'NormalRoutine' : [ 0x30, ['pointer64', ['void']]],
'NormalContext' : [ 0x38, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x40, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x48, ['pointer64', ['void']]],
'ApcStateIndex' : [ 0x50, ['unsigned char']],
'ApcMode' : [ 0x51, ['unsigned char']],
'Inserted' : [ 0x52, ['unsigned char']],
} ],
'_SEGMENT_OBJECT' : [ 0x48, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['_LARGE_INTEGER']],
'NonExtendedPtes' : [ 0x18, ['unsigned long']],
'ImageCommitment' : [ 0x1c, ['unsigned long']],
'ControlArea' : [ 0x20, ['pointer64', ['_CONTROL_AREA']]],
'Subsection' : [ 0x28, ['pointer64', ['_SUBSECTION']]],
'LargeControlArea' : [ 0x30, ['pointer64', ['_LARGE_CONTROL_AREA']]],
'MmSectionFlags' : [ 0x38, ['pointer64', ['_MMSECTION_FLAGS']]],
'MmSubSectionFlags' : [ 0x40, ['pointer64', ['_MMSUBSECTION_FLAGS']]],
} ],
'__unnamed_1371' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS']],
} ],
'_CONTROL_AREA' : [ 0x48, {
'Segment' : [ 0x0, ['pointer64', ['_SEGMENT']]],
'DereferenceList' : [ 0x8, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0x18, ['unsigned long']],
'NumberOfPfnReferences' : [ 0x1c, ['unsigned long']],
'NumberOfMappedViews' : [ 0x20, ['unsigned long']],
'NumberOfSystemCacheViews' : [ 0x24, ['unsigned long']],
'NumberOfUserReferences' : [ 0x28, ['unsigned long']],
'u' : [ 0x2c, ['__unnamed_1371']],
'FilePointer' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'WaitingForDeletion' : [ 0x38, ['pointer64', ['_EVENT_COUNTER']]],
'ModifiedWriteCount' : [ 0x40, ['unsigned short']],
'FlushInProgressCount' : [ 0x42, ['unsigned short']],
'WritableUserReferences' : [ 0x44, ['unsigned long']],
} ],
'_HANDLE_TABLE' : [ 0x70, {
'TableCode' : [ 0x0, ['unsigned long long']],
'QuotaProcess' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'UniqueProcessId' : [ 0x10, ['pointer64', ['void']]],
'HandleTableLock' : [ 0x18, ['array', 4, ['_EX_PUSH_LOCK']]],
'HandleTableList' : [ 0x38, ['_LIST_ENTRY']],
'HandleContentionEvent' : [ 0x48, ['_EX_PUSH_LOCK']],
'DebugInfo' : [ 0x50, ['pointer64', ['_HANDLE_TRACE_DEBUG_INFO']]],
'ExtraInfoPages' : [ 0x58, ['long']],
'FirstFree' : [ 0x5c, ['unsigned long']],
'LastFree' : [ 0x60, ['unsigned long']],
'NextHandleNeedingPool' : [ 0x64, ['unsigned long']],
'HandleCount' : [ 0x68, ['long']],
'Flags' : [ 0x6c, ['unsigned long']],
'StrictFIFO' : [ 0x6c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
} ],
'_POOL_HEADER' : [ 0x10, {
'PreviousSize' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'PoolIndex' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'BlockSize' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long')]],
'PoolType' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'Ulong1' : [ 0x0, ['unsigned long']],
'PoolTag' : [ 0x4, ['unsigned long']],
'ProcessBilled' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'AllocatorBackTraceIndex' : [ 0x8, ['unsigned short']],
'PoolTagHash' : [ 0xa, ['unsigned short']],
} ],
'_MMVAD_FLAGS2' : [ 0x4, {
'FileOffset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'SecNoChange' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'OneSecured' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'MultipleSecured' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'LongVad' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'ExtendableFile' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'Inherit' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_TEB_ACTIVE_FRAME' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned long']],
'Previous' : [ 0x8, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'Context' : [ 0x10, ['pointer64', ['_TEB_ACTIVE_FRAME_CONTEXT']]],
} ],
'_XMM_SAVE_AREA32' : [ 0x200, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned short']],
'Reserved2' : [ 0xe, ['unsigned short']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned short']],
'Reserved3' : [ 0x16, ['unsigned short']],
'MxCsr' : [ 0x18, ['unsigned long']],
'MxCsr_Mask' : [ 0x1c, ['unsigned long']],
'FloatRegisters' : [ 0x20, ['array', 8, ['_M128A']]],
'XmmRegisters' : [ 0xa0, ['array', 16, ['_M128A']]],
'Reserved4' : [ 0x1a0, ['array', 96, ['unsigned char']]],
} ],
'_KWAIT_BLOCK' : [ 0x30, {
'WaitListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'Object' : [ 0x18, ['pointer64', ['void']]],
'NextWaitBlock' : [ 0x20, ['pointer64', ['_KWAIT_BLOCK']]],
'WaitKey' : [ 0x28, ['unsigned short']],
'WaitType' : [ 0x2a, ['unsigned char']],
'SpareByte' : [ 0x2b, ['unsigned char']],
'SpareLong' : [ 0x2c, ['long']],
} ],
'_MMPTE_PROTOTYPE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned long long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'ProtoAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_MMSUPPORT' : [ 0x58, {
'WorkingSetExpansionLinks' : [ 0x0, ['_LIST_ENTRY']],
'LastTrimTime' : [ 0x10, ['_LARGE_INTEGER']],
'Flags' : [ 0x18, ['_MMSUPPORT_FLAGS']],
'PageFaultCount' : [ 0x1c, ['unsigned long']],
'PeakWorkingSetSize' : [ 0x20, ['unsigned long']],
'GrowthSinceLastEstimate' : [ 0x24, ['unsigned long']],
'MinimumWorkingSetSize' : [ 0x28, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x2c, ['unsigned long']],
'VmWorkingSetList' : [ 0x30, ['pointer64', ['_MMWSL']]],
'Claim' : [ 0x38, ['unsigned long']],
'NextEstimationSlot' : [ 0x3c, ['unsigned long']],
'NextAgingSlot' : [ 0x40, ['unsigned long']],
'EstimatedAvailable' : [ 0x44, ['unsigned long']],
'WorkingSetSize' : [ 0x48, ['unsigned long']],
'WorkingSetMutex' : [ 0x50, ['_EX_PUSH_LOCK']],
} ],
'_EX_WORK_QUEUE' : [ 0x58, {
'WorkerQueue' : [ 0x0, ['_KQUEUE']],
'DynamicThreadCount' : [ 0x40, ['unsigned long']],
'WorkItemsProcessed' : [ 0x44, ['unsigned long']],
'WorkItemsProcessedLastPass' : [ 0x48, ['unsigned long']],
'QueueDepthLastPass' : [ 0x4c, ['unsigned long']],
'Info' : [ 0x50, ['EX_QUEUE_WORKER_INFO']],
} ],
'_MMSUBSECTION_FLAGS' : [ 0x4, {
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadWrite' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SubsectionStatic' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 9, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'StartingSector4132' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 20, native_type='unsigned long')]],
'SectorEndOffset' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 32, native_type='unsigned long')]],
} ],
'_HEAP_TAG_ENTRY' : [ 0x48, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
'TagIndex' : [ 0x10, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x12, ['unsigned short']],
'TagName' : [ 0x14, ['array', 24, ['unsigned short']]],
} ],
'_KEVENT' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_EPROCESS_QUOTA_BLOCK' : [ 0x78, {
'QuotaEntry' : [ 0x0, ['array', 3, ['_EPROCESS_QUOTA_ENTRY']]],
'QuotaList' : [ 0x60, ['_LIST_ENTRY']],
'ReferenceCount' : [ 0x70, ['unsigned long']],
'ProcessCount' : [ 0x74, ['unsigned long']],
} ],
'_NT_TIB' : [ 0x38, {
'ExceptionList' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'StackBase' : [ 0x8, ['pointer64', ['void']]],
'StackLimit' : [ 0x10, ['pointer64', ['void']]],
'SubSystemTib' : [ 0x18, ['pointer64', ['void']]],
'FiberData' : [ 0x20, ['pointer64', ['void']]],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['pointer64', ['void']]],
'Self' : [ 0x30, ['pointer64', ['_NT_TIB']]],
} ],
'_EVENT_COUNTER' : [ 0x30, {
'ListEntry' : [ 0x0, ['_SLIST_ENTRY']],
'RefCount' : [ 0x10, ['unsigned long']],
'Event' : [ 0x18, ['_KEVENT']],
} ],
'_EJOB' : [ 0x220, {
'Event' : [ 0x0, ['_KEVENT']],
'JobLinks' : [ 0x18, ['_LIST_ENTRY']],
'ProcessListHead' : [ 0x28, ['_LIST_ENTRY']],
'JobLock' : [ 0x38, ['_ERESOURCE']],
'TotalUserTime' : [ 0xa0, ['_LARGE_INTEGER']],
'TotalKernelTime' : [ 0xa8, ['_LARGE_INTEGER']],
'ThisPeriodTotalUserTime' : [ 0xb0, ['_LARGE_INTEGER']],
'ThisPeriodTotalKernelTime' : [ 0xb8, ['_LARGE_INTEGER']],
'TotalPageFaultCount' : [ 0xc0, ['unsigned long']],
'TotalProcesses' : [ 0xc4, ['unsigned long']],
'ActiveProcesses' : [ 0xc8, ['unsigned long']],
'TotalTerminatedProcesses' : [ 0xcc, ['unsigned long']],
'PerProcessUserTimeLimit' : [ 0xd0, ['_LARGE_INTEGER']],
'PerJobUserTimeLimit' : [ 0xd8, ['_LARGE_INTEGER']],
'LimitFlags' : [ 0xe0, ['unsigned long']],
'MinimumWorkingSetSize' : [ 0xe8, ['unsigned long long']],
'MaximumWorkingSetSize' : [ 0xf0, ['unsigned long long']],
'ActiveProcessLimit' : [ 0xf8, ['unsigned long']],
'Affinity' : [ 0x100, ['unsigned long long']],
'PriorityClass' : [ 0x108, ['unsigned char']],
'UIRestrictionsClass' : [ 0x10c, ['unsigned long']],
'SecurityLimitFlags' : [ 0x110, ['unsigned long']],
'Token' : [ 0x118, ['pointer64', ['void']]],
'Filter' : [ 0x120, ['pointer64', ['_PS_JOB_TOKEN_FILTER']]],
'EndOfJobTimeAction' : [ 0x128, ['unsigned long']],
'CompletionPort' : [ 0x130, ['pointer64', ['void']]],
'CompletionKey' : [ 0x138, ['pointer64', ['void']]],
'SessionId' : [ 0x140, ['unsigned long']],
'SchedulingClass' : [ 0x144, ['unsigned long']],
'ReadOperationCount' : [ 0x148, ['unsigned long long']],
'WriteOperationCount' : [ 0x150, ['unsigned long long']],
'OtherOperationCount' : [ 0x158, ['unsigned long long']],
'ReadTransferCount' : [ 0x160, ['unsigned long long']],
'WriteTransferCount' : [ 0x168, ['unsigned long long']],
'OtherTransferCount' : [ 0x170, ['unsigned long long']],
'IoInfo' : [ 0x178, ['_IO_COUNTERS']],
'ProcessMemoryLimit' : [ 0x1a8, ['unsigned long long']],
'JobMemoryLimit' : [ 0x1b0, ['unsigned long long']],
'PeakProcessMemoryUsed' : [ 0x1b8, ['unsigned long long']],
'PeakJobMemoryUsed' : [ 0x1c0, ['unsigned long long']],
'CurrentJobMemoryUsed' : [ 0x1c8, ['unsigned long long']],
'MemoryLimitsLock' : [ 0x1d0, ['_KGUARDED_MUTEX']],
'JobSetLinks' : [ 0x208, ['_LIST_ENTRY']],
'MemberLevel' : [ 0x218, ['unsigned long']],
'JobFlags' : [ 0x21c, ['unsigned long']],
} ],
'_LARGE_CONTROL_AREA' : [ 0x68, {
'Segment' : [ 0x0, ['pointer64', ['_SEGMENT']]],
'DereferenceList' : [ 0x8, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0x18, ['unsigned long']],
'NumberOfPfnReferences' : [ 0x1c, ['unsigned long']],
'NumberOfMappedViews' : [ 0x20, ['unsigned long']],
'NumberOfSystemCacheViews' : [ 0x24, ['unsigned long']],
'NumberOfUserReferences' : [ 0x28, ['unsigned long']],
'u' : [ 0x2c, ['__unnamed_1371']],
'FilePointer' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'WaitingForDeletion' : [ 0x38, ['pointer64', ['_EVENT_COUNTER']]],
'ModifiedWriteCount' : [ 0x40, ['unsigned short']],
'FlushInProgressCount' : [ 0x42, ['unsigned short']],
'WritableUserReferences' : [ 0x44, ['unsigned long']],
'StartingFrame' : [ 0x48, ['unsigned long long']],
'UserGlobalList' : [ 0x50, ['_LIST_ENTRY']],
'SessionId' : [ 0x60, ['unsigned long']],
} ],
'_KGATE' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_PS_JOB_TOKEN_FILTER' : [ 0x38, {
'CapturedSidCount' : [ 0x0, ['unsigned long']],
'CapturedSids' : [ 0x8, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'CapturedSidsLength' : [ 0x10, ['unsigned long']],
'CapturedGroupCount' : [ 0x14, ['unsigned long']],
'CapturedGroups' : [ 0x18, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'CapturedGroupsLength' : [ 0x20, ['unsigned long']],
'CapturedPrivilegeCount' : [ 0x24, ['unsigned long']],
'CapturedPrivileges' : [ 0x28, ['pointer64', ['_LUID_AND_ATTRIBUTES']]],
'CapturedPrivilegesLength' : [ 0x30, ['unsigned long']],
} ],
'_MM_DRIVER_VERIFIER_DATA' : [ 0x80, {
'Level' : [ 0x0, ['unsigned long']],
'RaiseIrqls' : [ 0x4, ['unsigned long']],
'AcquireSpinLocks' : [ 0x8, ['unsigned long']],
'SynchronizeExecutions' : [ 0xc, ['unsigned long']],
'AllocationsAttempted' : [ 0x10, ['unsigned long']],
'AllocationsSucceeded' : [ 0x14, ['unsigned long']],
'AllocationsSucceededSpecialPool' : [ 0x18, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x1c, ['unsigned long']],
'TrimRequests' : [ 0x20, ['unsigned long']],
'Trims' : [ 0x24, ['unsigned long']],
'AllocationsFailed' : [ 0x28, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x2c, ['unsigned long']],
'Loads' : [ 0x30, ['unsigned long']],
'Unloads' : [ 0x34, ['unsigned long']],
'UnTrackedPool' : [ 0x38, ['unsigned long']],
'UserTrims' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long long']],
'NonPagedBytes' : [ 0x58, ['unsigned long long']],
'PeakPagedBytes' : [ 0x60, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x68, ['unsigned long long']],
'BurstAllocationsFailedDeliberately' : [ 0x70, ['unsigned long']],
'SessionTrims' : [ 0x74, ['unsigned long']],
'Reserved' : [ 0x78, ['array', 2, ['unsigned long']]],
} ],
'_IMAGE_FILE_HEADER' : [ 0x14, {
'Machine' : [ 0x0, ['unsigned short']],
'NumberOfSections' : [ 0x2, ['unsigned short']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'PointerToSymbolTable' : [ 0x8, ['unsigned long']],
'NumberOfSymbols' : [ 0xc, ['unsigned long']],
'SizeOfOptionalHeader' : [ 0x10, ['unsigned short']],
'Characteristics' : [ 0x12, ['unsigned short']],
} ],
'_MMPTE_HARDWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Writable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 40, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_IO_COMPLETION_CONTEXT' : [ 0x10, {
'Port' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['pointer64', ['void']]],
} ],
'_CALL_HASH_ENTRY' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CallersAddress' : [ 0x10, ['pointer64', ['void']]],
'CallersCaller' : [ 0x18, ['pointer64', ['void']]],
'CallCount' : [ 0x20, ['unsigned long']],
} ],
'_HMAP_ENTRY' : [ 0x20, {
'BlockAddress' : [ 0x0, ['unsigned long long']],
'BinAddress' : [ 0x8, ['unsigned long long']],
'CmView' : [ 0x10, ['pointer64', ['_CM_VIEW_OF_FILE']]],
'MemAlloc' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_SET_CONTEXT' : [ 0x4, {
'ContextFlags' : [ 0x0, ['unsigned long']],
} ],
'_MMSECTION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingCreated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'BeingPurged' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoModifiedWriting' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'FailAllIo' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Based' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'File' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Networked' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'NoCache' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PhysicalMemory' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Reserve' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Commit' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'FloppyMedia' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WasPurged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UserReference' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DeleteOnClose' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'FilePointerNull' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'DebugSymbolsLoaded' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'SetMappedFileIoComplete' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CollidedFlush' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'ImageMappedInSystemSpace' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'UserWritable' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'GlobalOnlyPerSession' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'WriteCombined' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'filler' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_DEFERRED_WRITE' : [ 0x50, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'BytesToWrite' : [ 0x10, ['unsigned long']],
'DeferredWriteLinks' : [ 0x18, ['_LIST_ENTRY']],
'Event' : [ 0x28, ['pointer64', ['_KEVENT']]],
'PostRoutine' : [ 0x30, ['pointer64', ['void']]],
'Context1' : [ 0x38, ['pointer64', ['void']]],
'Context2' : [ 0x40, ['pointer64', ['void']]],
'LimitModifiedPages' : [ 0x48, ['unsigned char']],
} ],
'_SID_AND_ATTRIBUTES' : [ 0x10, {
'Sid' : [ 0x0, ['pointer64', ['void']]],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_HIVE_LIST_ENTRY' : [ 0x30, {
'Name' : [ 0x0, ['pointer64', ['unsigned short']]],
'BaseName' : [ 0x8, ['pointer64', ['unsigned short']]],
'CmHive' : [ 0x10, ['pointer64', ['_CMHIVE']]],
'HHiveFlags' : [ 0x18, ['unsigned long']],
'CmHiveFlags' : [ 0x1c, ['unsigned long']],
'CmHive2' : [ 0x20, ['pointer64', ['_CMHIVE']]],
'ThreadFinished' : [ 0x28, ['unsigned char']],
'ThreadStarted' : [ 0x29, ['unsigned char']],
'Allocate' : [ 0x2a, ['unsigned char']],
} ],
'_MMVAD_FLAGS' : [ 0x8, {
'CommitCharge' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 51, native_type='unsigned long long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 51, end_bit = 52, native_type='unsigned long long')]],
'VadType' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 55, native_type='unsigned long long')]],
'MemCommit' : [ 0x0, ['BitField', dict(start_bit = 55, end_bit = 56, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 61, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 61, end_bit = 63, native_type='unsigned long long')]],
'PrivateMemory' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_KSPIN_LOCK_QUEUE' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Lock' : [ 0x8, ['pointer64', ['unsigned long long']]],
} ],
'_PS_IMPERSONATION_INFORMATION' : [ 0x10, {
'Token' : [ 0x0, ['pointer64', ['void']]],
'CopyOnOpen' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
'ImpersonationLevel' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
} ],
'__unnamed_142e' : [ 0x8, {
'LegacyDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'PendingDeviceRelations' : [ 0x0, ['pointer64', ['_DEVICE_RELATIONS']]],
} ],
'__unnamed_1430' : [ 0x8, {
'NextResourceDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
} ],
'__unnamed_1434' : [ 0x20, {
'DockStatus' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DOCK_NOTDOCKDEVICE', 1: 'DOCK_QUIESCENT', 2: 'DOCK_ARRIVING', 3: 'DOCK_DEPARTING', 4: 'DOCK_EJECTIRP_COMPLETED'})]],
'ListEntry' : [ 0x8, ['_LIST_ENTRY']],
'SerialNumber' : [ 0x18, ['pointer64', ['unsigned short']]],
} ],
'_DEVICE_NODE' : [ 0x1c0, {
'Sibling' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'Child' : [ 0x8, ['pointer64', ['_DEVICE_NODE']]],
'Parent' : [ 0x10, ['pointer64', ['_DEVICE_NODE']]],
'LastChild' : [ 0x18, ['pointer64', ['_DEVICE_NODE']]],
'Level' : [ 0x20, ['unsigned long']],
'Notify' : [ 0x28, ['pointer64', ['_PO_DEVICE_NOTIFY']]],
'State' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'PreviousState' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'StateHistory' : [ 0x38, ['array', -80, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]]],
'StateHistoryEntry' : [ 0x88, ['unsigned long']],
'CompletionStatus' : [ 0x8c, ['long']],
'PendingIrp' : [ 0x90, ['pointer64', ['_IRP']]],
'Flags' : [ 0x98, ['unsigned long']],
'UserFlags' : [ 0x9c, ['unsigned long']],
'Problem' : [ 0xa0, ['unsigned long']],
'PhysicalDeviceObject' : [ 0xa8, ['pointer64', ['_DEVICE_OBJECT']]],
'ResourceList' : [ 0xb0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'ResourceListTranslated' : [ 0xb8, ['pointer64', ['_CM_RESOURCE_LIST']]],
'InstancePath' : [ 0xc0, ['_UNICODE_STRING']],
'ServiceName' : [ 0xd0, ['_UNICODE_STRING']],
'DuplicatePDO' : [ 0xe0, ['pointer64', ['_DEVICE_OBJECT']]],
'ResourceRequirements' : [ 0xe8, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'InterfaceType' : [ 0xf0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0xf4, ['unsigned long']],
'ChildInterfaceType' : [ 0xf8, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ChildBusNumber' : [ 0xfc, ['unsigned long']],
'ChildBusTypeIndex' : [ 0x100, ['unsigned short']],
'RemovalPolicy' : [ 0x102, ['unsigned char']],
'HardwareRemovalPolicy' : [ 0x103, ['unsigned char']],
'TargetDeviceNotify' : [ 0x108, ['_LIST_ENTRY']],
'DeviceArbiterList' : [ 0x118, ['_LIST_ENTRY']],
'DeviceTranslatorList' : [ 0x128, ['_LIST_ENTRY']],
'NoTranslatorMask' : [ 0x138, ['unsigned short']],
'QueryTranslatorMask' : [ 0x13a, ['unsigned short']],
'NoArbiterMask' : [ 0x13c, ['unsigned short']],
'QueryArbiterMask' : [ 0x13e, ['unsigned short']],
'OverUsed1' : [ 0x140, ['__unnamed_142e']],
'OverUsed2' : [ 0x148, ['__unnamed_1430']],
'BootResources' : [ 0x150, ['pointer64', ['_CM_RESOURCE_LIST']]],
'CapabilityFlags' : [ 0x158, ['unsigned long']],
'DockInfo' : [ 0x160, ['__unnamed_1434']],
'DisableableDepends' : [ 0x180, ['unsigned long']],
'PendedSetInterfaceState' : [ 0x188, ['_LIST_ENTRY']],
'LegacyBusListEntry' : [ 0x198, ['_LIST_ENTRY']],
'DriverUnloadRetryCount' : [ 0x1a8, ['unsigned long']],
'PreviousParent' : [ 0x1b0, ['pointer64', ['_DEVICE_NODE']]],
'DeletedChildren' : [ 0x1b8, ['unsigned long']],
} ],
'__unnamed_1439' : [ 0x68, {
'CriticalSection' : [ 0x0, ['_RTL_CRITICAL_SECTION']],
'Resource' : [ 0x0, ['_ERESOURCE']],
} ],
'_HEAP_LOCK' : [ 0x68, {
'Lock' : [ 0x0, ['__unnamed_1439']],
} ],
'_PEB64' : [ 0x358, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['unsigned long long']],
'ImageBaseAddress' : [ 0x10, ['unsigned long long']],
'Ldr' : [ 0x18, ['unsigned long long']],
'ProcessParameters' : [ 0x20, ['unsigned long long']],
'SubSystemData' : [ 0x28, ['unsigned long long']],
'ProcessHeap' : [ 0x30, ['unsigned long long']],
'FastPebLock' : [ 0x38, ['unsigned long long']],
'AtlThunkSListPtr' : [ 0x40, ['unsigned long long']],
'SparePtr2' : [ 0x48, ['unsigned long long']],
'EnvironmentUpdateCount' : [ 0x50, ['unsigned long']],
'KernelCallbackTable' : [ 0x58, ['unsigned long long']],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'SpareUlong' : [ 0x64, ['unsigned long']],
'FreeList' : [ 0x68, ['unsigned long long']],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['unsigned long long']],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['unsigned long long']],
'ReadOnlySharedMemoryHeap' : [ 0x90, ['unsigned long long']],
'ReadOnlyStaticServerData' : [ 0x98, ['unsigned long long']],
'AnsiCodePageData' : [ 0xa0, ['unsigned long long']],
'OemCodePageData' : [ 0xa8, ['unsigned long long']],
'UnicodeCaseTableData' : [ 0xb0, ['unsigned long long']],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['unsigned long long']],
'GdiSharedHandleTable' : [ 0xf8, ['unsigned long long']],
'ProcessStarterHelper' : [ 0x100, ['unsigned long long']],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['unsigned long long']],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ImageProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['unsigned long long']],
'TlsExpansionBitmap' : [ 0x238, ['unsigned long long']],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['unsigned long long']],
'AppCompatInfo' : [ 0x2e0, ['unsigned long long']],
'CSDVersion' : [ 0x2e8, ['_STRING64']],
'ActivationContextData' : [ 0x2f8, ['unsigned long long']],
'ProcessAssemblyStorageMap' : [ 0x300, ['unsigned long long']],
'SystemDefaultActivationContextData' : [ 0x308, ['unsigned long long']],
'SystemAssemblyStorageMap' : [ 0x310, ['unsigned long long']],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['unsigned long long']],
'FlsListHead' : [ 0x328, ['LIST_ENTRY64']],
'FlsBitmap' : [ 0x338, ['unsigned long long']],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
} ],
'_KPCR' : [ 0x2600, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'GdtBase' : [ 0x0, ['pointer64', ['_KGDTENTRY64']]],
'TssBase' : [ 0x8, ['pointer64', ['_KTSS64']]],
'PerfGlobalGroupMask' : [ 0x10, ['pointer64', ['void']]],
'Self' : [ 0x18, ['pointer64', ['_KPCR']]],
'CurrentPrcb' : [ 0x20, ['pointer64', ['_KPRCB']]],
'LockArray' : [ 0x28, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Used_Self' : [ 0x30, ['pointer64', ['void']]],
'IdtBase' : [ 0x38, ['pointer64', ['_KIDTENTRY64']]],
'Unused' : [ 0x40, ['array', 2, ['unsigned long long']]],
'Irql' : [ 0x50, ['unsigned char']],
'SecondLevelCacheAssociativity' : [ 0x51, ['unsigned char']],
'ObsoleteNumber' : [ 0x52, ['unsigned char']],
'Fill0' : [ 0x53, ['unsigned char']],
'Unused0' : [ 0x54, ['array', 3, ['unsigned long']]],
'MajorVersion' : [ 0x60, ['unsigned short']],
'MinorVersion' : [ 0x62, ['unsigned short']],
'StallScaleFactor' : [ 0x64, ['unsigned long']],
'Unused1' : [ 0x68, ['array', 3, ['pointer64', ['void']]]],
'KernelReserved' : [ 0x80, ['array', 15, ['unsigned long']]],
'SecondLevelCacheSize' : [ 0xbc, ['unsigned long']],
'HalReserved' : [ 0xc0, ['array', 16, ['unsigned long']]],
'Unused2' : [ 0x100, ['unsigned long']],
'KdVersionBlock' : [ 0x108, ['pointer64', ['void']]],
'Unused3' : [ 0x110, ['pointer64', ['void']]],
'PcrAlign1' : [ 0x118, ['array', 24, ['unsigned long']]],
'Prcb' : [ 0x180, ['_KPRCB']],
} ],
'_MMCOLOR_TABLES' : [ 0x18, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['pointer64', ['void']]],
'Count' : [ 0x10, ['unsigned long long']],
} ],
'_ACL' : [ 0x8, {
'AclRevision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'AclSize' : [ 0x2, ['unsigned short']],
'AceCount' : [ 0x4, ['unsigned short']],
'Sbz2' : [ 0x6, ['unsigned short']],
} ],
'_DBGKD_FILL_MEMORY' : [ 0x10, {
'Address' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned short']],
'PatternLength' : [ 0xe, ['unsigned short']],
} ],
'_PP_LOOKASIDE_LIST' : [ 0x10, {
'P' : [ 0x0, ['pointer64', ['_GENERAL_LOOKASIDE']]],
'L' : [ 0x8, ['pointer64', ['_GENERAL_LOOKASIDE']]],
} ],
'_KPROCESS' : [ 0xb8, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'ProfileListHead' : [ 0x18, ['_LIST_ENTRY']],
'DirectoryTableBase' : [ 0x28, ['array', 2, ['unsigned long long']]],
'IopmOffset' : [ 0x38, ['unsigned short']],
'ActiveProcessors' : [ 0x40, ['unsigned long long']],
'KernelTime' : [ 0x48, ['unsigned long']],
'UserTime' : [ 0x4c, ['unsigned long']],
'ReadyListHead' : [ 0x50, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x60, ['_SINGLE_LIST_ENTRY']],
'Reserved1' : [ 0x68, ['pointer64', ['void']]],
'ThreadListHead' : [ 0x70, ['_LIST_ENTRY']],
'ProcessLock' : [ 0x80, ['unsigned long long']],
'Affinity' : [ 0x88, ['unsigned long long']],
'AutoAlignment' : [ 0x90, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'DisableBoost' : [ 0x90, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='long')]],
'DisableQuantum' : [ 0x90, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='long')]],
'ReservedFlags' : [ 0x90, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='long')]],
'ProcessFlags' : [ 0x90, ['long']],
'BasePriority' : [ 0x94, ['unsigned char']],
'QuantumReset' : [ 0x95, ['unsigned char']],
'State' : [ 0x96, ['unsigned char']],
'ThreadSeed' : [ 0x97, ['unsigned char']],
'PowerState' : [ 0x98, ['unsigned char']],
'IdealNode' : [ 0x99, ['unsigned char']],
'Visited' : [ 0x9a, ['unsigned char']],
'Flags' : [ 0x9b, ['_KEXECUTE_OPTIONS']],
'ExecuteOptions' : [ 0x9b, ['unsigned char']],
'StackCount' : [ 0xa0, ['unsigned long long']],
'ProcessListEntry' : [ 0xa8, ['_LIST_ENTRY']],
} ],
'_PHYSICAL_MEMORY_RUN' : [ 0x10, {
'BasePage' : [ 0x0, ['unsigned long long']],
'PageCount' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_1469' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MM_SESSION_SPACE_FLAGS']],
} ],
'_MM_SESSION_SPACE' : [ 0x1d80, {
'GlobalVirtualAddress' : [ 0x0, ['pointer64', ['_MM_SESSION_SPACE']]],
'ReferenceCount' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_1469']],
'SessionId' : [ 0x10, ['unsigned long']],
'ProcessList' : [ 0x18, ['_LIST_ENTRY']],
'LastProcessSwappedOutTime' : [ 0x28, ['_LARGE_INTEGER']],
'SessionPageDirectoryIndex' : [ 0x30, ['unsigned long long']],
'NonPagablePages' : [ 0x38, ['unsigned long long']],
'CommittedPages' : [ 0x40, ['unsigned long long']],
'PagedPoolStart' : [ 0x48, ['pointer64', ['void']]],
'PagedPoolEnd' : [ 0x50, ['pointer64', ['void']]],
'PagedPoolBasePde' : [ 0x58, ['pointer64', ['_MMPTE']]],
'Color' : [ 0x60, ['unsigned long']],
'ResidentProcessCount' : [ 0x64, ['long']],
'SessionPoolAllocationFailures' : [ 0x68, ['array', 4, ['unsigned long']]],
'ImageList' : [ 0x78, ['_LIST_ENTRY']],
'LocaleId' : [ 0x88, ['unsigned long']],
'AttachCount' : [ 0x8c, ['unsigned long']],
'AttachEvent' : [ 0x90, ['_KEVENT']],
'LastProcess' : [ 0xa8, ['pointer64', ['_EPROCESS']]],
'ProcessReferenceToSession' : [ 0xb0, ['long']],
'WsListEntry' : [ 0xb8, ['_LIST_ENTRY']],
'Lookaside' : [ 0x100, ['array', 21, ['_GENERAL_LOOKASIDE']]],
'Session' : [ 0xb80, ['_MMSESSION']],
'PagedPoolMutex' : [ 0xbe8, ['_KGUARDED_MUTEX']],
'PagedPoolInfo' : [ 0xc20, ['_MM_PAGED_POOL_INFO']],
'Vm' : [ 0xc60, ['_MMSUPPORT']],
'Wsle' : [ 0xcb8, ['pointer64', ['_MMWSLE']]],
'Win32KDriverUnload' : [ 0xcc0, ['pointer64', ['void']]],
'PagedPool' : [ 0xcc8, ['_POOL_DESCRIPTOR']],
'PageDirectory' : [ 0x1d10, ['_MMPTE']],
'SpecialPoolFirstPte' : [ 0x1d18, ['pointer64', ['_MMPTE']]],
'SpecialPoolLastPte' : [ 0x1d20, ['pointer64', ['_MMPTE']]],
'NextPdeForSpecialPoolExpansion' : [ 0x1d28, ['pointer64', ['_MMPTE']]],
'LastPdeForSpecialPoolExpansion' : [ 0x1d30, ['pointer64', ['_MMPTE']]],
'SpecialPagesInUse' : [ 0x1d38, ['unsigned long long']],
'ImageLoadingCount' : [ 0x1d40, ['long']],
} ],
'_PEB' : [ 0x358, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['pointer64', ['void']]],
'ImageBaseAddress' : [ 0x10, ['pointer64', ['void']]],
'Ldr' : [ 0x18, ['pointer64', ['_PEB_LDR_DATA']]],
'ProcessParameters' : [ 0x20, ['pointer64', ['_RTL_USER_PROCESS_PARAMETERS']]],
'SubSystemData' : [ 0x28, ['pointer64', ['void']]],
'ProcessHeap' : [ 0x30, ['pointer64', ['void']]],
'FastPebLock' : [ 0x38, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'AtlThunkSListPtr' : [ 0x40, ['pointer64', ['void']]],
'SparePtr2' : [ 0x48, ['pointer64', ['void']]],
'EnvironmentUpdateCount' : [ 0x50, ['unsigned long']],
'KernelCallbackTable' : [ 0x58, ['pointer64', ['void']]],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'SpareUlong' : [ 0x64, ['unsigned long']],
'FreeList' : [ 0x68, ['pointer64', ['_PEB_FREE_BLOCK']]],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['pointer64', ['void']]],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['pointer64', ['void']]],
'ReadOnlySharedMemoryHeap' : [ 0x90, ['pointer64', ['void']]],
'ReadOnlyStaticServerData' : [ 0x98, ['pointer64', ['pointer64', ['void']]]],
'AnsiCodePageData' : [ 0xa0, ['pointer64', ['void']]],
'OemCodePageData' : [ 0xa8, ['pointer64', ['void']]],
'UnicodeCaseTableData' : [ 0xb0, ['pointer64', ['void']]],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['pointer64', ['pointer64', ['void']]]],
'GdiSharedHandleTable' : [ 0xf8, ['pointer64', ['void']]],
'ProcessStarterHelper' : [ 0x100, ['pointer64', ['void']]],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ImageProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['pointer64', ['void']]],
'TlsExpansionBitmap' : [ 0x238, ['pointer64', ['void']]],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['pointer64', ['void']]],
'AppCompatInfo' : [ 0x2e0, ['pointer64', ['void']]],
'CSDVersion' : [ 0x2e8, ['_UNICODE_STRING']],
'ActivationContextData' : [ 0x2f8, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'ProcessAssemblyStorageMap' : [ 0x300, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'SystemDefaultActivationContextData' : [ 0x308, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'SystemAssemblyStorageMap' : [ 0x310, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['pointer64', ['pointer64', ['void']]]],
'FlsListHead' : [ 0x328, ['_LIST_ENTRY']],
'FlsBitmap' : [ 0x338, ['pointer64', ['void']]],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
} ],
'_HEAP_FREE_ENTRY' : [ 0x20, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'PreviousSize' : [ 0xa, ['unsigned short']],
'SmallTagIndex' : [ 0xc, ['unsigned char']],
'Flags' : [ 0xd, ['unsigned char']],
'UnusedBytes' : [ 0xe, ['unsigned char']],
'SegmentIndex' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'FreeList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_DBGKD_GET_CONTEXT' : [ 0x4, {
'Unused' : [ 0x0, ['unsigned long']],
} ],
'_TEB_ACTIVE_FRAME_CONTEXT' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned long']],
'FrameName' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_MMPTE_SOFTWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'UsedPageTableEntries' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 22, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_IO_RESOURCE_REQUIREMENTS_LIST' : [ 0x48, {
'ListSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x8, ['unsigned long']],
'SlotNumber' : [ 0xc, ['unsigned long']],
'Reserved' : [ 0x10, ['array', 3, ['unsigned long']]],
'AlternativeLists' : [ 0x1c, ['unsigned long']],
'List' : [ 0x20, ['array', 1, ['_IO_RESOURCE_LIST']]],
} ],
'__unnamed_1499' : [ 0x10, {
'IoStatus' : [ 0x0, ['_IO_STATUS_BLOCK']],
'LastByte' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'_MMMOD_WRITER_MDL_ENTRY' : [ 0xa8, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'WriteOffset' : [ 0x10, ['_LARGE_INTEGER']],
'u' : [ 0x18, ['__unnamed_1499']],
'Irp' : [ 0x28, ['pointer64', ['_IRP']]],
'LastPageToWrite' : [ 0x30, ['unsigned long long']],
'PagingListHead' : [ 0x38, ['pointer64', ['_MMMOD_WRITER_LISTHEAD']]],
'CurrentList' : [ 0x40, ['pointer64', ['_LIST_ENTRY']]],
'PagingFile' : [ 0x48, ['pointer64', ['_MMPAGING_FILE']]],
'File' : [ 0x50, ['pointer64', ['_FILE_OBJECT']]],
'ControlArea' : [ 0x58, ['pointer64', ['_CONTROL_AREA']]],
'FileResource' : [ 0x60, ['pointer64', ['_ERESOURCE']]],
'IssueTime' : [ 0x68, ['_LARGE_INTEGER']],
'Mdl' : [ 0x70, ['_MDL']],
'Page' : [ 0xa0, ['array', 1, ['unsigned long long']]],
} ],
'_CACHE_UNINITIALIZE_EVENT' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'Event' : [ 0x8, ['_KEVENT']],
} ],
'_SECURITY_TOKEN_AUDIT_DATA' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'GrantMask' : [ 0x4, ['unsigned long']],
'DenyMask' : [ 0x8, ['unsigned long']],
} ],
'_CM_RESOURCE_LIST' : [ 0x28, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['array', 1, ['_CM_FULL_RESOURCE_DESCRIPTOR']]],
} ],
'_TEB32' : [ 0xfbc, {
'NtTib' : [ 0x0, ['_NT_TIB32']],
'EnvironmentPointer' : [ 0x1c, ['unsigned long']],
'ClientId' : [ 0x20, ['_CLIENT_ID32']],
'ActiveRpcHandle' : [ 0x28, ['unsigned long']],
'ThreadLocalStoragePointer' : [ 0x2c, ['unsigned long']],
'ProcessEnvironmentBlock' : [ 0x30, ['unsigned long']],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['unsigned long']],
'Win32ThreadInfo' : [ 0x40, ['unsigned long']],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['unsigned long']],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['unsigned long']]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['unsigned long']],
'SpareBytes1' : [ 0x1ac, ['array', 40, ['unsigned char']]],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH32']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID32']],
'GdiCachedProcessHandle' : [ 0x6bc, ['unsigned long']],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['unsigned long']],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['unsigned long']]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['unsigned long']],
'glSectionInfo' : [ 0xbe0, ['unsigned long']],
'glSection' : [ 0xbe4, ['unsigned long']],
'glTable' : [ 0xbe8, ['unsigned long']],
'glCurrentRC' : [ 0xbec, ['unsigned long']],
'glContext' : [ 0xbf0, ['unsigned long']],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_STRING32']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['unsigned short']]],
'DeallocationStack' : [ 0xe0c, ['unsigned long']],
'TlsSlots' : [ 0xe10, ['array', 64, ['unsigned long']]],
'TlsLinks' : [ 0xf10, ['LIST_ENTRY32']],
'Vdm' : [ 0xf18, ['unsigned long']],
'ReservedForNtRpc' : [ 0xf1c, ['unsigned long']],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['unsigned long']]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 14, ['unsigned long']]],
'SubProcessTag' : [ 0xf64, ['unsigned long']],
'EtwTraceData' : [ 0xf68, ['unsigned long']],
'WinSockData' : [ 0xf6c, ['unsigned long']],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'InDbgPrint' : [ 0xf74, ['unsigned char']],
'FreeStackOnTermination' : [ 0xf75, ['unsigned char']],
'HasFiberData' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['unsigned long']],
'ReservedForOle' : [ 0xf80, ['unsigned long']],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SparePointer1' : [ 0xf88, ['unsigned long']],
'SoftPatchPtr1' : [ 0xf8c, ['unsigned long']],
'SoftPatchPtr2' : [ 0xf90, ['unsigned long']],
'TlsExpansionSlots' : [ 0xf94, ['unsigned long']],
'ImpersonationLocale' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['unsigned long']],
'pShimData' : [ 0xfa4, ['unsigned long']],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['unsigned long']],
'ActiveFrame' : [ 0xfb0, ['unsigned long']],
'FlsData' : [ 0xfb4, ['unsigned long']],
'SafeThunkCall' : [ 0xfb8, ['unsigned char']],
'BooleanSpare' : [ 0xfb9, ['array', 3, ['unsigned char']]],
} ],
'_CM_FULL_RESOURCE_DESCRIPTOR' : [ 0x24, {
'InterfaceType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x4, ['unsigned long']],
'PartialResourceList' : [ 0x8, ['_CM_PARTIAL_RESOURCE_LIST']],
} ],
'_EPROCESS_QUOTA_ENTRY' : [ 0x20, {
'Usage' : [ 0x0, ['unsigned long long']],
'Limit' : [ 0x8, ['unsigned long long']],
'Peak' : [ 0x10, ['unsigned long long']],
'Return' : [ 0x18, ['unsigned long long']],
} ],
'__unnamed_14be' : [ 0x50, {
'CellData' : [ 0x0, ['_CELL_DATA']],
'List' : [ 0x0, ['array', 1, ['unsigned long long']]],
} ],
'_CM_CACHED_VALUE_INDEX' : [ 0x58, {
'CellIndex' : [ 0x0, ['unsigned long']],
'Data' : [ 0x8, ['__unnamed_14be']],
} ],
'_KSEMAPHORE' : [ 0x20, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'Limit' : [ 0x18, ['long']],
} ],
'_PROCESSOR_POWER_STATE' : [ 0x170, {
'IdleFunction' : [ 0x0, ['pointer64', ['void']]],
'Idle0KernelTimeLimit' : [ 0x8, ['unsigned long']],
'Idle0LastTime' : [ 0xc, ['unsigned long']],
'IdleHandlers' : [ 0x10, ['pointer64', ['void']]],
'IdleState' : [ 0x18, ['pointer64', ['void']]],
'IdleHandlersCount' : [ 0x20, ['unsigned long']],
'LastCheck' : [ 0x28, ['unsigned long long']],
'IdleTimes' : [ 0x30, ['PROCESSOR_IDLE_TIMES']],
'IdleTime1' : [ 0x50, ['unsigned long']],
'PromotionCheck' : [ 0x54, ['unsigned long']],
'IdleTime2' : [ 0x58, ['unsigned long']],
'CurrentThrottle' : [ 0x5c, ['unsigned char']],
'ThermalThrottleLimit' : [ 0x5d, ['unsigned char']],
'CurrentThrottleIndex' : [ 0x5e, ['unsigned char']],
'ThermalThrottleIndex' : [ 0x5f, ['unsigned char']],
'LastKernelUserTime' : [ 0x60, ['unsigned long']],
'PerfIdleTime' : [ 0x64, ['unsigned long']],
'DebugDelta' : [ 0x68, ['unsigned long long']],
'DebugCount' : [ 0x70, ['unsigned long']],
'LastSysTime' : [ 0x74, ['unsigned long']],
'TotalIdleStateTime' : [ 0x78, ['array', 3, ['unsigned long long']]],
'TotalIdleTransitions' : [ 0x90, ['array', 3, ['unsigned long']]],
'PreviousC3StateTime' : [ 0xa0, ['unsigned long long']],
'KneeThrottleIndex' : [ 0xa8, ['unsigned char']],
'ThrottleLimitIndex' : [ 0xa9, ['unsigned char']],
'PerfStatesCount' : [ 0xaa, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xab, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0xac, ['unsigned char']],
'LastBusyPercentage' : [ 0xad, ['unsigned char']],
'LastC3Percentage' : [ 0xae, ['unsigned char']],
'LastAdjustedBusyPercentage' : [ 0xaf, ['unsigned char']],
'PromotionCount' : [ 0xb0, ['unsigned long']],
'DemotionCount' : [ 0xb4, ['unsigned long']],
'ErrorCount' : [ 0xb8, ['unsigned long']],
'RetryCount' : [ 0xbc, ['unsigned long']],
'Flags' : [ 0xc0, ['unsigned long']],
'PerfCounterFrequency' : [ 0xc8, ['_LARGE_INTEGER']],
'PerfTickCount' : [ 0xd0, ['unsigned long']],
'PerfTimer' : [ 0xd8, ['_KTIMER']],
'PerfDpc' : [ 0x118, ['_KDPC']],
'PerfStates' : [ 0x158, ['pointer64', ['PROCESSOR_PERF_STATE']]],
'PerfSetThrottle' : [ 0x160, ['pointer64', ['void']]],
'LastC3KernelUserTime' : [ 0x168, ['unsigned long']],
'Spare1' : [ 0x16c, ['array', 1, ['unsigned long']]],
} ],
'_DBGKD_READ_WRITE_MSR' : [ 0xc, {
'Msr' : [ 0x0, ['unsigned long']],
'DataValueLow' : [ 0x4, ['unsigned long']],
'DataValueHigh' : [ 0x8, ['unsigned long']],
} ],
'_MMPFNENTRY' : [ 0x2, {
'Modified' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'ReadInProgress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'WriteInProgress' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'PrototypePte' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'PageColor' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned short')]],
'PageLocation' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 11, native_type='unsigned short')]],
'RemovalRequested' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned short')]],
'CacheAttribute' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 14, native_type='unsigned short')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned short')]],
'ParityError' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned short')]],
} ],
'_IO_COUNTERS' : [ 0x30, {
'ReadOperationCount' : [ 0x0, ['unsigned long long']],
'WriteOperationCount' : [ 0x8, ['unsigned long long']],
'OtherOperationCount' : [ 0x10, ['unsigned long long']],
'ReadTransferCount' : [ 0x18, ['unsigned long long']],
'WriteTransferCount' : [ 0x20, ['unsigned long long']],
'OtherTransferCount' : [ 0x28, ['unsigned long long']],
} ],
'_TOKEN_SOURCE' : [ 0x10, {
'SourceName' : [ 0x0, ['array', 8, ['unsigned char']]],
'SourceIdentifier' : [ 0x8, ['_LUID']],
} ],
'_DBGKD_QUERY_MEMORY' : [ 0x18, {
'Address' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['unsigned long long']],
'AddressSpace' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'_DEVICE_OBJECT_POWER_EXTENSION' : [ 0x80, {
'IdleCount' : [ 0x0, ['long']],
'ConservationIdleTime' : [ 0x4, ['unsigned long']],
'PerformanceIdleTime' : [ 0x8, ['unsigned long']],
'DeviceObject' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'IdleList' : [ 0x18, ['_LIST_ENTRY']],
'DeviceType' : [ 0x28, ['unsigned char']],
'State' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'NotifySourceList' : [ 0x30, ['_LIST_ENTRY']],
'NotifyTargetList' : [ 0x40, ['_LIST_ENTRY']],
'PowerChannelSummary' : [ 0x50, ['_POWER_CHANNEL_SUMMARY']],
'Volume' : [ 0x70, ['_LIST_ENTRY']],
} ],
'_MMSUPPORT_FLAGS' : [ 0x4, {
'SessionSpace' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'BeingTrimmed' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'SessionLeader' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'TrimHard' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'MaximumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ForceTrim' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'MinimumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Available0' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'MemoryPriority' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'GrowWsleHash' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'AcquiredUnsafe' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'Available' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
} ],
'EX_QUEUE_WORKER_INFO' : [ 0x4, {
'QueueDisabled' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MakeThreadsAsNecessary' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WaitMode' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WorkerCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'QueueWorkerInfo' : [ 0x0, ['long']],
} ],
'PROCESSOR_PERF_STATE' : [ 0x20, {
'PercentFrequency' : [ 0x0, ['unsigned char']],
'MinCapacity' : [ 0x1, ['unsigned char']],
'Power' : [ 0x2, ['unsigned short']],
'IncreaseLevel' : [ 0x4, ['unsigned char']],
'DecreaseLevel' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'IncreaseTime' : [ 0x8, ['unsigned long']],
'DecreaseTime' : [ 0xc, ['unsigned long']],
'IncreaseCount' : [ 0x10, ['unsigned long']],
'DecreaseCount' : [ 0x14, ['unsigned long']],
'PerformanceTime' : [ 0x18, ['unsigned long long']],
} ],
'PROCESSOR_IDLE_TIMES' : [ 0x20, {
'StartTime' : [ 0x0, ['unsigned long long']],
'EndTime' : [ 0x8, ['unsigned long long']],
'IdleHandlerReserved' : [ 0x10, ['array', 4, ['unsigned long']]],
} ],
'_TERMINATION_PORT' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_TERMINATION_PORT']]],
'Port' : [ 0x8, ['pointer64', ['void']]],
} ],
'_MMMOD_WRITER_LISTHEAD' : [ 0x28, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Event' : [ 0x10, ['_KEVENT']],
} ],
'_SYSTEM_POWER_POLICY' : [ 0xe8, {
'Revision' : [ 0x0, ['unsigned long']],
'PowerButton' : [ 0x4, ['POWER_ACTION_POLICY']],
'SleepButton' : [ 0x10, ['POWER_ACTION_POLICY']],
'LidClose' : [ 0x1c, ['POWER_ACTION_POLICY']],
'LidOpenWake' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Reserved' : [ 0x2c, ['unsigned long']],
'Idle' : [ 0x30, ['POWER_ACTION_POLICY']],
'IdleTimeout' : [ 0x3c, ['unsigned long']],
'IdleSensitivity' : [ 0x40, ['unsigned char']],
'DynamicThrottle' : [ 0x41, ['unsigned char']],
'Spare2' : [ 0x42, ['array', 2, ['unsigned char']]],
'MinSleep' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MaxSleep' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ReducedLatencySleep' : [ 0x4c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WinLogonFlags' : [ 0x50, ['unsigned long']],
'Spare3' : [ 0x54, ['unsigned long']],
'DozeS4Timeout' : [ 0x58, ['unsigned long']],
'BroadcastCapacityResolution' : [ 0x5c, ['unsigned long']],
'DischargePolicy' : [ 0x60, ['array', 4, ['SYSTEM_POWER_LEVEL']]],
'VideoTimeout' : [ 0xc0, ['unsigned long']],
'VideoDimDisplay' : [ 0xc4, ['unsigned char']],
'VideoReserved' : [ 0xc8, ['array', 3, ['unsigned long']]],
'SpindownTimeout' : [ 0xd4, ['unsigned long']],
'OptimizeForPower' : [ 0xd8, ['unsigned char']],
'FanThrottleTolerance' : [ 0xd9, ['unsigned char']],
'ForcedThrottle' : [ 0xda, ['unsigned char']],
'MinThrottle' : [ 0xdb, ['unsigned char']],
'OverThrottled' : [ 0xdc, ['POWER_ACTION_POLICY']],
} ],
'_GDI_TEB_BATCH' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_POP_THERMAL_ZONE' : [ 0x120, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'State' : [ 0x10, ['unsigned char']],
'Flags' : [ 0x11, ['unsigned char']],
'Mode' : [ 0x12, ['unsigned char']],
'PendingMode' : [ 0x13, ['unsigned char']],
'ActivePoint' : [ 0x14, ['unsigned char']],
'PendingActivePoint' : [ 0x15, ['unsigned char']],
'Throttle' : [ 0x18, ['long']],
'LastTime' : [ 0x20, ['unsigned long long']],
'SampleRate' : [ 0x28, ['unsigned long']],
'LastTemp' : [ 0x2c, ['unsigned long']],
'PassiveTimer' : [ 0x30, ['_KTIMER']],
'PassiveDpc' : [ 0x70, ['_KDPC']],
'OverThrottled' : [ 0xb0, ['_POP_ACTION_TRIGGER']],
'Irp' : [ 0xc0, ['pointer64', ['_IRP']]],
'Info' : [ 0xc8, ['_THERMAL_INFORMATION']],
} ],
'_DBGKD_CONTINUE2' : [ 0x20, {
'ContinueStatus' : [ 0x0, ['long']],
'ControlSet' : [ 0x4, ['_AMD64_DBGKD_CONTROL_SET']],
'AnyControlSet' : [ 0x4, ['_DBGKD_ANY_CONTROL_SET']],
} ],
'_SECURITY_TOKEN_PROXY_DATA' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'ProxyClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ProxyFull', 1: 'ProxyService', 2: 'ProxyTree', 3: 'ProxyDirectory'})]],
'PathInfo' : [ 0x8, ['_UNICODE_STRING']],
'ContainerMask' : [ 0x18, ['unsigned long']],
'ObjectMask' : [ 0x1c, ['unsigned long']],
} ],
'_PROCESSOR_POWER_POLICY' : [ 0x4c, {
'Revision' : [ 0x0, ['unsigned long']],
'DynamicThrottle' : [ 0x4, ['unsigned char']],
'Spare' : [ 0x5, ['array', 3, ['unsigned char']]],
'DisableCStates' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'PolicyCount' : [ 0xc, ['unsigned long']],
'Policy' : [ 0x10, ['array', 3, ['_PROCESSOR_POWER_POLICY_INFO']]],
} ],
'_OBJECT_HANDLE_COUNT_DATABASE' : [ 0x18, {
'CountEntries' : [ 0x0, ['unsigned long']],
'HandleCountEntries' : [ 0x8, ['array', 1, ['_OBJECT_HANDLE_COUNT_ENTRY']]],
} ],
'_IMAGE_DOS_HEADER' : [ 0x40, {
'e_magic' : [ 0x0, ['unsigned short']],
'e_cblp' : [ 0x2, ['unsigned short']],
'e_cp' : [ 0x4, ['unsigned short']],
'e_crlc' : [ 0x6, ['unsigned short']],
'e_cparhdr' : [ 0x8, ['unsigned short']],
'e_minalloc' : [ 0xa, ['unsigned short']],
'e_maxalloc' : [ 0xc, ['unsigned short']],
'e_ss' : [ 0xe, ['unsigned short']],
'e_sp' : [ 0x10, ['unsigned short']],
'e_csum' : [ 0x12, ['unsigned short']],
'e_ip' : [ 0x14, ['unsigned short']],
'e_cs' : [ 0x16, ['unsigned short']],
'e_lfarlc' : [ 0x18, ['unsigned short']],
'e_ovno' : [ 0x1a, ['unsigned short']],
'e_res' : [ 0x1c, ['array', 4, ['unsigned short']]],
'e_oemid' : [ 0x24, ['unsigned short']],
'e_oeminfo' : [ 0x26, ['unsigned short']],
'e_res2' : [ 0x28, ['array', 10, ['unsigned short']]],
'e_lfanew' : [ 0x3c, ['long']],
} ],
'_OWNER_ENTRY' : [ 0x10, {
'OwnerThread' : [ 0x0, ['unsigned long long']],
'OwnerCount' : [ 0x8, ['long']],
'TableSize' : [ 0x8, ['unsigned long']],
} ],
'_HEAP_VIRTUAL_ALLOC_ENTRY' : [ 0x40, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtraStuff' : [ 0x10, ['_HEAP_ENTRY_EXTRA']],
'CommitSize' : [ 0x20, ['unsigned long long']],
'ReserveSize' : [ 0x28, ['unsigned long long']],
'BusyBlock' : [ 0x30, ['_HEAP_ENTRY']],
} ],
'_RTL_ATOM_TABLE' : [ 0x70, {
'Signature' : [ 0x0, ['unsigned long']],
'CriticalSection' : [ 0x8, ['_RTL_CRITICAL_SECTION']],
'RtlHandleTable' : [ 0x30, ['_RTL_HANDLE_TABLE']],
'NumberOfBuckets' : [ 0x60, ['unsigned long']],
'Buckets' : [ 0x68, ['array', 1, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]]],
} ],
'_TEB64' : [ 0x17d8, {
'NtTib' : [ 0x0, ['_NT_TIB64']],
'EnvironmentPointer' : [ 0x38, ['unsigned long long']],
'ClientId' : [ 0x40, ['_CLIENT_ID64']],
'ActiveRpcHandle' : [ 0x50, ['unsigned long long']],
'ThreadLocalStoragePointer' : [ 0x58, ['unsigned long long']],
'ProcessEnvironmentBlock' : [ 0x60, ['unsigned long long']],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['unsigned long long']],
'Win32ThreadInfo' : [ 0x78, ['unsigned long long']],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['unsigned long long']],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['unsigned long long']]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['unsigned long long']],
'SpareBytes1' : [ 0x2d0, ['array', 28, ['unsigned char']]],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH64']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID64']],
'GdiCachedProcessHandle' : [ 0x7e8, ['unsigned long long']],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['unsigned long long']],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['unsigned long long']]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['unsigned long long']],
'glSectionInfo' : [ 0x1228, ['unsigned long long']],
'glSection' : [ 0x1230, ['unsigned long long']],
'glTable' : [ 0x1238, ['unsigned long long']],
'glCurrentRC' : [ 0x1240, ['unsigned long long']],
'glContext' : [ 0x1248, ['unsigned long long']],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_STRING64']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['unsigned short']]],
'DeallocationStack' : [ 0x1478, ['unsigned long long']],
'TlsSlots' : [ 0x1480, ['array', 64, ['unsigned long long']]],
'TlsLinks' : [ 0x1680, ['LIST_ENTRY64']],
'Vdm' : [ 0x1690, ['unsigned long long']],
'ReservedForNtRpc' : [ 0x1698, ['unsigned long long']],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['unsigned long long']]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 14, ['unsigned long long']]],
'SubProcessTag' : [ 0x1728, ['unsigned long long']],
'EtwTraceData' : [ 0x1730, ['unsigned long long']],
'WinSockData' : [ 0x1738, ['unsigned long long']],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'InDbgPrint' : [ 0x1744, ['unsigned char']],
'FreeStackOnTermination' : [ 0x1745, ['unsigned char']],
'HasFiberData' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['unsigned long long']],
'ReservedForOle' : [ 0x1758, ['unsigned long long']],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SparePointer1' : [ 0x1768, ['unsigned long long']],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'SoftPatchPtr2' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['unsigned long long']],
'DeallocationBStore' : [ 0x1788, ['unsigned long long']],
'BStoreLimit' : [ 0x1790, ['unsigned long long']],
'ImpersonationLocale' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['unsigned long long']],
'pShimData' : [ 0x17a8, ['unsigned long long']],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['unsigned long long']],
'ActiveFrame' : [ 0x17c0, ['unsigned long long']],
'FlsData' : [ 0x17c8, ['unsigned long long']],
'SafeThunkCall' : [ 0x17d0, ['unsigned char']],
'BooleanSpare' : [ 0x17d1, ['array', 3, ['unsigned char']]],
} ],
'_IMAGE_ROM_OPTIONAL_HEADER' : [ 0x38, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'BaseOfBss' : [ 0x1c, ['unsigned long']],
'GprMask' : [ 0x20, ['unsigned long']],
'CprMask' : [ 0x24, ['array', 4, ['unsigned long']]],
'GpValue' : [ 0x34, ['unsigned long']],
} ],
'_iobuf' : [ 0x30, {
'_ptr' : [ 0x0, ['pointer64', ['unsigned char']]],
'_cnt' : [ 0x8, ['long']],
'_base' : [ 0x10, ['pointer64', ['unsigned char']]],
'_flag' : [ 0x18, ['long']],
'_file' : [ 0x1c, ['long']],
'_charbuf' : [ 0x20, ['long']],
'_bufsiz' : [ 0x24, ['long']],
'_tmpfname' : [ 0x28, ['pointer64', ['unsigned char']]],
} ],
'_MMPTE_LIST' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'OneEntry' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'filler1' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_CMHIVE' : [ 0xab8, {
'Hive' : [ 0x0, ['_HHIVE']],
'FileHandles' : [ 0x578, ['array', 3, ['pointer64', ['void']]]],
'NotifyList' : [ 0x590, ['_LIST_ENTRY']],
'HiveList' : [ 0x5a0, ['_LIST_ENTRY']],
'HiveLock' : [ 0x5b0, ['_EX_PUSH_LOCK']],
'ViewLock' : [ 0x5b8, ['pointer64', ['_KGUARDED_MUTEX']]],
'WriterLock' : [ 0x5c0, ['_EX_PUSH_LOCK']],
'FlusherLock' : [ 0x5c8, ['_EX_PUSH_LOCK']],
'SecurityLock' : [ 0x5d0, ['_EX_PUSH_LOCK']],
'LRUViewListHead' : [ 0x5d8, ['_LIST_ENTRY']],
'PinViewListHead' : [ 0x5e8, ['_LIST_ENTRY']],
'FileObject' : [ 0x5f8, ['pointer64', ['_FILE_OBJECT']]],
'FileFullPath' : [ 0x600, ['_UNICODE_STRING']],
'FileUserName' : [ 0x610, ['_UNICODE_STRING']],
'MappedViews' : [ 0x620, ['unsigned short']],
'PinnedViews' : [ 0x622, ['unsigned short']],
'UseCount' : [ 0x624, ['unsigned long']],
'SecurityCount' : [ 0x628, ['unsigned long']],
'SecurityCacheSize' : [ 0x62c, ['unsigned long']],
'SecurityHitHint' : [ 0x630, ['long']],
'SecurityCache' : [ 0x638, ['pointer64', ['_CM_KEY_SECURITY_CACHE_ENTRY']]],
'SecurityHash' : [ 0x640, ['array', 64, ['_LIST_ENTRY']]],
'UnloadEvent' : [ 0xa40, ['pointer64', ['_KEVENT']]],
'RootKcb' : [ 0xa48, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Frozen' : [ 0xa50, ['unsigned char']],
'UnloadWorkItem' : [ 0xa58, ['pointer64', ['_WORK_QUEUE_ITEM']]],
'GrowOnlyMode' : [ 0xa60, ['unsigned char']],
'GrowOffset' : [ 0xa64, ['unsigned long']],
'KcbConvertListHead' : [ 0xa68, ['_LIST_ENTRY']],
'KnodeConvertListHead' : [ 0xa78, ['_LIST_ENTRY']],
'CellRemapArray' : [ 0xa88, ['pointer64', ['_CM_CELL_REMAP_BLOCK']]],
'Flags' : [ 0xa90, ['unsigned long']],
'TrustClassEntry' : [ 0xa98, ['_LIST_ENTRY']],
'FlushCount' : [ 0xaa8, ['unsigned long']],
'CreatorOwner' : [ 0xab0, ['pointer64', ['_KTHREAD']]],
} ],
'_HANDLE_TRACE_DEBUG_INFO' : [ 0xf0, {
'RefCount' : [ 0x0, ['long']],
'TableSize' : [ 0x4, ['unsigned long']],
'BitMaskFlags' : [ 0x8, ['unsigned long']],
'CloseCompactionLock' : [ 0x10, ['_FAST_MUTEX']],
'CurrentStackIndex' : [ 0x48, ['unsigned long']],
'TraceDb' : [ 0x50, ['array', 1, ['_HANDLE_TRACE_DB_ENTRY']]],
} ],
'_MDL' : [ 0x30, {
'Next' : [ 0x0, ['pointer64', ['_MDL']]],
'Size' : [ 0x8, ['short']],
'MdlFlags' : [ 0xa, ['short']],
'Process' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'MappedSystemVa' : [ 0x18, ['pointer64', ['void']]],
'StartVa' : [ 0x20, ['pointer64', ['void']]],
'ByteCount' : [ 0x28, ['unsigned long']],
'ByteOffset' : [ 0x2c, ['unsigned long']],
} ],
'_HHIVE' : [ 0x578, {
'Signature' : [ 0x0, ['unsigned long']],
'GetCellRoutine' : [ 0x8, ['pointer64', ['void']]],
'ReleaseCellRoutine' : [ 0x10, ['pointer64', ['void']]],
'Allocate' : [ 0x18, ['pointer64', ['void']]],
'Free' : [ 0x20, ['pointer64', ['void']]],
'FileSetSize' : [ 0x28, ['pointer64', ['void']]],
'FileWrite' : [ 0x30, ['pointer64', ['void']]],
'FileRead' : [ 0x38, ['pointer64', ['void']]],
'FileFlush' : [ 0x40, ['pointer64', ['void']]],
'BaseBlock' : [ 0x48, ['pointer64', ['_HBASE_BLOCK']]],
'DirtyVector' : [ 0x50, ['_RTL_BITMAP']],
'DirtyCount' : [ 0x60, ['unsigned long']],
'DirtyAlloc' : [ 0x64, ['unsigned long']],
'BaseBlockAlloc' : [ 0x68, ['unsigned long']],
'Cluster' : [ 0x6c, ['unsigned long']],
'Flat' : [ 0x70, ['unsigned char']],
'ReadOnly' : [ 0x71, ['unsigned char']],
'Log' : [ 0x72, ['unsigned char']],
'DirtyFlag' : [ 0x73, ['unsigned char']],
'HiveFlags' : [ 0x74, ['unsigned long']],
'LogSize' : [ 0x78, ['unsigned long']],
'RefreshCount' : [ 0x7c, ['unsigned long']],
'StorageTypeCount' : [ 0x80, ['unsigned long']],
'Version' : [ 0x84, ['unsigned long']],
'Storage' : [ 0x88, ['array', 2, ['_DUAL']]],
} ],
'_PAGEFAULT_HISTORY' : [ 0x28, {
'CurrentIndex' : [ 0x0, ['unsigned long']],
'MaxIndex' : [ 0x4, ['unsigned long']],
'SpinLock' : [ 0x8, ['unsigned long long']],
'Reserved' : [ 0x10, ['pointer64', ['void']]],
'WatchInfo' : [ 0x18, ['array', 1, ['_PROCESS_WS_WATCH_INFORMATION']]],
} ],
'_RTL_ATOM_TABLE_ENTRY' : [ 0x18, {
'HashLink' : [ 0x0, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]],
'HandleIndex' : [ 0x8, ['unsigned short']],
'Atom' : [ 0xa, ['unsigned short']],
'ReferenceCount' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['unsigned char']],
'NameLength' : [ 0xf, ['unsigned char']],
'Name' : [ 0x10, ['array', 1, ['unsigned short']]],
} ],
'_MM_SESSION_SPACE_FLAGS' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeletePending' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Filler' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_CM_PARTIAL_RESOURCE_LIST' : [ 0x1c, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'PartialDescriptors' : [ 0x8, ['array', 1, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_OBJECT_CREATE_INFORMATION' : [ 0x48, {
'Attributes' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ParseContext' : [ 0x10, ['pointer64', ['void']]],
'ProbeMode' : [ 0x18, ['unsigned char']],
'PagedPoolCharge' : [ 0x1c, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x20, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x24, ['unsigned long']],
'SecurityDescriptor' : [ 0x28, ['pointer64', ['void']]],
'SecurityQos' : [ 0x30, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'SecurityQualityOfService' : [ 0x38, ['_SECURITY_QUALITY_OF_SERVICE']],
} ],
'__unnamed_1587' : [ 0x48, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Wcb' : [ 0x0, ['_WAIT_CONTEXT_BLOCK']],
} ],
'_DEVICE_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'ReferenceCount' : [ 0x4, ['long']],
'DriverObject' : [ 0x8, ['pointer64', ['_DRIVER_OBJECT']]],
'NextDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'CurrentIrp' : [ 0x20, ['pointer64', ['_IRP']]],
'Timer' : [ 0x28, ['pointer64', ['_IO_TIMER']]],
'Flags' : [ 0x30, ['unsigned long']],
'Characteristics' : [ 0x34, ['unsigned long']],
'Vpb' : [ 0x38, ['pointer64', ['_VPB']]],
'DeviceExtension' : [ 0x40, ['pointer64', ['void']]],
'DeviceType' : [ 0x48, ['unsigned long']],
'StackSize' : [ 0x4c, ['unsigned char']],
'Queue' : [ 0x50, ['__unnamed_1587']],
'AlignmentRequirement' : [ 0x98, ['unsigned long']],
'DeviceQueue' : [ 0xa0, ['_KDEVICE_QUEUE']],
'Dpc' : [ 0xc8, ['_KDPC']],
'ActiveThreadCount' : [ 0x108, ['unsigned long']],
'SecurityDescriptor' : [ 0x110, ['pointer64', ['void']]],
'DeviceLock' : [ 0x118, ['_KEVENT']],
'SectorSize' : [ 0x130, ['unsigned short']],
'Spare1' : [ 0x132, ['unsigned short']],
'DeviceObjectExtension' : [ 0x138, ['pointer64', ['_DEVOBJ_EXTENSION']]],
'Reserved' : [ 0x140, ['pointer64', ['void']]],
} ],
'_SECTION_OBJECT_POINTERS' : [ 0x18, {
'DataSectionObject' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['void']]],
'ImageSectionObject' : [ 0x10, ['pointer64', ['void']]],
} ],
'_SEP_AUDIT_POLICY' : [ 0x8, {
'PolicyElements' : [ 0x0, ['_SEP_AUDIT_POLICY_CATEGORIES']],
'PolicyOverlay' : [ 0x0, ['_SEP_AUDIT_POLICY_OVERLAY']],
'Overlay' : [ 0x0, ['unsigned long long']],
} ],
'_PEB32' : [ 0x230, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x4, ['unsigned long']],
'ImageBaseAddress' : [ 0x8, ['unsigned long']],
'Ldr' : [ 0xc, ['unsigned long']],
'ProcessParameters' : [ 0x10, ['unsigned long']],
'SubSystemData' : [ 0x14, ['unsigned long']],
'ProcessHeap' : [ 0x18, ['unsigned long']],
'FastPebLock' : [ 0x1c, ['unsigned long']],
'AtlThunkSListPtr' : [ 0x20, ['unsigned long']],
'SparePtr2' : [ 0x24, ['unsigned long']],
'EnvironmentUpdateCount' : [ 0x28, ['unsigned long']],
'KernelCallbackTable' : [ 0x2c, ['unsigned long']],
'SystemReserved' : [ 0x30, ['array', 1, ['unsigned long']]],
'SpareUlong' : [ 0x34, ['unsigned long']],
'FreeList' : [ 0x38, ['unsigned long']],
'TlsExpansionCounter' : [ 0x3c, ['unsigned long']],
'TlsBitmap' : [ 0x40, ['unsigned long']],
'TlsBitmapBits' : [ 0x44, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x4c, ['unsigned long']],
'ReadOnlySharedMemoryHeap' : [ 0x50, ['unsigned long']],
'ReadOnlyStaticServerData' : [ 0x54, ['unsigned long']],
'AnsiCodePageData' : [ 0x58, ['unsigned long']],
'OemCodePageData' : [ 0x5c, ['unsigned long']],
'UnicodeCaseTableData' : [ 0x60, ['unsigned long']],
'NumberOfProcessors' : [ 0x64, ['unsigned long']],
'NtGlobalFlag' : [ 0x68, ['unsigned long']],
'CriticalSectionTimeout' : [ 0x70, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0x78, ['unsigned long']],
'HeapSegmentCommit' : [ 0x7c, ['unsigned long']],
'HeapDeCommitTotalFreeThreshold' : [ 0x80, ['unsigned long']],
'HeapDeCommitFreeBlockThreshold' : [ 0x84, ['unsigned long']],
'NumberOfHeaps' : [ 0x88, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0x8c, ['unsigned long']],
'ProcessHeaps' : [ 0x90, ['unsigned long']],
'GdiSharedHandleTable' : [ 0x94, ['unsigned long']],
'ProcessStarterHelper' : [ 0x98, ['unsigned long']],
'GdiDCAttributeList' : [ 0x9c, ['unsigned long']],
'LoaderLock' : [ 0xa0, ['unsigned long']],
'OSMajorVersion' : [ 0xa4, ['unsigned long']],
'OSMinorVersion' : [ 0xa8, ['unsigned long']],
'OSBuildNumber' : [ 0xac, ['unsigned short']],
'OSCSDVersion' : [ 0xae, ['unsigned short']],
'OSPlatformId' : [ 0xb0, ['unsigned long']],
'ImageSubsystem' : [ 0xb4, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0xb8, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0xbc, ['unsigned long']],
'ImageProcessAffinityMask' : [ 0xc0, ['unsigned long']],
'GdiHandleBuffer' : [ 0xc4, ['array', 34, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x14c, ['unsigned long']],
'TlsExpansionBitmap' : [ 0x150, ['unsigned long']],
'TlsExpansionBitmapBits' : [ 0x154, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x1d4, ['unsigned long']],
'AppCompatFlags' : [ 0x1d8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x1e0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x1e8, ['unsigned long']],
'AppCompatInfo' : [ 0x1ec, ['unsigned long']],
'CSDVersion' : [ 0x1f0, ['_STRING32']],
'ActivationContextData' : [ 0x1f8, ['unsigned long']],
'ProcessAssemblyStorageMap' : [ 0x1fc, ['unsigned long']],
'SystemDefaultActivationContextData' : [ 0x200, ['unsigned long']],
'SystemAssemblyStorageMap' : [ 0x204, ['unsigned long']],
'MinimumStackCommit' : [ 0x208, ['unsigned long']],
'FlsCallback' : [ 0x20c, ['unsigned long']],
'FlsListHead' : [ 0x210, ['LIST_ENTRY32']],
'FlsBitmap' : [ 0x218, ['unsigned long']],
'FlsBitmapBits' : [ 0x21c, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x22c, ['unsigned long']],
} ],
'_MBCB' : [ 0xb8, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeIsInZone' : [ 0x2, ['short']],
'PagesToWrite' : [ 0x4, ['unsigned long']],
'DirtyPages' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'BitmapRanges' : [ 0x10, ['_LIST_ENTRY']],
'ResumeWritePage' : [ 0x20, ['long long']],
'BitmapRange1' : [ 0x28, ['_BITMAP_RANGE']],
'BitmapRange2' : [ 0x58, ['_BITMAP_RANGE']],
'BitmapRange3' : [ 0x88, ['_BITMAP_RANGE']],
} ],
'_POWER_CHANNEL_SUMMARY' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned long']],
'TotalCount' : [ 0x4, ['unsigned long']],
'D0Count' : [ 0x8, ['unsigned long']],
'NotifyList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_CM_VIEW_OF_FILE' : [ 0x40, {
'LRUViewList' : [ 0x0, ['_LIST_ENTRY']],
'PinViewList' : [ 0x10, ['_LIST_ENTRY']],
'FileOffset' : [ 0x20, ['unsigned long']],
'Size' : [ 0x24, ['unsigned long']],
'ViewAddress' : [ 0x28, ['pointer64', ['unsigned long long']]],
'Bcb' : [ 0x30, ['pointer64', ['void']]],
'UseCount' : [ 0x38, ['unsigned long']],
} ],
'_SLIST_ENTRY' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_KDEVICE_QUEUE' : [ 0x28, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceListHead' : [ 0x8, ['_LIST_ENTRY']],
'Lock' : [ 0x18, ['unsigned long long']],
'Busy' : [ 0x20, ['unsigned char']],
'Reserved' : [ 0x20, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='long long')]],
'Hint' : [ 0x20, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='long long')]],
} ],
'_KUSER_SHARED_DATA' : [ 0x378, {
'TickCountLowDeprecated' : [ 0x0, ['unsigned long']],
'TickCountMultiplier' : [ 0x4, ['unsigned long']],
'InterruptTime' : [ 0x8, ['_KSYSTEM_TIME']],
'SystemTime' : [ 0x14, ['_KSYSTEM_TIME']],
'TimeZoneBias' : [ 0x20, ['_KSYSTEM_TIME']],
'ImageNumberLow' : [ 0x2c, ['unsigned short']],
'ImageNumberHigh' : [ 0x2e, ['unsigned short']],
'NtSystemRoot' : [ 0x30, ['array', 260, ['unsigned short']]],
'MaxStackTraceDepth' : [ 0x238, ['unsigned long']],
'CryptoExponent' : [ 0x23c, ['unsigned long']],
'TimeZoneId' : [ 0x240, ['unsigned long']],
'LargePageMinimum' : [ 0x244, ['unsigned long']],
'Reserved2' : [ 0x248, ['array', 7, ['unsigned long']]],
'NtProductType' : [ 0x264, ['Enumeration', dict(target = 'long', choices = {1: 'NtProductWinNt', 2: 'NtProductLanManNt', 3: 'NtProductServer'})]],
'ProductTypeIsValid' : [ 0x268, ['unsigned char']],
'NtMajorVersion' : [ 0x26c, ['unsigned long']],
'NtMinorVersion' : [ 0x270, ['unsigned long']],
'ProcessorFeatures' : [ 0x274, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x2b4, ['unsigned long']],
'Reserved3' : [ 0x2b8, ['unsigned long']],
'TimeSlip' : [ 0x2bc, ['unsigned long']],
'AlternativeArchitecture' : [ 0x2c0, ['Enumeration', dict(target = 'long', choices = {0: 'StandardDesign', 1: 'NEC98x86', 2: 'EndAlternatives'})]],
'SystemExpirationDate' : [ 0x2c8, ['_LARGE_INTEGER']],
'SuiteMask' : [ 0x2d0, ['unsigned long']],
'KdDebuggerEnabled' : [ 0x2d4, ['unsigned char']],
'NXSupportPolicy' : [ 0x2d5, ['unsigned char']],
'ActiveConsoleId' : [ 0x2d8, ['unsigned long']],
'DismountCount' : [ 0x2dc, ['unsigned long']],
'ComPlusPackage' : [ 0x2e0, ['unsigned long']],
'LastSystemRITEventTickCount' : [ 0x2e4, ['unsigned long']],
'NumberOfPhysicalPages' : [ 0x2e8, ['unsigned long']],
'SafeBootMode' : [ 0x2ec, ['unsigned char']],
'TraceLogging' : [ 0x2f0, ['unsigned long']],
'TestRetInstruction' : [ 0x2f8, ['unsigned long long']],
'SystemCall' : [ 0x300, ['unsigned long']],
'SystemCallReturn' : [ 0x304, ['unsigned long']],
'SystemCallPad' : [ 0x308, ['array', 3, ['unsigned long long']]],
'TickCount' : [ 0x320, ['_KSYSTEM_TIME']],
'TickCountQuad' : [ 0x320, ['unsigned long long']],
'Cookie' : [ 0x330, ['unsigned long']],
'Wow64SharedInformation' : [ 0x334, ['array', 16, ['unsigned long']]],
} ],
'_OBJECT_TYPE_INITIALIZER' : [ 0x70, {
'Length' : [ 0x0, ['unsigned short']],
'UseDefaultObject' : [ 0x2, ['unsigned char']],
'CaseInsensitive' : [ 0x3, ['unsigned char']],
'InvalidAttributes' : [ 0x4, ['unsigned long']],
'GenericMapping' : [ 0x8, ['_GENERIC_MAPPING']],
'ValidAccessMask' : [ 0x18, ['unsigned long']],
'SecurityRequired' : [ 0x1c, ['unsigned char']],
'MaintainHandleCount' : [ 0x1d, ['unsigned char']],
'MaintainTypeList' : [ 0x1e, ['unsigned char']],
'PoolType' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'DefaultPagedPoolCharge' : [ 0x24, ['unsigned long']],
'DefaultNonPagedPoolCharge' : [ 0x28, ['unsigned long']],
'DumpProcedure' : [ 0x30, ['pointer64', ['void']]],
'OpenProcedure' : [ 0x38, ['pointer64', ['void']]],
'CloseProcedure' : [ 0x40, ['pointer64', ['void']]],
'DeleteProcedure' : [ 0x48, ['pointer64', ['void']]],
'ParseProcedure' : [ 0x50, ['pointer64', ['void']]],
'SecurityProcedure' : [ 0x58, ['pointer64', ['void']]],
'QueryNameProcedure' : [ 0x60, ['pointer64', ['void']]],
'OkayToCloseProcedure' : [ 0x68, ['pointer64', ['void']]],
} ],
'_KPROCESSOR_STATE' : [ 0x5b0, {
'SpecialRegisters' : [ 0x0, ['_KSPECIAL_REGISTERS']],
'ContextFrame' : [ 0xe0, ['_CONTEXT']],
} ],
'__unnamed_15db' : [ 0x10, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Secured' : [ 0x0, ['_MMADDRESS_LIST']],
} ],
'__unnamed_15e1' : [ 0x8, {
'Banked' : [ 0x0, ['pointer64', ['_MMBANKED_SECTION']]],
'ExtendedInfo' : [ 0x0, ['pointer64', ['_MMEXTEND_INFO']]],
} ],
'_MMVAD_LONG' : [ 0x68, {
'u1' : [ 0x0, ['__unnamed_1182']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_1185']],
'ControlArea' : [ 0x30, ['pointer64', ['_CONTROL_AREA']]],
'FirstPrototypePte' : [ 0x38, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x40, ['pointer64', ['_MMPTE']]],
'u2' : [ 0x48, ['__unnamed_118a']],
'u3' : [ 0x50, ['__unnamed_15db']],
'u4' : [ 0x60, ['__unnamed_15e1']],
} ],
'_KEXECUTE_OPTIONS' : [ 0x1, {
'ExecuteDisable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ExecuteEnable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DisableThunkEmulation' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Permanent' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ExecuteDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ImageDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
} ],
'_POOL_DESCRIPTOR' : [ 0x1048, {
'PoolType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'PoolIndex' : [ 0x4, ['unsigned long']],
'RunningAllocs' : [ 0x8, ['unsigned long']],
'RunningDeAllocs' : [ 0xc, ['unsigned long']],
'TotalPages' : [ 0x10, ['unsigned long']],
'TotalBigPages' : [ 0x14, ['unsigned long']],
'Threshold' : [ 0x18, ['unsigned long']],
'LockAddress' : [ 0x20, ['pointer64', ['void']]],
'PendingFrees' : [ 0x28, ['pointer64', ['void']]],
'PendingFreeDepth' : [ 0x30, ['long']],
'TotalBytes' : [ 0x38, ['unsigned long long']],
'Spare0' : [ 0x40, ['unsigned long long']],
'ListHeads' : [ 0x48, ['array', 256, ['_LIST_ENTRY']]],
} ],
'_HARDWARE_PTE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'reserved0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 40, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WOW64_PROCESS' : [ 0x8, {
'Wow64' : [ 0x0, ['pointer64', ['void']]],
} ],
'_PEB_LDR_DATA' : [ 0x48, {
'Length' : [ 0x0, ['unsigned long']],
'Initialized' : [ 0x4, ['unsigned char']],
'SsHandle' : [ 0x8, ['pointer64', ['void']]],
'InLoadOrderModuleList' : [ 0x10, ['_LIST_ENTRY']],
'InMemoryOrderModuleList' : [ 0x20, ['_LIST_ENTRY']],
'InInitializationOrderModuleList' : [ 0x30, ['_LIST_ENTRY']],
'EntryInProgress' : [ 0x40, ['pointer64', ['void']]],
} ],
'_DBGKD_SWITCH_PARTITION' : [ 0x4, {
'Partition' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION32' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'KernBase' : [ 0x8, ['unsigned long']],
'PsLoadedModuleList' : [ 0xc, ['unsigned long']],
'MachineType' : [ 0x10, ['unsigned short']],
'ThCallbackStack' : [ 0x12, ['unsigned short']],
'NextCallback' : [ 0x14, ['unsigned short']],
'FramePointer' : [ 0x16, ['unsigned short']],
'KiCallUserMode' : [ 0x18, ['unsigned long']],
'KeUserCallbackDispatcher' : [ 0x1c, ['unsigned long']],
'BreakpointWithStatus' : [ 0x20, ['unsigned long']],
'DebuggerDataList' : [ 0x24, ['unsigned long']],
} ],
'_MM_PAGED_POOL_INFO' : [ 0x40, {
'PagedPoolAllocationMap' : [ 0x0, ['pointer64', ['_RTL_BITMAP']]],
'EndOfPagedPoolBitmap' : [ 0x8, ['pointer64', ['_RTL_BITMAP']]],
'FirstPteForPagedPool' : [ 0x10, ['pointer64', ['_MMPTE']]],
'LastPteForPagedPool' : [ 0x18, ['pointer64', ['_MMPTE']]],
'NextPdeForPagedPoolExpansion' : [ 0x20, ['pointer64', ['_MMPTE']]],
'PagedPoolHint' : [ 0x28, ['unsigned long']],
'PagedPoolCommit' : [ 0x30, ['unsigned long long']],
'AllocatedPagedPool' : [ 0x38, ['unsigned long long']],
} ],
'_INTERLOCK_SEQ' : [ 0x8, {
'Depth' : [ 0x0, ['unsigned short']],
'FreeEntryOffset' : [ 0x2, ['unsigned short']],
'OffsetAndDepth' : [ 0x0, ['unsigned long']],
'Sequence' : [ 0x4, ['unsigned long']],
'Exchg' : [ 0x0, ['long long']],
} ],
'_VPB' : [ 0x60, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned short']],
'VolumeLabelLength' : [ 0x6, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'RealDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'SerialNumber' : [ 0x18, ['unsigned long']],
'ReferenceCount' : [ 0x1c, ['unsigned long']],
'VolumeLabel' : [ 0x20, ['array', 32, ['unsigned short']]],
} ],
'_CACHE_DESCRIPTOR' : [ 0xc, {
'Level' : [ 0x0, ['unsigned char']],
'Associativity' : [ 0x1, ['unsigned char']],
'LineSize' : [ 0x2, ['unsigned short']],
'Size' : [ 0x4, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'CacheUnified', 1: 'CacheInstruction', 2: 'CacheData', 3: 'CacheTrace'})]],
} ],
'_MMSESSION' : [ 0x68, {
'SystemSpaceViewLock' : [ 0x0, ['_KGUARDED_MUTEX']],
'SystemSpaceViewLockPointer' : [ 0x38, ['pointer64', ['_KGUARDED_MUTEX']]],
'SystemSpaceViewStart' : [ 0x40, ['pointer64', ['unsigned char']]],
'SystemSpaceViewTable' : [ 0x48, ['pointer64', ['_MMVIEW']]],
'SystemSpaceHashSize' : [ 0x50, ['unsigned long']],
'SystemSpaceHashEntries' : [ 0x54, ['unsigned long']],
'SystemSpaceHashKey' : [ 0x58, ['unsigned long']],
'BitmapFailures' : [ 0x5c, ['unsigned long']],
'SystemSpaceBitMap' : [ 0x60, ['pointer64', ['_RTL_BITMAP']]],
} ],
'_GENERIC_MAPPING' : [ 0x10, {
'GenericRead' : [ 0x0, ['unsigned long']],
'GenericWrite' : [ 0x4, ['unsigned long']],
'GenericExecute' : [ 0x8, ['unsigned long']],
'GenericAll' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_RESTORE_BREAKPOINT' : [ 0x4, {
'BreakPointHandle' : [ 0x0, ['unsigned long']],
} ],
'_EXCEPTION_REGISTRATION_RECORD' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Handler' : [ 0x8, ['pointer64', ['void']]],
} ],
'_SEP_AUDIT_POLICY_OVERLAY' : [ 0x8, {
'PolicyBits' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 36, native_type='unsigned long long')]],
'SetBit' : [ 0x0, ['BitField', dict(start_bit = 36, end_bit = 37, native_type='unsigned long long')]],
} ],
'_POOL_TRACKER_BIG_PAGES' : [ 0x18, {
'Va' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['unsigned long']],
'NumberOfPages' : [ 0xc, ['unsigned long']],
'QuotaObject' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PROCESS_WS_WATCH_INFORMATION' : [ 0x10, {
'FaultingPc' : [ 0x0, ['pointer64', ['void']]],
'FaultingVa' : [ 0x8, ['pointer64', ['void']]],
} ],
'_MMPTE_SUBSECTION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'SubsectionAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_VI_DEADLOCK_NODE' : [ 0xd0, {
'Parent' : [ 0x0, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ChildrenList' : [ 0x8, ['_LIST_ENTRY']],
'SiblingsList' : [ 0x18, ['_LIST_ENTRY']],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'Root' : [ 0x38, ['pointer64', ['_VI_DEADLOCK_RESOURCE']]],
'ThreadEntry' : [ 0x40, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'Active' : [ 0x48, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OnlyTryAcquireUsed' : [ 0x48, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ReleasedOutOfOrder' : [ 0x48, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SequenceNumber' : [ 0x48, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'StackTrace' : [ 0x50, ['array', 8, ['pointer64', ['void']]]],
'ParentStackTrace' : [ 0x90, ['array', 8, ['pointer64', ['void']]]],
} ],
'_SECURITY_QUALITY_OF_SERVICE' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'ContextTrackingMode' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
} ],
'_CONTEXT' : [ 0x4d0, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5Home' : [ 0x20, ['unsigned long long']],
'P6Home' : [ 0x28, ['unsigned long long']],
'ContextFlags' : [ 0x30, ['unsigned long']],
'MxCsr' : [ 0x34, ['unsigned long']],
'SegCs' : [ 0x38, ['unsigned short']],
'SegDs' : [ 0x3a, ['unsigned short']],
'SegEs' : [ 0x3c, ['unsigned short']],
'SegFs' : [ 0x3e, ['unsigned short']],
'SegGs' : [ 0x40, ['unsigned short']],
'SegSs' : [ 0x42, ['unsigned short']],
'EFlags' : [ 0x44, ['unsigned long']],
'Dr0' : [ 0x48, ['unsigned long long']],
'Dr1' : [ 0x50, ['unsigned long long']],
'Dr2' : [ 0x58, ['unsigned long long']],
'Dr3' : [ 0x60, ['unsigned long long']],
'Dr6' : [ 0x68, ['unsigned long long']],
'Dr7' : [ 0x70, ['unsigned long long']],
'Rax' : [ 0x78, ['unsigned long long']],
'Rcx' : [ 0x80, ['unsigned long long']],
'Rdx' : [ 0x88, ['unsigned long long']],
'Rbx' : [ 0x90, ['unsigned long long']],
'Rsp' : [ 0x98, ['unsigned long long']],
'Rbp' : [ 0xa0, ['unsigned long long']],
'Rsi' : [ 0xa8, ['unsigned long long']],
'Rdi' : [ 0xb0, ['unsigned long long']],
'R8' : [ 0xb8, ['unsigned long long']],
'R9' : [ 0xc0, ['unsigned long long']],
'R10' : [ 0xc8, ['unsigned long long']],
'R11' : [ 0xd0, ['unsigned long long']],
'R12' : [ 0xd8, ['unsigned long long']],
'R13' : [ 0xe0, ['unsigned long long']],
'R14' : [ 0xe8, ['unsigned long long']],
'R15' : [ 0xf0, ['unsigned long long']],
'Rip' : [ 0xf8, ['unsigned long long']],
'FltSave' : [ 0x100, ['_XMM_SAVE_AREA32']],
'Header' : [ 0x100, ['array', 2, ['_M128A']]],
'Legacy' : [ 0x120, ['array', 8, ['_M128A']]],
'Xmm0' : [ 0x1a0, ['_M128A']],
'Xmm1' : [ 0x1b0, ['_M128A']],
'Xmm2' : [ 0x1c0, ['_M128A']],
'Xmm3' : [ 0x1d0, ['_M128A']],
'Xmm4' : [ 0x1e0, ['_M128A']],
'Xmm5' : [ 0x1f0, ['_M128A']],
'Xmm6' : [ 0x200, ['_M128A']],
'Xmm7' : [ 0x210, ['_M128A']],
'Xmm8' : [ 0x220, ['_M128A']],
'Xmm9' : [ 0x230, ['_M128A']],
'Xmm10' : [ 0x240, ['_M128A']],
'Xmm11' : [ 0x250, ['_M128A']],
'Xmm12' : [ 0x260, ['_M128A']],
'Xmm13' : [ 0x270, ['_M128A']],
'Xmm14' : [ 0x280, ['_M128A']],
'Xmm15' : [ 0x290, ['_M128A']],
'VectorRegister' : [ 0x300, ['array', 26, ['_M128A']]],
'VectorControl' : [ 0x4a0, ['unsigned long long']],
'DebugControl' : [ 0x4a8, ['unsigned long long']],
'LastBranchToRip' : [ 0x4b0, ['unsigned long long']],
'LastBranchFromRip' : [ 0x4b8, ['unsigned long long']],
'LastExceptionToRip' : [ 0x4c0, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x4c8, ['unsigned long long']],
} ],
'_MMPTE_HARDWARE_LARGEPAGE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'reserved0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PAT' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 21, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 40, native_type='unsigned long long')]],
'reserved2' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 64, native_type='unsigned long long')]],
} ],
'_DBGKD_QUERY_SPECIAL_CALLS' : [ 0x4, {
'NumberOfSpecialCalls' : [ 0x0, ['unsigned long']],
} ],
'CMP_OFFSET_ARRAY' : [ 0x18, {
'FileOffset' : [ 0x0, ['unsigned long']],
'DataBuffer' : [ 0x8, ['pointer64', ['void']]],
'DataLength' : [ 0x10, ['unsigned long']],
} ],
'_PCI_PDO_EXTENSION' : [ 0x120, {
'Next' : [ 0x0, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'ExtensionType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'IrpDispatchTable' : [ 0x10, ['pointer64', ['_PCI_MJ_DISPATCH_TABLE']]],
'DeviceState' : [ 0x18, ['unsigned char']],
'TentativeNextState' : [ 0x19, ['unsigned char']],
'SecondaryExtLock' : [ 0x20, ['_KEVENT']],
'Slot' : [ 0x38, ['_PCI_SLOT_NUMBER']],
'PhysicalDeviceObject' : [ 0x40, ['pointer64', ['_DEVICE_OBJECT']]],
'ParentFdoExtension' : [ 0x48, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'SecondaryExtension' : [ 0x50, ['_SINGLE_LIST_ENTRY']],
'BusInterfaceReferenceCount' : [ 0x58, ['unsigned long']],
'AgpInterfaceReferenceCount' : [ 0x5c, ['unsigned long']],
'VendorId' : [ 0x60, ['unsigned short']],
'DeviceId' : [ 0x62, ['unsigned short']],
'SubsystemVendorId' : [ 0x64, ['unsigned short']],
'SubsystemId' : [ 0x66, ['unsigned short']],
'RevisionId' : [ 0x68, ['unsigned char']],
'ProgIf' : [ 0x69, ['unsigned char']],
'SubClass' : [ 0x6a, ['unsigned char']],
'BaseClass' : [ 0x6b, ['unsigned char']],
'AdditionalResourceCount' : [ 0x6c, ['unsigned char']],
'AdjustedInterruptLine' : [ 0x6d, ['unsigned char']],
'InterruptPin' : [ 0x6e, ['unsigned char']],
'RawInterruptLine' : [ 0x6f, ['unsigned char']],
'CapabilitiesPtr' : [ 0x70, ['unsigned char']],
'SavedLatencyTimer' : [ 0x71, ['unsigned char']],
'SavedCacheLineSize' : [ 0x72, ['unsigned char']],
'HeaderType' : [ 0x73, ['unsigned char']],
'NotPresent' : [ 0x74, ['unsigned char']],
'ReportedMissing' : [ 0x75, ['unsigned char']],
'ExpectedWritebackFailure' : [ 0x76, ['unsigned char']],
'NoTouchPmeEnable' : [ 0x77, ['unsigned char']],
'LegacyDriver' : [ 0x78, ['unsigned char']],
'UpdateHardware' : [ 0x79, ['unsigned char']],
'MovedDevice' : [ 0x7a, ['unsigned char']],
'DisablePowerDown' : [ 0x7b, ['unsigned char']],
'NeedsHotPlugConfiguration' : [ 0x7c, ['unsigned char']],
'IDEInNativeMode' : [ 0x7d, ['unsigned char']],
'BIOSAllowsIDESwitchToNativeMode' : [ 0x7e, ['unsigned char']],
'IoSpaceUnderNativeIdeControl' : [ 0x7f, ['unsigned char']],
'OnDebugPath' : [ 0x80, ['unsigned char']],
'IoSpaceNotRequired' : [ 0x81, ['unsigned char']],
'PowerState' : [ 0x88, ['PCI_POWER_STATE']],
'Dependent' : [ 0xd8, ['PCI_HEADER_TYPE_DEPENDENT']],
'HackFlags' : [ 0xe0, ['unsigned long long']],
'Resources' : [ 0xe8, ['pointer64', ['PCI_FUNCTION_RESOURCES']]],
'BridgeFdoExtension' : [ 0xf0, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'NextBridge' : [ 0xf8, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'NextHashEntry' : [ 0x100, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'Lock' : [ 0x108, ['_PCI_LOCK']],
'PowerCapabilities' : [ 0x118, ['_PCI_PMC']],
'TargetAgpCapabilityId' : [ 0x11a, ['unsigned char']],
'CommandEnables' : [ 0x11c, ['unsigned short']],
'InitialCommand' : [ 0x11e, ['unsigned short']],
} ],
'_HMAP_DIRECTORY' : [ 0x2000, {
'Directory' : [ 0x0, ['array', 1024, ['pointer64', ['_HMAP_TABLE']]]],
} ],
'_NT_TIB32' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['unsigned long']],
'StackBase' : [ 0x4, ['unsigned long']],
'StackLimit' : [ 0x8, ['unsigned long']],
'SubSystemTib' : [ 0xc, ['unsigned long']],
'FiberData' : [ 0x10, ['unsigned long']],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['unsigned long']],
'Self' : [ 0x18, ['unsigned long']],
} ],
'_SECURITY_DESCRIPTOR' : [ 0x28, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x8, ['pointer64', ['void']]],
'Group' : [ 0x10, ['pointer64', ['void']]],
'Sacl' : [ 0x18, ['pointer64', ['_ACL']]],
'Dacl' : [ 0x20, ['pointer64', ['_ACL']]],
} ],
'__unnamed_1650' : [ 0x10, {
'UserData' : [ 0x0, ['pointer64', ['void']]],
'Owner' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_1652' : [ 0x10, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_RTLP_RANGE_LIST_ENTRY' : [ 0x38, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'Allocated' : [ 0x10, ['__unnamed_1650']],
'Merged' : [ 0x10, ['__unnamed_1652']],
'Attributes' : [ 0x20, ['unsigned char']],
'PublicFlags' : [ 0x21, ['unsigned char']],
'PrivateFlags' : [ 0x22, ['unsigned short']],
'ListEntry' : [ 0x28, ['_LIST_ENTRY']],
} ],
'_KAPC_STATE' : [ 0x30, {
'ApcListHead' : [ 0x0, ['array', 2, ['_LIST_ENTRY']]],
'Process' : [ 0x20, ['pointer64', ['_KPROCESS']]],
'KernelApcInProgress' : [ 0x28, ['unsigned char']],
'KernelApcPending' : [ 0x29, ['unsigned char']],
'UserApcPending' : [ 0x2a, ['unsigned char']],
} ],
'_HEAP_STOP_ON_VALUES' : [ 0x30, {
'AllocAddress' : [ 0x0, ['unsigned long long']],
'AllocTag' : [ 0x8, ['_HEAP_STOP_ON_TAG']],
'ReAllocAddress' : [ 0x10, ['unsigned long long']],
'ReAllocTag' : [ 0x18, ['_HEAP_STOP_ON_TAG']],
'FreeAddress' : [ 0x20, ['unsigned long long']],
'FreeTag' : [ 0x28, ['_HEAP_STOP_ON_TAG']],
} ],
'_DEVICE_RELATIONS' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'Objects' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_DEVICE_MAP' : [ 0x38, {
'DosDevicesDirectory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'GlobalDosDevicesDirectory' : [ 0x8, ['pointer64', ['_OBJECT_DIRECTORY']]],
'ReferenceCount' : [ 0x10, ['unsigned long']],
'DriveMap' : [ 0x14, ['unsigned long']],
'DriveType' : [ 0x18, ['array', 32, ['unsigned char']]],
} ],
'_HEAP_PSEUDO_TAG_ENTRY' : [ 0x10, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
} ],
'_IO_RESOURCE_LIST' : [ 0x28, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'Descriptors' : [ 0x8, ['array', 1, ['_IO_RESOURCE_DESCRIPTOR']]],
} ],
'_MMBANKED_SECTION' : [ 0x38, {
'BasePhysicalPage' : [ 0x0, ['unsigned long long']],
'BasedPte' : [ 0x8, ['pointer64', ['_MMPTE']]],
'BankSize' : [ 0x10, ['unsigned long']],
'BankShift' : [ 0x14, ['unsigned long']],
'BankedRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'CurrentMappedPte' : [ 0x28, ['pointer64', ['_MMPTE']]],
'BankTemplate' : [ 0x30, ['array', 1, ['_MMPTE']]],
} ],
'_RTL_CRITICAL_SECTION' : [ 0x28, {
'DebugInfo' : [ 0x0, ['pointer64', ['_RTL_CRITICAL_SECTION_DEBUG']]],
'LockCount' : [ 0x8, ['long']],
'RecursionCount' : [ 0xc, ['long']],
'OwningThread' : [ 0x10, ['pointer64', ['void']]],
'LockSemaphore' : [ 0x18, ['pointer64', ['void']]],
'SpinCount' : [ 0x20, ['unsigned long long']],
} ],
'_KTSS64' : [ 0x68, {
'Reserved0' : [ 0x0, ['unsigned long']],
'Rsp0' : [ 0x4, ['unsigned long long']],
'Rsp1' : [ 0xc, ['unsigned long long']],
'Rsp2' : [ 0x14, ['unsigned long long']],
'Ist' : [ 0x1c, ['array', 8, ['unsigned long long']]],
'Reserved1' : [ 0x5c, ['unsigned long long']],
'Reserved2' : [ 0x64, ['unsigned short']],
'IoMapBase' : [ 0x66, ['unsigned short']],
} ],
'_KTRAP_FRAME' : [ 0x190, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'PreviousMode' : [ 0x28, ['unsigned char']],
'PreviousIrql' : [ 0x29, ['unsigned char']],
'FaultIndicator' : [ 0x2a, ['unsigned char']],
'ExceptionActive' : [ 0x2b, ['unsigned char']],
'MxCsr' : [ 0x2c, ['unsigned long']],
'Rax' : [ 0x30, ['unsigned long long']],
'Rcx' : [ 0x38, ['unsigned long long']],
'Rdx' : [ 0x40, ['unsigned long long']],
'R8' : [ 0x48, ['unsigned long long']],
'R9' : [ 0x50, ['unsigned long long']],
'R10' : [ 0x58, ['unsigned long long']],
'R11' : [ 0x60, ['unsigned long long']],
'GsBase' : [ 0x68, ['unsigned long long']],
'GsSwap' : [ 0x68, ['unsigned long long']],
'Xmm0' : [ 0x70, ['_M128A']],
'Xmm1' : [ 0x80, ['_M128A']],
'Xmm2' : [ 0x90, ['_M128A']],
'Xmm3' : [ 0xa0, ['_M128A']],
'Xmm4' : [ 0xb0, ['_M128A']],
'Xmm5' : [ 0xc0, ['_M128A']],
'FaultAddress' : [ 0xd0, ['unsigned long long']],
'ContextRecord' : [ 0xd0, ['unsigned long long']],
'TimeStamp' : [ 0xd0, ['unsigned long long']],
'Dr0' : [ 0xd8, ['unsigned long long']],
'Dr1' : [ 0xe0, ['unsigned long long']],
'Dr2' : [ 0xe8, ['unsigned long long']],
'Dr3' : [ 0xf0, ['unsigned long long']],
'Dr6' : [ 0xf8, ['unsigned long long']],
'Dr7' : [ 0x100, ['unsigned long long']],
'DebugControl' : [ 0x108, ['unsigned long long']],
'LastBranchToRip' : [ 0x110, ['unsigned long long']],
'LastBranchFromRip' : [ 0x118, ['unsigned long long']],
'LastExceptionToRip' : [ 0x120, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x128, ['unsigned long long']],
'LastBranchControl' : [ 0x108, ['unsigned long long']],
'LastBranchMSR' : [ 0x110, ['unsigned long']],
'SegDs' : [ 0x130, ['unsigned short']],
'SegEs' : [ 0x132, ['unsigned short']],
'SegFs' : [ 0x134, ['unsigned short']],
'SegGs' : [ 0x136, ['unsigned short']],
'TrapFrame' : [ 0x138, ['unsigned long long']],
'Rbx' : [ 0x140, ['unsigned long long']],
'Rdi' : [ 0x148, ['unsigned long long']],
'Rsi' : [ 0x150, ['unsigned long long']],
'Rbp' : [ 0x158, ['unsigned long long']],
'ErrorCode' : [ 0x160, ['unsigned long long']],
'ExceptionFrame' : [ 0x160, ['unsigned long long']],
'Rip' : [ 0x168, ['unsigned long long']],
'SegCs' : [ 0x170, ['unsigned short']],
'Fill1' : [ 0x172, ['array', 3, ['unsigned short']]],
'EFlags' : [ 0x178, ['unsigned long']],
'Fill2' : [ 0x17c, ['unsigned long']],
'Rsp' : [ 0x180, ['unsigned long long']],
'SegSs' : [ 0x188, ['unsigned short']],
'Fill3' : [ 0x18a, ['array', 1, ['unsigned short']]],
'CodePatchCycle' : [ 0x18c, ['long']],
} ],
'__unnamed_1680' : [ 0x5, {
'Acquired' : [ 0x0, ['unsigned char']],
'CacheLineSize' : [ 0x1, ['unsigned char']],
'LatencyTimer' : [ 0x2, ['unsigned char']],
'EnablePERR' : [ 0x3, ['unsigned char']],
'EnableSERR' : [ 0x4, ['unsigned char']],
} ],
'_PCI_FDO_EXTENSION' : [ 0x130, {
'List' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'ExtensionType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'IrpDispatchTable' : [ 0x10, ['pointer64', ['_PCI_MJ_DISPATCH_TABLE']]],
'DeviceState' : [ 0x18, ['unsigned char']],
'TentativeNextState' : [ 0x19, ['unsigned char']],
'SecondaryExtLock' : [ 0x20, ['_KEVENT']],
'PhysicalDeviceObject' : [ 0x38, ['pointer64', ['_DEVICE_OBJECT']]],
'FunctionalDeviceObject' : [ 0x40, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDeviceObject' : [ 0x48, ['pointer64', ['_DEVICE_OBJECT']]],
'ChildListLock' : [ 0x50, ['_KEVENT']],
'ChildPdoList' : [ 0x68, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'BusRootFdoExtension' : [ 0x70, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'ParentFdoExtension' : [ 0x78, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'ChildBridgePdoList' : [ 0x80, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'PciBusInterface' : [ 0x88, ['pointer64', ['_PCI_BUS_INTERFACE_STANDARD']]],
'MaxSubordinateBus' : [ 0x90, ['unsigned char']],
'BusHandler' : [ 0x98, ['pointer64', ['_BUS_HANDLER']]],
'BaseBus' : [ 0xa0, ['unsigned char']],
'Fake' : [ 0xa1, ['unsigned char']],
'ChildDelete' : [ 0xa2, ['unsigned char']],
'Scanned' : [ 0xa3, ['unsigned char']],
'ArbitersInitialized' : [ 0xa4, ['unsigned char']],
'BrokenVideoHackApplied' : [ 0xa5, ['unsigned char']],
'Hibernated' : [ 0xa6, ['unsigned char']],
'PowerState' : [ 0xa8, ['PCI_POWER_STATE']],
'SecondaryExtension' : [ 0xf8, ['_SINGLE_LIST_ENTRY']],
'ChildWaitWakeCount' : [ 0x100, ['unsigned long']],
'PreservedConfig' : [ 0x108, ['pointer64', ['_PCI_COMMON_CONFIG']]],
'Lock' : [ 0x110, ['_PCI_LOCK']],
'HotPlugParameters' : [ 0x120, ['__unnamed_1680']],
'BusHackFlags' : [ 0x128, ['unsigned long']],
} ],
'__unnamed_1684' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1686' : [ 0x10, {
'Level' : [ 0x0, ['unsigned long']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_1688' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'Port' : [ 0x4, ['unsigned long']],
'Reserved1' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_168a' : [ 0xc, {
'Data' : [ 0x0, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_168c' : [ 0xc, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_168e' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1690' : [ 0x10, {
'Generic' : [ 0x0, ['__unnamed_1684']],
'Port' : [ 0x0, ['__unnamed_1684']],
'Interrupt' : [ 0x0, ['__unnamed_1686']],
'Memory' : [ 0x0, ['__unnamed_1684']],
'Dma' : [ 0x0, ['__unnamed_1688']],
'DevicePrivate' : [ 0x0, ['__unnamed_168a']],
'BusNumber' : [ 0x0, ['__unnamed_168c']],
'DeviceSpecificData' : [ 0x0, ['__unnamed_168e']],
} ],
'_CM_PARTIAL_RESOURCE_DESCRIPTOR' : [ 0x14, {
'Type' : [ 0x0, ['unsigned char']],
'ShareDisposition' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_1690']],
} ],
'_WAIT_CONTEXT_BLOCK' : [ 0x48, {
'WaitQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DeviceRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeviceContext' : [ 0x20, ['pointer64', ['void']]],
'NumberOfMapRegisters' : [ 0x28, ['unsigned long']],
'DeviceObject' : [ 0x30, ['pointer64', ['void']]],
'CurrentIrp' : [ 0x38, ['pointer64', ['void']]],
'BufferChainingDpc' : [ 0x40, ['pointer64', ['_KDPC']]],
} ],
'_REQUEST_MAILBOX' : [ 0x40, {
'RequestSummary' : [ 0x0, ['long long']],
'RequestPacket' : [ 0x8, ['_KREQUEST_PACKET']],
'Virtual' : [ 0x8, ['array', 7, ['pointer64', ['void']]]],
} ],
'_CM_KEY_CONTROL_BLOCK' : [ 0xa8, {
'RefCount' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'ExtFlags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'PrivateAlloc' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Delete' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DelayedCloseIndex' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 22, native_type='unsigned long')]],
'TotalLevels' : [ 0x4, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'KeyHash' : [ 0x8, ['_CM_KEY_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x18, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x20, ['unsigned long']],
'ParentKcb' : [ 0x28, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NameBlock' : [ 0x30, ['pointer64', ['_CM_NAME_CONTROL_BLOCK']]],
'CachedSecurity' : [ 0x38, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
'ValueCache' : [ 0x40, ['_CACHED_CHILD_LIST']],
'IndexHint' : [ 0x50, ['pointer64', ['_CM_INDEX_HINT_BLOCK']]],
'HashKey' : [ 0x50, ['unsigned long']],
'SubKeyCount' : [ 0x50, ['unsigned long']],
'KeyBodyListHead' : [ 0x58, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x58, ['_LIST_ENTRY']],
'KeyBodyArray' : [ 0x68, ['array', 4, ['pointer64', ['_CM_KEY_BODY']]]],
'DelayCloseEntry' : [ 0x88, ['pointer64', ['void']]],
'KcbLastWriteTime' : [ 0x90, ['_LARGE_INTEGER']],
'KcbMaxNameLen' : [ 0x98, ['unsigned short']],
'KcbMaxValueNameLen' : [ 0x9a, ['unsigned short']],
'KcbMaxValueDataLen' : [ 0x9c, ['unsigned long']],
'RealKeyName' : [ 0xa0, ['pointer64', ['unsigned char']]],
} ],
'_M128A' : [ 0x10, {
'Low' : [ 0x0, ['unsigned long long']],
'High' : [ 0x8, ['long long']],
} ],
'_PCI_BUS_INTERFACE_STANDARD' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ReadConfig' : [ 0x20, ['pointer64', ['void']]],
'WriteConfig' : [ 0x28, ['pointer64', ['void']]],
'PinToLine' : [ 0x30, ['pointer64', ['void']]],
'LineToPin' : [ 0x38, ['pointer64', ['void']]],
} ],
'_WORK_QUEUE_ITEM' : [ 0x20, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x10, ['pointer64', ['void']]],
'Parameter' : [ 0x18, ['pointer64', ['void']]],
} ],
'_PI_RESOURCE_ARBITER_ENTRY' : [ 0x70, {
'DeviceArbiterList' : [ 0x0, ['_LIST_ENTRY']],
'ResourceType' : [ 0x10, ['unsigned char']],
'ArbiterInterface' : [ 0x18, ['pointer64', ['_ARBITER_INTERFACE']]],
'Level' : [ 0x20, ['unsigned long']],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'BestResourceList' : [ 0x38, ['_LIST_ENTRY']],
'BestConfig' : [ 0x48, ['_LIST_ENTRY']],
'ActiveArbiterList' : [ 0x58, ['_LIST_ENTRY']],
'State' : [ 0x68, ['unsigned char']],
'ResourcesChanged' : [ 0x69, ['unsigned char']],
} ],
'_KTIMER' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'DueTime' : [ 0x18, ['_ULARGE_INTEGER']],
'TimerListEntry' : [ 0x20, ['_LIST_ENTRY']],
'Dpc' : [ 0x30, ['pointer64', ['_KDPC']]],
'Period' : [ 0x38, ['long']],
} ],
'_SEP_AUDIT_POLICY_CATEGORIES' : [ 0x8, {
'System' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'Logon' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'ObjectAccess' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 12, native_type='unsigned long')]],
'PrivilegeUse' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned long')]],
'DetailedTracking' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'PolicyChange' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'AccountManagement' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 28, native_type='unsigned long')]],
'DirectoryServiceAccess' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 32, native_type='unsigned long')]],
'AccountLogon' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
} ],
'_CM_KEY_HASH' : [ 0x20, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x10, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_16d3' : [ 0x8, {
'MasterIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'IrpCount' : [ 0x0, ['long']],
'SystemBuffer' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_16d8' : [ 0x10, {
'UserApcRoutine' : [ 0x0, ['pointer64', ['void']]],
'UserApcContext' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_16da' : [ 0x10, {
'AsynchronousParameters' : [ 0x0, ['__unnamed_16d8']],
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'__unnamed_16e2' : [ 0x50, {
'DeviceQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DriverContext' : [ 0x0, ['array', 4, ['pointer64', ['void']]]],
'Thread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'AuxiliaryBuffer' : [ 0x28, ['pointer64', ['unsigned char']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'CurrentStackLocation' : [ 0x40, ['pointer64', ['_IO_STACK_LOCATION']]],
'PacketType' : [ 0x40, ['unsigned long']],
'OriginalFileObject' : [ 0x48, ['pointer64', ['_FILE_OBJECT']]],
} ],
'__unnamed_16e4' : [ 0x58, {
'Overlay' : [ 0x0, ['__unnamed_16e2']],
'Apc' : [ 0x0, ['_KAPC']],
'CompletionKey' : [ 0x0, ['pointer64', ['void']]],
} ],
'_IRP' : [ 0xd0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'MdlAddress' : [ 0x8, ['pointer64', ['_MDL']]],
'Flags' : [ 0x10, ['unsigned long']],
'AssociatedIrp' : [ 0x18, ['__unnamed_16d3']],
'ThreadListEntry' : [ 0x20, ['_LIST_ENTRY']],
'IoStatus' : [ 0x30, ['_IO_STATUS_BLOCK']],
'RequestorMode' : [ 0x40, ['unsigned char']],
'PendingReturned' : [ 0x41, ['unsigned char']],
'StackCount' : [ 0x42, ['unsigned char']],
'CurrentLocation' : [ 0x43, ['unsigned char']],
'Cancel' : [ 0x44, ['unsigned char']],
'CancelIrql' : [ 0x45, ['unsigned char']],
'ApcEnvironment' : [ 0x46, ['unsigned char']],
'AllocationFlags' : [ 0x47, ['unsigned char']],
'UserIosb' : [ 0x48, ['pointer64', ['_IO_STATUS_BLOCK']]],
'UserEvent' : [ 0x50, ['pointer64', ['_KEVENT']]],
'Overlay' : [ 0x58, ['__unnamed_16da']],
'CancelRoutine' : [ 0x68, ['pointer64', ['void']]],
'UserBuffer' : [ 0x70, ['pointer64', ['void']]],
'Tail' : [ 0x78, ['__unnamed_16e4']],
} ],
'_PCI_LOCK' : [ 0x10, {
'Atom' : [ 0x0, ['unsigned long long']],
'OldIrql' : [ 0x8, ['unsigned char']],
} ],
'_CM_KEY_SECURITY_CACHE_ENTRY' : [ 0x10, {
'Cell' : [ 0x0, ['unsigned long']],
'CachedSecurity' : [ 0x8, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
} ],
'_GDI_TEB_BATCH32' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'__unnamed_16f2' : [ 0x4, {
'PhysicalAddress' : [ 0x0, ['unsigned long']],
'VirtualSize' : [ 0x0, ['unsigned long']],
} ],
'_IMAGE_SECTION_HEADER' : [ 0x28, {
'Name' : [ 0x0, ['array', 8, ['unsigned char']]],
'Misc' : [ 0x8, ['__unnamed_16f2']],
'VirtualAddress' : [ 0xc, ['unsigned long']],
'SizeOfRawData' : [ 0x10, ['unsigned long']],
'PointerToRawData' : [ 0x14, ['unsigned long']],
'PointerToRelocations' : [ 0x18, ['unsigned long']],
'PointerToLinenumbers' : [ 0x1c, ['unsigned long']],
'NumberOfRelocations' : [ 0x20, ['unsigned short']],
'NumberOfLinenumbers' : [ 0x22, ['unsigned short']],
'Characteristics' : [ 0x24, ['unsigned long']],
} ],
'__unnamed_16f8' : [ 0x4, {
'Level' : [ 0x0, ['unsigned long']],
} ],
'_POP_ACTION_TRIGGER' : [ 0x10, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyInitiatePowerActionAPI', 4: 'PolicySetPowerStateAPI', 5: 'PolicyImmediateDozeS4', 6: 'PolicySystemIdle'})]],
'Flags' : [ 0x4, ['unsigned char']],
'Spare' : [ 0x5, ['array', 3, ['unsigned char']]],
'Battery' : [ 0x8, ['__unnamed_16f8']],
'Wait' : [ 0x8, ['pointer64', ['_POP_TRIGGER_WAIT']]],
} ],
'_ETIMER' : [ 0x108, {
'KeTimer' : [ 0x0, ['_KTIMER']],
'TimerApc' : [ 0x40, ['_KAPC']],
'TimerDpc' : [ 0x98, ['_KDPC']],
'ActiveTimerListEntry' : [ 0xd8, ['_LIST_ENTRY']],
'Lock' : [ 0xe8, ['unsigned long long']],
'Period' : [ 0xf0, ['long']],
'ApcAssociated' : [ 0xf4, ['unsigned char']],
'WakeTimer' : [ 0xf5, ['unsigned char']],
'WakeTimerListEntry' : [ 0xf8, ['_LIST_ENTRY']],
} ],
'_DBGKD_BREAKPOINTEX' : [ 0x8, {
'BreakPointCount' : [ 0x0, ['unsigned long']],
'ContinueStatus' : [ 0x4, ['long']],
} ],
'_IMAGE_OPTIONAL_HEADER64' : [ 0xf0, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'ImageBase' : [ 0x18, ['unsigned long long']],
'SectionAlignment' : [ 0x20, ['unsigned long']],
'FileAlignment' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'MajorImageVersion' : [ 0x2c, ['unsigned short']],
'MinorImageVersion' : [ 0x2e, ['unsigned short']],
'MajorSubsystemVersion' : [ 0x30, ['unsigned short']],
'MinorSubsystemVersion' : [ 0x32, ['unsigned short']],
'Win32VersionValue' : [ 0x34, ['unsigned long']],
'SizeOfImage' : [ 0x38, ['unsigned long']],
'SizeOfHeaders' : [ 0x3c, ['unsigned long']],
'CheckSum' : [ 0x40, ['unsigned long']],
'Subsystem' : [ 0x44, ['unsigned short']],
'DllCharacteristics' : [ 0x46, ['unsigned short']],
'SizeOfStackReserve' : [ 0x48, ['unsigned long long']],
'SizeOfStackCommit' : [ 0x50, ['unsigned long long']],
'SizeOfHeapReserve' : [ 0x58, ['unsigned long long']],
'SizeOfHeapCommit' : [ 0x60, ['unsigned long long']],
'LoaderFlags' : [ 0x68, ['unsigned long']],
'NumberOfRvaAndSizes' : [ 0x6c, ['unsigned long']],
'DataDirectory' : [ 0x70, ['array', 16, ['_IMAGE_DATA_DIRECTORY']]],
} ],
'_KIDTENTRY64' : [ 0x10, {
'OffsetLow' : [ 0x0, ['unsigned short']],
'Selector' : [ 0x2, ['unsigned short']],
'IstIndex' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned short')]],
'Reserved0' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned short')]],
'Type' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned short')]],
'Dpl' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned short')]],
'Present' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned short')]],
'OffsetMiddle' : [ 0x6, ['unsigned short']],
'OffsetHigh' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_CM_CELL_REMAP_BLOCK' : [ 0x8, {
'OldCell' : [ 0x0, ['unsigned long']],
'NewCell' : [ 0x4, ['unsigned long']],
} ],
'_PCI_PMC' : [ 0x2, {
'Version' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'PMEClock' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Rsvd1' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'DeviceSpecificInitialization' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Rsvd2' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Support' : [ 0x1, ['_PM_SUPPORT']],
} ],
'_DBGKD_CONTINUE' : [ 0x4, {
'ContinueStatus' : [ 0x0, ['long']],
} ],
'__unnamed_1718' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'Long' : [ 0x0, ['unsigned long long']],
'e1' : [ 0x0, ['_MMWSLENTRY']],
} ],
'_MMWSLE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_1718']],
} ],
'_EXCEPTION_POINTERS' : [ 0x10, {
'ExceptionRecord' : [ 0x0, ['pointer64', ['_EXCEPTION_RECORD']]],
'ContextRecord' : [ 0x8, ['pointer64', ['_CONTEXT']]],
} ],
'_KQUEUE' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x18, ['_LIST_ENTRY']],
'CurrentCount' : [ 0x28, ['unsigned long']],
'MaximumCount' : [ 0x2c, ['unsigned long']],
'ThreadListHead' : [ 0x30, ['_LIST_ENTRY']],
} ],
'__unnamed_1722' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMADDRESS_NODE']]],
} ],
'_MMADDRESS_NODE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_1722']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMADDRESS_NODE']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMADDRESS_NODE']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
} ],
'_RTL_USER_PROCESS_PARAMETERS' : [ 0x3f0, {
'MaximumLength' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DebugFlags' : [ 0xc, ['unsigned long']],
'ConsoleHandle' : [ 0x10, ['pointer64', ['void']]],
'ConsoleFlags' : [ 0x18, ['unsigned long']],
'StandardInput' : [ 0x20, ['pointer64', ['void']]],
'StandardOutput' : [ 0x28, ['pointer64', ['void']]],
'StandardError' : [ 0x30, ['pointer64', ['void']]],
'CurrentDirectory' : [ 0x38, ['_CURDIR']],
'DllPath' : [ 0x50, ['_UNICODE_STRING']],
'ImagePathName' : [ 0x60, ['_UNICODE_STRING']],
'CommandLine' : [ 0x70, ['_UNICODE_STRING']],
'Environment' : [ 0x80, ['pointer64', ['void']]],
'StartingX' : [ 0x88, ['unsigned long']],
'StartingY' : [ 0x8c, ['unsigned long']],
'CountX' : [ 0x90, ['unsigned long']],
'CountY' : [ 0x94, ['unsigned long']],
'CountCharsX' : [ 0x98, ['unsigned long']],
'CountCharsY' : [ 0x9c, ['unsigned long']],
'FillAttribute' : [ 0xa0, ['unsigned long']],
'WindowFlags' : [ 0xa4, ['unsigned long']],
'ShowWindowFlags' : [ 0xa8, ['unsigned long']],
'WindowTitle' : [ 0xb0, ['_UNICODE_STRING']],
'DesktopInfo' : [ 0xc0, ['_UNICODE_STRING']],
'ShellInfo' : [ 0xd0, ['_UNICODE_STRING']],
'RuntimeData' : [ 0xe0, ['_UNICODE_STRING']],
'CurrentDirectores' : [ 0xf0, ['array', 32, ['_RTL_DRIVE_LETTER_CURDIR']]],
} ],
'_CACHE_MANAGER_CALLBACKS' : [ 0x20, {
'AcquireForLazyWrite' : [ 0x0, ['pointer64', ['void']]],
'ReleaseFromLazyWrite' : [ 0x8, ['pointer64', ['void']]],
'AcquireForReadAhead' : [ 0x10, ['pointer64', ['void']]],
'ReleaseFromReadAhead' : [ 0x18, ['pointer64', ['void']]],
} ],
'_KSPECIAL_REGISTERS' : [ 0xd8, {
'Cr0' : [ 0x0, ['unsigned long long']],
'Cr2' : [ 0x8, ['unsigned long long']],
'Cr3' : [ 0x10, ['unsigned long long']],
'Cr4' : [ 0x18, ['unsigned long long']],
'KernelDr0' : [ 0x20, ['unsigned long long']],
'KernelDr1' : [ 0x28, ['unsigned long long']],
'KernelDr2' : [ 0x30, ['unsigned long long']],
'KernelDr3' : [ 0x38, ['unsigned long long']],
'KernelDr6' : [ 0x40, ['unsigned long long']],
'KernelDr7' : [ 0x48, ['unsigned long long']],
'Gdtr' : [ 0x50, ['_KDESCRIPTOR']],
'Idtr' : [ 0x60, ['_KDESCRIPTOR']],
'Tr' : [ 0x70, ['unsigned short']],
'Ldtr' : [ 0x72, ['unsigned short']],
'MxCsr' : [ 0x74, ['unsigned long']],
'DebugControl' : [ 0x78, ['unsigned long long']],
'LastBranchToRip' : [ 0x80, ['unsigned long long']],
'LastBranchFromRip' : [ 0x88, ['unsigned long long']],
'LastExceptionToRip' : [ 0x90, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x98, ['unsigned long long']],
'Cr8' : [ 0xa0, ['unsigned long long']],
'MsrGsBase' : [ 0xa8, ['unsigned long long']],
'MsrGsSwap' : [ 0xb0, ['unsigned long long']],
'MsrStar' : [ 0xb8, ['unsigned long long']],
'MsrLStar' : [ 0xc0, ['unsigned long long']],
'MsrCStar' : [ 0xc8, ['unsigned long long']],
'MsrSyscallMask' : [ 0xd0, ['unsigned long long']],
} ],
'_CELL_DATA' : [ 0x50, {
'u' : [ 0x0, ['_u']],
} ],
'_SE_AUDIT_PROCESS_CREATION_INFO' : [ 0x8, {
'ImageFileName' : [ 0x0, ['pointer64', ['_OBJECT_NAME_INFORMATION']]],
} ],
'_HEAP_ENTRY_EXTRA' : [ 0x10, {
'AllocatorBackTraceIndex' : [ 0x0, ['unsigned short']],
'TagIndex' : [ 0x2, ['unsigned short']],
'Settable' : [ 0x8, ['unsigned long long']],
'ZeroInit' : [ 0x0, ['unsigned long long']],
'ZeroInit1' : [ 0x8, ['unsigned long long']],
} ],
'_VI_DEADLOCK_RESOURCE' : [ 0xf8, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'VfDeadlockUnknown', 1: 'VfDeadlockMutex', 2: 'VfDeadlockMutexAbandoned', 3: 'VfDeadlockFastMutex', 4: 'VfDeadlockFastMutexUnsafe', 5: 'VfDeadlockSpinLock', 6: 'VfDeadlockQueuedSpinLock', 7: 'VfDeadlockTypeMaximum'})]],
'NodeCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'RecursionCount' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'ResourceAddress' : [ 0x8, ['pointer64', ['void']]],
'ThreadOwner' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'ResourceList' : [ 0x18, ['_LIST_ENTRY']],
'HashChainList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'StackTrace' : [ 0x38, ['array', 8, ['pointer64', ['void']]]],
'LastAcquireTrace' : [ 0x78, ['array', 8, ['pointer64', ['void']]]],
'LastReleaseTrace' : [ 0xb8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_OBJECT_HANDLE_COUNT_ENTRY' : [ 0x10, {
'Process' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'HandleCount' : [ 0x8, ['unsigned long']],
} ],
'_CLIENT_ID' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['pointer64', ['void']]],
'UniqueThread' : [ 0x8, ['pointer64', ['void']]],
} ],
'_PEB_FREE_BLOCK' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_PEB_FREE_BLOCK']]],
'Size' : [ 0x8, ['unsigned long']],
} ],
'_PO_DEVICE_NOTIFY' : [ 0x48, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'TargetDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'WakeNeeded' : [ 0x18, ['unsigned char']],
'OrderLevel' : [ 0x19, ['unsigned char']],
'DeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'Node' : [ 0x28, ['pointer64', ['void']]],
'DeviceName' : [ 0x30, ['pointer64', ['unsigned short']]],
'DriverName' : [ 0x38, ['pointer64', ['unsigned short']]],
'ChildCount' : [ 0x40, ['unsigned long']],
'ActiveChild' : [ 0x44, ['unsigned long']],
} ],
'_MMPFNLIST' : [ 0x20, {
'Total' : [ 0x0, ['unsigned long long']],
'ListName' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'ZeroedPageList', 1: 'FreePageList', 2: 'StandbyPageList', 3: 'ModifiedPageList', 4: 'ModifiedNoWritePageList', 5: 'BadPageList', 6: 'ActiveAndValid', 7: 'TransitionPage'})]],
'Flink' : [ 0x10, ['unsigned long long']],
'Blink' : [ 0x18, ['unsigned long long']],
} ],
'__unnamed_174c' : [ 0x4, {
'Spare' : [ 0x0, ['array', 4, ['unsigned char']]],
} ],
'__unnamed_174e' : [ 0x4, {
'PrimaryBus' : [ 0x0, ['unsigned char']],
'SecondaryBus' : [ 0x1, ['unsigned char']],
'SubordinateBus' : [ 0x2, ['unsigned char']],
'SubtractiveDecode' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsaBitSet' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'VgaBitSet' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'WeChangedBusNumbers' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'IsaBitRequired' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
} ],
'PCI_HEADER_TYPE_DEPENDENT' : [ 0x4, {
'type0' : [ 0x0, ['__unnamed_174c']],
'type1' : [ 0x0, ['__unnamed_174e']],
'type2' : [ 0x0, ['__unnamed_174e']],
} ],
'_DBGKD_GET_SET_BUS_DATA' : [ 0x14, {
'BusDataType' : [ 0x0, ['unsigned long']],
'BusNumber' : [ 0x4, ['unsigned long']],
'SlotNumber' : [ 0x8, ['unsigned long']],
'Offset' : [ 0xc, ['unsigned long']],
'Length' : [ 0x10, ['unsigned long']],
} ],
'_KINTERRUPT' : [ 0x80, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'InterruptListEntry' : [ 0x8, ['_LIST_ENTRY']],
'ServiceRoutine' : [ 0x18, ['pointer64', ['void']]],
'ServiceContext' : [ 0x20, ['pointer64', ['void']]],
'SpinLock' : [ 0x28, ['unsigned long long']],
'TickCount' : [ 0x30, ['unsigned long']],
'ActualLock' : [ 0x38, ['pointer64', ['unsigned long long']]],
'DispatchAddress' : [ 0x40, ['pointer64', ['void']]],
'Vector' : [ 0x48, ['unsigned long']],
'Irql' : [ 0x4c, ['unsigned char']],
'SynchronizeIrql' : [ 0x4d, ['unsigned char']],
'FloatingSave' : [ 0x4e, ['unsigned char']],
'Connected' : [ 0x4f, ['unsigned char']],
'Number' : [ 0x50, ['unsigned char']],
'ShareVector' : [ 0x51, ['unsigned char']],
'Mode' : [ 0x54, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'ServiceCount' : [ 0x58, ['unsigned long']],
'DispatchCount' : [ 0x5c, ['unsigned long']],
'TrapFrame' : [ 0x60, ['pointer64', ['_KTRAP_FRAME']]],
'Reserved' : [ 0x68, ['pointer64', ['void']]],
'DispatchCode' : [ 0x70, ['array', 4, ['unsigned long']]],
} ],
'_BITMAP_RANGE' : [ 0x30, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'BasePage' : [ 0x10, ['long long']],
'FirstDirtyPage' : [ 0x18, ['unsigned long']],
'LastDirtyPage' : [ 0x1c, ['unsigned long']],
'DirtyPages' : [ 0x20, ['unsigned long']],
'Bitmap' : [ 0x28, ['pointer64', ['unsigned long']]],
} ],
'_PCI_ARBITER_INSTANCE' : [ 0x190, {
'Header' : [ 0x0, ['PCI_SECONDARY_EXTENSION']],
'Interface' : [ 0x18, ['pointer64', ['_PCI_INTERFACE']]],
'BusFdoExtension' : [ 0x20, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'InstanceName' : [ 0x28, ['array', 24, ['unsigned short']]],
'CommonInstance' : [ 0x58, ['_ARBITER_INSTANCE']],
} ],
'_NT_TIB64' : [ 0x38, {
'ExceptionList' : [ 0x0, ['unsigned long long']],
'StackBase' : [ 0x8, ['unsigned long long']],
'StackLimit' : [ 0x10, ['unsigned long long']],
'SubSystemTib' : [ 0x18, ['unsigned long long']],
'FiberData' : [ 0x20, ['unsigned long long']],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['unsigned long long']],
'Self' : [ 0x30, ['unsigned long long']],
} ],
'_HANDLE_TRACE_DB_ENTRY' : [ 0xa0, {
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Type' : [ 0x18, ['unsigned long']],
'StackTrace' : [ 0x20, ['array', 16, ['pointer64', ['void']]]],
} ],
'_BUS_EXTENSION_LIST' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['void']]],
'BusExtension' : [ 0x8, ['pointer64', ['_PI_BUS_EXTENSION']]],
} ],
'_PCI_MJ_DISPATCH_TABLE' : [ 0x40, {
'PnpIrpMaximumMinorFunction' : [ 0x0, ['unsigned long']],
'PnpIrpDispatchTable' : [ 0x8, ['pointer64', ['_PCI_MN_DISPATCH_TABLE']]],
'PowerIrpMaximumMinorFunction' : [ 0x10, ['unsigned long']],
'PowerIrpDispatchTable' : [ 0x18, ['pointer64', ['_PCI_MN_DISPATCH_TABLE']]],
'SystemControlIrpDispatchStyle' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'IRP_COMPLETE', 1: 'IRP_DOWNWARD', 2: 'IRP_UPWARD', 3: 'IRP_DISPATCH'})]],
'SystemControlIrpDispatchFunction' : [ 0x28, ['pointer64', ['void']]],
'OtherIrpDispatchStyle' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'IRP_COMPLETE', 1: 'IRP_DOWNWARD', 2: 'IRP_UPWARD', 3: 'IRP_DISPATCH'})]],
'OtherIrpDispatchFunction' : [ 0x38, ['pointer64', ['void']]],
} ],
'_POP_TRIGGER_WAIT' : [ 0x38, {
'Event' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x18, ['long']],
'Link' : [ 0x20, ['_LIST_ENTRY']],
'Trigger' : [ 0x30, ['pointer64', ['_POP_ACTION_TRIGGER']]],
} ],
'_IO_TIMER' : [ 0x30, {
'Type' : [ 0x0, ['short']],
'TimerFlag' : [ 0x2, ['short']],
'TimerList' : [ 0x8, ['_LIST_ENTRY']],
'TimerRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'_MMWSLENTRY' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'LockedInWs' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'LockedInMemory' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned long long')]],
'Hashed' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Direct' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Age' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long long')]],
'VirtualPageNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_178e' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['unsigned char']],
'Flags1' : [ 0x1, ['unsigned char']],
'Flags2' : [ 0x2, ['unsigned char']],
'BaseHigh' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_1792' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned long')]],
'Dpl' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned long')]],
'Present' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'LimitHigh' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'System' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'LongMode' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'DefaultBig' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Granularity' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'BaseHigh' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_KGDTENTRY64' : [ 0x10, {
'LimitLow' : [ 0x0, ['unsigned short']],
'BaseLow' : [ 0x2, ['unsigned short']],
'Bytes' : [ 0x4, ['__unnamed_178e']],
'Bits' : [ 0x4, ['__unnamed_1792']],
'BaseUpper' : [ 0x8, ['unsigned long']],
'MustBeZero' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_OBJECT_DIRECTORY' : [ 0x140, {
'HashBuckets' : [ 0x0, ['array', 37, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]]],
'Lock' : [ 0x128, ['_EX_PUSH_LOCK']],
'DeviceMap' : [ 0x130, ['pointer64', ['_DEVICE_MAP']]],
'SessionId' : [ 0x138, ['unsigned long']],
} ],
'_HEAP_LOOKASIDE' : [ 0x40, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'LastTotalAllocates' : [ 0x24, ['unsigned long']],
'LastAllocateMisses' : [ 0x28, ['unsigned long']],
'Counters' : [ 0x2c, ['array', 2, ['unsigned long']]],
} ],
'_CLIENT_ID64' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['unsigned long long']],
'UniqueThread' : [ 0x8, ['unsigned long long']],
} ],
'_KDPC_DATA' : [ 0x20, {
'DpcListHead' : [ 0x0, ['_LIST_ENTRY']],
'DpcLock' : [ 0x10, ['unsigned long long']],
'DpcQueueDepth' : [ 0x18, ['long']],
'DpcCount' : [ 0x1c, ['unsigned long']],
} ],
'_ARBITER_INTERFACE' : [ 0x30, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ArbiterHandler' : [ 0x20, ['pointer64', ['void']]],
'Flags' : [ 0x28, ['unsigned long']],
} ],
'_AMD64_DBGKD_CONTROL_SET' : [ 0x1c, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long long']],
'CurrentSymbolStart' : [ 0xc, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x14, ['unsigned long long']],
} ],
'_CALL_PERFORMANCE_DATA' : [ 0x408, {
'SpinLock' : [ 0x0, ['unsigned long long']],
'HashTable' : [ 0x8, ['array', 64, ['_LIST_ENTRY']]],
} ],
'_MMWSL' : [ 0x80, {
'FirstFree' : [ 0x0, ['unsigned long']],
'FirstDynamic' : [ 0x4, ['unsigned long']],
'LastEntry' : [ 0x8, ['unsigned long']],
'NextSlot' : [ 0xc, ['unsigned long']],
'Wsle' : [ 0x10, ['pointer64', ['_MMWSLE']]],
'LastInitializedWsle' : [ 0x18, ['unsigned long']],
'NonDirectCount' : [ 0x1c, ['unsigned long']],
'HashTable' : [ 0x20, ['pointer64', ['_MMWSLE_HASH']]],
'HashTableSize' : [ 0x28, ['unsigned long']],
'NumberOfCommittedPageTables' : [ 0x2c, ['unsigned long']],
'HashTableStart' : [ 0x30, ['pointer64', ['void']]],
'HighestPermittedHashAddress' : [ 0x38, ['pointer64', ['void']]],
'NumberOfImageWaiters' : [ 0x40, ['unsigned long']],
'VadBitMapHint' : [ 0x44, ['unsigned long']],
'HighestUserAddress' : [ 0x48, ['pointer64', ['void']]],
'MaximumUserPageTablePages' : [ 0x50, ['unsigned long']],
'MaximumUserPageDirectoryPages' : [ 0x54, ['unsigned long']],
'CommittedPageTables' : [ 0x58, ['pointer64', ['unsigned long']]],
'NumberOfCommittedPageDirectories' : [ 0x60, ['unsigned long']],
'CommittedPageDirectories' : [ 0x68, ['pointer64', ['unsigned long']]],
'NumberOfCommittedPageDirectoryParents' : [ 0x70, ['unsigned long']],
'CommittedPageDirectoryParents' : [ 0x78, ['array', 1, ['unsigned long long']]],
} ],
'_ACTIVATION_CONTEXT_STACK' : [ 0x28, {
'ActiveFrame' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'FrameListCache' : [ 0x8, ['_LIST_ENTRY']],
'Flags' : [ 0x18, ['unsigned long']],
'NextCookieSequenceNumber' : [ 0x1c, ['unsigned long']],
'StackId' : [ 0x20, ['unsigned long']],
} ],
'_RTL_DRIVE_LETTER_CURDIR' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned short']],
'TimeStamp' : [ 0x4, ['unsigned long']],
'DosPath' : [ 0x8, ['_STRING']],
} ],
'PCI_FUNCTION_RESOURCES' : [ 0x170, {
'Limit' : [ 0x0, ['array', 7, ['_IO_RESOURCE_DESCRIPTOR']]],
'Current' : [ 0xe0, ['array', 7, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_KMUTANT' : [ 0x38, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListEntry' : [ 0x18, ['_LIST_ENTRY']],
'OwnerThread' : [ 0x28, ['pointer64', ['_KTHREAD']]],
'Abandoned' : [ 0x30, ['unsigned char']],
'ApcDisable' : [ 0x31, ['unsigned char']],
} ],
'__unnamed_17c3' : [ 0x8, {
'ImageCommitment' : [ 0x0, ['unsigned long long']],
'CreatingProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
} ],
'__unnamed_17c7' : [ 0x8, {
'ImageInformation' : [ 0x0, ['pointer64', ['_SECTION_IMAGE_INFORMATION']]],
'FirstMappedVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_SEGMENT' : [ 0x68, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'NonExtendedPtes' : [ 0xc, ['unsigned long']],
'Spare0' : [ 0x10, ['unsigned long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'SegmentPteTemplate' : [ 0x20, ['_MMPTE']],
'NumberOfCommittedPages' : [ 0x28, ['unsigned long long']],
'ExtendInfo' : [ 0x30, ['pointer64', ['_MMEXTEND_INFO']]],
'SegmentFlags' : [ 0x38, ['_SEGMENT_FLAGS']],
'BasedAddress' : [ 0x40, ['pointer64', ['void']]],
'u1' : [ 0x48, ['__unnamed_17c3']],
'u2' : [ 0x50, ['__unnamed_17c7']],
'PrototypePte' : [ 0x58, ['pointer64', ['_MMPTE']]],
'ThePtes' : [ 0x60, ['array', 1, ['_MMPTE']]],
} ],
'_PCI_COMMON_EXTENSION' : [ 0x38, {
'Next' : [ 0x0, ['pointer64', ['void']]],
'ExtensionType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'IrpDispatchTable' : [ 0x10, ['pointer64', ['_PCI_MJ_DISPATCH_TABLE']]],
'DeviceState' : [ 0x18, ['unsigned char']],
'TentativeNextState' : [ 0x19, ['unsigned char']],
'SecondaryExtLock' : [ 0x20, ['_KEVENT']],
} ],
'_MI_VERIFIER_DRIVER_ENTRY' : [ 0xa0, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Loads' : [ 0x10, ['unsigned long']],
'Unloads' : [ 0x14, ['unsigned long']],
'BaseName' : [ 0x18, ['_UNICODE_STRING']],
'StartAddress' : [ 0x28, ['pointer64', ['void']]],
'EndAddress' : [ 0x30, ['pointer64', ['void']]],
'Flags' : [ 0x38, ['unsigned long']],
'Signature' : [ 0x40, ['unsigned long long']],
'PoolPageHeaders' : [ 0x50, ['_SLIST_HEADER']],
'PoolTrackers' : [ 0x60, ['_SLIST_HEADER']],
'CurrentPagedPoolAllocations' : [ 0x70, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x74, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x78, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x7c, ['unsigned long']],
'PagedBytes' : [ 0x80, ['unsigned long long']],
'NonPagedBytes' : [ 0x88, ['unsigned long long']],
'PeakPagedBytes' : [ 0x90, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x98, ['unsigned long long']],
} ],
'_PRIVATE_CACHE_MAP' : [ 0x60, {
'NodeTypeCode' : [ 0x0, ['short']],
'Flags' : [ 0x0, ['_PRIVATE_CACHE_MAP_FLAGS']],
'UlongFlags' : [ 0x0, ['unsigned long']],
'ReadAheadMask' : [ 0x4, ['unsigned long']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'FileOffset1' : [ 0x10, ['_LARGE_INTEGER']],
'BeyondLastByte1' : [ 0x18, ['_LARGE_INTEGER']],
'FileOffset2' : [ 0x20, ['_LARGE_INTEGER']],
'BeyondLastByte2' : [ 0x28, ['_LARGE_INTEGER']],
'ReadAheadOffset' : [ 0x30, ['array', 2, ['_LARGE_INTEGER']]],
'ReadAheadLength' : [ 0x40, ['array', 2, ['unsigned long']]],
'ReadAheadSpinLock' : [ 0x48, ['unsigned long long']],
'PrivateLinks' : [ 0x50, ['_LIST_ENTRY']],
} ],
'_RTL_HANDLE_TABLE' : [ 0x30, {
'MaximumNumberOfHandles' : [ 0x0, ['unsigned long']],
'SizeOfHandleTableEntry' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['array', 2, ['unsigned long']]],
'FreeHandles' : [ 0x10, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'CommittedHandles' : [ 0x18, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'UnCommittedHandles' : [ 0x20, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'MaxReservedHandles' : [ 0x28, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_POP_IDLE_HANDLER' : [ 0x28, {
'Latency' : [ 0x0, ['unsigned long']],
'TimeCheck' : [ 0x4, ['unsigned long']],
'DemoteLimit' : [ 0x8, ['unsigned long']],
'PromoteLimit' : [ 0xc, ['unsigned long']],
'PromoteCount' : [ 0x10, ['unsigned long']],
'Demote' : [ 0x14, ['unsigned char']],
'Promote' : [ 0x15, ['unsigned char']],
'PromotePercent' : [ 0x16, ['unsigned char']],
'DemotePercent' : [ 0x17, ['unsigned char']],
'State' : [ 0x18, ['unsigned char']],
'Spare' : [ 0x19, ['array', 3, ['unsigned char']]],
'IdleFunction' : [ 0x20, ['pointer64', ['void']]],
} ],
'SYSTEM_POWER_CAPABILITIES' : [ 0x4c, {
'PowerButtonPresent' : [ 0x0, ['unsigned char']],
'SleepButtonPresent' : [ 0x1, ['unsigned char']],
'LidPresent' : [ 0x2, ['unsigned char']],
'SystemS1' : [ 0x3, ['unsigned char']],
'SystemS2' : [ 0x4, ['unsigned char']],
'SystemS3' : [ 0x5, ['unsigned char']],
'SystemS4' : [ 0x6, ['unsigned char']],
'SystemS5' : [ 0x7, ['unsigned char']],
'HiberFilePresent' : [ 0x8, ['unsigned char']],
'FullWake' : [ 0x9, ['unsigned char']],
'VideoDimPresent' : [ 0xa, ['unsigned char']],
'ApmPresent' : [ 0xb, ['unsigned char']],
'UpsPresent' : [ 0xc, ['unsigned char']],
'ThermalControl' : [ 0xd, ['unsigned char']],
'ProcessorThrottle' : [ 0xe, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xf, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x10, ['unsigned char']],
'spare2' : [ 0x11, ['array', 4, ['unsigned char']]],
'DiskSpinDown' : [ 0x15, ['unsigned char']],
'spare3' : [ 0x16, ['array', 8, ['unsigned char']]],
'SystemBatteriesPresent' : [ 0x1e, ['unsigned char']],
'BatteriesAreShortTerm' : [ 0x1f, ['unsigned char']],
'BatteryScale' : [ 0x20, ['array', 3, ['BATTERY_REPORTING_SCALE']]],
'AcOnLineWake' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SoftLidWake' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'RtcWake' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MinDeviceWakeState' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DefaultLowLatencyWake' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_DEVOBJ_EXTENSION' : [ 0x50, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'PowerFlags' : [ 0x10, ['unsigned long']],
'Dope' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT_POWER_EXTENSION']]],
'ExtensionFlags' : [ 0x20, ['unsigned long']],
'DeviceNode' : [ 0x28, ['pointer64', ['void']]],
'AttachedTo' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'StartIoCount' : [ 0x38, ['long']],
'StartIoKey' : [ 0x3c, ['long']],
'StartIoFlags' : [ 0x40, ['unsigned long']],
'Vpb' : [ 0x48, ['pointer64', ['_VPB']]],
} ],
'_DBGKD_GET_VERSION64' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned char']],
'KdSecondaryVersion' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'MachineType' : [ 0x8, ['unsigned short']],
'MaxPacketType' : [ 0xa, ['unsigned char']],
'MaxStateChange' : [ 0xb, ['unsigned char']],
'MaxManipulate' : [ 0xc, ['unsigned char']],
'Simulation' : [ 0xd, ['unsigned char']],
'Unused' : [ 0xe, ['array', 1, ['unsigned short']]],
'KernBase' : [ 0x10, ['unsigned long long']],
'PsLoadedModuleList' : [ 0x18, ['unsigned long long']],
'DebuggerDataList' : [ 0x20, ['unsigned long long']],
} ],
'_STRING32' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['unsigned long']],
} ],
'_MMVIEW' : [ 0x10, {
'Entry' : [ 0x0, ['unsigned long long']],
'ControlArea' : [ 0x8, ['pointer64', ['_CONTROL_AREA']]],
} ],
'_KSYSTEM_TIME' : [ 0xc, {
'LowPart' : [ 0x0, ['unsigned long']],
'High1Time' : [ 0x4, ['long']],
'High2Time' : [ 0x8, ['long']],
} ],
'PCI_SECONDARY_EXTENSION' : [ 0x18, {
'List' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'ExtensionType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'Destructor' : [ 0x10, ['pointer64', ['void']]],
} ],
'__unnamed_17f4' : [ 0x30, {
'type0' : [ 0x0, ['_PCI_HEADER_TYPE_0']],
'type1' : [ 0x0, ['_PCI_HEADER_TYPE_1']],
'type2' : [ 0x0, ['_PCI_HEADER_TYPE_2']],
} ],
'_PCI_COMMON_CONFIG' : [ 0x100, {
'VendorID' : [ 0x0, ['unsigned short']],
'DeviceID' : [ 0x2, ['unsigned short']],
'Command' : [ 0x4, ['unsigned short']],
'Status' : [ 0x6, ['unsigned short']],
'RevisionID' : [ 0x8, ['unsigned char']],
'ProgIf' : [ 0x9, ['unsigned char']],
'SubClass' : [ 0xa, ['unsigned char']],
'BaseClass' : [ 0xb, ['unsigned char']],
'CacheLineSize' : [ 0xc, ['unsigned char']],
'LatencyTimer' : [ 0xd, ['unsigned char']],
'HeaderType' : [ 0xe, ['unsigned char']],
'BIST' : [ 0xf, ['unsigned char']],
'u' : [ 0x10, ['__unnamed_17f4']],
'DeviceSpecific' : [ 0x40, ['array', 192, ['unsigned char']]],
} ],
'_HEAP_FREE_ENTRY_EXTRA' : [ 0x4, {
'TagIndex' : [ 0x0, ['unsigned short']],
'FreeBackTraceIndex' : [ 0x2, ['unsigned short']],
} ],
'_SECTION_IMAGE_INFORMATION' : [ 0x40, {
'TransferAddress' : [ 0x0, ['pointer64', ['void']]],
'ZeroBits' : [ 0x8, ['unsigned long']],
'MaximumStackSize' : [ 0x10, ['unsigned long long']],
'CommittedStackSize' : [ 0x18, ['unsigned long long']],
'SubSystemType' : [ 0x20, ['unsigned long']],
'SubSystemMinorVersion' : [ 0x24, ['unsigned short']],
'SubSystemMajorVersion' : [ 0x26, ['unsigned short']],
'SubSystemVersion' : [ 0x24, ['unsigned long']],
'GpValue' : [ 0x28, ['unsigned long']],
'ImageCharacteristics' : [ 0x2c, ['unsigned short']],
'DllCharacteristics' : [ 0x2e, ['unsigned short']],
'Machine' : [ 0x30, ['unsigned short']],
'ImageContainsCode' : [ 0x32, ['unsigned char']],
'Spare1' : [ 0x33, ['unsigned char']],
'LoaderFlags' : [ 0x34, ['unsigned long']],
'ImageFileSize' : [ 0x38, ['unsigned long']],
'Reserved' : [ 0x3c, ['array', 1, ['unsigned long']]],
} ],
'_POOL_TRACKER_TABLE' : [ 0x28, {
'Key' : [ 0x0, ['unsigned long']],
'NonPagedAllocs' : [ 0x4, ['unsigned long']],
'NonPagedFrees' : [ 0x8, ['unsigned long']],
'NonPagedBytes' : [ 0x10, ['unsigned long long']],
'PagedAllocs' : [ 0x18, ['unsigned long']],
'PagedFrees' : [ 0x1c, ['unsigned long']],
'PagedBytes' : [ 0x20, ['unsigned long long']],
} ],
'_KNODE' : [ 0x40, {
'DeadStackList' : [ 0x0, ['_SLIST_HEADER']],
'PfnDereferenceSListHead' : [ 0x10, ['_SLIST_HEADER']],
'Alignment' : [ 0x10, ['unsigned long long']],
'ProcessorMask' : [ 0x18, ['unsigned long long']],
'Color' : [ 0x20, ['unsigned char']],
'Seed' : [ 0x21, ['unsigned char']],
'NodeNumber' : [ 0x22, ['unsigned char']],
'Flags' : [ 0x23, ['_flags']],
'MmShiftedColor' : [ 0x24, ['unsigned long']],
'FreeCount' : [ 0x28, ['array', 2, ['unsigned long long']]],
'PfnDeferredList' : [ 0x38, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_PHYSICAL_MEMORY_DESCRIPTOR' : [ 0x20, {
'NumberOfRuns' : [ 0x0, ['unsigned long']],
'NumberOfPages' : [ 0x8, ['unsigned long long']],
'Run' : [ 0x10, ['array', 1, ['_PHYSICAL_MEMORY_RUN']]],
} ],
'_SEGMENT_FLAGS' : [ 0x8, {
'TotalNumberOfPtes4132' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned long long')]],
'ExtraSharedWowSubsections' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 64, native_type='unsigned long long')]],
} ],
'_PI_BUS_EXTENSION' : [ 0x70, {
'Flags' : [ 0x0, ['unsigned long']],
'NumberCSNs' : [ 0x4, ['unsigned char']],
'ReadDataPort' : [ 0x8, ['pointer64', ['unsigned char']]],
'DataPortMapped' : [ 0x10, ['unsigned char']],
'AddressPort' : [ 0x18, ['pointer64', ['unsigned char']]],
'AddrPortMapped' : [ 0x20, ['unsigned char']],
'CommandPort' : [ 0x28, ['pointer64', ['unsigned char']]],
'CmdPortMapped' : [ 0x30, ['unsigned char']],
'NextSlotNumber' : [ 0x34, ['unsigned long']],
'DeviceList' : [ 0x38, ['_SINGLE_LIST_ENTRY']],
'CardList' : [ 0x40, ['_SINGLE_LIST_ENTRY']],
'PhysicalBusDevice' : [ 0x48, ['pointer64', ['_DEVICE_OBJECT']]],
'FunctionalBusDevice' : [ 0x50, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x58, ['pointer64', ['_DEVICE_OBJECT']]],
'BusNumber' : [ 0x60, ['unsigned long']],
'SystemPowerState' : [ 0x64, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DevicePowerState' : [ 0x68, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_CLIENT_ID32' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['unsigned long']],
'UniqueThread' : [ 0x4, ['unsigned long']],
} ],
'_VI_DEADLOCK_THREAD' : [ 0x30, {
'Thread' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'CurrentSpinNode' : [ 0x8, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'CurrentOtherNode' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x18, ['_LIST_ENTRY']],
'NodeCount' : [ 0x28, ['unsigned long']],
'PagingCount' : [ 0x2c, ['unsigned long']],
} ],
'_MMEXTEND_INFO' : [ 0x10, {
'CommittedSize' : [ 0x0, ['unsigned long long']],
'ReferenceCount' : [ 0x8, ['unsigned long']],
} ],
'_GDI_TEB_BATCH64' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_IMAGE_DEBUG_DIRECTORY' : [ 0x1c, {
'Characteristics' : [ 0x0, ['unsigned long']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'MajorVersion' : [ 0x8, ['unsigned short']],
'MinorVersion' : [ 0xa, ['unsigned short']],
'Type' : [ 0xc, ['unsigned long']],
'SizeOfData' : [ 0x10, ['unsigned long']],
'AddressOfRawData' : [ 0x14, ['unsigned long']],
'PointerToRawData' : [ 0x18, ['unsigned long']],
} ],
'_PCI_INTERFACE' : [ 0x28, {
'InterfaceType' : [ 0x0, ['pointer64', ['_GUID']]],
'MinSize' : [ 0x8, ['unsigned short']],
'MinVersion' : [ 0xa, ['unsigned short']],
'MaxVersion' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['unsigned short']],
'ReferenceCount' : [ 0x10, ['long']],
'Signature' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'Constructor' : [ 0x18, ['pointer64', ['void']]],
'Initializer' : [ 0x20, ['pointer64', ['void']]],
} ],
'_POP_POWER_ACTION' : [ 0x50, {
'Updates' : [ 0x0, ['unsigned char']],
'State' : [ 0x1, ['unsigned char']],
'Shutdown' : [ 0x2, ['unsigned char']],
'Action' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'LightestState' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Flags' : [ 0xc, ['unsigned long']],
'Status' : [ 0x10, ['long']],
'IrpMinor' : [ 0x14, ['unsigned char']],
'SystemState' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'NextSystemState' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ShutdownBugCode' : [ 0x20, ['pointer64', ['_POP_SHUTDOWN_BUG_CHECK']]],
'DevState' : [ 0x28, ['pointer64', ['_POP_DEVICE_SYS_STATE']]],
'HiberContext' : [ 0x30, ['pointer64', ['_POP_HIBER_CONTEXT']]],
'LastWakeState' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WakeTime' : [ 0x40, ['unsigned long long']],
'SleepTime' : [ 0x48, ['unsigned long long']],
} ],
'_LPCP_MESSAGE' : [ 0x50, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'FreeEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Reserved0' : [ 0x8, ['unsigned long']],
'SenderPort' : [ 0x10, ['pointer64', ['void']]],
'RepliedToThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'PortContext' : [ 0x20, ['pointer64', ['void']]],
'Request' : [ 0x28, ['_PORT_MESSAGE']],
} ],
'_MMVAD_SHORT' : [ 0x30, {
'u1' : [ 0x0, ['__unnamed_1182']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_1185']],
} ],
'__unnamed_183c' : [ 0x2c, {
'InitialPrivilegeSet' : [ 0x0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet' : [ 0x0, ['_PRIVILEGE_SET']],
} ],
'_ACCESS_STATE' : [ 0xa0, {
'OperationID' : [ 0x0, ['_LUID']],
'SecurityEvaluated' : [ 0x8, ['unsigned char']],
'GenerateAudit' : [ 0x9, ['unsigned char']],
'GenerateOnClose' : [ 0xa, ['unsigned char']],
'PrivilegesAllocated' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned long']],
'RemainingDesiredAccess' : [ 0x10, ['unsigned long']],
'PreviouslyGrantedAccess' : [ 0x14, ['unsigned long']],
'OriginalDesiredAccess' : [ 0x18, ['unsigned long']],
'SubjectSecurityContext' : [ 0x20, ['_SECURITY_SUBJECT_CONTEXT']],
'SecurityDescriptor' : [ 0x40, ['pointer64', ['void']]],
'AuxData' : [ 0x48, ['pointer64', ['void']]],
'Privileges' : [ 0x50, ['__unnamed_183c']],
'AuditPrivileges' : [ 0x7c, ['unsigned char']],
'ObjectName' : [ 0x80, ['_UNICODE_STRING']],
'ObjectTypeName' : [ 0x90, ['_UNICODE_STRING']],
} ],
'_PNP_DEVICE_EVENT_ENTRY' : [ 0x88, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Argument' : [ 0x10, ['unsigned long']],
'CallerEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'Callback' : [ 0x20, ['pointer64', ['void']]],
'Context' : [ 0x28, ['pointer64', ['void']]],
'VetoType' : [ 0x30, ['pointer64', ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]]],
'VetoName' : [ 0x38, ['pointer64', ['_UNICODE_STRING']]],
'Data' : [ 0x40, ['_PLUGPLAY_EVENT_BLOCK']],
} ],
'_PRIVATE_CACHE_MAP_FLAGS' : [ 0x4, {
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'ReadAheadActive' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ReadAheadEnabled' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 32, native_type='unsigned long')]],
} ],
'_PNP_DEVICE_EVENT_LIST' : [ 0x88, {
'Status' : [ 0x0, ['long']],
'EventQueueMutex' : [ 0x8, ['_KMUTANT']],
'Lock' : [ 0x40, ['_KGUARDED_MUTEX']],
'List' : [ 0x78, ['_LIST_ENTRY']],
} ],
'_MMPTE_TRANSITION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 40, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 64, native_type='unsigned long long')]],
} ],
'_KREQUEST_PACKET' : [ 0x20, {
'CurrentPacket' : [ 0x0, ['array', 3, ['pointer64', ['void']]]],
'WorkerRoutine' : [ 0x18, ['pointer64', ['void']]],
} ],
'_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_flags' : [ 0x1, {
'Removable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Fill' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
} ],
'_CM_KEY_SECURITY_CACHE' : [ 0x30, {
'Cell' : [ 0x0, ['unsigned long']],
'ConvKey' : [ 0x4, ['unsigned long']],
'List' : [ 0x8, ['_LIST_ENTRY']],
'DescriptorLength' : [ 0x18, ['unsigned long']],
'Descriptor' : [ 0x1c, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_PROCESSOR_POWER_POLICY_INFO' : [ 0x14, {
'TimeCheck' : [ 0x0, ['unsigned long']],
'DemoteLimit' : [ 0x4, ['unsigned long']],
'PromoteLimit' : [ 0x8, ['unsigned long']],
'DemotePercent' : [ 0xc, ['unsigned char']],
'PromotePercent' : [ 0xd, ['unsigned char']],
'Spare' : [ 0xe, ['array', 2, ['unsigned char']]],
'AllowDemotion' : [ 0x10, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AllowPromotion' : [ 0x10, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reserved' : [ 0x10, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_GUID' : [ 0x10, {
'Data1' : [ 0x0, ['unsigned long']],
'Data2' : [ 0x4, ['unsigned short']],
'Data3' : [ 0x6, ['unsigned short']],
'Data4' : [ 0x8, ['array', 8, ['unsigned char']]],
} ],
'_ARBITER_INSTANCE' : [ 0x138, {
'Signature' : [ 0x0, ['unsigned long']],
'MutexEvent' : [ 0x8, ['pointer64', ['_KEVENT']]],
'Name' : [ 0x10, ['pointer64', ['unsigned short']]],
'ResourceType' : [ 0x18, ['long']],
'Allocation' : [ 0x20, ['pointer64', ['_RTL_RANGE_LIST']]],
'PossibleAllocation' : [ 0x28, ['pointer64', ['_RTL_RANGE_LIST']]],
'OrderingList' : [ 0x30, ['_ARBITER_ORDERING_LIST']],
'ReservedList' : [ 0x40, ['_ARBITER_ORDERING_LIST']],
'ReferenceCount' : [ 0x50, ['long']],
'Interface' : [ 0x58, ['pointer64', ['_ARBITER_INTERFACE']]],
'AllocationStackMaxSize' : [ 0x60, ['unsigned long']],
'AllocationStack' : [ 0x68, ['pointer64', ['_ARBITER_ALLOCATION_STATE']]],
'UnpackRequirement' : [ 0x70, ['pointer64', ['void']]],
'PackResource' : [ 0x78, ['pointer64', ['void']]],
'UnpackResource' : [ 0x80, ['pointer64', ['void']]],
'ScoreRequirement' : [ 0x88, ['pointer64', ['void']]],
'TestAllocation' : [ 0x90, ['pointer64', ['void']]],
'RetestAllocation' : [ 0x98, ['pointer64', ['void']]],
'CommitAllocation' : [ 0xa0, ['pointer64', ['void']]],
'RollbackAllocation' : [ 0xa8, ['pointer64', ['void']]],
'BootAllocation' : [ 0xb0, ['pointer64', ['void']]],
'QueryArbitrate' : [ 0xb8, ['pointer64', ['void']]],
'QueryConflict' : [ 0xc0, ['pointer64', ['void']]],
'AddReserved' : [ 0xc8, ['pointer64', ['void']]],
'StartArbiter' : [ 0xd0, ['pointer64', ['void']]],
'PreprocessEntry' : [ 0xd8, ['pointer64', ['void']]],
'AllocateEntry' : [ 0xe0, ['pointer64', ['void']]],
'GetNextAllocationRange' : [ 0xe8, ['pointer64', ['void']]],
'FindSuitableRange' : [ 0xf0, ['pointer64', ['void']]],
'AddAllocation' : [ 0xf8, ['pointer64', ['void']]],
'BacktrackAllocation' : [ 0x100, ['pointer64', ['void']]],
'OverrideConflict' : [ 0x108, ['pointer64', ['void']]],
'TransactionInProgress' : [ 0x110, ['unsigned char']],
'Extension' : [ 0x118, ['pointer64', ['void']]],
'BusDeviceObject' : [ 0x120, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictCallbackContext' : [ 0x128, ['pointer64', ['void']]],
'ConflictCallback' : [ 0x130, ['pointer64', ['void']]],
} ],
'_BUS_HANDLER' : [ 0xb8, {
'Version' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ConfigurationType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'Cmos', 1: 'EisaConfiguration', 2: 'Pos', 3: 'CbusConfiguration', 4: 'PCIConfiguration', 5: 'VMEConfiguration', 6: 'NuBusConfiguration', 7: 'PCMCIAConfiguration', 8: 'MPIConfiguration', 9: 'MPSAConfiguration', 10: 'PNPISAConfiguration', 11: 'SgiInternalConfiguration', 12: 'MaximumBusDataType', -1: 'ConfigurationSpaceUndefined'})]],
'BusNumber' : [ 0xc, ['unsigned long']],
'DeviceObject' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'ParentHandler' : [ 0x18, ['pointer64', ['_BUS_HANDLER']]],
'BusData' : [ 0x20, ['pointer64', ['void']]],
'DeviceControlExtensionSize' : [ 0x28, ['unsigned long']],
'BusAddresses' : [ 0x30, ['pointer64', ['_SUPPORTED_RANGES']]],
'Reserved' : [ 0x38, ['array', 4, ['unsigned long']]],
'GetBusData' : [ 0x48, ['pointer64', ['void']]],
'SetBusData' : [ 0x50, ['pointer64', ['void']]],
'AdjustResourceList' : [ 0x58, ['pointer64', ['void']]],
'AssignSlotResources' : [ 0x60, ['pointer64', ['void']]],
'GetInterruptVector' : [ 0x68, ['pointer64', ['void']]],
'TranslateBusAddress' : [ 0x70, ['pointer64', ['void']]],
'Spare1' : [ 0x78, ['pointer64', ['void']]],
'Spare2' : [ 0x80, ['pointer64', ['void']]],
'Spare3' : [ 0x88, ['pointer64', ['void']]],
'Spare4' : [ 0x90, ['pointer64', ['void']]],
'Spare5' : [ 0x98, ['pointer64', ['void']]],
'Spare6' : [ 0xa0, ['pointer64', ['void']]],
'Spare7' : [ 0xa8, ['pointer64', ['void']]],
'Spare8' : [ 0xb0, ['pointer64', ['void']]],
} ],
'SYSTEM_POWER_LEVEL' : [ 0x18, {
'Enable' : [ 0x0, ['unsigned char']],
'Spare' : [ 0x1, ['array', 3, ['unsigned char']]],
'BatteryLevel' : [ 0x4, ['unsigned long']],
'PowerPolicy' : [ 0x8, ['POWER_ACTION_POLICY']],
'MinSystemState' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_PCI_MN_DISPATCH_TABLE' : [ 0x10, {
'DispatchStyle' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'IRP_COMPLETE', 1: 'IRP_DOWNWARD', 2: 'IRP_UPWARD', 3: 'IRP_DISPATCH'})]],
'DispatchFunction' : [ 0x8, ['pointer64', ['void']]],
} ],
'_POP_DEVICE_SYS_STATE' : [ 0xba8, {
'IrpMinor' : [ 0x0, ['unsigned char']],
'SystemState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Event' : [ 0x8, ['_KEVENT']],
'SpinLock' : [ 0x20, ['unsigned long long']],
'Thread' : [ 0x28, ['pointer64', ['_KTHREAD']]],
'GetNewDeviceList' : [ 0x30, ['unsigned char']],
'Order' : [ 0x38, ['_PO_DEVICE_NOTIFY_ORDER']],
'Status' : [ 0x448, ['long']],
'FailedDevice' : [ 0x450, ['pointer64', ['_DEVICE_OBJECT']]],
'Waking' : [ 0x458, ['unsigned char']],
'Cancelled' : [ 0x459, ['unsigned char']],
'IgnoreErrors' : [ 0x45a, ['unsigned char']],
'IgnoreNotImplemented' : [ 0x45b, ['unsigned char']],
'WaitAny' : [ 0x45c, ['unsigned char']],
'WaitAll' : [ 0x45d, ['unsigned char']],
'PresentIrpQueue' : [ 0x460, ['_LIST_ENTRY']],
'Head' : [ 0x470, ['_POP_DEVICE_POWER_IRP']],
'PowerIrpState' : [ 0x4c8, ['array', 20, ['_POP_DEVICE_POWER_IRP']]],
} ],
'_OBJECT_DUMP_CONTROL' : [ 0x10, {
'Stream' : [ 0x0, ['pointer64', ['void']]],
'Detail' : [ 0x8, ['unsigned long']],
} ],
'_SECURITY_SUBJECT_CONTEXT' : [ 0x20, {
'ClientToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'PrimaryToken' : [ 0x10, ['pointer64', ['void']]],
'ProcessAuditId' : [ 0x18, ['pointer64', ['void']]],
} ],
'_HEAP_STOP_ON_TAG' : [ 0x4, {
'HeapAndTagIndex' : [ 0x0, ['unsigned long']],
'TagIndex' : [ 0x0, ['unsigned short']],
'HeapIndex' : [ 0x2, ['unsigned short']],
} ],
'_MMWSLE_HASH' : [ 0x10, {
'Key' : [ 0x0, ['pointer64', ['void']]],
'Index' : [ 0x8, ['unsigned long']],
} ],
'_CM_NAME_CONTROL_BLOCK' : [ 0x20, {
'Compressed' : [ 0x0, ['unsigned char']],
'RefCount' : [ 0x2, ['unsigned short']],
'NameHash' : [ 0x8, ['_CM_NAME_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
'NameLength' : [ 0x18, ['unsigned short']],
'Name' : [ 0x1a, ['array', 1, ['unsigned short']]],
} ],
'_CM_KEY_BODY' : [ 0x30, {
'Type' : [ 0x0, ['unsigned long']],
'KeyControlBlock' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NotifyBlock' : [ 0x10, ['pointer64', ['_CM_NOTIFY_BLOCK']]],
'ProcessID' : [ 0x18, ['pointer64', ['void']]],
'KeyBodyList' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_HANDLE_TABLE_ENTRY' : [ 0x10, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'ObAttributes' : [ 0x0, ['unsigned long']],
'InfoTable' : [ 0x0, ['pointer64', ['_HANDLE_TABLE_ENTRY_INFO']]],
'Value' : [ 0x0, ['unsigned long long']],
'GrantedAccess' : [ 0x8, ['unsigned long']],
'GrantedAccessIndex' : [ 0x8, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0xa, ['unsigned short']],
'NextFreeTableEntry' : [ 0x8, ['long']],
} ],
'_HEAP_USERDATA_HEADER' : [ 0x20, {
'SFreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'SubSegment' : [ 0x0, ['pointer64', ['_HEAP_SUBSEGMENT']]],
'HeapHandle' : [ 0x8, ['pointer64', ['void']]],
'SizeIndex' : [ 0x10, ['unsigned long long']],
'Signature' : [ 0x18, ['unsigned long long']],
} ],
'_LPCP_PORT_OBJECT' : [ 0x100, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'ConnectedPort' : [ 0x8, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'MsgQueue' : [ 0x10, ['_LPCP_PORT_QUEUE']],
'Creator' : [ 0x30, ['_CLIENT_ID']],
'ClientSectionBase' : [ 0x40, ['pointer64', ['void']]],
'ServerSectionBase' : [ 0x48, ['pointer64', ['void']]],
'PortContext' : [ 0x50, ['pointer64', ['void']]],
'ClientThread' : [ 0x58, ['pointer64', ['_ETHREAD']]],
'SecurityQos' : [ 0x60, ['_SECURITY_QUALITY_OF_SERVICE']],
'StaticSecurity' : [ 0x70, ['_SECURITY_CLIENT_CONTEXT']],
'LpcReplyChainHead' : [ 0xb8, ['_LIST_ENTRY']],
'LpcDataInfoChainHead' : [ 0xc8, ['_LIST_ENTRY']],
'ServerProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MappingProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MaxMessageLength' : [ 0xe0, ['unsigned short']],
'MaxConnectionInfoLength' : [ 0xe2, ['unsigned short']],
'Flags' : [ 0xe4, ['unsigned long']],
'WaitEvent' : [ 0xe8, ['_KEVENT']],
} ],
'PCI_POWER_STATE' : [ 0x50, {
'CurrentSystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'CurrentDeviceState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'SystemWakeLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWakeLevel' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'SystemStateMapping' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'WaitWakeIrp' : [ 0x30, ['pointer64', ['_IRP']]],
'SavedCancelRoutine' : [ 0x38, ['pointer64', ['void']]],
'Paging' : [ 0x40, ['long']],
'Hibernate' : [ 0x44, ['long']],
'CrashDump' : [ 0x48, ['long']],
} ],
'_STRING64' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['unsigned long long']],
} ],
'_POOL_HACKER' : [ 0x30, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'Contents' : [ 0x10, ['array', 8, ['unsigned long']]],
} ],
'_CM_INDEX_HINT_BLOCK' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'HashKey' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'__unnamed_18e1' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'FileAttributes' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'EaLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_18e5' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
} ],
'__unnamed_18e9' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_MAILSLOT_CREATE_PARAMETERS']]],
} ],
'__unnamed_18eb' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_18ef' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileName' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'FileInformationClass' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileMaximumInformation'})]],
'FileIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_18f1' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'CompletionFilter' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_18f3' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileMaximumInformation'})]],
} ],
'__unnamed_18f5' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileMaximumInformation'})]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'ReplaceIfExists' : [ 0x18, ['unsigned char']],
'AdvanceOnly' : [ 0x19, ['unsigned char']],
'ClusterCount' : [ 0x18, ['unsigned long']],
'DeleteHandle' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_18f7' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'EaList' : [ 0x8, ['pointer64', ['void']]],
'EaListLength' : [ 0x10, ['unsigned long']],
'EaIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_18f9' : [ 0x4, {
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_18fd' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FsInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsMaximumInformation'})]],
} ],
'__unnamed_18ff' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'FsControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1901' : [ 0x18, {
'Length' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1903' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'IoControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1905' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1907' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'SecurityDescriptor' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_1909' : [ 0x10, {
'Vpb' : [ 0x0, ['pointer64', ['_VPB']]],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_190d' : [ 0x8, {
'Srb' : [ 0x0, ['pointer64', ['_SCSI_REQUEST_BLOCK']]],
} ],
'__unnamed_1911' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'StartSid' : [ 0x8, ['pointer64', ['void']]],
'SidList' : [ 0x10, ['pointer64', ['_FILE_GET_QUOTA_INFORMATION']]],
'SidListLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1915' : [ 0x4, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations'})]],
} ],
'__unnamed_1917' : [ 0x20, {
'InterfaceType' : [ 0x0, ['pointer64', ['_GUID']]],
'Size' : [ 0x8, ['unsigned short']],
'Version' : [ 0xa, ['unsigned short']],
'Interface' : [ 0x10, ['pointer64', ['_INTERFACE']]],
'InterfaceSpecificData' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_191b' : [ 0x8, {
'Capabilities' : [ 0x0, ['pointer64', ['_DEVICE_CAPABILITIES']]],
} ],
'__unnamed_191d' : [ 0x8, {
'IoResourceRequirementList' : [ 0x0, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
} ],
'__unnamed_191f' : [ 0x20, {
'WhichSpace' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['void']]],
'Offset' : [ 0x10, ['unsigned long']],
'Length' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1921' : [ 0x1, {
'Lock' : [ 0x0, ['unsigned char']],
} ],
'__unnamed_1925' : [ 0x4, {
'IdType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber'})]],
} ],
'__unnamed_1929' : [ 0x10, {
'DeviceTextType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'})]],
'LocaleId' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_192d' : [ 0x10, {
'InPath' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'__unnamed_192f' : [ 0x4, {
'PowerState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_1933' : [ 0x8, {
'PowerSequence' : [ 0x0, ['pointer64', ['_POWER_SEQUENCE']]],
} ],
'__unnamed_1937' : [ 0x20, {
'SystemContext' : [ 0x0, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'State' : [ 0x10, ['_POWER_STATE']],
'ShutdownType' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
} ],
'__unnamed_1939' : [ 0x10, {
'AllocatedResources' : [ 0x0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated' : [ 0x8, ['pointer64', ['_CM_RESOURCE_LIST']]],
} ],
'__unnamed_193b' : [ 0x20, {
'ProviderId' : [ 0x0, ['unsigned long long']],
'DataPath' : [ 0x8, ['pointer64', ['void']]],
'BufferSize' : [ 0x10, ['unsigned long']],
'Buffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_193d' : [ 0x20, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_193f' : [ 0x20, {
'Create' : [ 0x0, ['__unnamed_18e1']],
'CreatePipe' : [ 0x0, ['__unnamed_18e5']],
'CreateMailslot' : [ 0x0, ['__unnamed_18e9']],
'Read' : [ 0x0, ['__unnamed_18eb']],
'Write' : [ 0x0, ['__unnamed_18eb']],
'QueryDirectory' : [ 0x0, ['__unnamed_18ef']],
'NotifyDirectory' : [ 0x0, ['__unnamed_18f1']],
'QueryFile' : [ 0x0, ['__unnamed_18f3']],
'SetFile' : [ 0x0, ['__unnamed_18f5']],
'QueryEa' : [ 0x0, ['__unnamed_18f7']],
'SetEa' : [ 0x0, ['__unnamed_18f9']],
'QueryVolume' : [ 0x0, ['__unnamed_18fd']],
'SetVolume' : [ 0x0, ['__unnamed_18fd']],
'FileSystemControl' : [ 0x0, ['__unnamed_18ff']],
'LockControl' : [ 0x0, ['__unnamed_1901']],
'DeviceIoControl' : [ 0x0, ['__unnamed_1903']],
'QuerySecurity' : [ 0x0, ['__unnamed_1905']],
'SetSecurity' : [ 0x0, ['__unnamed_1907']],
'MountVolume' : [ 0x0, ['__unnamed_1909']],
'VerifyVolume' : [ 0x0, ['__unnamed_1909']],
'Scsi' : [ 0x0, ['__unnamed_190d']],
'QueryQuota' : [ 0x0, ['__unnamed_1911']],
'SetQuota' : [ 0x0, ['__unnamed_18f9']],
'QueryDeviceRelations' : [ 0x0, ['__unnamed_1915']],
'QueryInterface' : [ 0x0, ['__unnamed_1917']],
'DeviceCapabilities' : [ 0x0, ['__unnamed_191b']],
'FilterResourceRequirements' : [ 0x0, ['__unnamed_191d']],
'ReadWriteConfig' : [ 0x0, ['__unnamed_191f']],
'SetLock' : [ 0x0, ['__unnamed_1921']],
'QueryId' : [ 0x0, ['__unnamed_1925']],
'QueryDeviceText' : [ 0x0, ['__unnamed_1929']],
'UsageNotification' : [ 0x0, ['__unnamed_192d']],
'WaitWake' : [ 0x0, ['__unnamed_192f']],
'PowerSequence' : [ 0x0, ['__unnamed_1933']],
'Power' : [ 0x0, ['__unnamed_1937']],
'StartDevice' : [ 0x0, ['__unnamed_1939']],
'WMI' : [ 0x0, ['__unnamed_193b']],
'Others' : [ 0x0, ['__unnamed_193d']],
} ],
'_IO_STACK_LOCATION' : [ 0x48, {
'MajorFunction' : [ 0x0, ['unsigned char']],
'MinorFunction' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned char']],
'Control' : [ 0x3, ['unsigned char']],
'Parameters' : [ 0x8, ['__unnamed_193f']],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'CompletionRoutine' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
} ],
'__unnamed_1946' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Alignment' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1948' : [ 0x8, {
'MinimumVector' : [ 0x0, ['unsigned long']],
'MaximumVector' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_194a' : [ 0x8, {
'MinimumChannel' : [ 0x0, ['unsigned long']],
'MaximumChannel' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_194c' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'MinBusNumber' : [ 0x4, ['unsigned long']],
'MaxBusNumber' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_194e' : [ 0xc, {
'Priority' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1950' : [ 0x18, {
'Port' : [ 0x0, ['__unnamed_1946']],
'Memory' : [ 0x0, ['__unnamed_1946']],
'Interrupt' : [ 0x0, ['__unnamed_1948']],
'Dma' : [ 0x0, ['__unnamed_194a']],
'Generic' : [ 0x0, ['__unnamed_1946']],
'DevicePrivate' : [ 0x0, ['__unnamed_168a']],
'BusNumber' : [ 0x0, ['__unnamed_194c']],
'ConfigData' : [ 0x0, ['__unnamed_194e']],
} ],
'_IO_RESOURCE_DESCRIPTOR' : [ 0x20, {
'Option' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'ShareDisposition' : [ 0x2, ['unsigned char']],
'Spare1' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['unsigned short']],
'Spare2' : [ 0x6, ['unsigned short']],
'u' : [ 0x8, ['__unnamed_1950']],
} ],
'_MI_VERIFIER_POOL_HEADER' : [ 0x8, {
'VerifierPoolEntry' : [ 0x0, ['pointer64', ['_VI_POOL_ENTRY']]],
} ],
'__unnamed_1959' : [ 0x4, {
'DataLength' : [ 0x0, ['short']],
'TotalLength' : [ 0x2, ['short']],
} ],
'__unnamed_195b' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1959']],
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_195d' : [ 0x4, {
'Type' : [ 0x0, ['short']],
'DataInfoOffset' : [ 0x2, ['short']],
} ],
'__unnamed_195f' : [ 0x4, {
's2' : [ 0x0, ['__unnamed_195d']],
'ZeroInit' : [ 0x0, ['unsigned long']],
} ],
'_PORT_MESSAGE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_195b']],
'u2' : [ 0x4, ['__unnamed_195f']],
'ClientId' : [ 0x8, ['_CLIENT_ID']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x18, ['unsigned long']],
'ClientViewSize' : [ 0x20, ['unsigned long long']],
'CallbackId' : [ 0x20, ['unsigned long']],
} ],
'_DBGKD_ANY_CONTROL_SET' : [ 0x1c, {
'X86ControlSet' : [ 0x0, ['_X86_DBGKD_CONTROL_SET']],
'AlphaControlSet' : [ 0x0, ['unsigned long']],
'IA64ControlSet' : [ 0x0, ['_IA64_DBGKD_CONTROL_SET']],
'Amd64ControlSet' : [ 0x0, ['_AMD64_DBGKD_CONTROL_SET']],
} ],
'_ARBITER_ORDERING_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned short']],
'Maximum' : [ 0x2, ['unsigned short']],
'Orderings' : [ 0x8, ['pointer64', ['_ARBITER_ORDERING']]],
} ],
'_HBASE_BLOCK' : [ 0x1000, {
'Signature' : [ 0x0, ['unsigned long']],
'Sequence1' : [ 0x4, ['unsigned long']],
'Sequence2' : [ 0x8, ['unsigned long']],
'TimeStamp' : [ 0xc, ['_LARGE_INTEGER']],
'Major' : [ 0x14, ['unsigned long']],
'Minor' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['unsigned long']],
'Format' : [ 0x20, ['unsigned long']],
'RootCell' : [ 0x24, ['unsigned long']],
'Length' : [ 0x28, ['unsigned long']],
'Cluster' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x70, ['array', 99, ['unsigned long']]],
'CheckSum' : [ 0x1fc, ['unsigned long']],
'Reserved2' : [ 0x200, ['array', 894, ['unsigned long']]],
'BootType' : [ 0xff8, ['unsigned long']],
'BootRecover' : [ 0xffc, ['unsigned long']],
} ],
'_DUAL' : [ 0x278, {
'Length' : [ 0x0, ['unsigned long']],
'Map' : [ 0x8, ['pointer64', ['_HMAP_DIRECTORY']]],
'SmallDir' : [ 0x10, ['pointer64', ['_HMAP_TABLE']]],
'Guard' : [ 0x18, ['unsigned long']],
'FreeDisplay' : [ 0x20, ['array', 24, ['_FREE_DISPLAY']]],
'FreeSummary' : [ 0x260, ['unsigned long']],
'FreeBins' : [ 0x268, ['_LIST_ENTRY']],
} ],
'_VI_POOL_ENTRY' : [ 0x20, {
'PageHeader' : [ 0x0, ['_VI_POOL_PAGE_HEADER']],
'InUse' : [ 0x0, ['_VI_POOL_ENTRY_INUSE']],
'NextFree' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_LPCP_PORT_QUEUE' : [ 0x20, {
'NonPagedPortQueue' : [ 0x0, ['pointer64', ['_LPCP_NONPAGED_PORT_QUEUE']]],
'Semaphore' : [ 0x8, ['pointer64', ['_KSEMAPHORE']]],
'ReceiveHead' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_INITIAL_PRIVILEGE_SET' : [ 0x2c, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 3, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_POP_HIBER_CONTEXT' : [ 0x150, {
'WriteToFile' : [ 0x0, ['unsigned char']],
'ReserveLoaderMemory' : [ 0x1, ['unsigned char']],
'ReserveFreeMemory' : [ 0x2, ['unsigned char']],
'VerifyOnWake' : [ 0x3, ['unsigned char']],
'Reset' : [ 0x4, ['unsigned char']],
'HiberFlags' : [ 0x5, ['unsigned char']],
'LinkFile' : [ 0x6, ['unsigned char']],
'LinkFileHandle' : [ 0x8, ['pointer64', ['void']]],
'Lock' : [ 0x10, ['unsigned long long']],
'MapFrozen' : [ 0x18, ['unsigned char']],
'MemoryMap' : [ 0x20, ['_RTL_BITMAP']],
'ClonedRanges' : [ 0x30, ['_LIST_ENTRY']],
'ClonedRangeCount' : [ 0x40, ['unsigned long']],
'NextCloneRange' : [ 0x48, ['pointer64', ['_LIST_ENTRY']]],
'NextPreserve' : [ 0x50, ['unsigned long long']],
'LoaderMdl' : [ 0x58, ['pointer64', ['_MDL']]],
'Clones' : [ 0x60, ['pointer64', ['_MDL']]],
'NextClone' : [ 0x68, ['pointer64', ['unsigned char']]],
'NoClones' : [ 0x70, ['unsigned long long']],
'Spares' : [ 0x78, ['pointer64', ['_MDL']]],
'PagesOut' : [ 0x80, ['unsigned long long']],
'IoPage' : [ 0x88, ['pointer64', ['void']]],
'CurrentMcb' : [ 0x90, ['pointer64', ['void']]],
'DumpStack' : [ 0x98, ['pointer64', ['_DUMP_STACK_CONTEXT']]],
'WakeState' : [ 0xa0, ['pointer64', ['_KPROCESSOR_STATE']]],
'NoRanges' : [ 0xa8, ['unsigned long']],
'HiberVa' : [ 0xb0, ['unsigned long long']],
'HiberPte' : [ 0xb8, ['_LARGE_INTEGER']],
'Status' : [ 0xc0, ['long']],
'MemoryImage' : [ 0xc8, ['pointer64', ['PO_MEMORY_IMAGE']]],
'TableHead' : [ 0xd0, ['pointer64', ['_PO_MEMORY_RANGE_ARRAY']]],
'CompressionWorkspace' : [ 0xd8, ['pointer64', ['unsigned char']]],
'CompressedWriteBuffer' : [ 0xe0, ['pointer64', ['unsigned char']]],
'PerformanceStats' : [ 0xe8, ['pointer64', ['unsigned long']]],
'CompressionBlock' : [ 0xf0, ['pointer64', ['void']]],
'DmaIO' : [ 0xf8, ['pointer64', ['void']]],
'TemporaryHeap' : [ 0x100, ['pointer64', ['void']]],
'PerfInfo' : [ 0x108, ['_PO_HIBER_PERF']],
} ],
'_FILE_GET_QUOTA_INFORMATION' : [ 0x14, {
'NextEntryOffset' : [ 0x0, ['unsigned long']],
'SidLength' : [ 0x4, ['unsigned long']],
'Sid' : [ 0x8, ['_SID']],
} ],
'_MMADDRESS_LIST' : [ 0x10, {
'StartVpn' : [ 0x0, ['unsigned long long']],
'EndVpn' : [ 0x8, ['unsigned long long']],
} ],
'_OBJECT_NAME_INFORMATION' : [ 0x10, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
} ],
'_KDESCRIPTOR' : [ 0x10, {
'Pad' : [ 0x0, ['array', 3, ['unsigned short']]],
'Limit' : [ 0x6, ['unsigned short']],
'Base' : [ 0x8, ['pointer64', ['void']]],
} ],
'_DUMP_STACK_CONTEXT' : [ 0x110, {
'Init' : [ 0x0, ['_DUMP_INITIALIZATION_CONTEXT']],
'PartitionOffset' : [ 0xa0, ['_LARGE_INTEGER']],
'DumpPointers' : [ 0xa8, ['pointer64', ['void']]],
'PointersLength' : [ 0xb0, ['unsigned long']],
'ModulePrefix' : [ 0xb8, ['pointer64', ['unsigned short']]],
'DriverList' : [ 0xc0, ['_LIST_ENTRY']],
'InitMsg' : [ 0xd0, ['_STRING']],
'ProgMsg' : [ 0xe0, ['_STRING']],
'DoneMsg' : [ 0xf0, ['_STRING']],
'FileObject' : [ 0x100, ['pointer64', ['void']]],
'UsageType' : [ 0x108, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'_POP_SHUTDOWN_BUG_CHECK' : [ 0x28, {
'Code' : [ 0x0, ['unsigned long']],
'Parameter1' : [ 0x8, ['unsigned long long']],
'Parameter2' : [ 0x10, ['unsigned long long']],
'Parameter3' : [ 0x18, ['unsigned long long']],
'Parameter4' : [ 0x20, ['unsigned long long']],
} ],
'__unnamed_199a' : [ 0x4, {
'DeviceNumber' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long')]],
'FunctionNumber' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_199c' : [ 0x4, {
'bits' : [ 0x0, ['__unnamed_199a']],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_PCI_SLOT_NUMBER' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_199c']],
} ],
'_CM_NOTIFY_BLOCK' : [ 0x58, {
'HiveList' : [ 0x0, ['_LIST_ENTRY']],
'PostList' : [ 0x10, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x20, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'KeyBody' : [ 0x28, ['pointer64', ['_CM_KEY_BODY']]],
'Filter' : [ 0x30, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'WatchTree' : [ 0x30, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NotifyPending' : [ 0x30, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubjectContext' : [ 0x38, ['_SECURITY_SUBJECT_CONTEXT']],
} ],
'_SID' : [ 0xc, {
'Revision' : [ 0x0, ['unsigned char']],
'SubAuthorityCount' : [ 0x1, ['unsigned char']],
'IdentifierAuthority' : [ 0x2, ['_SID_IDENTIFIER_AUTHORITY']],
'SubAuthority' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_RTL_HANDLE_TABLE_ENTRY' : [ 0x8, {
'Flags' : [ 0x0, ['unsigned long']],
'NextFree' : [ 0x0, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_VI_POOL_ENTRY_INUSE' : [ 0x20, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'CallingAddress' : [ 0x8, ['pointer64', ['void']]],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
'Tag' : [ 0x18, ['unsigned long long']],
} ],
'_INTERFACE' : [ 0x20, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
} ],
'_SUPPORTED_RANGES' : [ 0xc0, {
'Version' : [ 0x0, ['unsigned short']],
'Sorted' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
'NoIO' : [ 0x4, ['unsigned long']],
'IO' : [ 0x8, ['_SUPPORTED_RANGE']],
'NoMemory' : [ 0x30, ['unsigned long']],
'Memory' : [ 0x38, ['_SUPPORTED_RANGE']],
'NoPrefetchMemory' : [ 0x60, ['unsigned long']],
'PrefetchMemory' : [ 0x68, ['_SUPPORTED_RANGE']],
'NoDma' : [ 0x90, ['unsigned long']],
'Dma' : [ 0x98, ['_SUPPORTED_RANGE']],
} ],
'_DRIVER_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x10, ['unsigned long']],
'DriverStart' : [ 0x18, ['pointer64', ['void']]],
'DriverSize' : [ 0x20, ['unsigned long']],
'DriverSection' : [ 0x28, ['pointer64', ['void']]],
'DriverExtension' : [ 0x30, ['pointer64', ['_DRIVER_EXTENSION']]],
'DriverName' : [ 0x38, ['_UNICODE_STRING']],
'HardwareDatabase' : [ 0x48, ['pointer64', ['_UNICODE_STRING']]],
'FastIoDispatch' : [ 0x50, ['pointer64', ['_FAST_IO_DISPATCH']]],
'DriverInit' : [ 0x58, ['pointer64', ['void']]],
'DriverStartIo' : [ 0x60, ['pointer64', ['void']]],
'DriverUnload' : [ 0x68, ['pointer64', ['void']]],
'MajorFunction' : [ 0x70, ['array', 28, ['pointer64', ['void']]]],
} ],
'_SID_IDENTIFIER_AUTHORITY' : [ 0x6, {
'Value' : [ 0x0, ['array', 6, ['unsigned char']]],
} ],
'_SECURITY_DESCRIPTOR_RELATIVE' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['unsigned long']],
'Group' : [ 0x8, ['unsigned long']],
'Sacl' : [ 0xc, ['unsigned long']],
'Dacl' : [ 0x10, ['unsigned long']],
} ],
'_DRIVER_EXTENSION' : [ 0x38, {
'DriverObject' : [ 0x0, ['pointer64', ['_DRIVER_OBJECT']]],
'AddDevice' : [ 0x8, ['pointer64', ['void']]],
'Count' : [ 0x10, ['unsigned long']],
'ServiceKeyName' : [ 0x18, ['_UNICODE_STRING']],
'ClientDriverExtension' : [ 0x28, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'FsFilterCallbacks' : [ 0x30, ['pointer64', ['_FS_FILTER_CALLBACKS']]],
} ],
'_PM_SUPPORT' : [ 0x1, {
'Rsvd2' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'D1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'D2' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'PMED0' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PMED1' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'PMED2' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'PMED3Hot' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'PMED3Cold' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'__unnamed_19cb' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'__unnamed_19cd' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'__unnamed_19d1' : [ 0x8, {
'AllocatedResources' : [ 0x0, ['pointer64', ['pointer64', ['_CM_PARTIAL_RESOURCE_LIST']]]],
} ],
'__unnamed_19d3' : [ 0x20, {
'PhysicalDeviceObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictingResource' : [ 0x8, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'ConflictCount' : [ 0x10, ['pointer64', ['unsigned long']]],
'Conflicts' : [ 0x18, ['pointer64', ['pointer64', ['_ARBITER_CONFLICT_INFO']]]],
} ],
'__unnamed_19d5' : [ 0x8, {
'ReserveDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_19d7' : [ 0x20, {
'TestAllocation' : [ 0x0, ['__unnamed_19cb']],
'RetestAllocation' : [ 0x0, ['__unnamed_19cb']],
'BootAllocation' : [ 0x0, ['__unnamed_19cd']],
'QueryAllocatedResources' : [ 0x0, ['__unnamed_19d1']],
'QueryConflict' : [ 0x0, ['__unnamed_19d3']],
'QueryArbitrate' : [ 0x0, ['__unnamed_19cd']],
'AddReserved' : [ 0x0, ['__unnamed_19d5']],
} ],
'_ARBITER_PARAMETERS' : [ 0x20, {
'Parameters' : [ 0x0, ['__unnamed_19d7']],
} ],
'POWER_ACTION_POLICY' : [ 0xc, {
'Action' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'Flags' : [ 0x4, ['unsigned long']],
'EventCode' : [ 0x8, ['unsigned long']],
} ],
'_HANDLE_TABLE_ENTRY_INFO' : [ 0x4, {
'AuditMask' : [ 0x0, ['unsigned long']],
} ],
'_POWER_SEQUENCE' : [ 0xc, {
'SequenceD1' : [ 0x0, ['unsigned long']],
'SequenceD2' : [ 0x4, ['unsigned long']],
'SequenceD3' : [ 0x8, ['unsigned long']],
} ],
'_IMAGE_DATA_DIRECTORY' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'PO_MEMORY_IMAGE' : [ 0xc0, {
'Signature' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'LengthSelf' : [ 0xc, ['unsigned long']],
'PageSelf' : [ 0x10, ['unsigned long long']],
'PageSize' : [ 0x18, ['unsigned long']],
'ImageType' : [ 0x1c, ['unsigned long']],
'SystemTime' : [ 0x20, ['_LARGE_INTEGER']],
'InterruptTime' : [ 0x28, ['unsigned long long']],
'FeatureFlags' : [ 0x30, ['unsigned long']],
'HiberFlags' : [ 0x34, ['unsigned char']],
'spare' : [ 0x35, ['array', 3, ['unsigned char']]],
'NoHiberPtes' : [ 0x38, ['unsigned long']],
'HiberVa' : [ 0x40, ['unsigned long long']],
'HiberPte' : [ 0x48, ['_LARGE_INTEGER']],
'NoFreePages' : [ 0x50, ['unsigned long']],
'FreeMapCheck' : [ 0x54, ['unsigned long']],
'WakeCheck' : [ 0x58, ['unsigned long']],
'TotalPages' : [ 0x60, ['unsigned long long']],
'FirstTablePage' : [ 0x68, ['unsigned long long']],
'LastFilePage' : [ 0x70, ['unsigned long long']],
'PerfInfo' : [ 0x78, ['_PO_HIBER_PERF']],
} ],
'BATTERY_REPORTING_SCALE' : [ 0x8, {
'Granularity' : [ 0x0, ['unsigned long']],
'Capacity' : [ 0x4, ['unsigned long']],
} ],
'_CURDIR' : [ 0x18, {
'DosPath' : [ 0x0, ['_UNICODE_STRING']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PO_HIBER_PERF' : [ 0x48, {
'IoTicks' : [ 0x0, ['unsigned long long']],
'InitTicks' : [ 0x8, ['unsigned long long']],
'CopyTicks' : [ 0x10, ['unsigned long long']],
'StartCount' : [ 0x18, ['unsigned long long']],
'ElapsedTime' : [ 0x20, ['unsigned long']],
'IoTime' : [ 0x24, ['unsigned long']],
'CopyTime' : [ 0x28, ['unsigned long']],
'InitTime' : [ 0x2c, ['unsigned long']],
'PagesWritten' : [ 0x30, ['unsigned long']],
'PagesProcessed' : [ 0x34, ['unsigned long']],
'BytesCopied' : [ 0x38, ['unsigned long']],
'DumpCount' : [ 0x3c, ['unsigned long']],
'FileRuns' : [ 0x40, ['unsigned long']],
} ],
'_FREE_DISPLAY' : [ 0x18, {
'RealVectorSize' : [ 0x0, ['unsigned long']],
'Display' : [ 0x8, ['_RTL_BITMAP']],
} ],
'_KDEVICE_QUEUE_ENTRY' : [ 0x18, {
'DeviceListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SortKey' : [ 0x10, ['unsigned long']],
'Inserted' : [ 0x14, ['unsigned char']],
} ],
'_DEVICE_CAPABILITIES' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'DeviceD1' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeviceD2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockSupported' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EjectSupported' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Removable' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DockDevice' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'UniqueID' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SilentInstall' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RawDeviceOK' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SurpriseRemovalOK' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'WakeFromD0' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeFromD1' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeFromD2' : [ 0x4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeFromD3' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HardwareDisabled' : [ 0x4, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NonDynamic' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'WarmEjectSupported' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NoDisplayInUI' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 32, native_type='unsigned long')]],
'Address' : [ 0x8, ['unsigned long']],
'UINumber' : [ 0xc, ['unsigned long']],
'DeviceState' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'SystemWake' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWake' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'D1Latency' : [ 0x34, ['unsigned long']],
'D2Latency' : [ 0x38, ['unsigned long']],
'D3Latency' : [ 0x3c, ['unsigned long']],
} ],
'_VI_POOL_PAGE_HEADER' : [ 0x18, {
'NextPage' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
'VerifierEntry' : [ 0x8, ['pointer64', ['void']]],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_RTL_RANGE_LIST' : [ 0x20, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
'Count' : [ 0x14, ['unsigned long']],
'Stamp' : [ 0x18, ['unsigned long']],
} ],
'_RTL_CRITICAL_SECTION_DEBUG' : [ 0x30, {
'Type' : [ 0x0, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x2, ['unsigned short']],
'CriticalSection' : [ 0x8, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'ProcessLocksList' : [ 0x10, ['_LIST_ENTRY']],
'EntryCount' : [ 0x20, ['unsigned long']],
'ContentionCount' : [ 0x24, ['unsigned long']],
'Spare' : [ 0x28, ['array', 2, ['unsigned long']]],
} ],
'__unnamed_19fb' : [ 0x14, {
'ClassGuid' : [ 0x0, ['_GUID']],
'SymbolicLinkName' : [ 0x10, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_19fd' : [ 0x2, {
'DeviceIds' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_19ff' : [ 0x2, {
'DeviceId' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a01' : [ 0x10, {
'NotificationStructure' : [ 0x0, ['pointer64', ['void']]],
'DeviceIds' : [ 0x8, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a03' : [ 0x8, {
'Notification' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1a05' : [ 0x8, {
'NotificationCode' : [ 0x0, ['unsigned long']],
'NotificationData' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1a07' : [ 0x8, {
'VetoType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]],
'DeviceIdVetoNameBuffer' : [ 0x4, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a09' : [ 0x10, {
'BlockedDriverGuid' : [ 0x0, ['_GUID']],
} ],
'__unnamed_1a0b' : [ 0x2, {
'ParentId' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a0d' : [ 0x18, {
'DeviceClass' : [ 0x0, ['__unnamed_19fb']],
'TargetDevice' : [ 0x0, ['__unnamed_19fd']],
'InstallDevice' : [ 0x0, ['__unnamed_19ff']],
'CustomNotification' : [ 0x0, ['__unnamed_1a01']],
'ProfileNotification' : [ 0x0, ['__unnamed_1a03']],
'PowerNotification' : [ 0x0, ['__unnamed_1a05']],
'VetoNotification' : [ 0x0, ['__unnamed_1a07']],
'BlockedDriverNotification' : [ 0x0, ['__unnamed_1a09']],
'InvalidIDNotification' : [ 0x0, ['__unnamed_1a0b']],
} ],
'_PLUGPLAY_EVENT_BLOCK' : [ 0x48, {
'EventGuid' : [ 0x0, ['_GUID']],
'EventCategory' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'HardwareProfileChangeEvent', 1: 'TargetDeviceChangeEvent', 2: 'DeviceClassChangeEvent', 3: 'CustomDeviceEvent', 4: 'DeviceInstallEvent', 5: 'DeviceArrivalEvent', 6: 'PowerEvent', 7: 'VetoEvent', 8: 'BlockedDriverEvent', 9: 'InvalidIDEvent', 10: 'MaxPlugEventCategory'})]],
'Result' : [ 0x18, ['pointer64', ['unsigned long']]],
'Flags' : [ 0x20, ['unsigned long']],
'TotalSize' : [ 0x24, ['unsigned long']],
'DeviceObject' : [ 0x28, ['pointer64', ['void']]],
'u' : [ 0x30, ['__unnamed_1a0d']],
} ],
'_CACHED_CHILD_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'ValueList' : [ 0x8, ['unsigned long long']],
'RealKcb' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
} ],
'_PO_MEMORY_RANGE_ARRAY' : [ 0x20, {
'Range' : [ 0x0, ['_PO_MEMORY_RANGE_ARRAY_RANGE']],
'Link' : [ 0x0, ['_PO_MEMORY_RANGE_ARRAY_LINK']],
} ],
'__unnamed_1a24' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned long']],
'CheckSum' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1a26' : [ 0x10, {
'DiskId' : [ 0x0, ['_GUID']],
} ],
'__unnamed_1a28' : [ 0x10, {
'Mbr' : [ 0x0, ['__unnamed_1a24']],
'Gpt' : [ 0x0, ['__unnamed_1a26']],
} ],
'_DUMP_INITIALIZATION_CONTEXT' : [ 0xa0, {
'Length' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'MemoryBlock' : [ 0x8, ['pointer64', ['void']]],
'CommonBuffer' : [ 0x10, ['array', 2, ['pointer64', ['void']]]],
'PhysicalAddress' : [ 0x20, ['array', 2, ['_LARGE_INTEGER']]],
'StallRoutine' : [ 0x30, ['pointer64', ['void']]],
'OpenRoutine' : [ 0x38, ['pointer64', ['void']]],
'WriteRoutine' : [ 0x40, ['pointer64', ['void']]],
'FinishRoutine' : [ 0x48, ['pointer64', ['void']]],
'AdapterObject' : [ 0x50, ['pointer64', ['_ADAPTER_OBJECT']]],
'MappedRegisterBase' : [ 0x58, ['pointer64', ['void']]],
'PortConfiguration' : [ 0x60, ['pointer64', ['void']]],
'CrashDump' : [ 0x68, ['unsigned char']],
'MaximumTransferSize' : [ 0x6c, ['unsigned long']],
'CommonBufferSize' : [ 0x70, ['unsigned long']],
'TargetAddress' : [ 0x78, ['pointer64', ['void']]],
'WritePendingRoutine' : [ 0x80, ['pointer64', ['void']]],
'PartitionStyle' : [ 0x88, ['unsigned long']],
'DiskInfo' : [ 0x8c, ['__unnamed_1a28']],
} ],
'_IO_CLIENT_EXTENSION' : [ 0x10, {
'NextExtension' : [ 0x0, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'ClientIdentificationAddress' : [ 0x8, ['pointer64', ['void']]],
} ],
'_CM_NAME_HASH' : [ 0x18, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_NAME_HASH']]],
'NameLength' : [ 0x10, ['unsigned short']],
'Name' : [ 0x12, ['array', 1, ['unsigned short']]],
} ],
'_ARBITER_ALLOCATION_STATE' : [ 0x50, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'CurrentMinimum' : [ 0x10, ['unsigned long long']],
'CurrentMaximum' : [ 0x18, ['unsigned long long']],
'Entry' : [ 0x20, ['pointer64', ['_ARBITER_LIST_ENTRY']]],
'CurrentAlternative' : [ 0x28, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'AlternativeCount' : [ 0x30, ['unsigned long']],
'Alternatives' : [ 0x38, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'Flags' : [ 0x40, ['unsigned short']],
'RangeAttributes' : [ 0x42, ['unsigned char']],
'RangeAvailableAttributes' : [ 0x43, ['unsigned char']],
'WorkSpace' : [ 0x48, ['unsigned long long']],
} ],
'_PCI_HEADER_TYPE_0' : [ 0x30, {
'BaseAddresses' : [ 0x0, ['array', 6, ['unsigned long']]],
'CIS' : [ 0x18, ['unsigned long']],
'SubVendorID' : [ 0x1c, ['unsigned short']],
'SubSystemID' : [ 0x1e, ['unsigned short']],
'ROMBaseAddress' : [ 0x20, ['unsigned long']],
'CapabilitiesPtr' : [ 0x24, ['unsigned char']],
'Reserved1' : [ 0x25, ['array', 3, ['unsigned char']]],
'Reserved2' : [ 0x28, ['unsigned long']],
'InterruptLine' : [ 0x2c, ['unsigned char']],
'InterruptPin' : [ 0x2d, ['unsigned char']],
'MinimumGrant' : [ 0x2e, ['unsigned char']],
'MaximumLatency' : [ 0x2f, ['unsigned char']],
} ],
'_PO_DEVICE_NOTIFY_ORDER' : [ 0x410, {
'DevNodeSequence' : [ 0x0, ['unsigned long']],
'WarmEjectPdoPointer' : [ 0x8, ['pointer64', ['pointer64', ['_DEVICE_OBJECT']]]],
'OrderLevel' : [ 0x10, ['array', 8, ['_PO_NOTIFY_ORDER_LEVEL']]],
} ],
'_FS_FILTER_CALLBACKS' : [ 0x68, {
'SizeOfFsFilterCallbacks' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'PreAcquireForSectionSynchronization' : [ 0x8, ['pointer64', ['void']]],
'PostAcquireForSectionSynchronization' : [ 0x10, ['pointer64', ['void']]],
'PreReleaseForSectionSynchronization' : [ 0x18, ['pointer64', ['void']]],
'PostReleaseForSectionSynchronization' : [ 0x20, ['pointer64', ['void']]],
'PreAcquireForCcFlush' : [ 0x28, ['pointer64', ['void']]],
'PostAcquireForCcFlush' : [ 0x30, ['pointer64', ['void']]],
'PreReleaseForCcFlush' : [ 0x38, ['pointer64', ['void']]],
'PostReleaseForCcFlush' : [ 0x40, ['pointer64', ['void']]],
'PreAcquireForModifiedPageWriter' : [ 0x48, ['pointer64', ['void']]],
'PostAcquireForModifiedPageWriter' : [ 0x50, ['pointer64', ['void']]],
'PreReleaseForModifiedPageWriter' : [ 0x58, ['pointer64', ['void']]],
'PostReleaseForModifiedPageWriter' : [ 0x60, ['pointer64', ['void']]],
} ],
'_IA64_DBGKD_CONTROL_SET' : [ 0x14, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long long']],
} ],
'_PO_MEMORY_RANGE_ARRAY_RANGE' : [ 0x20, {
'PageNo' : [ 0x0, ['unsigned long long']],
'StartPage' : [ 0x8, ['unsigned long long']],
'EndPage' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
} ],
'_u' : [ 0x50, {
'KeyNode' : [ 0x0, ['_CM_KEY_NODE']],
'KeyValue' : [ 0x0, ['_CM_KEY_VALUE']],
'KeySecurity' : [ 0x0, ['_CM_KEY_SECURITY']],
'KeyIndex' : [ 0x0, ['_CM_KEY_INDEX']],
'ValueData' : [ 0x0, ['_CM_BIG_DATA']],
'KeyList' : [ 0x0, ['array', 1, ['unsigned long']]],
'KeyString' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'_ARBITER_CONFLICT_INFO' : [ 0x18, {
'OwningObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Start' : [ 0x8, ['unsigned long long']],
'End' : [ 0x10, ['unsigned long long']],
} ],
'_PO_NOTIFY_ORDER_LEVEL' : [ 0x80, {
'LevelReady' : [ 0x0, ['_KEVENT']],
'DeviceCount' : [ 0x18, ['unsigned long']],
'ActiveCount' : [ 0x1c, ['unsigned long']],
'WaitSleep' : [ 0x20, ['_LIST_ENTRY']],
'ReadySleep' : [ 0x30, ['_LIST_ENTRY']],
'Pending' : [ 0x40, ['_LIST_ENTRY']],
'Complete' : [ 0x50, ['_LIST_ENTRY']],
'ReadyS0' : [ 0x60, ['_LIST_ENTRY']],
'WaitS0' : [ 0x70, ['_LIST_ENTRY']],
} ],
'__unnamed_1a58' : [ 0x8, {
'Base' : [ 0x0, ['unsigned long']],
'Limit' : [ 0x4, ['unsigned long']],
} ],
'_PCI_HEADER_TYPE_2' : [ 0x30, {
'SocketRegistersBaseAddress' : [ 0x0, ['unsigned long']],
'CapabilitiesPtr' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'SecondaryStatus' : [ 0x6, ['unsigned short']],
'PrimaryBus' : [ 0x8, ['unsigned char']],
'SecondaryBus' : [ 0x9, ['unsigned char']],
'SubordinateBus' : [ 0xa, ['unsigned char']],
'SecondaryLatency' : [ 0xb, ['unsigned char']],
'Range' : [ 0xc, ['array', 4, ['__unnamed_1a58']]],
'InterruptLine' : [ 0x2c, ['unsigned char']],
'InterruptPin' : [ 0x2d, ['unsigned char']],
'BridgeControl' : [ 0x2e, ['unsigned short']],
} ],
'_CM_KEY_VALUE' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'NameLength' : [ 0x2, ['unsigned short']],
'DataLength' : [ 0x4, ['unsigned long']],
'Data' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
'Flags' : [ 0x10, ['unsigned short']],
'Spare' : [ 0x12, ['unsigned short']],
'Name' : [ 0x14, ['array', 1, ['unsigned short']]],
} ],
'_FS_FILTER_CALLBACK_DATA' : [ 0x40, {
'SizeOfFsFilterCallbackData' : [ 0x0, ['unsigned long']],
'Operation' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'Parameters' : [ 0x18, ['_FS_FILTER_PARAMETERS']],
} ],
'_PO_MEMORY_RANGE_ARRAY_LINK' : [ 0x18, {
'Next' : [ 0x0, ['pointer64', ['_PO_MEMORY_RANGE_ARRAY']]],
'NextTable' : [ 0x8, ['unsigned long long']],
'CheckSum' : [ 0x10, ['unsigned long']],
'EntryCount' : [ 0x14, ['unsigned long']],
} ],
'_FAST_IO_DISPATCH' : [ 0xe0, {
'SizeOfFastIoDispatch' : [ 0x0, ['unsigned long']],
'FastIoCheckIfPossible' : [ 0x8, ['pointer64', ['void']]],
'FastIoRead' : [ 0x10, ['pointer64', ['void']]],
'FastIoWrite' : [ 0x18, ['pointer64', ['void']]],
'FastIoQueryBasicInfo' : [ 0x20, ['pointer64', ['void']]],
'FastIoQueryStandardInfo' : [ 0x28, ['pointer64', ['void']]],
'FastIoLock' : [ 0x30, ['pointer64', ['void']]],
'FastIoUnlockSingle' : [ 0x38, ['pointer64', ['void']]],
'FastIoUnlockAll' : [ 0x40, ['pointer64', ['void']]],
'FastIoUnlockAllByKey' : [ 0x48, ['pointer64', ['void']]],
'FastIoDeviceControl' : [ 0x50, ['pointer64', ['void']]],
'AcquireFileForNtCreateSection' : [ 0x58, ['pointer64', ['void']]],
'ReleaseFileForNtCreateSection' : [ 0x60, ['pointer64', ['void']]],
'FastIoDetachDevice' : [ 0x68, ['pointer64', ['void']]],
'FastIoQueryNetworkOpenInfo' : [ 0x70, ['pointer64', ['void']]],
'AcquireForModWrite' : [ 0x78, ['pointer64', ['void']]],
'MdlRead' : [ 0x80, ['pointer64', ['void']]],
'MdlReadComplete' : [ 0x88, ['pointer64', ['void']]],
'PrepareMdlWrite' : [ 0x90, ['pointer64', ['void']]],
'MdlWriteComplete' : [ 0x98, ['pointer64', ['void']]],
'FastIoReadCompressed' : [ 0xa0, ['pointer64', ['void']]],
'FastIoWriteCompressed' : [ 0xa8, ['pointer64', ['void']]],
'MdlReadCompleteCompressed' : [ 0xb0, ['pointer64', ['void']]],
'MdlWriteCompleteCompressed' : [ 0xb8, ['pointer64', ['void']]],
'FastIoQueryOpen' : [ 0xc0, ['pointer64', ['void']]],
'ReleaseForModWrite' : [ 0xc8, ['pointer64', ['void']]],
'AcquireForCcFlush' : [ 0xd0, ['pointer64', ['void']]],
'ReleaseForCcFlush' : [ 0xd8, ['pointer64', ['void']]],
} ],
'_OBJECT_DIRECTORY_ENTRY' : [ 0x18, {
'ChainLink' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'HashValue' : [ 0x10, ['unsigned long']],
} ],
'_POP_DEVICE_POWER_IRP' : [ 0x58, {
'Free' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Irp' : [ 0x8, ['pointer64', ['_IRP']]],
'Notify' : [ 0x10, ['pointer64', ['_PO_DEVICE_NOTIFY']]],
'Pending' : [ 0x18, ['_LIST_ENTRY']],
'Complete' : [ 0x28, ['_LIST_ENTRY']],
'Abort' : [ 0x38, ['_LIST_ENTRY']],
'Failed' : [ 0x48, ['_LIST_ENTRY']],
} ],
'_FILE_BASIC_INFORMATION' : [ 0x28, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x20, ['unsigned long']],
} ],
'_RTL_RANGE' : [ 0x28, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'UserData' : [ 0x10, ['pointer64', ['void']]],
'Owner' : [ 0x18, ['pointer64', ['void']]],
'Attributes' : [ 0x20, ['unsigned char']],
'Flags' : [ 0x21, ['unsigned char']],
} ],
'_PCI_HEADER_TYPE_1' : [ 0x30, {
'BaseAddresses' : [ 0x0, ['array', 2, ['unsigned long']]],
'PrimaryBus' : [ 0x8, ['unsigned char']],
'SecondaryBus' : [ 0x9, ['unsigned char']],
'SubordinateBus' : [ 0xa, ['unsigned char']],
'SecondaryLatency' : [ 0xb, ['unsigned char']],
'IOBase' : [ 0xc, ['unsigned char']],
'IOLimit' : [ 0xd, ['unsigned char']],
'SecondaryStatus' : [ 0xe, ['unsigned short']],
'MemoryBase' : [ 0x10, ['unsigned short']],
'MemoryLimit' : [ 0x12, ['unsigned short']],
'PrefetchBase' : [ 0x14, ['unsigned short']],
'PrefetchLimit' : [ 0x16, ['unsigned short']],
'PrefetchBaseUpper32' : [ 0x18, ['unsigned long']],
'PrefetchLimitUpper32' : [ 0x1c, ['unsigned long']],
'IOBaseUpper16' : [ 0x20, ['unsigned short']],
'IOLimitUpper16' : [ 0x22, ['unsigned short']],
'CapabilitiesPtr' : [ 0x24, ['unsigned char']],
'Reserved1' : [ 0x25, ['array', 3, ['unsigned char']]],
'ROMBaseAddress' : [ 0x28, ['unsigned long']],
'InterruptLine' : [ 0x2c, ['unsigned char']],
'InterruptPin' : [ 0x2d, ['unsigned char']],
'BridgeControl' : [ 0x2e, ['unsigned short']],
} ],
'_PRIVILEGE_SET' : [ 0x14, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_SECURITY_CLIENT_CONTEXT' : [ 0x48, {
'SecurityQos' : [ 0x0, ['_SECURITY_QUALITY_OF_SERVICE']],
'ClientToken' : [ 0x10, ['pointer64', ['void']]],
'DirectlyAccessClientToken' : [ 0x18, ['unsigned char']],
'DirectAccessEffectiveOnly' : [ 0x19, ['unsigned char']],
'ServerIsRemote' : [ 0x1a, ['unsigned char']],
'ClientTokenControl' : [ 0x1c, ['_TOKEN_CONTROL']],
} ],
'_IO_SECURITY_CONTEXT' : [ 0x18, {
'SecurityQos' : [ 0x0, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'AccessState' : [ 0x8, ['pointer64', ['_ACCESS_STATE']]],
'DesiredAccess' : [ 0x10, ['unsigned long']],
'FullCreateOptions' : [ 0x14, ['unsigned long']],
} ],
'_X86_DBGKD_CONTROL_SET' : [ 0x10, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long']],
} ],
'_RTL_ACTIVATION_CONTEXT_STACK_FRAME' : [ 0x18, {
'Previous' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'ActivationContext' : [ 0x8, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'_MAILSLOT_CREATE_PARAMETERS' : [ 0x18, {
'MailslotQuota' : [ 0x0, ['unsigned long']],
'MaximumMessageSize' : [ 0x4, ['unsigned long']],
'ReadTimeout' : [ 0x8, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x10, ['unsigned char']],
} ],
'_NAMED_PIPE_CREATE_PARAMETERS' : [ 0x28, {
'NamedPipeType' : [ 0x0, ['unsigned long']],
'ReadMode' : [ 0x4, ['unsigned long']],
'CompletionMode' : [ 0x8, ['unsigned long']],
'MaximumInstances' : [ 0xc, ['unsigned long']],
'InboundQuota' : [ 0x10, ['unsigned long']],
'OutboundQuota' : [ 0x14, ['unsigned long']],
'DefaultTimeout' : [ 0x18, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x20, ['unsigned char']],
} ],
'_CM_BIG_DATA' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_SUPPORTED_RANGE' : [ 0x28, {
'Next' : [ 0x0, ['pointer64', ['_SUPPORTED_RANGE']]],
'SystemAddressSpace' : [ 0x8, ['unsigned long']],
'SystemBase' : [ 0x10, ['long long']],
'Base' : [ 0x18, ['long long']],
'Limit' : [ 0x20, ['long long']],
} ],
'_CM_KEY_NODE' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'LastWriteTime' : [ 0x4, ['_LARGE_INTEGER']],
'Spare' : [ 0xc, ['unsigned long']],
'Parent' : [ 0x10, ['unsigned long']],
'SubKeyCounts' : [ 0x14, ['array', 2, ['unsigned long']]],
'SubKeyLists' : [ 0x1c, ['array', 2, ['unsigned long']]],
'ValueList' : [ 0x24, ['_CHILD_LIST']],
'ChildHiveReference' : [ 0x1c, ['_CM_KEY_REFERENCE']],
'Security' : [ 0x2c, ['unsigned long']],
'Class' : [ 0x30, ['unsigned long']],
'MaxNameLen' : [ 0x34, ['unsigned long']],
'MaxClassLen' : [ 0x38, ['unsigned long']],
'MaxValueNameLen' : [ 0x3c, ['unsigned long']],
'MaxValueDataLen' : [ 0x40, ['unsigned long']],
'WorkVar' : [ 0x44, ['unsigned long']],
'NameLength' : [ 0x48, ['unsigned short']],
'ClassLength' : [ 0x4a, ['unsigned short']],
'Name' : [ 0x4c, ['array', 1, ['unsigned short']]],
} ],
'_ARBITER_ORDERING' : [ 0x10, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
} ],
'_ARBITER_LIST_ENTRY' : [ 0x60, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'AlternativeCount' : [ 0x10, ['unsigned long']],
'Alternatives' : [ 0x18, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'RequestSource' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Flags' : [ 0x2c, ['unsigned long']],
'WorkSpace' : [ 0x30, ['long long']],
'InterfaceType' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'SlotNumber' : [ 0x3c, ['unsigned long']],
'BusNumber' : [ 0x40, ['unsigned long']],
'Assignment' : [ 0x48, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
'SelectedAlternative' : [ 0x50, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Result' : [ 0x58, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterResultSuccess', 1: 'ArbiterResultExternalConflict', 2: 'ArbiterResultNullRequest', -1: 'ArbiterResultUndefined'})]],
} ],
'_LPCP_NONPAGED_PORT_QUEUE' : [ 0x28, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'BackPointer' : [ 0x20, ['pointer64', ['_LPCP_PORT_OBJECT']]],
} ],
'_CM_KEY_INDEX' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_FILE_NETWORK_OPEN_INFORMATION' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x28, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x30, ['unsigned long']],
} ],
'_CM_KEY_REFERENCE' : [ 0x10, {
'KeyCell' : [ 0x0, ['unsigned long']],
'KeyHive' : [ 0x8, ['pointer64', ['_HHIVE']]],
} ],
'_ARBITER_ALTERNATIVE' : [ 0x38, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
'Length' : [ 0x10, ['unsigned long']],
'Alignment' : [ 0x14, ['unsigned long']],
'Priority' : [ 0x18, ['long']],
'Flags' : [ 0x1c, ['unsigned long']],
'Descriptor' : [ 0x20, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Reserved' : [ 0x28, ['array', 3, ['unsigned long']]],
} ],
'_TOKEN_CONTROL' : [ 0x28, {
'TokenId' : [ 0x0, ['_LUID']],
'AuthenticationId' : [ 0x8, ['_LUID']],
'ModifiedId' : [ 0x10, ['_LUID']],
'TokenSource' : [ 0x18, ['_TOKEN_SOURCE']],
} ],
'__unnamed_1ae3' : [ 0x10, {
'EndingOffset' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'ResourceToRelease' : [ 0x8, ['pointer64', ['pointer64', ['_ERESOURCE']]]],
} ],
'__unnamed_1ae5' : [ 0x8, {
'ResourceToRelease' : [ 0x0, ['pointer64', ['_ERESOURCE']]],
} ],
'__unnamed_1ae9' : [ 0x8, {
'SyncType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SyncTypeOther', 1: 'SyncTypeCreateSection'})]],
'PageProtection' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1aeb' : [ 0x28, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
'Argument5' : [ 0x20, ['pointer64', ['void']]],
} ],
'_FS_FILTER_PARAMETERS' : [ 0x28, {
'AcquireForModifiedPageWriter' : [ 0x0, ['__unnamed_1ae3']],
'ReleaseForModifiedPageWriter' : [ 0x0, ['__unnamed_1ae5']],
'AcquireForSectionSynchronization' : [ 0x0, ['__unnamed_1ae9']],
'Others' : [ 0x0, ['__unnamed_1aeb']],
} ],
'_COMPRESSED_DATA_INFO' : [ 0xc, {
'CompressionFormatAndEngine' : [ 0x0, ['unsigned short']],
'CompressionUnitShift' : [ 0x2, ['unsigned char']],
'ChunkShift' : [ 0x3, ['unsigned char']],
'ClusterShift' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'NumberOfChunks' : [ 0x6, ['unsigned short']],
'CompressedChunkSizes' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_FILE_STANDARD_INFORMATION' : [ 0x18, {
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x8, ['_LARGE_INTEGER']],
'NumberOfLinks' : [ 0x10, ['unsigned long']],
'DeletePending' : [ 0x14, ['unsigned char']],
'Directory' : [ 0x15, ['unsigned char']],
} ],
'_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_CM_KEY_SECURITY' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'Flink' : [ 0x4, ['unsigned long']],
'Blink' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
}
|
gpl-2.0
|
seanballais/sage-aes
|
botos/modules/voting/views.py
|
1
|
12161
|
# botos/modules/voting/views.py
# Copyright (C) 2016 Sean Francis N. Ballais
#
# This module is part of Botos and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
""" Views for voting.
"""
import json
import collections
from flask import render_template
from flask import flash
from flask import redirect
from flask import Markup
from flask_login import login_user
from flask_login import logout_user
from flask_login import current_user
from wtforms import RadioField
from wtforms.validators import DataRequired
from botos.modules.app_data import controllers
from botos import app
from botos import login_manager
from botos.modules.activity_log import ActivityLogObservable
from botos.modules.app_data.controllers import Settings
from botos.modules.app_data.models import User
from botos.modules.admin.utility import Utility
from botos.modules.voting.forms import LoginForm
from botos.modules.voting.forms import VotingForm
# Set up the logger
logger = ActivityLogObservable.ActivityLogObservable('voting_' + __name__)
def generate_voting_form():
"""
Generate a voting form based on the information from the database.
:return: Return a voting form with attributes based on the database.
"""
logger.add_log(20,
'Generating voting form.'
)
level_num = 1
for position in Utility.get_position_list():
candidate_list = []
candidate_num = 0
for candidate in controllers.Candidate.get_candidate_with_position(position[0]):
logger.add_log(20,
'Generating candidate {0}.'.format(candidate.id)
)
candidate_list.append((
candidate.id,
generate_option_images(level_num,
candidate_num,
candidate,
position[0],
controllers.CandidateParty.get_candidate_party_by_id(candidate.party).name
)
))
candidate_num += 1
setattr(VotingForm,
'{0}'.format(position[0]),
RadioField(label=position[1],
validators=[DataRequired()],
choices=candidate_list,
render_kw={
'id': "{0}".format(position[1]),
})
)
level_num += 1
return VotingForm()
def generate_option_images(level_num,
candidate_num,
candidate,
candidate_position,
candidate_party_name
):
"""
Generate option images for the candidates.
:param level_num: Level of the candidate position.
:param candidate_num: The nth candidate in the loop.
:param candidate: Candidate dictionary.
:param candidate_position: The position of the candidate.
:param candidate_party_name: Party name of the candidate.
:return: Return a markup of the option images.
"""
return Markup(
"<a href=\"javascript:void(0);\" id=\"{0}-{1}\" "
"class=\"radio-picture {3}\" style=\"background: url('{2}') no"
"-repeat scroll 0 0 white;\"> </a><br/>"
"<h3 class='candidate-name'>{4} {5}<br><small>{6}</small></h3>".format(level_num,
candidate_num,
candidate.profile_url,
candidate_position,
candidate.first_name,
candidate.last_name,
candidate_party_name
)
)
def generate_candidate_script_code(candidate_position):
"""
Generate a JS script that will be used to add selection feedback.
:param candidate_position: ID of the position of the candidate.
:return: Return a markup string.
"""
str_position = str(candidate_position)
return Markup(
'<script type="text/javascript">\n'
'\t\t\t$("a.' + str_position + '").click(function() {\n'
'\t\t\t\tvar input_clicked = $(this).parent().siblings("input");\n'
'\t\t\t\tif (input_clicked.is(":checked")) {\n'
'console.log("Clicked before.");'
'console.log(input_clicked);'
'\t\t\t\t\t$("a.' + str_position + '").removeClass("selected-glow");\n'
'\t\t\t\t\tinput_clicked.prop("checked", false);\n'
'\t\t\t\t} else {\n'
'console.log("Oh really?");'
'console.log(input_clicked);'
'\t\t\t\t\tinput_clicked.prop("checked", true);\n'
'\t\t\t\t\t$("a.' + str_position + '").removeClass("selected-glow");\n'
'\t\t\t\t\t$(this).addClass("selected-glow");\n'
'\t\t\t\t}\n'
'\t\t\t})\n'
'\t\t</script>\n'
)
def generate_js_script():
"""
Generate a JS script that will allow selection feedback on candidates.
:return: Return a JS script.
"""
js_link_handlers = []
for position in Utility.get_position_list():
js_link_handlers.append(generate_candidate_script_code(position[0]))
return js_link_handlers
@login_manager.user_loader
def load_user(user_id):
"""
Load the user. Callback for Flask-Login.
:param user_id: ID of the user.
:return: A User object.
"""
logger.add_log(10,
'Getting user by ID of {0}.'.format(user_id)
)
return User.query.get(user_id)
@app.route('/login',
methods=[
'POST'
])
def login():
"""
Login the voters before voting.
:return: Reloads if invalid user credentials, loads the voting page otherwise.
"""
login_form = LoginForm()
username = login_form.username.data
password = login_form.password.data
logger.add_log(20,
'Attempting to log in user {0}.'.format(username)
)
if login_form.validate_on_submit():
reg_user = controllers.User.get_user(username)
if reg_user is not None and reg_user.is_password_correct(password) and reg_user.is_active():
login_user(reg_user,
remember=True
)
logger.add_log(20,
'User {0} logged in successfully.'.format(username)
)
flash('Logged in successfully.')
logger.add_log(10,
'Current user role: {0}'.format(current_user.role)
)
if current_user.role == 'admin' or current_user.role == 'viewer':
return redirect('/admin')
else:
logger.add_log(20,
'Invalid credentials entered for user {0}.'.format(username)
)
flash('Username or password is invalid.',
'error'
)
return redirect('/')
logger.add_log(20,
'Username or password not entered.'
)
return redirect('/')
@app.route('/logout',
methods=[
'POST'
])
def logout_voter():
"""
Logout the voter from the application.
:return: Redirect to the login page.
"""
logger.add_log(20,
'Logging out user {0}.'.format(current_user.username)
)
logout_user()
return redirect('/')
@app.route('/send_vote',
methods=[
'POST'
])
def send_vote():
"""
Send the vote to the database.
:return: Redirect the user to the thank you page.
"""
form = generate_voting_form()
for field in form:
if field.type != 'CSRFTokenField' and field.data != 'None':
logger.add_log(20,
'Passing in voter data. Voting for candidate {0}'.format(field.data)
)
controllers.VoteStore.increment_vote(field.data,
current_user.section_id
)
controllers.User.set_active(current_user.username,
False
)
return redirect('/thank_you')
@app.route('/get_votes',
methods=[
'POST',
'GET'
])
def get_votes():
"""
Get the current votes in the system.
:return: Return a JSON string containing the latest votes of each candidate.
"""
vote_data = collections.OrderedDict()
for position in Utility.get_position_list():
candidate_votes = collections.OrderedDict()
candidate_count = 0
for candidate in Utility.get_candidate_of_position_list(position[0]):
total_votes = controllers.VoteStore.get_candidate_total_votes(candidate['id'])
candidate_votes[candidate_count] = {
'votes': total_votes,
'name': "{0} {1} ({2})".format(candidate['first_name'],
candidate['last_name'],
position[1]
),
'profile_url': "{0}".format(candidate['profile_url'])
}
candidate_count += 1
vote_data[position[1]] = candidate_votes
return json.dumps(vote_data)
@app.route('/thank_you')
def vote_thank_you():
"""
Display the thank you page.
:return: Render the thank you page.
"""
if not current_user.is_active():
logger.add_log(20,
'Voter {0} finished voting. Accessing thank you page.'.format(current_user.id)
)
return render_template('{0}/thank-you.html'.format(Settings.get_property_value('current_template')))
logger.add_log(20,
'Someone attempted to visit the thank you. Not sure if it was a voter, admin, or someone anonymous.'
)
return redirect('/')
@app.route('/')
def app_index():
"""
Index page of the whole app. This page will show different looks depending on the current user state.
:return: Render the appropriate template depending on the user status.
"""
login_form = LoginForm()
logger.add_log(20,
'Accessing index page.'
)
if current_user.is_authenticated:
logger.add_log(20,
'Current user is authenticated. Displaying voting page.')
if current_user.role != 'voter':
logger.add_log(20,
'Logged in user is an admin. Redirecting to the admin panel.'
)
return redirect('/admin')
elif current_user.is_active():
logger.add_log(20,
'Logged in user is a voter. Displaying the voting page.'
)
return render_template('{0}/voting.html'.format(Settings.get_property_value('current_template')),
voting_form=generate_voting_form(),
link_handler=generate_js_script()
)
logger.add_log(20,
'Current visitor is anonymous or inactive. Might need to say "Who you? You ain\'t my nigga."'
)
# TODO: Make the index template.
return render_template('{0}/index.html'.format(Settings.get_property_value('current_template')),
form=login_form
)
|
gpl-3.0
|
bitcommoditiz/Cocoaz-CCz
|
share/qt/extract_strings_qt.py
|
2945
|
1844
|
#!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
|
mit
|
psolstice/zcoin
|
qa/rpc-tests/wallet-dump.py
|
81
|
4788
|
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (start_nodes, start_node, assert_equal, bitcoind_processes)
def read_dump(file_name, addrs, hd_master_addr_old):
"""
Read the given dump, count the addrs that match, count change and reserve.
Also check that the old hd_master is inactive
"""
with open(file_name, encoding='utf8') as inputfile:
found_addr = 0
found_addr_chg = 0
found_addr_rsv = 0
hd_master_addr_ret = None
for line in inputfile:
# only read non comment lines
if line[0] != "#" and len(line) > 10:
# split out some data
key_label, comment = line.split("#")
# key = key_label.split(" ")[0]
keytype = key_label.split(" ")[2]
if len(comment) > 1:
addr_keypath = comment.split(" addr=")[1]
addr = addr_keypath.split(" ")[0]
keypath = None
if keytype == "inactivehdmaster=1":
# ensure the old master is still available
assert(hd_master_addr_old == addr)
elif keytype == "hdmaster=1":
# ensure we have generated a new hd master key
assert(hd_master_addr_old != addr)
hd_master_addr_ret = addr
else:
keypath = addr_keypath.rstrip().split("hdkeypath=")[1]
# count key types
for addrObj in addrs:
if addrObj['address'] == addr and addrObj['hdkeypath'] == keypath and keytype == "label=":
found_addr += 1
break
elif keytype == "change=1":
found_addr_chg += 1
break
elif keytype == "reserve=1":
found_addr_rsv += 1
break
return found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret
class WalletDumpTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 1
self.extra_args = [["-keypool=90"]]
def setup_network(self, split=False):
# Use 1 minute timeout because the initial getnewaddress RPC can take
# longer than the default 30 seconds due to an expensive
# CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in
# the test often takes even longer.
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, self.extra_args, timewait=60)
def run_test (self):
tmpdir = self.options.tmpdir
# generate 20 addresses to compare against the dump
test_addr_count = 20
addrs = []
for i in range(0,test_addr_count):
addr = self.nodes[0].getnewaddress()
vaddr= self.nodes[0].validateaddress(addr) #required to get hd keypath
addrs.append(vaddr)
# Should be a no-op:
self.nodes[0].keypoolrefill()
# dump unencrypted wallet
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump")
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, None)
assert_equal(found_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_addr_chg, 50) # 50 blocks where mined
assert_equal(found_addr_rsv, 90 + 1) # keypool size (TODO: fix off-by-one)
#encrypt wallet, restart, unlock and dump
self.nodes[0].encryptwallet('test')
bitcoind_processes[0].wait()
self.nodes[0] = start_node(0, self.options.tmpdir, self.extra_args[0])
self.nodes[0].walletpassphrase('test', 10)
# Should be a no-op:
self.nodes[0].keypoolrefill()
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \
read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc)
assert_equal(found_addr, test_addr_count)
assert_equal(found_addr_chg, 90 + 1 + 50) # old reserve keys are marked as change now
assert_equal(found_addr_rsv, 90 + 1) # keypool size (TODO: fix off-by-one)
if __name__ == '__main__':
WalletDumpTest().main ()
|
mit
|
danlurie/C-PAC
|
CPAC/utils/datasource.py
|
3
|
9911
|
import nipype.pipeline.engine as pe
import nipype.interfaces.utility as util
def create_func_datasource(rest_dict, wf_name='func_datasource'):
import nipype.pipeline.engine as pe
import nipype.interfaces.utility as util
wf = pe.Workflow(name=wf_name)
inputnode = pe.Node(util.IdentityInterface(
fields=['subject', 'scan'],
mandatory_inputs=True),
name='inputnode')
inputnode.iterables = [('scan', rest_dict.keys())]
selectrest = pe.Node(util.Function(input_names=['scan', 'rest_dict'],
output_names=['rest'],
function=get_rest),
name='selectrest')
selectrest.inputs.rest_dict = rest_dict
outputnode = pe.Node(util.IdentityInterface(fields=['subject',
'rest',
'scan' ]),
name='outputspec')
wf.connect(inputnode, 'scan', selectrest, 'scan')
wf.connect(inputnode, 'subject', outputnode, 'subject')
wf.connect(selectrest, 'rest', outputnode, 'rest')
wf.connect(inputnode, 'scan', outputnode, 'scan')
return wf
def get_rest(scan, rest_dict):
return rest_dict[scan]
def create_anat_datasource(wf_name='anat_datasource'):
import nipype.pipeline.engine as pe
import nipype.interfaces.utility as util
wf = pe.Workflow(name=wf_name)
inputnode = pe.Node(util.IdentityInterface(
fields=['subject', 'anat'],
mandatory_inputs=True),
name='inputnode')
outputnode = pe.Node(util.IdentityInterface(fields=['subject',
'anat' ]),
name='outputspec')
wf.connect(inputnode, 'subject', outputnode, 'subject')
wf.connect(inputnode, 'anat', outputnode, 'anat')
return wf
def create_roi_mask_dataflow(dir_path, mask_type, wf_name='datasource_roi_mask'):
import nipype.interfaces.io as nio
import os
wf = pe.Workflow(name=wf_name)
if mask_type == 'roi':
tab = 'ROI Average TSE'
elif mask_type == 'voxel':
tab = 'ROI Voxelwise TSE'
elif mask_type == 'centrality':
tab = 'Network Centrality'
if '.nii' in dir_path:
masks = []
masks.append(dir_path)
elif '.txt' in dir_path:
masks = open(dir_path, 'r').readlines()
else:
print '\n\n[!] CPAC says: Your ROI/mask specification file (under ' \
'%s options) either needs to be a NIFTI file (.nii or ' \
'.nii.gz) of an ROI/mask or a text file (.txt) containing a ' \
'list of NIFTI files of ROI/mask files.\nPlease change this ' \
'in your pipeline configuration file and try again.\n\n' % tab
raise Exception
mask_dict = {}
for mask_file in masks:
mask_file = mask_file.rstrip('\r\n')
if not os.path.exists(mask_file):
err = '\n\n[!] CPAC says: One of your ROI/mask specification ' \
'files (under %s options) does not have a correct path ' \
'or does not exist.\nTip: If all the paths are okay, ' \
'then ensure there are no whitespaces or blank lines in ' \
'your ROI specification file.\n\n' % mask_type
raise Exception(err)
if mask_file.strip() == '' or mask_file.startswith('#'):
continue
base_file = os.path.basename(mask_file)
base_name = ''
if base_file.endswith('.nii'):
base_name = os.path.splitext(base_file)[0]
elif(base_file.endswith('.nii.gz')):
base_name = os.path.splitext(os.path.splitext(base_file)[0])[0]
else:
err = "\n\n[!] CPAC says: One of your ROI/mask specification " \
"files (under %s options) does not have '.nii' or " \
"'.nii.gz' as an extension.\n\nMask file: %s\n\n" \
% (tab, mask_file)
raise Exception(err)
if not (base_name in mask_dict):
mask_dict[base_name] = mask_file
else:
err = "\n\n[!] CPAC says: You have two or more ROI/mask files " \
"with the same name - please make sure these files are named " \
"differently.\n\nDuplicate name: %s\n\nNote: This can be " \
"changed in the ROI/mask file you specified under the %s " \
"options.\n\n" % (mask_file, tab)
raise Exception(err)
inputnode = pe.Node(util.IdentityInterface(
fields=['mask'],
mandatory_inputs=True),
name='inputspec')
inputnode.iterables = [('mask', mask_dict.keys())]
selectmask = pe.Node(util.Function(input_names=['scan', 'rest_dict'],
output_names=['out_file'],
function=get_rest),
name='select_mask')
selectmask.inputs.rest_dict = mask_dict
outputnode = pe.Node(util.IdentityInterface(fields=['out_file']),
name='outputspec')
wf.connect(inputnode, 'mask',
selectmask, 'scan')
wf.connect(selectmask, 'out_file',
outputnode, 'out_file')
return wf
def create_spatial_map_dataflow(dirPath, wf_name='datasource_maps'):
import nipype.interfaces.io as nio
import os
wf = pe.Workflow(name=wf_name)
spatial_maps = open(dirPath, 'r').readlines()
spatial_map_dict = {}
for spatial_map_file in spatial_maps:
spatial_map_file = spatial_map_file.rstrip('\r\n')
if not os.path.exists(spatial_map_file):
print "\n\n" + "ERROR: One of your spatial map files (under Spatial" + \
" Regression options) does not have a correct path or does not exist." + \
"\n" + "Tip: If all the paths are okay, then ensure there are no" + \
" whitespaces or blank lines in your spatial map specification file." + \
"\n\n" + "Error name: datasource_0001" + "\n\n"
raise Exception
base_file = os.path.basename(spatial_map_file)
base_name = ''
try:
if base_file.endswith('.nii'):
base_name = os.path.splitext(base_file)[0]
elif(base_file.endswith('.nii.gz')):
base_name = os.path.splitext(os.path.splitext(base_file)[0])[0]
else:
raise Exception("File extension not in .nii and .nii.gz File: %s" % spatial_map_file)
except Exception, e:
print('error in spatial_map_dataflow: ', e)
if not (base_name in spatial_map_dict):
spatial_map_dict[base_name] = spatial_map_file
else:
raise ValueError('Files with same name not allowed %s %s' % (spatial_map_file, spatial_map_dict[base_name]))
inputnode = pe.Node(util.IdentityInterface(
fields=['spatial_map'],
mandatory_inputs=True),
name='inputspec')
inputnode.iterables = [('spatial_map', spatial_map_dict.keys())]
select_spatial_map = pe.Node(util.Function(input_names=['scan', 'rest_dict'],
output_names=['out_file'],
function=get_rest),
name='select_spatial_map')
select_spatial_map.inputs.rest_dict = spatial_map_dict
wf.connect(inputnode, 'spatial_map',
select_spatial_map, 'scan')
return wf
def create_grp_analysis_dataflow(wf_name='gp_dataflow'):
import nipype.pipeline.engine as pe
import nipype.interfaces.utility as util
from CPAC.utils import select_model_files
wf = pe.Workflow(name=wf_name)
inputnode = pe.Node(util.IdentityInterface(fields=['ftest',
'grp_model',
'model_name'],
mandatory_inputs=True),
name='inputspec')
selectmodel = pe.Node(util.Function(input_names=['model',
'ftest',
'model_name'],
output_names=['fts_file',
'con_file',
'grp_file',
'mat_file'],
function=select_model_files),
name='selectnode')
wf.connect(inputnode, 'ftest',
selectmodel, 'ftest')
wf.connect(inputnode, 'grp_model',
selectmodel, 'model')
wf.connect(inputnode, 'model_name', selectmodel, 'model_name')
outputnode = pe.Node(util.IdentityInterface(fields=['fts',
'grp',
'mat',
'con'],
mandatory_inputs=True),
name='outputspec')
wf.connect(selectmodel, 'mat_file',
outputnode, 'mat')
wf.connect(selectmodel, 'grp_file',
outputnode, 'grp')
wf.connect(selectmodel, 'fts_file',
outputnode, 'fts')
wf.connect(selectmodel, 'con_file',
outputnode, 'con')
return wf
|
bsd-3-clause
|
olivierdalang/QGIS
|
tests/src/python/test_qgsnullsymbolrenderer.py
|
45
|
3186
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
test_qgsnullsymbolrenderer.py
-----------------------------
Date : April 2016
Copyright : (C) 2016 by Nyall Dawson
Email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Nyall Dawson'
__date__ = 'April 2016'
__copyright__ = '(C) 2016, Nyall Dawson'
import qgis # NOQA
import os
from qgis.PyQt.QtCore import QSize
from qgis.core import (QgsVectorLayer,
QgsProject,
QgsRectangle,
QgsMultiRenderChecker,
QgsNullSymbolRenderer)
from qgis.testing import start_app, unittest
from qgis.testing.mocked import get_iface
from utilities import unitTestDataPath
# Convenience instances in case you may need them
# not used in this test
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsNullSymbolRenderer(unittest.TestCase):
def setUp(self):
self.iface = get_iface()
myShpFile = os.path.join(TEST_DATA_DIR, 'polys.shp')
self.layer = QgsVectorLayer(myShpFile, 'Polys', 'ogr')
QgsProject.instance().addMapLayer(self.layer)
self.renderer = QgsNullSymbolRenderer()
self.layer.setRenderer(self.renderer)
rendered_layers = [self.layer]
self.mapsettings = self.iface.mapCanvas().mapSettings()
self.mapsettings.setOutputSize(QSize(400, 400))
self.mapsettings.setOutputDpi(96)
self.mapsettings.setExtent(QgsRectangle(-163, 22, -70, 52))
self.mapsettings.setLayers(rendered_layers)
def tearDown(self):
QgsProject.instance().removeAllMapLayers()
def testRender(self):
# test no features are rendered
renderchecker = QgsMultiRenderChecker()
renderchecker.setMapSettings(self.mapsettings)
renderchecker.setControlPathPrefix('null_renderer')
renderchecker.setControlName('expected_nullrenderer_render')
result = renderchecker.runTest('nullrenderer_render')
assert result
def testSelected(self):
# select a feature and render
self.layer.select([1, 2, 3])
renderchecker = QgsMultiRenderChecker()
renderchecker.setMapSettings(self.mapsettings)
renderchecker.setControlPathPrefix('null_renderer')
renderchecker.setControlName('expected_nullrenderer_selected')
result = renderchecker.runTest('nullrenderer_selected')
assert result
if __name__ == '__main__':
unittest.main()
|
gpl-2.0
|
mhils/mitmproxy
|
test/pathod/language/test_http.py
|
11
|
10165
|
import io
import pytest
from pathod import language
from pathod.language import http, base
from .. import tservers
def parse_request(s):
return next(language.parse_pathoc(s))
def test_make_error_response():
d = io.BytesIO()
s = http.make_error_response("foo")
language.serve(s, d, {})
class TestRequest:
def test_nonascii(self):
with pytest.raises(Exception, match="ASCII"):
parse_request("get:\xf0")
def test_err(self):
with pytest.raises(language.ParseException):
parse_request('GET')
def test_simple(self):
r = parse_request('GET:"/foo"')
assert r.method.string() == b"GET"
assert r.path.string() == b"/foo"
r = parse_request('GET:/foo')
assert r.path.string() == b"/foo"
r = parse_request('GET:@1k')
assert len(r.path.string()) == 1024
def test_multiple(self):
r = list(language.parse_pathoc("GET:/ PUT:/"))
assert r[0].method.string() == b"GET"
assert r[1].method.string() == b"PUT"
assert len(r) == 2
l = """
GET
"/foo"
ir,@1
PUT
"/foo
bar"
ir,@1
"""
r = list(language.parse_pathoc(l))
assert len(r) == 2
assert r[0].method.string() == b"GET"
assert r[1].method.string() == b"PUT"
l = """
get:"http://localhost:9999/p/200":ir,@1
get:"http://localhost:9999/p/200":ir,@2
"""
r = list(language.parse_pathoc(l))
assert len(r) == 2
assert r[0].method.string() == b"GET"
assert r[1].method.string() == b"GET"
def test_nested_response(self):
l = "get:/p:s'200'"
r = list(language.parse_pathoc(l))
assert len(r) == 1
assert len(r[0].tokens) == 3
assert isinstance(r[0].tokens[2], http.NestedResponse)
assert r[0].values({})
def test_render(self):
s = io.BytesIO()
r = parse_request("GET:'/foo'")
assert language.serve(
r,
s,
language.Settings(request_host="foo.com")
)
def test_multiline(self):
l = """
GET
"/foo"
ir,@1
"""
r = parse_request(l)
assert r.method.string() == b"GET"
assert r.path.string() == b"/foo"
assert r.actions
l = """
GET
"/foo
bar"
ir,@1
"""
r = parse_request(l)
assert r.method.string() == b"GET"
assert r.path.string().endswith(b"bar")
assert r.actions
def test_spec(self):
def rt(s):
s = parse_request(s).spec()
assert parse_request(s).spec() == s
rt("get:/foo")
rt("get:/foo:da")
def test_freeze(self):
r = parse_request("GET:/:b@100").freeze(language.Settings())
assert len(r.spec()) > 100
def test_path_generator(self):
r = parse_request("GET:@100").freeze(language.Settings())
assert len(r.spec()) > 100
def test_websocket(self):
r = parse_request('ws:/path/')
res = r.resolve(language.Settings())
assert res.method.string().lower() == b"get"
assert res.tok(http.Path).value.val == b"/path/"
assert res.tok(http.Method).value.val.lower() == b"get"
assert http.get_header(b"Upgrade", res.headers).value.val == b"websocket"
r = parse_request('ws:put:/path/')
res = r.resolve(language.Settings())
assert r.method.string().lower() == b"put"
assert res.tok(http.Path).value.val == b"/path/"
assert res.tok(http.Method).value.val.lower() == b"put"
assert http.get_header(b"Upgrade", res.headers).value.val == b"websocket"
class TestResponse:
def dummy_response(self):
return next(language.parse_pathod("400'msg'"))
def test_response(self):
r = next(language.parse_pathod("400:m'msg'"))
assert r.status_code.string() == b"400"
assert r.reason.string() == b"msg"
r = next(language.parse_pathod("400:m'msg':b@100b"))
assert r.reason.string() == b"msg"
assert r.body.values({})
assert str(r)
r = next(language.parse_pathod("200"))
assert r.status_code.string() == b"200"
assert not r.reason
assert b"OK" in [i[:] for i in r.preamble({})]
def test_render(self):
s = io.BytesIO()
r = next(language.parse_pathod("400:m'msg'"))
assert language.serve(r, s, {})
r = next(language.parse_pathod("400:p0,100:dr"))
assert "p0" in r.spec()
s = r.preview_safe()
assert "p0" not in s.spec()
def test_raw(self):
s = io.BytesIO()
r = next(language.parse_pathod("400:b'foo'"))
language.serve(r, s, {})
v = s.getvalue()
assert b"Content-Length" in v
s = io.BytesIO()
r = next(language.parse_pathod("400:b'foo':r"))
language.serve(r, s, {})
v = s.getvalue()
assert b"Content-Length" not in v
def test_length(self):
def testlen(x):
s = io.BytesIO()
x = next(x)
language.serve(x, s, language.Settings())
assert x.length(language.Settings()) == len(s.getvalue())
testlen(language.parse_pathod("400:m'msg':r"))
testlen(language.parse_pathod("400:m'msg':h'foo'='bar':r"))
testlen(language.parse_pathod("400:m'msg':h'foo'='bar':b@100b:r"))
def test_maximum_length(self):
def testlen(x):
x = next(x)
s = io.BytesIO()
m = x.maximum_length({})
language.serve(x, s, {})
assert m >= len(s.getvalue())
r = language.parse_pathod("400:m'msg':b@100:d0")
testlen(r)
r = language.parse_pathod("400:m'msg':b@100:d0:i0,'foo'")
testlen(r)
r = language.parse_pathod("400:m'msg':b@100:d0:i0,'foo'")
testlen(r)
def test_parse_err(self):
with pytest.raises(language.ParseException):
language.parse_pathod("400:msg,b:")
try:
language.parse_pathod("400'msg':b:")
except language.ParseException as v:
assert v.marked()
assert str(v)
def test_nonascii(self):
with pytest.raises(Exception, match="ASCII"):
language.parse_pathod("foo:b\xf0")
def test_parse_header(self):
r = next(language.parse_pathod('400:h"foo"="bar"'))
assert http.get_header(b"foo", r.headers)
def test_parse_pause_before(self):
r = next(language.parse_pathod("400:p0,10"))
assert r.actions[0].spec() == "p0,10"
def test_parse_pause_after(self):
r = next(language.parse_pathod("400:pa,10"))
assert r.actions[0].spec() == "pa,10"
def test_parse_pause_random(self):
r = next(language.parse_pathod("400:pr,10"))
assert r.actions[0].spec() == "pr,10"
def test_parse_stress(self):
# While larger values are known to work on linux, len() technically
# returns an int and a python 2.7 int on windows has 32bit precision.
# Therefore, we should keep the body length < 2147483647 bytes in our
# tests.
r = next(language.parse_pathod("400:b@1g"))
assert r.length({})
def test_spec(self):
def rt(s):
s = next(language.parse_pathod(s)).spec()
assert next(language.parse_pathod(s)).spec() == s
rt("400:b@100g")
rt("400")
rt("400:da")
def test_websockets(self):
r = next(language.parse_pathod("ws"))
with pytest.raises(Exception, match="No websocket key"):
r.resolve(language.Settings())
res = r.resolve(language.Settings(websocket_key=b"foo"))
assert res.status_code.string() == b"101"
def test_ctype_shortcut():
e = http.ShortcutContentType.expr()
v = e.parseString("c'foo'")[0]
assert v.key.val == b"Content-Type"
assert v.value.val == b"foo"
s = v.spec()
assert s == e.parseString(s)[0].spec()
e = http.ShortcutContentType.expr()
v = e.parseString("c@100")[0]
v2 = v.freeze({})
v3 = v2.freeze({})
assert v2.value.val == v3.value.val
def test_location_shortcut():
e = http.ShortcutLocation.expr()
v = e.parseString("l'foo'")[0]
assert v.key.val == b"Location"
assert v.value.val == b"foo"
s = v.spec()
assert s == e.parseString(s)[0].spec()
e = http.ShortcutLocation.expr()
v = e.parseString("l@100")[0]
v2 = v.freeze({})
v3 = v2.freeze({})
assert v2.value.val == v3.value.val
def test_shortcuts():
assert next(language.parse_pathod(
"400:c'foo'")).headers[0].key.val == b"Content-Type"
assert next(language.parse_pathod(
"400:l'foo'")).headers[0].key.val == b"Location"
assert b"Android" in tservers.render(parse_request("get:/:ua"))
assert b"User-Agent" in tservers.render(parse_request("get:/:ua"))
def test_user_agent():
e = http.ShortcutUserAgent.expr()
v = e.parseString("ua")[0]
assert b"Android" in v.string()
e = http.ShortcutUserAgent.expr()
v = e.parseString("u'a'")[0]
assert b"Android" not in v.string()
v = e.parseString("u@100'")[0]
assert len(str(v.freeze({}).value)) > 100
v2 = v.freeze({})
v3 = v2.freeze({})
assert v2.value.val == v3.value.val
def test_nested_response():
e = http.NestedResponse.expr()
v = e.parseString("s'200'")[0]
assert v.value.val == b"200"
with pytest.raises(language.ParseException):
e.parseString("s'foo'")
v = e.parseString('s"200:b@1"')[0]
assert "@1" in v.spec()
f = v.freeze({})
assert "@1" not in f.spec()
def test_nested_response_freeze():
e = http.NestedResponse(
base.TokValueLiteral(
r"200:b\'foo\':i10,\'\\x27\'"
)
)
assert e.freeze({})
assert e.values({})
def test_unique_components():
with pytest.raises(Exception, match="multiple body clauses"):
language.parse_pathod("400:b@1:b@1")
|
mit
|
willthames/ansible
|
lib/ansible/plugins/lookup/nested.py
|
46
|
2098
|
# (c) 2012, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from jinja2.exceptions import UndefinedError
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.plugins.lookup import LookupBase
from ansible.utils.listify import listify_lookup_plugin_terms
class LookupModule(LookupBase):
def _lookup_variables(self, terms, variables):
results = []
for x in terms:
try:
intermediate = listify_lookup_plugin_terms(x, templar=self._templar, loader=self._loader, fail_on_undefined=True)
except UndefinedError as e:
raise AnsibleUndefinedVariable("One of the nested variables was undefined. The error was: %s" % e)
results.append(intermediate)
return results
def run(self, terms, variables=None, **kwargs):
terms = self._lookup_variables(terms, variables)
my_list = terms[:]
my_list.reverse()
result = []
if len(my_list) == 0:
raise AnsibleError("with_nested requires at least one element in the nested list")
result = my_list.pop()
while len(my_list) > 0:
result2 = self._combine(result, my_list.pop())
result = result2
new_result = []
for x in result:
new_result.append(self._flatten(x))
return new_result
|
gpl-3.0
|
OpenCobolIDE/OpenCobolIDE
|
open_cobol_ide/extlibs/qdarkstyle/pyqt_style_rc.py
|
3
|
92074
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt4 (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = b"\
\x00\x00\x11\x43\
\x00\
\x00\x5f\xaa\x78\x9c\xc5\x1c\xdb\x72\xdb\xb6\xf2\x5d\x5f\x81\x3a\
\x2f\x49\x8e\x94\xe8\x12\x39\x36\xd3\x74\x46\xb6\xe5\x58\x73\x6c\
\xcb\x96\x94\xe4\x64\x3a\x9d\x0c\x25\x41\x16\x1b\x9a\x60\x49\xaa\
\xb6\xdb\xe9\xbf\x9f\x05\x08\x50\xb8\xf1\x26\x3b\x69\xd3\x71\x62\
\x72\xb1\x37\x2c\x16\xbb\x8b\x05\x5f\xbf\x6c\xa0\x97\x68\xb6\xc6\
\xe8\x62\x34\x43\xe7\xde\x02\x07\x31\x46\xcf\xe1\x97\x17\xf0\x82\
\xbe\x3b\x26\xe1\x43\xe4\xdd\xac\x13\xf4\x7c\xf1\x02\xfd\xdc\x6d\
\x77\x7a\x2d\xf8\xf1\xe6\x17\xf4\xf3\x31\xf1\xbd\x00\x9d\x6c\xfe\
\xd8\xe0\x38\x20\x0f\xbf\xf0\x11\x57\x38\xba\xf5\xe2\xd8\x23\x01\
\xf2\x62\xb4\xc6\x11\x9e\x3f\xa0\x9b\xc8\x0d\x12\xbc\x6c\xa2\x55\
\x84\x31\x22\x2b\xb4\x58\xbb\xd1\x0d\x6e\xa2\x84\x20\x37\x78\x40\
\x21\x8e\x62\x18\x40\xe6\x89\xeb\x05\x5e\x70\x83\x5c\xb4\x00\xca\
\x14\x1f\x00\x27\x6b\xc0\x14\x93\x55\x72\xe7\x46\x18\xe0\x97\xc8\
\x8d\x63\xb2\xf0\x5c\x40\x89\x96\x64\xb1\xb9\xc5\x41\xe2\x26\x94\
\xe4\xca\xf3\x71\x8c\x9e\x27\x20\xd2\xde\x94\x8f\xd8\x7b\xc1\xe8\
\x2c\xb1\xeb\x53\x84\xc0\x34\x7d\x2d\xde\xa2\x3b\x2f\x59\x93\x4d\
\x82\x22\x1c\x27\x91\xb7\xa0\x68\x9a\x00\xb4\xf0\x37\x4b\xca\x89\
\x78\xed\x7b\xb7\x1e\x27\x42\x87\x33\xa5\xc4\x14\x1f\xa0\xde\xc4\
\x20\x0a\x65\xb8\x89\x6e\xc9\xd2\x5b\xd1\xbf\x31\x93\x2f\xdc\xcc\
\x7d\x2f\x5e\x37\xd1\xd2\xa3\xd8\xe7\x9b\x04\x1e\xc6\xf4\x21\xd3\
\x75\x93\x4a\xf3\x9a\x44\x28\xc6\x3e\x63\x0e\x90\x78\x20\x00\x13\
\x7a\xcb\x23\x03\xa3\x84\x42\xaa\xdc\x84\xab\x2b\xa6\x4f\xee\xd6\
\xe4\x56\x95\xc7\x63\x5c\xad\x36\x51\x00\x84\x31\x1b\xb6\x24\xa0\
\x3e\x46\xf7\x77\xbc\x48\xe8\x13\x3a\x62\x45\x7c\x9f\xdc\x51\x19\
\x17\x24\x58\x7a\x54\xb4\xd8\x69\x08\x8b\x70\xe7\xe4\x4f\xcc\x84\
\x4a\xe7\x3f\x20\x09\xf0\x9c\x32\x42\xe7\x23\xdc\xce\x33\x7f\x15\
\xaf\x5d\xdf\x47\x73\xcc\x95\x07\xa4\xbd\x80\x62\xa3\x4f\x85\x5c\
\x11\x65\x22\x4e\xc0\x1a\x3c\xd7\x47\x21\x89\x18\x55\x5d\xde\x57\
\x29\x17\x67\x43\x34\x1d\x9f\xce\x3e\x0f\x26\x43\x34\x9a\xa2\xab\
\xc9\xf8\xd3\xe8\x64\x78\x82\xf6\x06\x53\xf8\x7d\xaf\x89\x3e\x8f\
\x66\x67\xe3\x8f\x33\x04\x10\x93\xc1\xe5\xec\x0b\x1a\x9f\xa2\xc1\
\xe5\x17\xf4\xdf\xd1\xe5\x49\x13\x0d\xff\x77\x35\x19\x4e\xa7\x68\
\x3c\xa1\xd8\x46\x17\x57\xe7\xa3\x21\x3c\x1e\x5d\x1e\x9f\x7f\x3c\
\x19\x5d\x7e\x40\x47\x30\xf4\x72\x0c\x86\x3f\x02\x8b\x07\xbc\xb3\
\x31\xa3\xc9\xb1\x8d\x86\x53\x8a\xef\x62\x38\x39\x3e\x83\x5f\x07\
\x47\xa3\xf3\xd1\xec\x4b\x93\xe2\x3a\x1d\xcd\x2e\x29\xe6\xd3\xf1\
\x04\x0d\xd0\xd5\x60\x32\x1b\x1d\x7f\x3c\x1f\x4c\xd0\xd5\xc7\xc9\
\xd5\x78\x3a\x04\x26\x4e\x00\xf3\xe5\xe8\xf2\x74\x02\x84\x86\x17\
\xc3\xcb\xd9\x2b\x20\x0c\xcf\xd0\xf0\x13\xfc\x82\xa6\x67\x83\xf3\
\x73\x4a\x8d\xa2\x1b\x7c\x04\x31\x26\x94\x51\x74\x3c\xbe\xfa\x32\
\x19\x7d\x38\x9b\xa1\xb3\xf1\xf9\xc9\x10\x1e\x1e\x0d\x81\xbf\xc1\
\xd1\xf9\x30\xa5\x06\xd2\x1d\x9f\x0f\x46\x17\x4d\x74\x32\xb8\x18\
\x7c\x18\xb2\x51\x63\x40\xc4\x84\xa4\x90\x29\x9b\xe8\xf3\xd9\x90\
\x3e\xa5\x54\x07\xf0\xff\xf1\x6c\x34\xbe\xa4\xf2\x1c\x8f\x2f\x67\
\x13\xf8\xb5\x09\xe2\x4e\x66\xd9\xe8\xcf\xa3\xe9\xb0\x89\x06\x93\
\xd1\x94\x6a\xe6\x74\x32\xbe\x60\x92\x52\xed\xc2\xa0\x31\xc3\x03\
\x43\x2f\x87\x29\x22\xaa\x79\x75\x82\x00\x84\xfe\xfe\x71\x3a\xcc\
\x70\xa2\x93\xe1\xe0\x1c\xd0\xc1\x6c\x5d\xea\x13\xfa\x0a\x1e\xbc\
\x6e\x5c\xcf\x08\xf1\x67\x5e\xd8\xf8\xbb\x81\xe0\xbf\x39\x89\x96\
\x38\x72\x50\x27\xbc\x07\x83\xf5\xbd\x25\x7a\xf6\x76\xff\xed\xe1\
\xdb\xe3\x77\xe9\x6b\x77\xf1\xed\x26\x22\x9b\x60\xd9\x5a\x10\x9f\
\x00\x60\x74\x33\x7f\x7e\xd8\x6e\xa2\x4e\xbb\x0b\x3f\x3a\x6f\x5f\
\xbc\x4b\x21\xf9\xeb\xbb\xb5\x97\xe0\xf4\x49\xe8\x2e\xe9\x72\x76\
\x50\x3f\xbc\x4f\x9f\x90\xd0\x5d\x78\xc9\x83\x83\xba\xed\xf6\xbb\
\xc6\x3f\x8d\xc6\xf5\x67\x6f\x79\x83\x13\xce\x0b\x47\xf1\x0c\xaf\
\x56\xed\x55\x27\x8f\x81\x67\xbd\x4e\x6f\xbf\x37\x4f\x5f\xc3\x22\
\xc6\xcc\x7f\xb4\x0c\xc0\x67\xbd\xa5\x8b\xf1\xa1\x0e\x57\x46\xc4\
\xf7\x42\x87\x6b\xe5\x9d\xa4\xa1\x96\x77\xeb\xde\x60\x07\x16\x5e\
\x80\xdf\x29\x9a\x6b\x83\xe6\x12\x70\xb6\x71\x08\xcb\x28\x48\xd0\
\xdc\x07\x6c\x5c\xde\x4d\x02\x0e\x1b\x46\x29\xd2\x3a\xa0\xa1\x5b\
\x67\x0d\x6b\x3d\x12\x93\x60\x11\x52\x62\x5e\x67\x59\x47\x95\x0a\
\x87\x97\xe5\xd8\xe8\xc8\xe3\x35\x5e\x7c\x3b\x22\xf7\x1c\x3a\xa6\
\x73\xa2\xce\x92\xe0\x7a\x2b\xab\x4d\x69\xb7\xb0\x9d\x78\xa0\x77\
\x92\x24\xe4\x16\xa6\x94\x0e\x97\xf1\x3b\xe0\x7f\xdd\xb9\x9f\xb1\
\x25\x70\x08\xf3\x52\x60\x1d\x0f\xdc\xe1\xc2\x4d\x48\xd4\x6c\x5c\
\x7f\x00\xe6\x43\xf5\x29\xc7\x71\xe7\x2d\x93\x35\x18\xeb\x81\xe0\
\x75\x8d\xa9\xa7\x14\x4f\xfe\x29\x1a\xcb\xf9\xf5\xf1\x2a\xb1\x71\
\xbb\x85\x77\x36\xc1\x82\x3e\xcd\x38\xe7\x53\xbf\x89\xfc\xe7\xce\
\xeb\x3f\xe2\xf8\xab\x07\xde\x3b\x7e\x1d\x2d\x5e\x33\xb8\x39\xb9\
\xff\x9a\x0d\x79\x15\x06\x37\x2f\x2a\xa0\x4e\xe7\xbf\x59\x06\xb5\
\x82\xad\x36\x2e\x85\x0a\x61\x23\x8d\x61\xaf\xb7\x8a\x6f\xa1\x59\
\x0c\x25\x68\x16\x43\x71\x9a\x4c\x45\x62\x29\x6c\x0d\xa6\x8e\xca\
\xbe\x32\x82\x25\x8a\xab\x3d\x23\xd5\xe6\xa3\xca\x6c\x54\x99\x8b\
\x6a\x33\x51\x65\x1e\xaa\xcc\xc2\x93\xcd\x41\xce\x0c\xd8\x65\x84\
\x7f\xe1\x84\x86\x20\x01\x44\x81\xd5\x27\x42\x19\x56\x32\x1d\x0a\
\x6c\xa1\xc2\x55\xc8\xa2\xe9\x53\x21\x65\x95\xd5\x67\xbf\x8e\xa9\
\x66\xfe\xaf\x64\x12\x35\x37\x59\x63\xd6\xc4\xc8\xca\x3e\xa7\x84\
\x23\x13\x70\x07\x07\x68\xe3\x6a\xe2\x2e\x3d\x72\xb4\x81\x8d\x22\
\xf8\x5e\xbb\x8e\x44\xa2\x7c\xe3\x51\xa0\xf3\x36\x99\x6e\x47\xdf\
\x64\xd2\x27\x3a\xb5\x9d\x36\x8e\x88\x22\xb0\xed\x1a\xe5\xb8\x33\
\x63\x2f\x05\x14\xeb\xa7\x14\x50\x5d\x14\xa6\x27\xb1\xcc\x4d\x65\
\xd1\x8c\x25\x93\xc3\x8d\xaa\xb9\x27\xe1\xc1\xdc\x02\x8a\x69\x97\
\x69\xb6\xa2\x5e\xbf\xa3\x56\x77\xd3\xa9\xbe\x20\x1e\x41\xb8\x78\
\x71\x3f\xd2\x99\xe8\x96\x63\x12\x6b\x5c\x5f\xe0\x60\x73\xe4\x16\
\x05\xce\x52\x76\x60\x0b\x9c\x39\x02\x87\x85\xce\x06\x1a\x47\x0e\
\xe5\x2d\x03\xf2\x63\x6d\x6d\x64\x49\x6e\x65\x22\xb6\x5b\x4b\xe5\
\xb4\xac\x30\x61\x30\x1d\x67\x2b\x73\x6e\x3c\x4b\x13\x2f\x32\x17\
\x47\xb9\xab\xc6\x4c\x3e\x35\xc9\x3f\x53\x74\x20\xe9\x22\xdb\x01\
\x04\x44\x5f\x83\xd8\xce\x8a\x9c\x3f\xa2\x1e\x4d\xb3\xc4\x3f\x14\
\x89\xd2\x80\x3e\xdb\x47\x4c\x66\xe5\x79\x41\xaf\x5f\xd2\xaa\x13\
\x8e\xfe\xc4\x6c\x0f\xa2\x25\x99\x68\x9b\x1d\xf2\xd1\x34\x4f\x56\
\x59\xd2\xe7\x3d\xcf\xb2\x1c\x00\x04\x52\x74\x05\xa0\xbf\xd5\xcd\
\x23\x63\x50\x32\x19\x9f\xbe\x9b\xfb\x1b\x6c\x91\xa8\xa3\x0b\x1a\
\xa5\x88\x74\x85\x89\x15\x87\x2a\xa6\x48\x0d\x50\x01\x2c\xfc\x16\
\xbe\x5f\xf8\x9b\xd8\xfb\x93\x16\x8f\x04\x8a\xf7\x88\xad\x3d\x50\
\x03\x28\x2f\x79\xf0\xa5\x77\x14\xd7\xf3\x18\x63\x74\x3d\x60\xaa\
\x62\x01\x04\x15\x37\x19\x0a\x44\x2f\x58\x7d\x41\x63\xcb\x51\x68\
\x6d\xdd\x02\x7a\x54\x6e\x55\x91\x48\x36\x6d\x3b\x50\xb3\xf9\xba\
\x62\xb2\xb5\x25\xab\x2b\xd7\xee\x52\xe5\xcb\x04\xd6\x60\xb7\x04\
\xe6\x8d\xd1\x9c\x39\x76\xdd\x18\x76\xb3\x84\x5d\xac\x20\x37\x50\
\xaa\x82\xbd\xba\xa2\xca\xb7\x9e\x02\x7a\xf5\x64\xa9\x23\xc9\x8e\
\x72\x94\x4a\xc1\x5c\x49\xcb\x8d\x22\x72\x87\x2c\xee\xb8\x84\x06\
\x1d\xfc\x95\x0d\x66\x88\xd3\x8d\x99\x17\xa4\x72\x62\xef\x37\x7d\
\xfa\xa7\xb4\xa4\x47\x39\x1c\xcc\x63\xf0\xd8\x8b\x64\x04\x6e\xf7\
\x93\x87\xef\x38\x26\xd7\x87\x2c\x8c\xe6\x60\x66\xa5\xaf\x78\xd3\
\xb7\x6f\x0a\xbd\x41\xef\xb0\x77\xa8\x54\xf8\xa8\xee\x36\xb1\xb4\
\x67\x71\x91\xd2\x70\x0f\x65\xfb\x35\xfb\x3d\x7f\x6b\x94\x6b\x6d\
\x33\x77\xae\x21\xc9\x12\x34\xf1\x40\x0e\x9f\xc4\xb3\x29\x20\xc2\
\x76\x42\x69\xc8\x46\x71\x9f\x43\x08\x37\x5c\x7a\x49\x7e\x28\xd4\
\xed\x75\xf7\xbb\x87\x79\xc5\x58\x2e\x35\x5b\xd9\x4e\xca\x7c\x69\
\xd8\x92\xa7\xad\xbc\x2d\x51\x64\x99\x48\x11\xa2\x26\x62\xbe\xfd\
\x25\x24\xa4\x85\x63\x31\x39\xdb\x04\x36\xf1\x12\xf0\x4d\x3c\xaf\
\xdc\xcc\xc1\x4c\x93\x88\xf8\x2d\x02\x76\x4a\x2d\x3a\x1d\xfe\x4e\
\x7f\x1d\x92\x98\x9d\xbf\x40\xdc\x46\x42\xb4\x80\xe0\x40\x14\x7c\
\x45\x44\xa4\x6f\xc3\xe2\x39\xdf\x87\xcd\x17\x8c\xc3\x4e\xc6\xa1\
\x30\xe4\xe9\x02\xe8\xf9\x83\x08\xbb\xca\x5c\x9a\x82\xd6\x0e\xfa\
\x8c\x58\x35\x25\x45\x8d\x74\x0d\xe2\xff\x05\xa2\xba\x7e\x43\x8d\
\x43\x3a\x7d\x55\xaf\x0e\xea\x01\xb9\x0e\x0b\xad\xf8\x3f\x4c\x6e\
\xe4\xfa\xf6\xb3\xee\xa0\x7b\xd8\xb5\x2f\x9d\x37\x66\x90\xb3\xb5\
\x46\x3e\x4c\xe5\xd3\x59\xbb\xc1\xd2\xc7\x26\xbf\x16\x0c\xfb\xed\
\xfe\x69\xff\x94\x33\x0f\x16\xc1\x43\x1d\xdd\xa2\x15\x66\x34\x6a\
\x30\x53\x2d\x96\xfb\x18\xf4\x84\x36\xda\x5c\x11\xfc\x6f\xdb\x19\
\x40\xb1\x4f\xd4\xbd\xae\x1c\x94\xb5\x8d\xa0\x2c\x7b\x62\x35\x4d\
\x86\xd6\x78\xaf\x5b\xb6\x26\x23\x40\x7e\x17\x19\xe9\x7a\xc8\x17\
\xd1\x94\xc8\x10\xda\x2a\x22\xc5\x5a\x57\x42\xcb\x2c\xf2\xdc\xbd\
\x0c\x2a\x4b\x3f\xea\xcc\xe8\x13\x48\x59\x79\x22\xcb\x66\x92\xcb\
\x89\xca\xc0\xaa\x0b\xba\x9d\xd6\x7f\x69\x36\x37\x61\x1a\x89\x48\
\xec\xab\xf2\x2d\xc9\x5d\x60\x80\x58\x12\xf0\x6c\x6f\x34\x8c\x25\
\xa4\xc2\xe7\xa1\xa7\xea\xd3\x00\x8a\x90\x4b\x43\x61\x26\x12\x88\
\xd9\x0a\x5c\x96\xec\x2b\x85\x06\x0d\x07\xac\x38\x5f\x7d\x41\xee\
\xe0\x80\xed\x1e\xb6\x9c\x59\xdd\xbf\x0a\x23\xa8\xe3\x60\x33\x5b\
\xd4\xc8\xc9\x9b\x8d\xe4\x7e\x2a\xba\x9e\x4d\xf8\xf4\x8e\x07\x76\
\xeb\x9d\xfd\xce\x93\x0a\x47\xcd\xfb\xe9\xc5\x4b\xeb\x39\x3b\xef\
\x1d\x42\x42\x8b\x5f\x35\x61\x98\xb3\xa9\x37\x93\x3f\x6e\x02\x8b\
\x67\xd0\xe6\x4f\x4d\xa0\xea\xde\x74\x3b\x99\xff\xda\x1c\x66\xfe\
\x54\xb0\x9f\xeb\x4d\x73\x5d\x42\xa9\x2f\xb5\xa3\xce\x3c\x69\x15\
\xc4\xd7\x33\x7c\x9f\x54\x4f\x60\xaa\xa5\x75\x4a\x81\xf5\xca\x77\
\xbd\xa0\x32\x95\x32\x32\xb5\xe2\x75\x4a\xfe\x0c\xbb\xf0\x9a\xa6\
\xb0\xb4\x38\xc2\x4a\x25\xf9\x4c\x94\x15\x54\xf3\xf2\xb7\x5c\xea\
\x53\xef\x2f\xfc\x21\xf2\xc2\xd2\x92\x41\x0c\x80\x37\x00\x68\x09\
\x57\xbb\x46\xb8\x2a\x72\xe3\xc6\xf5\x05\xe8\xf6\xb3\x17\x80\x35\
\x49\x35\xcf\x5a\x65\x79\xb3\x55\x88\xa7\x5c\x59\x16\x91\x9d\x12\
\x5a\x55\xbe\x74\x59\xb3\x9d\x5a\x53\xb7\x71\x95\xf5\xda\xe4\x2a\
\xff\x00\xfe\xec\xd7\x64\xae\x24\x5b\x53\x79\xcf\xce\x2e\x4c\x65\
\x65\xba\x2d\xc8\x9d\x2c\xd6\x51\xce\x60\xad\x6a\x72\xe3\xfa\x34\
\x72\x6f\xf1\xee\x19\xea\x3f\x02\xc5\xaf\x2b\xfa\x73\xba\x76\x43\
\xfc\x7e\xaf\xbd\xf7\x5b\x0d\x94\x4a\x94\xa3\x18\x73\xe2\xd2\x82\
\x96\xd2\x2d\x96\x37\x90\xf7\x5f\x31\x0f\x43\x08\x75\x4c\xa8\x78\
\xc0\xb3\xde\x61\xef\xa0\x77\xa0\x2b\x5f\x2d\xea\x48\xe6\xbb\x02\
\x37\xdc\xba\xe3\xb3\x36\x27\xfe\x52\x21\x66\xc9\x68\x4b\x97\xe0\
\xd9\x2d\x18\x68\x02\xe3\xe7\x6e\x94\xd5\xea\x0c\x84\xc2\xa9\x96\
\xa2\xfb\x54\x8c\x4e\x5e\x17\xd5\x59\x64\xa3\xaa\x20\xad\xce\x66\
\x01\x4a\x56\x0f\x7b\xf6\x47\xf2\x95\xbf\xfe\x0a\x5e\xfc\x2b\xaf\
\x45\x9b\x7b\xca\xb3\xfe\x41\xff\xb0\xef\xa6\x4e\x7f\x13\xaf\x95\
\x56\x83\x1d\xba\x0a\xb9\xad\x0a\x37\xa8\x45\x72\xb6\x45\x99\x5b\
\x4c\xcb\xad\xbb\x19\x0b\x41\x3b\x95\x55\x65\xd1\xeb\xaa\x4f\x22\
\x80\x52\x94\x2d\x11\x20\x2d\x6e\x65\x42\x68\xa7\x86\xe6\x8b\xca\
\xc5\xb3\xb2\x72\xa2\x60\x52\xd3\x07\xab\x8d\x1a\xa6\x50\x74\x1a\
\xca\x1d\xa6\x86\x47\x3b\x76\x2d\xc6\xa4\xe8\xa2\xd5\xc9\xd5\x46\
\xab\xf3\x36\xeb\x2f\x24\xb7\x73\x22\x75\x5b\xe6\xf7\xab\xaa\xa4\
\x9e\xba\x3a\x6b\xda\xa1\x54\x41\x7b\x9b\x9d\x29\x4a\x9a\xe1\x87\
\x08\x95\x22\x16\xcd\xb0\xf6\x07\xfb\x87\xfb\x87\xaa\x02\x44\x22\
\x21\x51\xe0\x4f\xb2\x3a\x69\xe8\x05\x12\xa0\xa8\x6f\x8b\xdf\x45\
\x20\x97\x21\x92\xa3\x3b\x1d\x17\x8b\xba\xc4\xc0\x08\xe3\xed\xef\
\xe5\x85\x7b\x9b\xd3\x50\x24\xc9\x5c\x8b\x62\x0f\x3d\xfb\x12\xd8\
\x06\x33\x45\x53\xff\xc6\xa5\x7f\x54\x3a\x28\xef\x28\xa4\x24\x52\
\xde\xb1\xbc\x5c\xc9\x34\x15\x3d\x38\xcb\x88\x84\x2d\x9a\x4f\x08\
\xeb\x36\x73\x14\xae\x8c\xe2\x34\x4e\x2e\x8f\x29\x55\x12\x59\x1e\
\xaa\x4e\x61\xb0\xba\xf7\x60\xef\x38\xaf\x4b\x37\x02\xee\xdd\x07\
\x13\x20\x6f\x31\xd1\xf9\x4b\x1d\x53\xa6\x34\xbd\x2a\x9a\xae\x6c\
\x1b\x90\xa6\x91\x2c\xb9\x2a\xed\x76\xc9\x4f\xfb\xf3\x50\x3a\xf4\
\x9a\x8c\xfd\x4d\xd6\x00\x69\x7b\x29\x9f\x23\xd5\xc8\x5c\x95\x33\
\x8c\x74\x6d\xa2\xbf\x73\x9c\x49\xfd\x7e\x95\x6a\xb9\x9d\xe5\x34\
\xc8\xe6\xb6\x74\x17\x02\x29\xf0\x5c\x8e\x01\x4a\x8e\x4e\x72\x6c\
\x57\xbe\x05\x60\x35\xdd\xf4\xd4\x48\x58\xaf\x8d\x11\x36\x0b\x3f\
\x90\x95\xb4\xfe\x6a\xe3\x24\x2b\x0b\x98\x0e\x77\x5b\x31\xc8\x1a\
\x44\x0b\x60\xc8\x6a\x55\x1a\xda\x15\xd5\xeb\x4a\xce\x42\xfe\x29\
\x60\x5d\xf1\xe0\x95\x6b\x4c\x2c\xbb\x31\x70\x6e\xd7\x87\x45\x58\
\x69\xf1\x14\xa8\x44\x5e\x99\xab\xd5\x23\x16\xfc\x8e\x6a\xd1\xd7\
\xff\x4e\x4b\xbc\x71\x7d\xee\xce\xb1\xaf\x6d\x8b\xed\x6c\x1d\xcb\
\xe9\x94\x38\xcd\x36\x61\xf3\x12\xb0\xec\xfc\xdb\x09\xdd\x00\x5b\
\x12\x31\x8b\xb3\xb0\x84\x2a\xdb\x73\xab\x0c\xef\xb6\x09\xf0\x8f\
\x10\xb6\x21\x48\x3a\x1e\x5a\xcb\xc8\xbd\x3b\x72\xe3\xf4\xc2\x0d\
\x7d\xb5\xed\x0d\xa3\x8d\x5f\x34\x2d\x12\x37\xf1\xd2\x1b\x76\xf3\
\x07\xd6\x53\xf6\xf2\xb5\xcd\xe5\xf4\x54\x6a\xd6\xe3\xf8\x62\xd9\
\x59\x66\xb4\xf0\x49\x8c\xb9\x0f\x40\xe5\x1d\x3b\x14\x5a\x32\x8b\
\xc2\xd6\x44\x1b\x89\x8a\xa6\x90\x0e\x61\xb0\x8f\xa2\xc6\xe3\xe7\
\x6a\x72\xb5\x38\x74\x45\x8a\x30\x65\xb3\xf1\x15\x9a\x0d\x8e\xa6\
\xe9\xcd\x35\xce\x40\xe2\xce\x1d\x1a\x35\x14\xe4\x77\x95\x82\x64\
\x11\xae\xe7\x54\x10\x72\x36\x2e\x39\xcf\x2a\x8c\xaa\xfb\x7a\x94\
\x42\xc3\x0c\x16\x88\xe4\x84\x66\x96\x38\xa4\xab\xd9\xa0\x90\xdd\
\xf9\xa9\xa4\x21\x31\x87\x79\x48\xa5\xde\xf6\x8f\x9e\x5a\x49\x8f\
\x12\x8f\x02\x14\x8b\x98\x9a\x74\x79\xb2\x97\x9a\xcc\xd1\x78\x36\
\x1b\x5f\xd8\xad\x26\x15\xe6\xd1\x86\x93\x36\x7a\x3c\xa1\xd5\xa8\
\x51\x66\xa1\x12\xad\x91\xa8\x2d\x3a\xea\xb7\x6d\xb6\xc3\xa7\xf3\
\x87\x99\x4f\xa9\xaa\x9e\x42\xf2\x32\x21\xeb\x19\xd0\xf9\xf0\x74\
\x66\x37\x1f\xca\xde\xa3\x8d\x87\x57\x46\x9e\xde\x7a\x72\xbc\x47\
\x2d\xdb\xc9\x8e\x7c\xad\xc6\xc3\x58\xff\x61\xa6\x53\xae\xa8\xc7\
\xcb\x5d\x2c\x61\x0d\xbb\xa1\x86\x93\xde\x22\xb7\x5a\x4e\x1a\x6e\
\x3c\xd6\x74\xa2\xec\xa4\xe0\x7b\xd8\x4e\x95\xc5\x67\x87\x29\xb7\
\x9c\x94\xf3\x1f\x66\x3a\x15\x14\xf5\x58\xb9\x4b\x24\xac\x61\x3a\
\x1c\x0d\x92\x2a\xdf\x4a\x87\xae\x83\x03\x96\x2c\x70\x64\x35\x3a\
\x73\x41\x12\xc0\x8f\xac\x04\x98\x44\x35\xf1\x9b\xad\x42\x55\xd8\
\x17\xc9\x4e\x2d\xfe\x8d\x14\xa9\x8a\x20\xd5\x29\x15\xf4\xb2\xb1\
\x9c\xe8\x84\x2c\xbe\xa5\x99\x8b\xed\xb8\xc1\x2c\xb5\x2b\x06\xf9\
\xa6\xdd\x3b\xed\xf1\x3e\x1a\xd6\x9c\x3a\x77\xa3\x56\x1a\x00\x53\
\x06\x2a\xc5\xfd\xd9\xb8\x80\x44\xb7\xae\x9f\x3f\x10\x38\x02\x5e\
\xa5\x92\xcd\x96\x75\x35\x54\x6f\x22\xe5\xd5\xca\x27\x6e\xd2\x52\
\x8f\x54\x0a\xaf\xce\x14\xd7\x16\x8b\x12\x87\x3c\x8e\xb2\xde\x8b\
\x3c\xbe\x72\x16\x11\xfb\x08\x85\xfb\xbc\xdb\xef\x37\xd1\xf6\x47\
\xa7\x5d\xa2\x80\xec\x72\x76\x01\x41\x35\x9d\xc9\xdc\x25\xd5\x48\
\x2b\xfb\x61\x39\xa9\x2d\x61\x49\x94\xa0\x81\xf4\xb9\x17\x27\x72\
\x2d\x77\xe7\xc2\x99\x8c\xd7\x99\x83\xce\x17\xeb\xec\x9e\x00\xd0\
\xd1\x5f\xd9\xbf\x37\x61\x35\x29\x69\x02\x25\xbb\xca\x10\x66\x18\
\xdd\xb8\x15\x7b\x73\x1f\x34\x14\x3b\x3f\xb9\xcb\xdf\x89\x17\xc4\
\x2d\x7a\x5b\x4a\xb1\xa7\x82\x66\x99\x5d\x08\x7d\x47\x3a\x3f\x51\
\x42\x8b\xb5\xe7\x2f\x01\x32\xfd\xed\x47\x90\x2d\xa0\xca\xcc\x97\
\x5e\xd7\x36\x06\xa5\x6f\xd4\xb1\xf2\xd0\xd2\x84\x3c\xc5\xf3\x35\
\xc5\x53\xc4\x1e\x09\x39\x6a\x3b\x8f\x36\xe6\xcc\x21\x0a\x6b\x55\
\x79\xa3\x68\x1e\xa5\xb8\xac\x2c\x5e\x5b\x7d\x8a\xe7\xa9\xa8\xc4\
\x16\x09\x24\x07\xbe\x8b\x2e\xf3\xf9\x2d\xd6\x28\xe7\xb6\x8e\x5e\
\x4d\x66\x85\x63\xe2\x17\x1e\xb5\x48\x46\xf6\x28\x56\x00\x94\xe7\
\xa6\x3b\xfb\x6f\x9b\xdd\xce\x41\xb3\xfb\x06\x3c\x62\xfb\x55\x8f\
\x93\x94\xbf\x8d\x63\xc6\x82\x0d\x0b\x47\x65\x0c\x95\xf3\x53\x7a\
\xc4\xc7\x2f\xde\x38\x30\x00\x70\x98\xfd\x11\x16\x6f\xdd\xdf\xef\
\xbb\x7d\xac\x96\x6c\x2d\xd7\x20\x1c\x15\x52\xa9\x68\xe6\x6d\xac\
\x32\x47\x79\x4d\x25\x0a\x05\xf3\x14\x30\x97\x55\x51\x6e\xde\x37\
\xca\xcd\xd9\x13\xc1\x63\xeb\x80\xb6\xb3\x5a\xd9\x3c\xd4\xd9\xe4\
\x8a\xd3\x7a\x40\xaa\xf1\xf2\x78\xad\xf5\x72\xb4\xa6\xb3\xf3\xfd\
\x75\xd6\x66\x5a\x2b\xd6\xd9\x36\x88\xcd\x4d\x13\x0a\xaf\x94\x5b\
\xbb\xa5\x8a\x82\x34\xa9\x4d\xd9\x96\x0e\xaa\x3c\xfd\x1a\x92\x70\
\x13\x5e\x90\x25\x7e\xbf\xd7\xd9\xfb\x0d\xfd\x4d\x2b\xe8\x24\xf0\
\x1f\xd8\x8d\x69\x76\x17\x8e\xc1\x5d\x51\x30\x56\x41\xd7\x5a\x3c\
\xd8\x55\x2d\x56\x76\x77\xbf\x61\x74\xe7\xa6\x03\x69\xed\x9d\x61\
\x16\x37\x4b\xe9\x48\x59\xaa\xad\x24\x9a\x18\x25\x3c\x76\x0d\x1e\
\x47\x01\xfb\xea\x5d\x2e\x83\x9d\x47\x32\xc8\x12\x05\x29\x13\xc9\
\xdc\x92\x94\x9c\xdc\x82\x9e\x8a\x83\xd9\x4a\x53\x6d\xe9\x50\x28\
\x9a\x3b\xd1\xbb\xa1\x32\x93\x7d\x9b\x08\xf1\xff\x72\x39\x55\xa3\
\xe0\x4a\x9d\x2a\xd5\xf9\x04\x85\x53\x1d\x6f\x4f\x31\xd1\x1c\xfb\
\xe4\x8e\x7e\xba\x72\x43\x89\xb2\xf9\xe3\x5f\x8c\x54\xa6\x10\x26\
\xe7\x04\xfb\xee\x03\x5e\xa6\xbf\xdf\xc2\xbc\x67\x5f\xb4\x53\xc4\
\xd0\xef\xc4\xd7\x6c\xd7\x4e\xdb\x7b\x68\x0b\x0f\x3f\x40\x6a\x75\
\xb9\xa5\xc4\x6b\x6f\x95\x20\x2f\x41\x2e\x9a\xc3\xcf\xf4\x33\x01\
\x86\x40\x31\x97\x88\x7e\x1a\xd2\x10\x49\x5f\x39\xf9\x52\xe4\xe6\
\x66\x65\x0b\xdf\x2c\x88\xed\xdb\xab\x1a\x39\x40\x20\x10\xf5\x68\
\xa9\xbf\x43\xff\xa1\x8e\x99\x2d\x0d\xfe\x71\x84\xf7\x6c\x69\xd3\
\x6f\x4c\x92\x05\xfb\x1e\x68\xfa\xdd\x4a\x7e\x88\x66\x38\x49\x4b\
\x5f\x9b\x21\xaa\x7c\x13\xb9\x66\x87\x82\x1d\x17\x0b\x98\xca\x8f\
\x1b\xb5\x96\x27\xdd\x7a\xf2\x2f\x95\x96\xb7\xb5\xf0\xa3\x06\xa9\
\xb3\x25\xb5\xa5\x03\xa9\x78\xe4\xe3\xea\x99\xe0\x4d\xe4\x2d\xa9\
\x1e\xed\xfd\x7e\x85\x69\xa2\x44\x0b\x3c\xc2\xb6\x2d\xdd\xde\x99\
\xdb\x36\x18\x54\xbf\x50\x22\xe5\xb0\xc6\x0b\x2d\x22\x13\x7e\x64\
\x97\x50\x4c\xa7\x9e\x45\x77\xee\x22\xf1\xfe\xc4\x05\xe1\x5f\x06\
\x90\x17\x3f\xa6\x00\xbb\x70\xd5\xb0\x68\xaf\x4a\x27\xa6\xb1\x6e\
\xad\x1b\x75\xdb\x88\x22\x8c\x8d\x9a\x3d\xc9\xbb\x5d\x60\x4a\x54\
\xf9\x82\x7c\xfd\x8e\x9e\x1c\xd6\x13\x7c\x9f\xb4\x5c\xdf\xbb\xc9\
\x7a\x52\x72\x2f\x43\x38\xdb\xfb\x35\xce\xca\x8b\xe2\x44\x31\x4e\
\x2b\x18\x75\xa4\x90\xb9\x68\x6d\xea\xd9\x19\x53\xb5\x4b\x18\x8e\
\x71\x3d\x45\x42\x63\x14\xaf\xac\x08\xa4\x8b\x96\xc5\xac\xcb\x80\
\x76\xe6\xb7\xa7\x1c\x55\xb9\x37\xaf\x29\xca\x98\x74\x01\xec\x48\
\xc4\xd7\xb6\xd4\x23\x09\xe9\x4e\x81\xcd\x86\x7a\x6f\x30\x0b\x85\
\x69\x29\x96\x6e\x86\xec\x9b\x20\x6c\xeb\x23\x51\x22\x7d\x1c\x84\
\xee\x66\x32\xd1\x6d\x9f\xca\x6e\x2e\x5e\xc6\x25\x5a\x81\x2a\xb7\
\x20\xc9\x9d\x2e\xcc\xa7\x1c\x93\x28\xc0\x91\xf0\xf8\x62\xe9\x3c\
\x66\x31\x97\xaf\x8c\x6c\xa3\x22\xf7\x62\x91\x16\xaf\xb8\xa2\x0b\
\x0d\xec\x9b\x07\xee\xbc\xe8\x34\xa9\xba\x18\xf5\x0f\xfe\x6d\x8d\
\xe6\xc6\x31\x4a\xf9\xd9\x64\x5f\xd3\x4b\x7a\x8a\xb2\xfd\xd0\x09\
\xb5\x30\x0f\x6c\xdc\x5b\x78\x7f\x61\x94\x3d\x07\xa0\x58\x84\x1a\
\xf4\x22\x86\xc3\x81\x6a\x08\x6e\x46\xb1\xfc\x8a\x49\xb2\x89\xb3\
\xef\x71\x69\xb1\x83\xde\xe7\x43\x9b\x4c\xc5\x59\x41\x7a\xeb\x25\
\xcd\x05\xdf\xef\xf5\xf6\x7e\x03\x67\x90\x3e\x63\xb1\x10\x7b\x94\
\x6f\x5f\x4a\x36\x31\x0d\x7d\x2f\x49\xb6\x99\xab\x25\x82\xb1\xdd\
\x7a\xd2\x87\x95\x1d\x3e\xc9\xf7\x9d\xca\xae\x91\x99\xa8\xb5\x0a\
\x84\x7c\xc9\xc0\x3a\x42\xcb\xbe\x95\xfb\x4e\x2c\x00\x8b\xc8\x0d\
\x0d\x14\xec\x37\x75\xca\xf7\x9e\x7e\xd9\xde\x23\x11\x70\xc0\xf3\
\x6d\x82\x6f\xf9\xaa\x69\xf7\x8f\x0e\x8e\x99\xec\xff\x07\x4f\xc5\
\xfa\x5e\
\x00\x00\x03\xac\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x03\x29\x49\x44\
\x41\x54\x58\x85\xed\x95\x4f\x68\x5c\x55\x14\xc6\x7f\xe7\x65\x88\
\x64\xda\xc6\xbd\xa9\x94\x48\x57\xb6\x91\x3a\x28\xae\xd3\x4d\xc5\
\x0a\x4d\x40\x66\x63\xda\x37\x2f\x25\xcd\x46\x07\xd1\x24\x8e\xae\
\xb2\x50\xa8\x49\xdd\x64\x99\xc2\xbc\x19\xd3\x6e\x9e\x20\x53\xc1\
\xe2\x9f\x85\x75\x1b\xfc\xd3\xa4\x15\x91\x52\x4a\x70\x4a\xd7\x25\
\x33\x24\xcd\xe0\xfb\x5c\xbc\x37\x4d\x90\xbc\x37\x1d\xe9\xce\xf9\
\x56\xf7\xcf\x77\xce\xfd\xee\x39\xe7\x9e\x0b\x3d\xf4\xf0\x7f\x87\
\x75\x43\x0e\x82\xa0\x7f\xab\xd1\x18\x97\xd9\x98\x41\x0e\x18\x8a\
\xb7\xea\x98\xfd\x2a\xa8\x65\xb3\xd9\x5a\x3e\x9f\xdf\x79\xea\x02\
\xaa\xe5\xf2\x5b\x98\x2d\x00\xc3\x06\xb7\x04\x37\x64\x56\x07\x70\
\xc2\x70\x08\xb3\x51\xc1\x08\x70\xd7\x60\xee\x9c\xe7\x7d\xf5\x54\
\x04\x04\x41\xd0\xb7\xd5\x6c\x2e\x00\xef\x1b\x7c\x6b\x61\x58\x3a\
\x7b\xfe\xfc\xda\x7e\x5c\xdf\xf7\x4f\x38\x70\x11\x38\x05\x2c\xde\
\xdb\xd8\x28\xcd\xcf\xcf\x87\x69\xfe\x33\x9d\x04\xc4\x87\xbf\x27\
\x69\xd6\x9d\x9c\xbc\x94\xc6\xf5\x3c\xef\x26\xf0\x7a\xd5\xf7\x67\
\x81\x8b\xc3\x47\x8e\x00\xcc\xa5\xd9\xa4\x46\x20\x0e\xfb\x97\x66\
\x36\x73\xae\x50\xf8\x1c\x60\x69\x69\xe9\x99\xc1\xc1\xc1\x69\x93\
\xde\x26\x0a\x39\x26\xad\xcb\xec\xea\xc3\xcd\xcd\xe5\x62\xb1\xf8\
\x08\xa0\x52\xa9\xcc\x99\xf4\x99\x03\xe3\x67\x3d\xaf\xd6\xb5\x80\
\x20\x08\xfa\xb7\x9b\xcd\x3f\x24\xfd\xe9\x4e\x4e\xbe\x01\x70\xe5\
\xf2\xe5\xc3\x61\x26\x73\x3d\xce\x75\x08\x38\x31\x3d\x1a\x9b\xad\
\xf7\xb5\x5a\xa7\x27\xa6\xa6\xea\x00\x15\xdf\xff\xde\xcc\x86\x07\
\xb2\xd9\x63\x49\x85\xe9\xec\xb7\x08\xb0\xd5\x68\x8c\x0b\x5e\x70\
\xa4\x8f\xda\x37\x0f\x33\x99\xeb\x32\x3b\xbe\x8f\x6d\x7b\x3c\xf2\
\x77\x26\xf3\x4d\x10\x04\xfd\x00\xe6\x38\x1f\x22\x1d\xdd\x6e\x36\
\xcf\x24\x9d\x93\x28\x40\x66\x63\xc0\x5a\xbb\xe0\x9e\x3d\x74\xe8\
\x82\x60\x04\x29\x39\x6d\xd1\xde\x4b\x5b\x8d\xc6\x05\x00\xd7\x75\
\x7f\xc3\xec\x36\xd0\xbd\x00\x83\x9c\x49\x3f\xed\x59\x9a\x20\x0a\
\x75\x3a\xa4\xd0\x22\x6e\x7b\xfe\xa3\xe0\x95\xae\x05\x60\xf6\x5c\
\xfb\x9d\xc7\x38\x96\xca\xdf\xb5\x73\x14\x71\xdb\xb8\x8f\xd9\x50\
\x12\x3d\xd5\xa1\xcc\xba\xea\x94\xfb\xea\x01\x43\x4a\x8c\x5c\xb2\
\x00\xe9\x81\x49\x87\xf7\xac\xfc\xce\x13\xa6\x40\x70\xfb\xf1\x34\
\xba\xfd\x83\xee\x05\x98\xfd\x8c\xd9\xe8\x9e\x95\x2b\xa9\xfc\x5d\
\x3b\xc7\xe0\xea\xae\x1e\x9d\x04\x56\xbb\x16\x20\xa8\x21\x1d\xf7\
\x7d\xff\x04\xc0\xc3\xcd\xcd\x65\xcc\xd6\x31\x53\xca\xe1\x02\x6e\
\x0e\x1c\x3c\xb8\x0c\xb0\x52\x2e\xe7\x0c\x5e\x44\xfa\xba\x6b\x01\
\xd9\x6c\xb6\x06\xdc\x8d\x7b\x3b\xc5\x62\xf1\x51\x5f\xab\x75\x1a\
\xb8\x15\x53\x76\xd3\xd1\xce\xb1\xb4\x86\xe3\xbc\x99\xcf\xe7\x77\
\x24\x59\x18\x7d\x5e\x77\xb6\x5b\xad\x6b\x5d\x0b\xc8\xe7\xf3\x3b\
\x38\xce\x2c\x70\x2a\xee\xed\x4c\x4c\x4d\xd5\x07\xb2\xd9\x57\x91\
\xde\x95\xb4\x0a\x34\x81\xa6\x60\xd5\xcc\xde\x19\x38\x70\xe0\x35\
\xd7\x75\xef\x03\x54\x7d\xbf\x04\x9c\x94\xd9\xcc\xf4\xf4\x74\x2b\
\xe9\x9c\x8e\x55\x5e\xf5\xfd\x05\xe0\x03\xa0\xe4\x7a\xde\x62\x27\
\xbe\x24\xab\xfa\x7e\xc9\xcc\x3e\x01\x16\x5d\xcf\x2b\xa5\xf1\x3b\
\x16\xd5\xbd\x8d\x8d\x92\xa4\x4b\xc0\x42\xd5\xf7\xbf\xab\x56\xab\
\x2f\x27\x71\x57\xca\xe5\xdc\x17\x95\xca\x0f\x66\xf6\x29\xd1\x77\
\xfc\x71\x27\xff\x4f\xfc\xce\x57\x7c\x7f\x2c\x34\x5b\x44\x3a\x1a\
\xb7\xd7\x1b\x82\xbf\x62\x27\xcf\x23\x8d\x12\x35\xa0\x3b\x32\x9b\
\x29\x14\x0a\x89\x85\xf7\x9f\x04\xc0\xe3\x1f\xf2\x8c\x60\x0c\xc8\
\x61\x16\xf5\x09\xa9\x6e\xf0\x8b\xa4\xda\x76\xab\x75\x2d\x2d\xe7\
\x3d\xf4\xd0\xc3\xbf\xf1\x0f\x78\xe5\x4e\xf2\x11\xe4\x69\x42\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x02\x4a\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x40\x00\x00\x00\x40\x08\x06\x00\x00\x00\xaa\x69\x71\xde\
\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\
\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\
\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\
\xdf\x04\x19\x10\x14\x1a\x38\xc7\x37\xd0\x00\x00\x00\x1d\x69\x54\
\x58\x74\x43\x6f\x6d\x6d\x65\x6e\x74\x00\x00\x00\x00\x00\x43\x72\
\x65\x61\x74\x65\x64\x20\x77\x69\x74\x68\x20\x47\x49\x4d\x50\x64\
\x2e\x65\x07\x00\x00\x01\xae\x49\x44\x41\x54\x78\xda\xed\x9b\x49\
\x92\xc3\x20\x0c\x45\x23\x5d\xdc\xf6\xc9\xd3\xbb\xae\x54\x06\x26\
\xe9\x7f\x09\x8c\xd6\x5d\x32\xef\x21\x68\x20\xf0\x78\xec\xd8\xb1\
\xe3\xce\x21\xcc\x8f\x9d\xe7\xf9\x6c\xfc\x3b\x59\x42\x40\x2b\x70\
\xa4\x10\xc9\x0a\xcd\x92\x21\xb3\x80\xa3\x44\xc8\x8c\xf0\x9e\x12\
\x64\x46\x70\x4f\x11\x32\x3b\xbc\x55\x82\xcc\x0e\x6e\x15\x21\x2b\
\xc1\x8f\x48\x90\xd5\xe0\x7b\x25\xe8\x5e\x0a\x2f\xd8\xfb\x3d\x55\
\x20\x56\xf8\xe3\x38\xfe\x73\x5c\xd7\x45\x11\xf5\xfa\xcd\xda\x77\
\x6b\x12\xd4\xbb\x61\xef\x8d\x43\xc3\x5b\x43\x11\xa5\x8f\x92\x30\
\x92\xb7\xc6\xa0\xa8\x71\xef\x2d\xc1\x92\xaf\xc4\x62\x1e\x02\xa5\
\xf1\xe7\x25\xa1\x94\xc7\x3a\xef\x88\x57\xef\xa3\x1a\xe9\x99\xf7\
\xdb\x84\xe8\x36\x09\x22\x2a\x01\xd9\xf3\x90\xff\x02\x9e\x12\x18\
\xf0\x5f\x87\x80\xc7\xa2\xc7\xda\x78\x24\xfc\xfb\x30\x80\x2c\x85\
\x2d\x95\xc0\xea\x79\xf8\x5e\x60\x44\x02\x1b\x1e\xbe\x19\xea\x91\
\x10\x01\xff\x31\x07\xa0\x36\x3d\x35\x38\x36\xfc\xeb\x3c\x40\xd9\
\x0e\x8f\xce\x09\x8c\xcd\x15\xed\x3c\xa0\x17\x86\xb5\xb3\xa4\x1e\
\x88\xb4\x42\xb1\xe0\xe9\x02\x5a\xe0\x98\xf0\x21\x02\x2c\xeb\x80\
\xe9\x05\xb4\xc2\x31\x25\x68\x36\x78\xb6\x04\x8d\x86\x67\x9c\x27\
\x84\x0a\x68\x81\x8f\x94\x00\xd9\x0d\x8e\xf6\x3c\x63\x51\x44\xd9\
\x0d\x8e\xc2\x44\x54\x82\x66\x1a\xf3\x11\x12\x34\x13\x7c\x84\x04\
\xb7\x43\x51\xc4\x18\xf6\xce\x07\x3d\x14\x45\x4c\x60\x8c\x4a\xd0\
\xac\xf0\x2c\x09\x52\x28\x97\x67\x34\xbc\xe7\x77\x7e\xfd\x48\x1a\
\x72\x26\x98\x21\x5f\x55\x80\xe5\xe6\x15\xaa\xb1\xa3\x79\x4b\x2c\
\x9a\xbd\xe7\xd1\xf9\xcd\x17\x24\xb2\x47\xad\x92\xf7\x15\x99\x8e\
\x64\xfb\x96\xd8\x8a\xb1\x2f\x4a\x0e\x24\xbf\xef\x55\xd9\xcc\x22\
\x68\x97\xa5\x33\x4a\x08\xb9\x2e\x9f\x45\x82\xf5\xd1\xc4\x7e\x32\
\x03\x68\xd8\x3d\x1f\x4d\x21\x65\x4c\xf5\x6c\xce\x43\x08\xf3\xe1\
\xe4\x8e\xbb\xc7\x1f\xfe\x88\x5a\xe2\xcd\xef\x1c\x49\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xac\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x07\x00\x00\x00\x3f\x08\x06\x00\x00\x00\x2c\x7b\xd2\x13\
\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xb3\x00\x79\x00\x79\xdc\xdd\
\x53\xfc\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\
\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\
\xdf\x04\x19\x10\x2e\x14\xfa\xd6\xc4\xae\x00\x00\x00\x39\x49\x44\
\x41\x54\x38\xcb\x63\x60\x20\x06\xc4\xc7\xc7\x33\xc4\xc7\xc7\xa3\
\x88\x31\x61\x53\x84\x53\x12\xaf\xce\x91\x28\xc9\x82\xc4\xfe\x8f\
\xc4\x66\x1c\x0d\xa1\x51\xc9\x51\xc9\x51\x49\x7c\x05\x06\xe3\x68\
\x08\x91\x2a\x49\x3e\x00\x00\x88\x4b\x04\xd3\x39\x2e\x90\x3f\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xb6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x18\x00\x00\x00\x11\x08\x06\x00\x00\x00\xc7\x78\x6c\x30\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\
\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\
\x0b\x2c\x0d\x1f\x43\xaa\xe1\x00\x00\x00\x36\x49\x44\x41\x54\x38\
\xcb\x63\x60\x20\x01\x2c\x5a\xb4\xe8\xff\xa2\x45\x8b\xfe\x93\xa2\
\x87\x89\x81\xc6\x60\xd4\x82\x11\x60\x01\x23\xa9\xc9\x74\xd0\xf9\
\x80\x85\x1c\x4d\x71\x71\x71\x8c\xa3\xa9\x68\xd4\x82\x61\x64\x01\
\x00\x31\xb5\x09\xec\x1f\x4b\xb4\x15\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x02\x42\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x40\x00\x00\x00\x40\x08\x06\x00\x00\x00\xaa\x69\x71\xde\
\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xb3\x00\x79\x00\x79\xdc\xdd\
\x53\xfc\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\
\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\
\xdf\x04\x19\x10\x17\x3b\x5f\x83\x74\x4d\x00\x00\x00\x1d\x69\x54\
\x58\x74\x43\x6f\x6d\x6d\x65\x6e\x74\x00\x00\x00\x00\x00\x43\x72\
\x65\x61\x74\x65\x64\x20\x77\x69\x74\x68\x20\x47\x49\x4d\x50\x64\
\x2e\x65\x07\x00\x00\x01\xa6\x49\x44\x41\x54\x78\xda\xed\x9b\xdb\
\x0e\xc3\x20\x0c\x43\x9b\x68\xff\xdd\xf6\xcb\xb7\xb7\x69\x9a\x76\
\x49\x4b\xec\x98\x42\x5e\x37\x51\x7c\x70\x28\x85\xb0\x2c\x33\x66\
\xcc\x18\x39\x8c\xf9\xb0\x6d\xdb\xee\xc1\xff\xd9\x25\x00\x44\x05\
\x57\x02\x31\x55\xd1\x2c\x18\xd6\x8b\x70\x14\x08\xeb\x51\x7c\x26\
\x04\xeb\x51\x78\x26\x08\xeb\x5d\x7c\x2b\x04\xeb\x5d\x78\x2b\x08\
\xbb\x92\xf8\x33\x10\xec\x6a\xe2\x8f\x42\xb8\x55\x76\x72\x5d\xd7\
\x67\x27\xf7\x7d\x2f\x01\x6c\x55\xa3\xff\x2a\x1e\x05\x21\xe2\x02\
\x53\x11\x5f\x05\xc1\x2b\x6d\x7f\xe6\x77\x6a\x0a\x64\x8f\xfe\x11\
\x71\x99\x4e\xf8\xe5\x02\x53\x14\xcf\x84\xe0\xd5\xb6\xff\x25\x92\
\x91\x0e\x86\x1e\xfd\xa8\x78\xc6\xc4\xf8\xc9\x05\xae\x32\xf2\x55\
\x4e\x70\x25\xdb\x57\x40\x30\x84\xfd\x5b\xed\x8c\x4c\x87\xf7\x34\
\x70\x85\x91\xaf\x74\x82\xab\x89\x67\x43\x70\x45\xf1\x4c\x08\x96\
\x91\xff\xe8\x57\x58\x76\xfb\xaf\xf3\x80\x2b\x8e\x3c\xd3\x09\xae\
\x2e\x1e\x0d\xc1\x7b\x10\x8f\x84\xe0\xcc\x4e\x2a\xb6\x4f\x5d\x07\
\x28\xb6\xef\x6a\x39\xc9\x4e\x3b\x57\xcb\x49\xf6\x9c\xe3\xc8\x9c\
\xcc\x82\x80\x9c\x70\x53\xe6\x00\x24\x04\xf4\xdb\x26\xf5\x6b\x30\
\xbb\xb3\x08\xf1\xd0\xaf\xc1\x4c\x27\xb0\xd6\x19\xd4\x75\x40\x14\
\x02\x73\x91\x05\xd9\x11\x6a\x81\xc0\x5e\x61\x42\x37\x45\x8f\x8a\
\x41\x8b\xa7\x6f\x8a\x1e\x71\x42\xc5\xb7\x05\x1c\x40\x14\x42\x95\
\xf8\xaf\x29\x90\x99\x06\x2d\xeb\x81\xcb\x9c\x0c\x9d\x11\xc3\xaa\
\x17\xa0\x1e\x8e\x46\x9d\xc0\x3c\x22\xa7\x1f\x8f\xff\x13\xc7\xae\
\x14\x29\x29\x90\xf8\xe6\x04\x84\xf8\x7f\x05\x12\x65\x25\x32\xef\
\x10\x2a\xc4\x87\x01\x20\x21\xa0\x22\x5a\x25\xe6\xcb\xe0\x31\x0b\
\x25\x4f\x34\x3e\x6e\xa9\xac\x32\x08\x5a\xb1\xb4\x22\x84\x92\x72\
\x79\x15\x08\xad\x97\x26\xe6\x95\x19\x40\xc7\xc6\xbc\x34\x85\x84\
\xd1\xd5\xb5\xb9\x0c\x20\xcc\x8b\x93\x33\x46\x8f\x07\x53\x21\x72\
\xe7\x17\x36\x2b\x63\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
\x00\x00\x02\xd8\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x02\x55\x49\x44\
\x41\x54\x58\x85\xed\x95\x4d\x4f\x53\x51\x10\x86\x9f\xb9\x1a\x12\
\xef\x4f\x10\x0d\xc1\xb0\x12\x4d\xb0\xf1\x0f\xc0\x06\xe3\x06\x48\
\x4c\x77\xd0\x0f\x16\x6c\x8d\x01\x2c\xae\x58\x68\x82\x05\xff\xc2\
\x3d\xad\xec\xae\x89\x16\x57\x7e\x2c\xc4\xad\xf1\x8b\x68\x62\x0c\
\x21\xa4\xb1\x86\x3f\xd0\x86\x86\x26\x7d\x5d\xb4\x21\xc6\x70\x5b\
\x2e\xb0\xb3\xef\x76\xe6\xcc\x3c\x67\xce\x99\x19\xe8\xa9\xa7\xff\
\x5d\x16\xc7\x39\x0c\xc3\xbe\xfd\x6a\x75\x4a\x66\x93\x06\x09\xa0\
\xbf\x6d\xaa\x60\xf6\x59\x50\xf2\x7d\xbf\x94\x4c\x26\x0f\xce\x1c\
\xa0\x18\x04\x77\x30\xcb\x03\x83\x06\xdf\x04\x9b\x32\xab\x00\x78\
\xcd\x66\x3f\x66\xa3\x82\xeb\xc0\x8e\xc1\xe2\x4c\x26\xf3\xfc\x4c\
\x00\xc2\x30\x3c\xb7\x5f\xab\xe5\x81\x7b\x06\xaf\xac\xd9\xcc\x4d\
\xcf\xce\x6e\x1d\xe5\xeb\x9c\x1b\xf1\x60\x05\x18\x07\x56\x77\xcb\
\xe5\xdc\xf2\xf2\x72\xb3\x53\xfc\xf3\xdd\x00\xda\xc9\xef\x4a\x5a\
\x48\x65\xb3\x6b\x9d\x7c\x33\x99\xcc\x57\xe0\x56\xd1\xb9\x05\x60\
\x65\x70\x60\x00\x60\xb1\xd3\x99\x8e\x15\x68\x97\xfd\x99\x99\xcd\
\xcf\xa4\xd3\x4f\xba\xc1\xfe\xad\x42\xa1\xb0\x68\xd2\x63\x0f\xa6\
\xa6\x33\x99\x52\x6c\x80\x30\x0c\xfb\xea\xb5\xda\x0f\x49\x3f\x53\
\xd9\xec\xed\x38\xc9\x0f\x21\x9c\x7b\x63\x66\x83\x17\x7c\x7f\x38\
\xea\x63\x7a\x51\x87\xf7\xab\xd5\x29\xc1\x15\x4f\x5a\x3a\x49\x72\
\x00\xf3\xbc\xfb\x48\x43\xf5\x5a\x6d\x22\xca\x27\x12\x40\x66\x93\
\xc0\x56\xd4\x87\x3b\x8e\x52\xa9\xd4\x17\xcc\xbe\x03\xf1\x01\x0c\
\x12\x26\xbd\x3f\x69\xf2\x43\x49\xef\x04\x37\xa3\xcc\xd1\x5d\x60\
\x76\x51\x50\x39\x35\x00\xfc\xc6\xac\x3f\xca\x18\x59\x01\x00\x99\
\xc5\x9a\x94\x47\xc9\xc0\x90\x22\x67\x41\x34\x80\xb4\x67\xd2\xa5\
\xd3\x02\xa8\x75\xfb\xbd\x28\x7b\xa7\x27\xf8\x08\x8c\x9e\x1a\x40\
\x1a\x33\xf8\x10\x65\x8f\xee\x02\x28\x21\x5d\x73\xce\x8d\x9c\x34\
\xf9\x7a\x10\x24\x0c\xae\x22\xbd\x8c\x0d\xe0\xfb\x7e\x09\xd8\x69\
\xcf\xf6\xd8\x92\x64\xcd\xd6\xf2\xda\xae\x37\x1a\x1b\xb1\x01\x92\
\xc9\xe4\x01\x9e\xb7\x00\x8c\xb7\x67\x7b\x2c\x15\x9d\xcb\x01\x63\
\x32\x9b\x9f\x9b\x9b\x6b\xc4\x06\x00\x48\xa5\x52\x2f\x80\x55\x60\
\xe5\xb8\x10\x92\xac\x10\x04\x4b\x66\xf6\x10\xc8\xa7\xd3\xe9\xc8\
\xf2\x77\x05\x00\xd8\x2d\x97\x73\x92\xd6\x80\x7c\xd1\xb9\xd7\xc5\
\x62\xf1\x46\x94\xef\x7a\x10\x24\x9e\x16\x0a\x6f\xcd\xec\x11\xad\
\x75\xfc\xa0\x5b\xfc\x63\xf7\xf9\xba\x73\x93\x4d\xb3\x55\xa4\xa1\
\xf6\x78\xdd\x14\xfc\x6a\x07\xb9\x8c\x34\x0a\x0c\x03\xdb\x32\x9b\
\xef\x76\xf3\xd8\x00\x70\xb8\x21\x27\x04\x93\x40\x02\xb3\xd6\x9c\
\x90\x2a\x06\x9f\x24\x95\xea\x8d\xc6\x46\xa7\x37\xef\xa9\xa7\x9e\
\xfe\xd5\x1f\x3e\xd4\xef\x44\x0d\xbc\xff\x65\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\x9f\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x08\x14\x1f\xf9\
\x23\xd9\x0b\x00\x00\x00\x23\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x0d\xe6\x7c\x80\xb1\x18\x91\x05\x52\x04\xe0\x42\x08\x15\x29\x02\
\x0c\x0c\x8c\xc8\x02\x08\x95\x68\x00\x00\xac\xac\x07\x90\x4e\x65\
\x34\xac\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\xd0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x4d\x49\x44\
\x41\x54\x58\x85\xed\xd7\x4d\x4e\xc2\x40\x18\xc6\xf1\xff\x5b\x08\
\x08\xea\x01\xd0\x2b\x88\x09\x5b\xcf\x21\xbb\xca\xd8\x1a\x49\xe0\
\x3e\x62\x42\x42\x69\x49\x97\x78\x0c\xd7\x84\x70\x07\x71\xef\x07\
\x02\x81\xd7\x85\xd4\x10\xc0\xdd\x10\x13\xed\xb3\x9b\xc9\x9b\x79\
\x7e\x93\x6e\x3a\xf0\xdf\x23\x9b\x6b\xcf\x98\x6b\xa0\x01\x94\x81\
\x03\x4b\x3d\x1f\xc0\x48\x44\x5a\x41\x18\x46\x80\xee\x02\x88\x67\
\x4c\x08\xd4\x80\x29\x30\x00\x5e\x2d\x01\x8e\x80\x0a\x90\x07\xba\
\xdd\x28\xba\x49\x10\xdf\x00\xcf\x18\x0f\x08\x04\x1e\xb3\x8b\x45\
\xb5\x1d\xc7\x63\x4b\xe5\x00\xd4\x5d\xb7\x34\x77\x9c\x3e\x22\x17\
\x02\x26\x88\xa2\x1e\x80\xb3\x36\xd3\x00\xa6\x4b\x91\x4b\xdb\xe5\
\x00\xed\x38\x1e\x4b\x36\x5b\x05\x66\x2a\xd2\x4c\xf6\xd7\x01\x67\
\xc0\x20\x0c\xc3\x67\xdb\xe5\x49\x82\x20\x78\x42\x64\x80\x6a\x79\
\x17\xa0\x80\xea\xfb\xbe\xca\xbf\xb3\x5c\xbe\x01\xc5\x5d\x80\x5f\
\x49\x0a\x48\x01\x29\x20\x05\xa4\x80\x14\x90\x02\x52\xc0\x3a\x60\
\x82\x48\xf1\xc7\x49\x6b\x8d\xce\x21\x30\xd9\x02\x28\x8c\x80\x4a\
\xdd\x75\x4b\xfb\xea\xae\xd5\x6a\xa7\xa8\x56\x80\xe1\x16\xc0\x11\
\xb9\x07\xf2\xf3\x4c\xe6\xc1\xf7\xfd\x93\x7d\x94\x67\x44\xfa\x40\
\x4e\x45\x5a\xc9\xfe\xe6\xc3\xa4\x03\x78\xc0\x6c\xf5\xf7\xfa\x62\
\xa5\x5d\xe4\x78\x75\xf3\x9c\x42\x27\x8c\xa2\x5b\x36\x1f\x26\xc9\
\xa8\x6f\xcc\x95\x8a\x34\x51\x3d\x07\x0a\x56\x00\x5f\xdf\x7c\x88\
\xea\x5d\xb7\xd7\x8b\x2d\x9d\xf9\x47\xf2\x09\x3e\x70\x64\x41\x95\
\x87\xdf\x69\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xc3\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x40\x00\x00\x00\x40\x08\x06\x00\x00\x00\xaa\x69\x71\xde\
\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\
\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\
\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\
\xdc\x0b\x07\x09\x2e\x37\xff\x44\xe8\xf0\x00\x00\x00\x1d\x69\x54\
\x58\x74\x43\x6f\x6d\x6d\x65\x6e\x74\x00\x00\x00\x00\x00\x43\x72\
\x65\x61\x74\x65\x64\x20\x77\x69\x74\x68\x20\x47\x49\x4d\x50\x64\
\x2e\x65\x07\x00\x00\x00\x27\x49\x44\x41\x54\x78\xda\xed\xc1\x01\
\x0d\x00\x00\x00\xc2\xa0\xf7\x4f\x6d\x0e\x37\xa0\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x77\x03\x40\x40\
\x00\x01\xaf\x7a\x0e\xe8\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\
\x60\x82\
\x00\x00\x01\xd0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x4d\x49\x44\
\x41\x54\x58\x85\xed\x97\x3b\x4e\xc3\x40\x14\x00\xe7\x45\x51\xc2\
\xf7\x00\x81\x2b\x00\x52\xee\x42\xca\x8d\xed\x58\x14\x70\x1f\x42\
\x65\x99\x8d\x29\xc3\x1d\x68\xa8\xa3\x28\x77\x20\xf4\x7c\x42\x3e\
\xf2\xa3\x70\x8c\x8c\x4c\xb9\x16\x12\x78\x2a\x5b\x5a\x79\x66\x25\
\x17\xef\xc1\x7f\x47\x8a\x2f\xaa\x2a\x36\x8e\xfd\x86\xc8\xa5\xc2\
\x29\xb0\xe3\xc8\xf3\x21\x30\x03\x86\xc6\xf7\xad\x88\x68\x29\x40\
\x55\x25\x89\xe3\x5b\x15\xe9\x03\x4b\x60\x82\xc8\xab\x13\xbd\xea\
\x01\xd0\x05\xda\x88\xc4\x7d\xcf\x0b\xf3\x88\x66\x7e\xc6\xc6\xb1\
\x2f\x99\xfc\xb1\xd1\x6c\xf6\x8c\x31\x73\x27\xf2\x2d\x49\x92\x74\
\xd2\xcd\x66\x8c\x6a\x60\xad\x7d\x00\x46\x00\x8d\xfc\x40\x43\xe4\
\x12\x58\xa6\x70\xee\x5a\x0e\x60\x8c\x99\x6f\xd2\xb4\x07\xac\x44\
\xf5\xea\xcb\x9b\x3f\x28\x9c\x00\x93\x20\x08\x9e\x5d\xcb\x73\xc2\
\x30\x7c\x02\x26\x64\xff\xd7\xf7\x00\x60\x17\x78\xaf\x4a\x5e\xe0\
\x0d\xd8\xfb\x29\xe0\x57\xa8\x03\xea\x80\x3a\xa0\x0e\xa8\x03\xea\
\x80\x3a\xa0\x0e\x28\x06\x2c\x28\x4c\x2a\x15\xb2\xbf\x75\x95\x02\
\x66\x40\x37\x49\x92\x4e\x55\x66\x6b\xed\x31\xd9\x78\x3e\x2d\x05\
\x08\xdc\x00\xed\x74\xbd\xbe\x8f\xa2\xe8\xa8\x12\x79\x9a\x8e\x81\
\x96\xc0\xb0\xe0\xcd\x50\x55\x19\x59\x1b\xa1\x1a\x00\x2b\xb2\xc5\
\xe4\xc5\x89\x5d\xf5\x90\xec\xe6\x2d\x85\xc8\xf3\xfd\x8b\x7c\x31\
\x29\xaf\x66\xd6\x9a\xed\xdc\x7e\x46\x36\x29\xbb\x60\x01\x4c\x51\
\xbd\xf6\x06\x83\x3b\x47\xdf\xfc\x23\x7c\x02\x90\xc4\x75\x30\xa3\
\x38\xd1\xd4\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xef\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x51\x00\x00\x00\x3a\x08\x06\x00\x00\x00\xc8\xbc\xb5\xaf\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\
\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\
\x0b\x2a\x32\xff\x7f\x20\x5a\x00\x00\x00\x6f\x49\x44\x41\x54\x78\
\xda\xed\xd0\xb1\x0d\x00\x30\x08\x03\x41\xc8\xa0\x0c\xc7\xa2\x49\
\xcf\x04\x28\xba\x2f\x5d\x59\x97\xb1\xb4\xee\xbe\x73\xab\xaa\xdc\
\xf8\xf5\x84\x20\x42\x84\x28\x88\x10\x21\x42\x14\x44\x88\x10\x21\
\x0a\x22\x44\x88\x10\x05\x11\x22\x44\x88\x82\x08\x11\x22\x44\x41\
\x84\x08\x51\x10\x21\x42\x84\x28\x88\x10\x21\x42\x14\x44\x88\x10\
\x21\x0a\x22\x44\x88\x10\x05\x11\x22\x44\x88\x82\x08\x11\x22\x44\
\x41\x84\x08\x51\x10\x21\x42\xfc\xaa\x07\x12\x55\x04\x74\x56\x9e\
\x9e\x54\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x02\x56\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x40\x00\x00\x00\x40\x08\x06\x00\x00\x00\xaa\x69\x71\xde\
\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\
\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\
\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\
\xdf\x04\x19\x10\x14\x2d\x80\x7a\x92\xdf\x00\x00\x00\x1d\x69\x54\
\x58\x74\x43\x6f\x6d\x6d\x65\x6e\x74\x00\x00\x00\x00\x00\x43\x72\
\x65\x61\x74\x65\x64\x20\x77\x69\x74\x68\x20\x47\x49\x4d\x50\x64\
\x2e\x65\x07\x00\x00\x01\xba\x49\x44\x41\x54\x78\xda\xed\x9b\x5b\
\x92\x02\x21\x0c\x45\x4d\x16\xa6\x1b\xd0\xd5\x8e\x1b\xd0\x8d\xe9\
\x9f\x65\x39\xda\x3c\x92\x7b\x13\x68\xf2\x3d\x95\xe6\x1c\x1e\x43\
\x10\x0e\x87\x15\x2b\x56\xec\x39\x84\xf9\xb1\xbf\xe3\xf1\x51\xf3\
\x77\x97\xfb\x5d\xa6\x10\x50\x0b\x1c\x29\x44\xb2\x42\xb3\x64\xc8\
\x28\xe0\x28\x11\x32\x22\xbc\xa7\x04\x19\x11\xdc\x53\x84\x8c\x0e\
\x6f\x95\x20\xa3\x83\x5b\x45\xc8\x4c\xf0\x3d\x12\x64\x36\xf8\x56\
\x09\xba\xb6\xc2\x13\xf6\x7e\xcb\x28\x10\x2b\xfc\xf9\x76\x7b\xe5\
\xb8\x9e\x4e\x14\x51\xef\xdf\x2c\x7d\xb7\x24\x41\xbd\x1b\xf6\xd9\
\x38\x34\xbc\x35\x14\x31\xf4\x51\x12\x7a\xf2\x96\x18\x14\x35\xef\
\xbd\x25\x58\xf2\x6d\xb1\x98\xa7\xc0\xd6\xfc\xf3\x92\xb0\x95\xc7\
\xba\xee\x88\x57\xef\xa3\x1a\xe9\x99\xf7\xdb\x82\xe8\xb6\x08\x22\
\x46\x02\xb2\xe7\x21\xff\x05\x3c\x25\x30\xe0\xbf\x4e\x01\x8f\x4d\
\x8f\xb5\xf1\x48\xf8\xcf\x69\x00\xd9\x0a\x5b\x46\x02\xab\xe7\xe1\
\xb5\x40\x8f\x04\x36\x3c\xbc\x18\x6a\x91\x10\x01\xff\x6f\x0d\x40\
\x15\x3d\x25\x38\x36\xfc\xfb\x3a\x40\x29\x87\x7b\xd7\x04\x46\x71\
\x45\x3b\x0f\x68\x85\x61\x55\x96\xd4\x03\x91\x5a\x28\x16\x3c\x5d\
\x40\x0d\x1c\x13\x3e\x44\x80\x65\x1f\x30\xbc\x80\x5a\x38\xa6\x04\
\xcd\x06\xcf\x96\xa0\xd1\xf0\x8c\xf3\x84\x50\x01\x35\xf0\x91\x12\
\x20\xd5\x60\x6f\xcf\x33\x36\x45\x94\x6a\xb0\x17\x26\x62\x24\x68\
\xa6\x39\x1f\x21\x41\x33\xc1\x47\x48\x70\x3b\x14\x45\xcc\x61\xef\
\x7c\xd0\x43\x51\xc4\x02\xc6\x18\x09\x9a\x15\x9e\x25\xe1\x67\x82\
\xda\x69\xc0\xaa\xe7\xad\xdf\xf9\xf5\x23\x69\xc8\x99\x60\x86\x7c\
\x45\x01\x96\x9b\x57\xa8\xc6\xf6\xe6\xdd\x62\xd1\xec\x3d\x8f\xce\
\x6f\xbe\x20\x91\x3d\x4a\x23\x79\x5d\x91\xa9\x4d\xb6\x6e\x89\x4d\
\x1a\xeb\xa2\x64\x6b\xf2\x5d\x5f\x95\xcd\x2c\x82\x76\x59\x3a\xa3\
\x84\x90\xeb\xf2\x59\x24\x58\x1f\x4d\xac\x27\x33\xde\x0d\xdb\xed\
\xa3\x29\xa4\x8c\xa1\x9e\xcd\x79\x08\x61\x3e\x9c\x5c\xb1\xf7\x78\
\x02\x51\xa0\x5a\x91\x77\xd2\x02\x23\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x01\xec\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x69\x49\x44\
\x41\x54\x58\x85\xed\x97\x3b\x4e\xc3\x40\x10\x86\xbf\xb1\xa2\x84\
\xe7\x01\x02\x57\x00\xa4\xdc\x85\x94\x8e\xed\x44\x14\x70\x1f\x42\
\x65\x2d\x1b\x53\x86\x3b\xd0\x50\x47\x51\xee\x40\xe8\x79\x84\x3c\
\xe4\xa1\x70\x8c\x8c\x2c\x25\x05\x36\x05\xf8\xaf\x76\xb5\x23\x7f\
\x9f\xad\x95\x3c\x03\xff\x3d\x92\xdd\xa8\xaa\x58\x63\x7c\x47\xe4\
\x52\xe1\x14\xd8\x29\x88\xf3\x21\x30\x01\xfa\xae\xef\x5b\x11\xd1\
\x9c\x80\xaa\x4a\x64\xcc\xad\x8a\x74\x80\x39\x30\x42\xe4\xb5\x10\
\xbc\xea\x01\xd0\x02\x1a\x88\x98\x8e\xe7\xf5\x52\x89\x5a\x5a\x63\
\x8d\xf1\x25\x81\x3f\x3a\xb5\x5a\xdb\x75\xdd\x69\x21\xf0\x75\xa2\
\x28\x6a\xc6\xab\xd5\x10\xd5\xc0\x5a\xfb\x00\x0c\x00\x9c\xb4\xc0\
\x11\xb9\x04\xe6\x31\x9c\x17\x0d\x07\x70\x5d\x77\xba\x8a\xe3\x36\
\xb0\x10\xd5\xab\x2f\x6e\xba\x50\x38\x01\x46\x41\x10\x3c\x17\x0d\
\x4f\xd3\xeb\xf5\x9e\x80\x11\xc9\xfd\xfa\x2e\x00\xec\x02\xef\x65\
\xc1\x33\x79\x03\xf6\xd2\x4d\x6d\x43\x21\x00\xd6\x18\xdd\x56\xb3\
\x29\x5e\x10\xc8\xa6\x73\x67\xd3\xe1\x6f\xa4\x12\xa8\x04\x2a\x81\
\x4a\xa0\x12\xa8\x04\x2a\x81\xad\xfd\xc0\xb6\xff\xf9\x4f\x93\xfd\
\x02\x33\x32\x9d\x4a\x89\xd9\x5f\xb3\x72\x02\x13\xa0\x15\x45\x51\
\xb3\x2c\xb2\xb5\xf6\x98\xa4\x3d\x1f\xe7\x04\x04\x6e\x80\x46\xbc\
\x5c\xde\x87\x61\x78\x54\x0a\x3c\x8e\x87\x40\x5d\xa0\x9f\xe1\x26\
\x51\x55\x19\x58\x1b\xa2\x1a\x00\x0b\x92\xc1\xe4\xa5\x10\xba\xea\
\x21\xc9\x9b\xd7\x15\x42\xcf\xf7\x2f\xd2\xc1\x24\x3f\x9a\x59\xeb\
\xae\xfb\xf6\x33\x92\x4e\xb9\x88\xcc\x80\x31\xaa\xd7\x5e\xb7\x7b\
\x57\xd0\x33\xff\x48\x3e\x01\xac\x18\x7a\x56\x83\xd7\xe8\x6e\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1d\x00\xb0\
\xd5\x35\xa3\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x06\xfe\x9f\x67\x60\x60\x42\x30\xa1\x1c\x08\x93\x81\x81\x09\xc1\
\x64\x60\x60\x62\x60\x60\x34\x44\xe2\x20\x73\x19\x90\x8d\x40\x02\
\x00\x64\x40\x09\x75\x86\xb3\xad\x9c\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x00\x96\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x02\x62\x4b\x47\x44\x00\xd3\xb5\x57\xa0\x5c\x00\x00\
\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\
\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x0b\x07\x0c\
\x0d\x1b\x75\xfe\x31\x99\x00\x00\x00\x27\x49\x44\x41\x54\x08\xd7\
\x65\x8c\xb1\x0d\x00\x00\x08\x83\xe0\xff\xa3\x75\x70\xb1\xca\xd4\
\x90\x50\x78\x08\x55\x21\x14\xb6\x54\x70\xe6\x48\x8d\x87\xcc\x0f\
\x0d\xe0\xf0\x08\x02\x34\xe2\x2b\xa7\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x00\xa0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1c\x1f\x24\
\xc6\x09\x17\x00\x00\x00\x24\x49\x44\x41\x54\x08\xd7\x63\x60\x40\
\x05\xff\xcf\xc3\x58\x4c\xc8\x5c\x26\x64\x59\x26\x64\xc5\x70\x0e\
\xa3\x21\x9c\xc3\x68\x88\x61\x1a\x0a\x00\x00\x6d\x84\x09\x75\x37\
\x9e\xd9\x23\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\x9c\x53\x34\xfc\x5d\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x0b\x02\x04\x6d\
\x98\x1b\x69\x00\x00\x00\x29\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x00\x8c\x0c\x0c\xff\xcf\xa3\x08\x18\x32\x32\x30\x20\x0b\x32\x1a\
\x32\x30\x30\x42\x98\x10\x41\x46\x43\x14\x13\x50\xb5\xa3\x01\x00\
\xd6\x10\x07\xd2\x2f\x48\xdf\x4a\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x00\xbb\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x3f\x00\x00\x00\x07\x08\x06\x00\x00\x00\xbf\x76\x95\x1f\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\
\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\
\x09\x35\x2b\x55\xca\x52\x6a\x00\x00\x00\x3b\x49\x44\x41\x54\x38\
\xcb\x63\x60\x18\x05\x23\x13\x30\x12\xa3\xa8\xbe\x7d\x2a\x25\x76\
\xfc\xa7\x97\x3b\xd1\xc1\xaa\xa5\x73\x18\xae\x5f\x39\x8f\x53\x9e\
\x69\x34\xe6\x09\x00\x4d\x1d\xc3\x21\x19\xf3\x0c\x0c\x0c\x78\x63\
\x7e\x14\x8c\x54\x00\x00\x69\x64\x0b\x05\xfd\x6b\x58\xca\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xe4\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x36\x00\x00\x00\x0a\x08\x06\x00\x00\x00\xff\xfd\xad\x0b\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\x7f\x00\x87\x00\x95\xe6\xde\xa6\xaf\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\
\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\
\x09\x2a\x2b\x98\x90\x5c\xf4\x00\x00\x00\x64\x49\x44\x41\x54\x48\
\xc7\x63\xfc\xcf\x30\x3c\x01\x0b\xa5\x06\x34\xb4\x4f\x85\x87\xcd\
\xaa\xa5\x73\x18\xae\x5d\x39\xcf\x48\x2b\x35\x14\x79\xcc\xd8\xc8\
\x88\x24\x03\x7c\x89\xd0\x4f\x2d\x35\x84\xc0\xd9\x73\xe7\xe0\x6c\
\x26\x86\x91\x92\x14\x91\x7d\x4d\x54\x52\x0c\x4d\x26\xa8\x9f\x5a\
\x6a\x46\x93\xe2\x68\x52\x1c\x82\x49\x91\x91\xd2\x7a\x4c\x4b\xc7\
\x10\xc5\x08\x6c\xc5\x34\xb5\xd4\xd0\xd5\x63\x83\x15\x00\x00\x7a\
\x30\x4a\x09\x71\xea\x2d\x6e\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x00\xe0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x51\x00\x00\x00\x3a\x08\x06\x00\x00\x00\xc8\xbc\xb5\xaf\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\
\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\
\x0b\x29\x1c\x08\x84\x7e\x56\x00\x00\x00\x60\x49\x44\x41\x54\x78\
\xda\xed\xd9\xb1\x0d\x00\x20\x08\x00\x41\x71\x50\x86\x63\x51\xed\
\x8d\x85\x25\x89\x77\xa5\x15\xf9\x48\x45\x8c\xa6\xaa\x6a\x9d\x6f\
\x99\x19\x1d\x67\x9d\x03\x11\x45\x14\x11\x11\x45\x14\x51\x44\x44\
\x14\x51\x44\x11\x11\x51\x44\x11\x45\x44\x44\x11\x45\x14\x11\x11\
\x45\x14\xf1\x5b\xd1\x75\xb0\xdb\xdd\xd9\x4f\xb4\xce\x88\x28\x22\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf\x36\xce\x69\x07\x1e\xe9\
\x39\x55\x40\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x02\xf8\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x02\x75\x49\x44\
\x41\x54\x58\x85\xed\x96\xcd\x4e\x13\x51\x18\x86\x9f\xaf\x15\xd2\
\x32\x78\x03\x56\x4d\x69\x58\x89\xa6\x3f\xf1\x06\x20\x26\x1a\x37\
\x94\x84\xd9\xb6\x33\xc4\x0b\x30\x46\x10\x34\x51\x16\x2e\x48\xd1\
\xb8\x72\x43\xb4\x74\xd8\x92\x98\xe2\xca\xb8\x11\x37\x2c\x8c\xda\
\x36\x12\xc0\x10\x40\x03\x86\x0b\xc0\x54\xa3\x71\x3e\x17\xb4\xd1\
\x44\xa6\x65\x0a\x3b\xfb\x6c\xbf\xf7\x9c\xf7\x49\xe6\xcc\x99\x81\
\x36\x6d\xfe\x77\xc4\x4f\xd8\x34\xcd\xce\xee\x70\x78\x48\x44\xd2\
\x40\x4a\x21\x02\x80\xea\x0e\x22\xef\x05\x8a\x7b\xd5\x6a\x71\x7e\
\x7e\xfe\xc7\xb1\x0b\xd8\x99\xcc\xb0\x8a\xe4\x04\x7a\x80\x0f\xa2\
\xba\xa8\x22\x3b\xb5\x71\x04\xe8\x07\x2e\x00\x1b\x2a\x32\x56\x28\
\x14\x9e\x1d\x8b\x80\x69\x9a\xc1\x93\x86\x91\x53\xd5\x1b\x02\x2f\
\x08\x06\xc7\xf3\xf9\x7c\xe5\xa0\xac\x65\x59\x09\x81\x29\x54\x2f\
\xab\xea\x74\x34\x16\x1b\x9f\x9c\x9c\x74\x1b\xed\x7f\xa2\x99\x40\
\xad\xfc\x3a\x30\x9a\x77\x9c\x07\x8d\xb2\x85\x42\xa1\x0c\x5c\x19\
\xb1\xac\x51\x60\xea\xd3\xe6\x26\xc0\x58\xa3\x35\xc1\x46\x43\x3b\
\x93\x19\x06\x1e\x09\x8c\xce\x3a\xce\xc3\x66\xb2\x75\x4a\xe5\xf2\
\x52\x32\x91\xf8\x2e\x22\xf7\x12\xc9\x64\xa5\x5c\x2e\xaf\x79\x65\
\x3d\x1f\x81\x69\x9a\x9d\xdd\x5d\x5d\xab\xc0\xc7\x59\xc7\xb9\x7a\
\xd8\xf2\xbf\xb1\xb3\xd9\x97\x40\xcf\xd7\x6a\xb5\xcf\xeb\x60\x06\
\xbc\x16\x77\x87\xc3\x43\x40\x4c\x82\xc1\x89\x56\xca\x01\x02\xaa\
\xb7\x80\x5e\xc3\x30\x06\x3d\x33\x5e\x03\x11\x49\xa3\x5a\xf1\x3a\
\x70\x87\xe1\xe9\xdc\x5c\x09\x58\x46\xd5\xbf\x00\x90\x42\xe4\x75\
\xab\xe5\x75\x44\xf5\x95\xa8\x5e\xf4\x2d\xa0\x70\x4a\xfe\xbc\xe7\
\x2d\xe3\xc2\x17\x44\x22\xbe\x05\x00\x54\xd5\xd7\x4d\x79\x60\x41\
\x20\x20\xfb\x1e\xfe\x05\x76\x45\xf5\xf4\x51\x05\x54\x35\x82\xea\
\x6e\x2b\x02\x6f\x55\xa4\xff\xa8\x02\xc0\x80\xc0\x1b\xdf\x02\x02\
\x45\xe0\xbc\x65\x59\x89\x56\x9b\x6d\xdb\x4e\x01\xe7\x14\x9e\xfb\
\x16\xd8\xab\x56\x8b\xc0\x86\xc0\x54\x8b\xfd\x22\xae\x9b\x03\xd6\
\x3b\x42\xa1\x05\xaf\x90\xe7\x55\xbc\xb2\xb2\xf2\x2b\x15\x8f\x6f\
\x03\x77\x52\xc9\x64\xb5\x54\x2e\x2f\xf9\x69\xb7\xb3\xd9\x09\xe0\
\x9a\xc0\xc8\x93\x7c\x7e\xd5\xb7\x00\x40\xa9\x52\x59\x4b\xc4\xe3\
\x06\x70\x37\x95\x4c\x7e\x3b\xa4\x84\xd4\xca\xef\x8b\xc8\x74\xde\
\x71\x1e\x37\x0a\x37\xfd\x1a\x46\x63\xb1\xf1\xcf\x5b\x5b\xaa\xaa\
\x39\x2b\x9b\xbd\x14\x54\x1d\xaf\xdd\x70\xff\x60\xdb\x76\x4a\x5c\
\x37\xa7\x30\x20\x22\xb9\xb3\xd1\xe8\xed\xa6\xb6\xcd\x02\x75\x2c\
\xcb\x4a\x8b\xea\x34\xd0\x0b\x2c\x03\x8b\xc0\x76\x6d\x7c\x86\xfd\
\x1f\x92\x3e\x60\x5d\xe0\x66\xde\x71\x3c\x0f\x5e\x4b\x02\xb0\xff\
\x85\x34\x0c\x63\x50\x5c\x37\x8d\x48\x0a\xa8\xdf\x13\x3b\x0a\xef\
\x44\xb5\xd8\x11\x0a\x2d\xcc\xcc\xcc\xfc\xf4\xb3\x6f\x9b\x36\xff\
\x37\xbf\x01\x4a\x37\xdd\xdd\x8c\xf1\x82\x6a\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\x93\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x02\x62\x4b\x47\x44\x00\xd3\xb5\x57\xa0\x5c\x00\x00\
\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\
\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x0b\x07\x0c\
\x0c\x2b\x4a\x3c\x30\x74\x00\x00\x00\x24\x49\x44\x41\x54\x08\xd7\
\x63\x60\x40\x05\xff\xff\xc3\x58\x4c\xc8\x5c\x26\x64\x59\x26\x64\
\xc5\x70\x0e\x23\x23\x9c\xc3\xc8\x88\x61\x1a\x0a\x00\x00\x9e\x14\
\x0a\x05\x2b\xca\xe5\x75\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\
\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\x9c\x53\x34\xfc\x5d\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x0b\x1b\x0e\x16\
\x4d\x5b\x6f\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x00\x8c\x0c\x0c\x73\x3e\x20\x0b\xa4\x08\x30\x32\x30\x20\x0b\xa6\
\x08\x30\x30\x30\x42\x98\x10\xc1\x14\x01\x14\x13\x50\xb5\xa3\x01\
\x00\xc6\xb9\x07\x90\x5d\x66\x1f\x83\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x00\x81\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x10\x00\x00\x00\x10\x01\x03\x00\x00\x00\x25\x3d\x6d\x22\
\x00\x00\x00\x06\x50\x4c\x54\x45\x00\x00\x00\xae\xae\xae\x77\x6b\
\xd6\x2d\x00\x00\x00\x01\x74\x52\x4e\x53\x00\x40\xe6\xd8\x66\x00\
\x00\x00\x29\x49\x44\x41\x54\x78\x5e\x05\xc0\xb1\x0d\x00\x20\x08\
\x04\xc0\xc3\x58\xd8\xfe\x0a\xcc\xc2\x70\x8c\x6d\x28\x0e\x97\x47\
\x68\x86\x55\x71\xda\x1d\x6f\x25\xba\xcd\xd8\xfd\x35\x0a\x04\x1b\
\xd6\xd9\x1a\x92\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\
\x00\x00\x00\xdc\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x10\x00\x00\x00\x40\x08\x06\x00\x00\x00\x13\x7d\xf7\x96\
\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xb3\x00\x79\x00\x79\xdc\xdd\
\x53\xfc\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\
\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\
\xdf\x04\x19\x10\x2d\x19\xaf\x4a\xeb\xd0\x00\x00\x00\x1d\x69\x54\
\x58\x74\x43\x6f\x6d\x6d\x65\x6e\x74\x00\x00\x00\x00\x00\x43\x72\
\x65\x61\x74\x65\x64\x20\x77\x69\x74\x68\x20\x47\x49\x4d\x50\x64\
\x2e\x65\x07\x00\x00\x00\x40\x49\x44\x41\x54\x58\xc3\xed\xce\x31\
\x0a\x00\x20\x0c\x03\x40\xf5\xa3\x7d\x5b\x5f\xaa\x53\xc1\xc9\xc5\
\x45\xe4\x32\x05\x1a\x8e\xb6\x76\x99\x5e\x25\x22\x66\xf5\xcc\xec\
\xfb\xe8\x74\x1b\xb7\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\xf0\x36\xf0\x41\x16\x0b\x42\x08\x78\x15\x57\x44\xa2\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\xe3\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x60\x49\x44\
\x41\x54\x58\x85\xed\xd7\x3b\x4e\x42\x61\x10\x86\xe1\x77\x40\x45\
\xbc\x2c\x00\xdd\x82\x98\x90\x00\x46\x05\x57\x21\xa5\x17\x62\x2c\
\xb0\x43\x12\x5d\x83\x0d\xd8\x88\x15\xde\x62\x89\x7b\x30\x39\x60\
\x14\x49\x4e\x08\x7b\x10\x7b\x2f\x08\x08\x63\xa1\x87\x40\xa0\x3c\
\xc4\x44\xcf\xd7\xfd\x53\xfc\xdf\x53\xce\xc0\x7f\x8f\xf4\xbd\x54\
\x25\x92\x79\xd8\x16\x95\x04\x82\x1f\x98\xb4\xa9\xe7\x03\xa5\x0a\
\x92\x35\xf6\x43\x97\x88\xe8\x20\x40\x55\xd6\x8e\x4b\x17\xaa\x6c\
\x02\x0d\x01\x53\xd1\x57\x3b\xda\x05\x99\x51\x08\x00\x1e\x90\x73\
\x23\x19\xda\xb1\x10\x5d\x40\x24\x7d\x1f\x17\xe4\x0c\xb4\x88\x8c\
\xc5\x8c\x64\xb0\x66\x47\xb9\x95\x68\xa6\xec\x43\xdb\x79\x60\x45\
\x95\xad\x42\x6a\xe9\x0a\xc0\xd5\x55\xaa\x24\x80\x86\xfb\xd3\xb5\
\x6e\x77\x39\x80\x91\x0c\xd6\x3a\xad\x56\x0c\x68\x8a\xb0\x67\xcd\
\xbb\x00\x84\x05\x01\xf3\xf6\x20\xfc\x6c\x77\xb9\x95\xe2\x61\xe4\
\x09\x30\x01\xff\x20\x00\xbc\x0a\xef\xa3\x2a\xef\xc9\x1b\x30\x35\
\x0c\xf0\x2b\x71\x00\x0e\xc0\x01\x38\x00\x07\xe0\x00\x1c\x80\x03\
\xe8\x05\xd4\xa5\x67\x53\x19\x61\xa6\x81\xfa\x10\x80\x56\x15\x02\
\xd1\x4c\xd9\x37\xaa\xe6\xe5\xf4\xdd\x3c\x10\x10\xa8\x0c\x02\xd4\
\x75\x0a\x78\xd0\xf6\xcd\xea\x51\x61\x6e\x14\xe5\xe3\xb8\xf3\xc0\
\x44\x47\x34\x6b\xcd\xfb\x0e\x93\x68\xe6\x31\x07\x1a\x07\x9a\x80\
\x09\xfa\x62\x4f\xbd\xcc\xf2\x7d\x98\x4c\x28\xe4\x0a\xc9\xf0\xee\
\xc0\x61\x62\x21\x22\xe9\xd2\xc6\xcf\xde\xbe\x08\x78\xed\x01\x50\
\x17\xa8\xa8\xca\x89\x91\x0a\x5f\xdb\xf4\xe7\x1f\xc9\x17\xa4\x29\
\x70\x23\xfc\x8b\x13\x87\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\
\x60\x82\
\x00\x00\x02\x56\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x40\x00\x00\x00\x40\x08\x06\x00\x00\x00\xaa\x69\x71\xde\
\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\
\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\
\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\
\xdf\x04\x19\x10\x15\x00\xdc\xbe\xff\xeb\x00\x00\x00\x1d\x69\x54\
\x58\x74\x43\x6f\x6d\x6d\x65\x6e\x74\x00\x00\x00\x00\x00\x43\x72\
\x65\x61\x74\x65\x64\x20\x77\x69\x74\x68\x20\x47\x49\x4d\x50\x64\
\x2e\x65\x07\x00\x00\x01\xba\x49\x44\x41\x54\x78\xda\xed\x9b\x5b\
\x92\x02\x21\x0c\x45\x4d\xd6\x37\x2e\x48\x17\xa0\x0b\xd2\xfd\xe9\
\x9f\x65\x39\xda\x3c\x92\x7b\x13\x68\xf2\x3d\x95\xe6\x1c\x1e\x43\
\x10\x0e\x87\x15\x2b\x56\xec\x39\x84\xf9\xb1\xdb\xe9\xf4\xa8\xf9\
\xbb\xe3\xf5\x2a\x53\x08\xa8\x05\x8e\x14\x22\x59\xa1\x59\x32\x64\
\x14\x70\x94\x08\x19\x11\xde\x53\x82\x8c\x08\xee\x29\x42\x46\x87\
\xb7\x4a\x90\xd1\xc1\xad\x22\x64\x26\xf8\x1e\x09\x32\x1b\x7c\xab\
\x04\x5d\x5b\xe1\x09\x7b\xbf\x65\x14\x88\x15\xfe\xef\x72\x79\xe5\
\xb8\x9f\xcf\x14\x51\xef\xdf\x2c\x7d\xb7\x24\x41\xbd\x1b\xf6\xd9\
\x38\x34\xbc\x35\x14\x31\xf4\x51\x12\x7a\xf2\x96\x18\x14\x35\xef\
\xbd\x25\x58\xf2\x6d\xb1\x98\xa7\xc0\xd6\xfc\xf3\x92\xb0\x95\xc7\
\xba\xee\x88\x57\xef\xa3\x1a\xe9\x99\xf7\xdb\x82\xe8\xb6\x08\x22\
\x46\x02\xb2\xe7\x21\xff\x05\x3c\x25\x30\xe0\xbf\x4e\x01\x8f\x4d\
\x8f\xb5\xf1\x48\xf8\xcf\x69\x00\xd9\x0a\x5b\x46\x02\xab\xe7\xe1\
\xb5\x40\x8f\x04\x36\x3c\xbc\x18\x6a\x91\x10\x01\xff\x6f\x0d\x40\
\x15\x3d\x25\x38\x36\xfc\xfb\x3a\x40\x29\x87\x7b\xd7\x04\x46\x71\
\x45\x3b\x0f\x68\x85\x61\x55\x96\xd4\x03\x91\x5a\x28\x16\x3c\x5d\
\x40\x0d\x1c\x13\x3e\x44\x80\x65\x1f\x30\xbc\x80\x5a\x38\xa6\x04\
\xcd\x06\xcf\x96\xa0\xd1\xf0\x8c\xf3\x84\x50\x01\x35\xf0\x91\x12\
\x20\xd5\x60\x6f\xcf\x33\x36\x45\x94\x6a\xb0\x17\x26\x62\x24\x68\
\xa6\x39\x1f\x21\x41\x33\xc1\x47\x48\x70\x3b\x14\x45\xcc\x61\xef\
\x7c\xd0\x43\x51\xc4\x02\xc6\x18\x09\x9a\x15\x9e\x25\xe1\x67\x82\
\xda\x69\xc0\xaa\xe7\xad\xdf\xf9\xf5\x23\x69\xc8\x99\x60\x86\x7c\
\x45\x01\x96\x9b\x57\xa8\xc6\xf6\xe6\xdd\x62\xd1\xec\x3d\x8f\xce\
\x6f\xbe\x20\x91\x3d\x4a\x23\x79\x5d\x91\xa9\x4d\xb6\x6e\x89\x4d\
\x1a\xeb\xa2\x64\x6b\xf2\x5d\x5f\x95\xcd\x2c\x82\x76\x59\x3a\xa3\
\x84\x90\xeb\xf2\x59\x24\x58\x1f\x4d\xac\x27\x33\xde\x0d\xdb\xed\
\xa3\x29\xa4\x8c\xa1\x9e\xcd\x79\x08\x61\x3e\x9c\x5c\xb1\xf7\x78\
\x02\x47\xb0\x5b\x07\x3a\x44\x3e\x01\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x00\xa0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1f\x0d\xfc\
\x52\x2b\x9c\x00\x00\x00\x24\x49\x44\x41\x54\x08\xd7\x63\x60\x40\
\x05\x73\x3e\xc0\x58\x4c\xc8\x5c\x26\x64\x59\x26\x64\xc5\x70\x4e\
\x8a\x00\x9c\x93\x22\x80\x61\x1a\x0a\x00\x00\x29\x95\x08\xaf\x88\
\xac\xba\x34\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x03\xcc\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x03\x49\x49\x44\
\x41\x54\x58\x85\xed\x96\xcd\x6b\x5c\x55\x18\xc6\x7f\xcf\x9d\x99\
\x98\xe9\x64\x16\xd2\x9d\xa9\x92\x0e\xa1\x0b\xd3\xd8\x76\xf0\x1f\
\x68\x11\x14\x2b\x34\x81\xde\x55\xca\xcc\xbd\xa5\x54\x5c\x04\x44\
\x6d\x3a\xd5\x4d\x16\x2e\xe2\x44\x57\xb3\x1b\xea\x78\xa7\x18\xb2\
\x08\xc8\x54\xb0\x88\x1b\xeb\xc6\x85\x68\xf3\x55\x53\xa4\xb4\x55\
\x9a\x52\x70\x25\x99\x30\xa5\x36\xb9\xaf\x8b\xf9\x68\xc1\xcc\x0c\
\x53\xba\x6b\x9e\xdd\x39\xe7\x39\xef\xfb\xbb\xef\x7d\xef\x39\x17\
\x76\xb5\xab\xe7\x5d\xea\xc5\xec\xba\x6e\xdf\x40\x3c\x3e\x2e\x69\
\x0c\x48\x1b\x0c\x02\x60\xb6\x8e\x74\x4d\x50\xa9\xd6\x6a\x95\x85\
\x85\x85\x7f\x9f\x39\x80\x9f\xc9\x9c\x34\x29\x2f\xd8\x0f\xac\xca\
\xec\xaa\x49\xeb\x8d\xe5\x41\xe0\x28\x30\x0a\xdc\x32\x69\x2a\x08\
\x82\x6f\x9e\x09\x80\xeb\xba\x91\x64\x22\x91\x37\xb3\x0f\x04\xdf\
\x13\x89\xe4\x4a\xa5\xd2\xf2\x4e\x5e\xcf\xf3\x0e\x0b\x66\x30\x7b\
\xd3\xcc\x66\x87\x52\xa9\xdc\xf4\xf4\x74\xd8\x29\x7e\xb4\x1b\x40\
\x23\xf9\xfb\xc0\xb9\x52\xb9\xfc\x79\x27\x6f\x10\x04\x4b\xc0\x5b\
\xa7\x3d\xef\x1c\x30\xf3\xe7\xed\xdb\x00\x53\x9d\xf6\x74\xac\x80\
\x9f\xc9\x9c\x44\x5a\x10\x7c\x54\x2a\x97\xbf\x00\x98\x9c\x9c\x7c\
\x61\x73\x63\xe3\x5d\x83\x09\xd5\x4b\x0e\x66\x2b\xe6\x38\x73\xc9\
\x64\xb2\x58\x28\x14\x1e\x02\xf8\xd9\xec\x14\xf0\x99\x49\xe3\x41\
\x10\x54\x7a\x06\x70\x5d\xb7\x6f\x60\xcf\x9e\x1b\xc0\x1f\x5f\x95\
\xcb\x6f\x03\x9c\x99\x98\xd8\xb7\x1d\x8b\x5d\xc1\x6c\x14\x08\x01\
\xa7\x61\x0f\x01\x47\xb0\xe2\x6c\x6d\x1d\xbf\x38\x37\xb7\xde\x80\
\xf8\x01\xd8\xbf\x59\xab\x8d\xb4\x6b\x4c\x67\xa7\x49\x80\x81\x78\
\x7c\x1c\x48\x29\x12\xb9\xd0\x7c\xf2\xed\x58\xec\x8a\x99\x1d\xdc\
\x61\xaf\xd3\xa0\x18\x0d\xa3\xd1\xef\x5c\xd7\xed\x03\x70\xcc\xce\
\x03\xc3\x89\x44\xe2\x44\xbb\x3c\x6d\x01\x24\x8d\x61\xb6\xdc\x6c\
\xb8\x6a\xb5\x7a\x16\xb3\x51\x75\xa8\x9a\x40\x06\xaf\x0d\xc4\xe3\
\x67\x01\xbe\xbc\x74\x69\x11\xb8\x8e\x59\xef\x00\x40\x1a\xe9\xa7\
\xd6\xc8\xec\x14\xf5\x52\x77\x96\x14\x02\xa7\x5a\x43\xb3\x1f\x65\
\xf6\x7a\xcf\x00\x06\x2f\xe9\xf1\x77\x8e\x60\xa4\x0b\x70\x13\xd4\
\x91\x34\xd2\x1c\x86\x70\x0f\x69\xb0\x67\x80\x7a\x2c\xeb\xe9\xa4\
\xdc\x31\x81\xe3\x88\x0e\x95\xeb\x04\x70\x5f\x66\xfb\x5a\x30\xf0\
\x7b\xa7\x40\x2d\x49\x61\x08\xd7\x5b\xfb\xcc\x06\x31\xbb\xff\x34\
\x00\xbf\x9a\x74\xf4\x89\xc0\x5f\x77\xf1\x37\x33\x3a\x32\x9b\x7b\
\x62\xe6\x98\xe0\x97\x9e\x01\x04\x15\xe0\xa0\xe7\x79\x87\x01\x92\
\xc9\x64\x51\xb0\x62\x60\x6d\x73\x83\x21\x2d\x6d\x3e\x78\x50\x04\
\xf0\x7d\x3f\x0d\xbc\x6a\xf0\x6d\xcf\x00\xd5\x5a\xad\x02\xdc\x12\
\xcc\x00\x14\x0a\x85\x87\xce\xd6\xd6\x71\x07\x56\x1b\x96\xc7\xaf\
\xa3\xde\xf9\x48\x5a\xde\x0e\xc3\x77\x1a\x87\x8e\x14\x86\x79\xe0\
\x66\xac\xbf\xff\x72\xbb\x3c\x91\x76\x0b\x6b\x6b\x6b\xdb\xe9\x43\
\x87\xee\x02\x9f\xa4\x8f\x1c\xa9\x2d\x2e\x2d\xfd\x7c\x6d\x75\x75\
\x63\xf8\xc0\x81\x52\x5f\x34\xfa\xb7\x49\x7b\x05\x2f\x02\x8f\x0c\
\x16\x1d\x98\xd9\xac\xd5\xde\x9b\x9f\x9f\xff\x07\xc0\xcf\x66\x2f\
\x00\x67\x04\xa7\x2f\x96\x4a\x37\xda\xe5\xe9\xda\xe5\x5e\x26\x93\
\x97\xf4\xa1\xa4\x5c\x29\x08\x66\xbb\xf9\x01\xf9\xd9\x6c\x0e\xf8\
\x54\xd2\x6c\x29\x08\x72\x9d\xcc\x5d\x6f\xc3\xa1\x54\x2a\xf7\xd7\
\x9d\x3b\x66\x66\x79\x2f\x9b\x7d\x23\x62\x96\x6b\x9c\x70\xff\x93\
\xef\xfb\x69\x85\x61\xde\xe0\x98\xa4\xfc\x2b\x43\x43\x1f\x77\xa5\
\xed\x66\x68\xca\xf3\xbc\x31\x99\xcd\x02\xc3\xd4\x3f\xb3\xab\xc0\
\xdd\xc6\xf2\xcb\xd4\x7f\x48\x46\x80\x9b\x8d\xdb\xb3\x6d\xe3\x3d\
\x15\x00\xd4\x6f\xc8\x44\x22\x71\x42\x61\x38\x86\x94\x06\x9a\xe7\
\xc4\xba\xc1\x6f\x32\xab\xc4\xfa\xfb\x2f\x17\x8b\xc5\x47\xbd\xc4\
\xdd\xd5\xae\x9e\x6f\xfd\x07\xb0\xd0\x3c\xea\x1c\xa0\xa5\x5f\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x08\x15\x3b\xdc\
\x3b\x0c\x9b\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x00\x8c\x0c\x0c\x73\x3e\x20\x0b\xa4\x08\x30\x32\x30\x20\x0b\xa6\
\x08\x30\x30\x30\x42\x98\x10\xc1\x14\x01\x14\x13\x50\xb5\xa3\x01\
\x00\xc6\xb9\x07\x90\x5d\x66\x1f\x83\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x00\xa0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\x9c\x53\x34\xfc\x5d\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x0b\x1b\x29\xb3\
\x47\xee\x04\x00\x00\x00\x24\x49\x44\x41\x54\x08\xd7\x63\x60\x40\
\x05\x73\x3e\xc0\x58\x4c\xc8\x5c\x26\x64\x59\x26\x64\xc5\x70\x4e\
\x8a\x00\x9c\x93\x22\x80\x61\x1a\x0a\x00\x00\x29\x95\x08\xaf\x88\
\xac\xba\x34\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\xed\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x6a\x49\x44\
\x41\x54\x58\x85\xed\x97\xcb\x4e\xc2\x40\x14\x86\xbf\x43\x08\x78\
\x7d\x00\xf4\x15\xd4\x84\x77\x91\x65\x69\x0b\x71\xa1\xef\x23\xae\
\x9a\x71\xa8\x4b\x7c\x07\x37\xae\x09\xe1\x1d\xc4\xbd\x17\xe4\x92\
\x1e\x17\xa5\xa6\x06\xd8\x98\x21\x18\xed\xbf\x9a\x76\x26\xfd\xbe\
\x4e\xa6\xcd\x39\xf0\xdf\x23\xf9\x0b\x55\x15\x6b\x4c\x50\x12\xb9\
\x54\x38\x05\x76\x1c\x71\x3e\x04\x86\x40\xc7\x0b\x02\x2b\x22\xba\
\x24\xa0\xaa\x12\x1b\x73\xab\x22\x4d\x60\x02\xf4\x11\x79\x75\x82\
\x57\x3d\x00\xea\x40\x15\x11\xd3\xf4\xfd\x76\x26\x51\xce\xd6\x58\
\x63\x02\x49\xe1\x8f\xa5\x72\xb9\xe1\x79\xde\xc8\x09\x7c\x91\x38\
\x8e\x6b\xc9\x7c\xde\x43\x35\xb4\xd6\x3e\x00\x5d\x80\x52\xb6\xa0\
\x24\x72\x09\x4c\x12\x38\x77\x0d\x07\xf0\x3c\x6f\x34\x4f\x92\x06\
\x30\x15\xd5\xab\x2f\x6e\x36\x50\x38\x01\xfa\x61\x18\x3e\xbb\x86\
\x67\x69\xb7\xdb\x4f\x40\x9f\xf4\x7c\x7d\x17\x00\x76\x81\xf7\x4d\
\xc1\x73\x79\x03\xf6\x56\x09\x6c\x25\x85\xc0\xd6\x05\xca\xeb\x26\
\xac\x31\xba\x6e\xee\x27\xf1\xc3\x50\x56\xdd\xdf\xfa\x0e\x14\x02\
\x85\x40\x21\xb0\xf6\x3f\xb0\xee\xbb\x75\x9d\xad\xef\x40\x21\xf0\
\xab\x04\xc6\xe4\x2a\x95\x0d\x66\x7f\xc1\x5a\x12\x18\x02\xf5\x38\
\x8e\x6b\x9b\x22\x5b\x6b\x8f\x49\xcb\xf3\xc1\x92\x80\xc0\x0d\x50\
\x4d\x66\xb3\xfb\x28\x8a\x8e\x36\x02\x4f\x92\x1e\x50\x11\xe8\xe4\
\xb8\x69\x54\x55\xba\xd6\x46\xa8\x86\xc0\x94\xb4\x31\x79\x71\x42\
\x57\x3d\x24\x7d\xf3\x8a\x42\xe4\x07\xc1\x45\xd6\x98\x2c\xb7\x66\
\xd6\x7a\x8b\xba\xfd\x8c\xb4\x52\x76\x91\x31\x30\x40\xf5\xda\x6f\
\xb5\xee\x1c\x3d\xf3\x8f\xe4\x13\xfb\x36\x7a\x56\x11\xde\xcf\xd8\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1f\x20\xb9\
\x8d\x77\xe9\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x06\xe6\x7c\x60\x60\x60\x42\x30\xa1\x1c\x08\x93\x81\x81\x09\xc1\
\x64\x60\x60\x62\x60\x48\x11\x40\xe2\x20\x73\x19\x90\x8d\x40\x02\
\x00\x23\xed\x08\xaf\x64\x9f\x0f\x15\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x02\xd4\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x02\x51\x49\x44\
\x41\x54\x58\x85\xed\x96\x41\x4b\x54\x51\x14\xc7\x7f\xe7\x8d\xb8\
\xd0\x26\x30\x77\x69\x84\xe1\xaa\x29\x90\x41\xc7\x92\x5e\xa0\x1b\
\xa1\x8d\x0a\xf5\x19\x5a\x3b\x33\xda\xd8\x6a\x16\x41\x36\x83\xf3\
\xbe\x87\x41\x8d\xad\xc2\x4d\xf6\x14\xf4\x0d\x99\x48\x0e\x11\xe2\
\xaa\x11\xdb\x18\x34\xa8\x0b\xc3\x77\x5a\xcc\x48\x10\xf3\x74\xee\
\xe8\xae\xf9\x6f\xef\x39\xfc\x7f\xf7\xdc\x7b\xcf\x3d\xd0\x54\x53\
\xff\xbb\xc4\x24\x38\x92\x2e\xb6\x76\x86\x0f\x27\x54\x18\x07\x8d\
\x02\x5d\xd5\xa5\x12\xca\x67\x11\xc9\xef\x97\xdb\xf3\xc5\x74\xe4\
\xf8\xd2\x01\x6c\x67\xed\x31\x2a\x19\xa0\x07\xe4\x0b\xaa\x4b\x58\
\x94\x00\x44\xb5\x4b\xb1\x86\x41\xef\x22\xec\x08\x32\xed\x4e\xc6\
\xde\x5c\x0a\xc0\x93\xf9\xf9\xd0\x8f\xdd\x9b\x19\x94\x38\xf0\x5e\
\x95\xd4\x4a\x62\x70\xb3\x56\xec\x90\x53\xe8\x0b\xf9\x3a\x8b\x30\
\x0a\x64\x97\xcb\xb1\x14\x69\xf1\xeb\xdd\x64\x4d\xd9\x8e\x37\x67\
\xe7\xbc\x93\x87\xce\x5a\xb2\xee\x9c\x9c\x37\x65\xe7\xbc\x13\x3b\
\xe7\x65\xce\x8b\x3d\xb3\x02\xd5\xb2\xbf\x16\x24\xe9\xc6\x63\x73\
\xf5\x02\x54\x72\xbd\x69\x94\x57\x08\x13\xcb\x93\x83\x79\x63\x80\
\x48\xba\xd8\x7a\xed\xea\xc1\x57\x41\xbf\xb9\xf1\x7b\x8f\x4c\xcc\
\x4f\xf5\xc0\x29\x2c\x8a\x6a\xcf\xcf\xf2\x95\x48\xd0\xc5\xb4\x82\
\x92\x3b\xc3\x87\x13\xc0\x2d\x5f\x65\xa6\x11\x73\x00\xcb\x97\x67\
\x40\x6f\x47\xf8\x60\x2c\x30\x26\x68\xa1\xf2\xd4\xd8\x0c\xba\x70\
\xf5\xc8\x4d\x0c\x6c\xa8\xb2\x25\x60\x0e\x00\x1a\x15\xf4\x63\xa3\
\xe6\xa7\x12\xf8\x80\xd0\xdf\x00\x00\xd7\x15\x29\x5d\x14\x40\x61\
\x97\xbf\x0d\xcb\x08\x00\xc4\xac\x53\xd6\x34\x10\x11\x20\xb0\x17\
\x9c\x05\xb0\x87\x4f\xf7\x45\x01\x14\xed\x02\xf6\xcc\x01\x94\x4f\
\x0a\xc3\x17\x05\x00\x46\x80\x82\x31\x80\x88\xe4\x45\xb8\x33\xe4\
\x14\xfa\x1a\x75\xb6\x9d\xd5\x28\x70\x1b\xd1\x77\xc6\x00\xfb\xe5\
\xf6\x3c\xc2\x4e\xc8\xd7\xd9\x86\xdc\x55\x05\xb5\x32\xc0\xf6\x51\
\x5b\xcb\x82\x31\x40\x31\x1d\x39\x56\x65\x0a\x61\xd4\xce\x79\x53\
\xa6\xfe\x76\xce\x4b\x01\x23\xa2\x7e\x72\xfd\x69\xff\x6f\x63\x00\
\x80\x95\xf8\xe0\x5b\x20\x0b\xcc\xd6\x0d\xa1\x2a\xf6\xdc\xda\x0c\
\x22\x2f\x44\xc8\xb8\x89\xfb\x81\xe5\x87\x7a\xe6\x81\xb4\x5a\x76\
\xb8\xf0\x12\x61\x1a\x58\x14\xb5\x52\x6e\x62\x60\xa3\x56\xa8\xed\
\xac\x46\xab\x65\x1f\x11\x21\xe3\xfe\x8a\x3d\x3f\xef\x3b\x36\x18\
\x48\xbc\x71\x94\x2c\xd0\xab\xca\x96\x08\x4b\x08\xdf\x01\x50\x6e\
\x50\x79\x31\x11\x60\x5b\xd4\x4f\x9e\xb7\x73\x63\x00\xa8\xfc\x90\
\x1d\xe1\x83\x31\xaa\x23\x99\x20\xdd\x15\x7f\x2d\x89\xca\x3a\x96\
\xe6\x8f\xda\x5a\x16\xce\x3a\xf3\xa6\x9a\x6a\xea\x5f\xfd\x01\xd3\
\x1c\xd9\x7f\x5e\xb9\x33\xcd\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x02\x00\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x7d\x49\x44\
\x41\x54\x58\x85\xed\x97\x3b\x4e\x02\x51\x14\x86\xbf\x83\x28\x3e\
\x17\xa0\x6e\x41\x4d\x48\x78\x44\x9d\x71\x15\x5a\xfa\x8a\xb1\xd0\
\x0e\x48\x74\x0d\x36\x60\xa3\x56\x3e\x63\xa9\x7b\x30\x19\x34\x82\
\x24\x84\xb8\x07\xb5\xf7\x81\x82\x70\x2c\x74\x08\x04\xc3\x14\xce\
\x58\xe8\x7c\xdd\xbd\xe7\xe6\xfe\x5f\x6e\x73\xcf\x81\xff\x8e\xb4\
\xac\x54\xc5\xc8\xe4\x96\x44\x65\x0d\x61\x1c\xe8\x75\x29\xe7\x15\
\xe5\x16\x64\xd7\x4a\x46\x8f\x11\xd1\x76\x01\x55\x99\xd9\xce\x1f\
\xa9\xb2\x00\xbc\x09\x14\x15\x7d\x72\x23\x5d\x90\x41\x85\x30\x10\
\x02\x39\xb4\x12\xd1\x15\x5b\xa2\x21\x60\xa4\xaf\x97\x05\x39\x00\
\xbd\x44\x82\x73\x56\x22\x72\xef\x46\xb8\x8d\x99\x29\x0c\xa3\xb5\
\x33\x60\x4a\x95\xc5\x6c\x2a\x7e\x02\x10\x68\x58\xaa\xac\x01\x6f\
\x5d\xef\x81\x59\xb7\xc3\x01\xac\x44\xe4\xbe\x5e\xad\xce\x01\x15\
\x11\xd6\xed\xfd\x86\x00\xc2\x98\x40\xf1\x62\x23\xf6\xe0\x76\xb8\
\xcd\xe5\xa6\x71\x07\x14\x81\xf1\x76\x01\xe8\x53\x78\xf1\x2a\xbc\
\x89\x67\xa0\xdf\x5e\x04\x9d\x4e\x9b\xe9\x9c\x3a\x9d\xe9\x84\x95\
\x8c\x4b\xa7\x7a\xa0\x53\xf1\x37\xf0\x05\x7c\x01\x5f\xc0\x17\xf0\
\x05\x7c\x01\x5f\xc0\xb1\x1f\x70\xfa\xcf\x7f\x4a\xf3\x0b\x94\xa5\
\xa9\x53\xf1\x90\x01\xa0\xfc\x8d\x80\xde\x2a\x84\xcd\x4c\x61\xd8\
\xab\xe4\xc9\xf4\xd5\x28\x10\x16\x28\xb5\x0b\x68\x60\x0f\x08\xa1\
\xb5\xf3\xe9\xad\xec\x88\x17\xe1\xdd\x74\x9d\x01\x3d\x75\xd1\x5d\
\x7b\xbf\x65\x30\x31\x33\x37\xfb\xa0\xcb\x40\x05\x28\x82\x3e\xba\
\x13\x2f\x43\x7c\x0e\x26\x3d\x0a\xfb\xd9\x44\x6c\xb5\x6d\x30\xb1\
\x25\x8c\x74\x7e\xfe\xab\x6f\x9f\x00\xfa\xdc\x11\xa0\x2c\x50\x52\
\x95\x1d\x2b\x15\x3b\x75\xe9\xce\x3f\xc2\x07\xd1\xbc\x75\x94\xcf\
\xbc\x8d\xf9\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x03\xa5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x03\x22\x49\x44\
\x41\x54\x58\x85\xed\x96\x4d\x6c\x54\x55\x14\xc7\x7f\xe7\x0d\xa9\
\x09\xcc\x90\x50\x76\xb6\xc6\x60\x60\xe3\xa3\x86\x34\xf4\xc3\xc6\
\x67\xa4\x1b\xa2\x98\x40\x13\x5d\xc9\x1a\x36\xda\x84\x7e\x59\x5c\
\xcd\xce\x3a\xa4\x33\x09\xcb\xae\x65\x83\x89\x19\x4c\x04\xc3\xc6\
\x3a\x98\xb4\x6f\x22\x62\x4b\x27\xc6\x34\xac\x9c\x06\x37\x94\x74\
\x98\x92\x50\x3a\xef\xef\xe2\x4d\xa7\x35\x99\xe9\xcc\x43\x76\xf4\
\xbf\xba\xe7\xbd\x73\xef\xf9\xdd\x73\xee\x17\xec\x69\x4f\xaf\xba\
\x2c\x8a\xb3\x9b\x2c\xb4\x1d\x4e\xac\x0f\xc9\x38\x07\xea\x06\x3a\
\xaa\xbf\x8a\x88\xdf\xcd\x2c\xfb\xa8\x74\x20\x5b\x48\xba\x1b\x2f\
\x1d\xc0\xcb\xcc\x7f\x82\x2c\x05\x1c\x01\xbb\x8f\x34\x8b\x43\x11\
\xc0\xa4\x0e\xe1\x9c\x02\x75\x61\x3c\x30\x6c\x22\x77\xa9\xf7\xfb\
\x97\x02\xf0\xe9\xf5\xeb\xb1\x7f\x56\xde\x4c\x21\x46\x80\x9f\x24\
\x26\x7f\x1d\xed\x5b\xa8\xe7\x3b\x90\xc9\x9f\x88\x05\x9a\xc2\x38\
\x0d\x5c\xb9\x53\xea\x9d\x24\x69\x41\xab\x93\xac\x2b\x2f\xe3\x4f\
\x7b\x69\xbf\xf2\x7e\x66\x7e\xac\xe5\x3e\x69\x7f\xdc\x4b\xfb\x15\
\x2f\xed\xa7\x9a\xf9\xee\x9a\x81\x6a\xda\xbf\x33\x6c\x2c\x37\xd2\
\x3b\x0d\xf0\xe1\xd5\xe5\xd7\x9e\x3c\x7f\x7c\xd1\xe0\x33\x59\xd0\
\x15\x0e\x62\x8b\x18\xd7\xe2\xb1\xf6\x99\x5b\xc3\xc7\x9e\x55\xc1\
\x27\x10\xdf\x60\x0c\xdd\xb9\xd4\x97\x8d\x0c\xe0\x26\x0b\x6d\xed\
\x07\xcb\x7f\x1a\xfa\x2b\x37\xd2\xff\x11\xc0\x07\x57\xe7\x3b\x2b\
\x9b\xce\x4d\x50\x17\x58\x00\x72\xaa\xc3\x84\x6d\x63\x31\x16\xd3\
\x99\xd9\xe1\xfe\x22\xc0\x7b\x99\xfc\x6d\x93\x8e\xac\x96\xe2\x6e\
\xa3\x85\xe9\x34\x02\x38\x9c\x58\x1f\x02\xde\x0a\x64\x97\xb7\x66\
\x5e\xd9\x74\x6e\x62\x3a\x1e\x7a\x68\x47\xdf\x5a\xbb\xab\xb2\xc9\
\x8f\x6e\xb2\xd0\x06\xe0\x04\xf6\x25\x70\xf4\x50\xa2\x7c\xb6\x51\
\x9c\x86\x00\xe1\x56\x63\x61\x6b\xc1\x95\x2b\xab\x17\x40\x5d\x68\
\x97\xb2\x09\x03\x7b\xa7\xfd\x60\xf9\x02\x40\x6e\xb4\xe7\x9e\xc4\
\x92\x41\x74\x00\x50\xb7\xa1\x5f\x6a\x66\x60\xe7\xc3\x54\xef\x2e\
\x41\x00\x9c\xdf\xb2\x0d\x7e\xc6\x38\xf9\x02\x00\xbc\x2e\xac\x58\
\xb3\x4c\xee\x7f\xd3\x5e\x5f\x06\x0e\xc8\xdd\x01\xb4\xc2\xf6\x81\
\x15\x09\x00\x2c\xda\x49\x59\x37\x80\x99\x11\x66\x25\x32\xc0\x43\
\x02\x3a\x6b\x96\xac\xd0\x6a\x09\x24\x96\xb6\x6d\x75\x00\x0f\xa3\
\x03\x88\xdf\x04\xa7\xb6\x3d\xf5\x6d\xab\x25\x30\xb3\x6b\x3b\x3e\
\x0d\x02\xf9\xc8\x00\x66\x96\x35\xe3\xf8\x40\x26\x7f\x02\x20\x1e\
\x6b\x9f\xc1\x58\xc4\xd0\x2e\xd1\x25\xe3\x8f\xd5\x52\x7c\x06\xc0\
\xcb\xcc\x75\x03\x6f\x63\xfa\x21\x32\xc0\xa3\xd2\x81\x2c\xc6\x83\
\x58\xa0\x29\x80\x5b\xc3\xc7\x9e\xc5\x62\x3a\x03\xdc\xaf\x46\xab\
\x95\xa3\xba\xf2\x11\x2c\x54\x54\xf9\xb8\x90\x74\x37\x90\x0c\x39\
\x29\x60\xf9\xe9\xfe\x7d\x37\x22\x03\x14\x92\xee\x86\xc4\x38\xc6\
\x69\x2f\xed\x8f\x03\xcc\x0e\xf7\x17\x57\xd7\xe2\x3d\xc0\x17\x52\
\x90\x07\xd6\x81\x75\xa4\xbc\x99\x3e\x7f\xbc\x16\xef\x9b\x1b\x19\
\x58\x01\xf0\xd2\xfe\x24\x30\x68\x0a\xc6\xee\x5e\x3c\xf9\xbc\x51\
\x9c\xa6\xf2\xd2\x7e\xaa\x7a\xb1\x8c\xb7\xd4\x41\x32\x6f\x7a\xfe\
\x72\x78\x81\xf9\x53\xcd\xdc\x9b\x6f\xb3\xa4\x1c\x2f\x91\xff\x1a\
\x63\x02\xb8\x6d\x72\x26\x73\xa3\x3d\xf7\xea\xc2\x66\xe6\xba\xab\
\x69\x1f\x34\x23\x95\x5b\xeb\xfd\xaa\xd9\x75\x1c\xe1\x41\xe2\x9f\
\x43\x5c\x01\x8e\x4a\x2c\x99\x31\x8b\xf1\x37\x00\xe2\x0d\xc2\x1d\
\xe3\x02\xcb\xa6\x60\x2c\x37\xfa\x6e\xc3\x85\xf7\x42\x00\x10\xde\
\x90\x87\x12\xe5\xb3\x54\x9f\x64\x86\x75\x86\xf1\x55\x34\xd9\x5d\
\x1c\x65\x9f\xee\xdf\x77\xe3\x7f\xd5\x7c\x4f\x7b\x7a\xe5\xf4\x2f\
\x95\x3f\x47\xac\x6d\xe5\x30\x73\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x02\x02\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x7f\x49\x44\
\x41\x54\x58\x85\xed\x97\xcb\x4a\x42\x51\x14\x86\xbf\x65\xa5\xd9\
\xe5\x01\xac\x57\xc8\x40\x28\xa3\xd2\x9e\x22\x87\xdd\x88\x06\x36\
\x33\xa1\x9e\xa1\x89\x36\xa9\x46\x5d\x69\x58\xef\x10\x1c\x8d\xb4\
\x40\xa2\x77\xc8\xe6\x5d\xac\x2c\x57\x83\x3a\xa2\x1c\xcf\x24\xb6\
\x18\x75\xfe\xd9\x5e\x1b\xf6\xf7\xb1\x60\x6f\xf6\x82\xff\x1e\x69\
\x5a\xa9\x4a\x2c\x5b\x58\x14\x95\x24\x42\x18\xe8\x35\xc4\x79\x41\
\xb9\x05\xd9\xb1\xd6\xc6\x8f\x10\x51\xa7\x80\xaa\xcc\x6c\x15\x0f\
\x55\x99\x07\x5e\x05\x4a\x8a\x3e\x9a\xa0\x0b\x32\xa0\x10\x01\x02\
\x20\x07\x56\x6a\x7c\xd9\x96\xa8\x0b\xc4\x32\x97\x4b\x82\xec\x83\
\xe6\x91\xee\x84\x95\x1a\x2b\x9b\x80\xdb\x89\x67\xaf\x43\xe8\xc7\
\x29\x30\xa5\xca\x42\x2e\x3d\x71\x0c\xe0\xab\x5b\xaa\x24\x81\xd7\
\xae\x77\xdf\xac\x69\x38\x80\x95\x1a\x2b\xd7\xaa\xd5\x04\xf0\x26\
\xc2\xaa\x5d\xaf\x0b\x20\x8c\x08\x94\xce\xd7\xa3\xf7\xa6\xe1\x76\
\xf2\x1b\xb1\x3b\xa0\x04\x84\x9d\x02\x10\x54\x78\x6e\x17\xbc\x21\
\x4f\x40\x5f\x2b\x81\x8e\xc4\x13\xe8\xb8\x40\xb7\xdb\x46\x3c\x53\
\x50\xb7\xbd\x9f\xc4\x5a\x9b\x90\x56\xf5\x8e\x77\xc0\x13\xf0\x04\
\x3c\x01\xd7\x77\xc0\xed\xde\x9a\x4e\xc7\x3b\xe0\x09\xfc\x2a\x81\
\x8a\x34\xfc\x54\xda\x98\x7e\xa0\xd2\x42\x40\x6f\x15\x22\xf1\xec\
\x75\xa8\x5d\xe4\xc9\xcc\xc5\x30\x10\x11\xb8\x71\x0a\xa8\x6f\x17\
\x08\xa0\x1f\x67\xd3\x9b\xb9\xa1\x76\xc0\x7b\xe8\x3a\x05\xfc\x35\
\xd1\x1d\xbb\xde\x34\x98\xc4\xb3\x57\x7b\xa0\x4b\xc0\x1b\x50\x02\
\x7d\x30\x83\x97\x41\xbe\x06\x13\xbf\xc2\x5e\x2e\x15\x5d\x71\x0c\
\x26\xb6\x44\x2c\x53\x9c\xfb\xfe\xb7\x8f\x02\x41\x33\x02\x54\x04\
\x6e\x54\x65\xdb\x4a\x47\x4f\x0c\x9d\xf9\x47\xf2\x09\xb5\xbd\x75\
\x94\xee\x91\xe8\xbe\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
\x00\x00\x00\x9e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x08\x15\x0f\xfd\
\x8f\xf8\x2e\x00\x00\x00\x22\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x0d\xfe\x9f\x87\xb1\x18\x91\x05\x18\x0d\xe1\x42\x48\x2a\x0c\x19\
\x18\x18\x91\x05\x10\x2a\xd1\x00\x00\xca\xb5\x07\xd2\x76\xbb\xb2\
\xc5\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\xeb\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x68\x49\x44\
\x41\x54\x58\x85\xed\x97\x4d\x4e\xc2\x40\x18\x86\x9f\xaf\x10\x14\
\xd4\x03\xa0\x57\x10\x13\xb6\x9e\x43\x76\xc8\x58\x8c\x26\x70\x1f\
\x31\x31\xa1\x74\x48\x97\x78\x0c\xd7\xc4\x78\x07\x71\xef\x0f\x02\
\x91\xcf\x85\x94\x20\xa0\x2c\x1c\x5c\x68\xdf\xdd\x4c\xdf\xf4\x79\
\xa6\x4d\xd3\x19\xf8\xef\x91\xf9\xb1\x6f\xcc\x09\x50\x03\x0a\xc0\
\xa6\x23\xce\x2b\x70\x27\x22\x8d\x20\x0c\x2d\xa0\xcb\x04\xc4\x37\
\x26\x04\x2a\xc0\x00\xe8\x02\x4f\x8e\x04\xb6\x81\x22\xb0\x01\xb4\
\x5a\xd6\x9e\xc6\x12\x53\x01\xdf\x18\x1f\x08\x04\x6e\xd2\x6f\x6f\
\xa5\xab\x28\xea\x39\x82\x03\x70\x5e\x2e\xe7\x47\x9e\xd7\x41\xe4\
\x50\xc0\x04\xd6\xb6\x01\xbc\x99\x4e\x0d\x18\x8c\x45\x8e\x5c\xc3\
\x01\xae\xa2\xa8\x27\xe9\x74\x09\x18\xaa\x48\x3d\x9e\x9f\x15\xd8\
\x07\xba\x61\x18\x3e\xb8\x86\xc7\x09\x82\xe0\x1e\x91\x2e\xaa\x85\
\x65\x02\x59\x54\x5f\xd6\x05\x9f\x66\x3c\x7e\x06\x72\xf1\x30\xbd\
\xaa\xef\x1b\xa3\xab\x3a\xdf\xa5\x65\xed\xfc\x97\xf6\x29\xde\x77\
\x17\x7f\x23\x89\x40\x22\x90\x08\x24\x02\x89\x40\x22\x90\x08\xac\
\xdc\x0f\xac\xfa\x9f\xff\x34\xb3\x4f\xa0\x8f\x48\xee\xcb\xa6\x33\
\xa2\xb7\x05\xf4\x17\x04\x14\xee\x80\xe2\x79\xb9\x9c\x5f\x17\xbb\
\x52\xa9\xec\xa1\x5a\x04\x6e\x17\x04\x3c\x91\x4b\x60\x63\x94\x4a\
\x5d\x57\xab\xd5\xdd\x75\xc0\x53\x22\x1d\x20\xa3\x22\x8d\x78\x7e\
\xfe\x60\xd2\x04\x7c\x60\x38\xd9\xbd\x3e\x3a\xa1\x8b\xec\x4c\x56\
\x9e\x51\x68\x86\xd6\x9e\x31\x7f\x30\x89\xab\x55\x63\x8e\x55\xa4\
\x8e\xea\x01\x90\x75\x22\xf0\xf1\xce\x6f\x51\xbd\x68\xb5\xdb\x91\
\xa3\x7b\xfe\x91\xbc\x03\x16\x71\x6a\x27\x44\x74\xfe\x4f\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x09\
\x09\x5f\x97\x13\
\x00\x71\
\x00\x73\x00\x73\x00\x5f\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x0a\
\x09\x24\x4d\x25\
\x00\x71\
\x00\x64\x00\x61\x00\x72\x00\x6b\x00\x73\x00\x74\x00\x79\x00\x6c\x00\x65\
\x00\x09\
\x00\x28\xad\x23\
\x00\x73\
\x00\x74\x00\x79\x00\x6c\x00\x65\x00\x2e\x00\x71\x00\x73\x00\x73\
\x00\x02\
\x00\x00\x07\x83\
\x00\x72\
\x00\x63\
\x00\x11\
\x0a\xe5\x6c\x07\
\x00\x72\
\x00\x61\x00\x64\x00\x69\x00\x6f\x00\x5f\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x09\
\x06\x98\x83\x27\
\x00\x63\
\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x08\x8c\x6a\xa7\
\x00\x48\
\x00\x73\x00\x65\x00\x70\x00\x61\x00\x72\x00\x74\x00\x6f\x00\x6f\x00\x6c\x00\x62\x00\x61\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x1a\
\x01\x21\xeb\x47\
\x00\x73\
\x00\x74\x00\x79\x00\x6c\x00\x65\x00\x73\x00\x68\x00\x65\x00\x65\x00\x74\x00\x2d\x00\x62\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\
\x00\x2d\x00\x6d\x00\x6f\x00\x72\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0a\
\x05\x95\xde\x27\
\x00\x75\
\x00\x6e\x00\x64\x00\x6f\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x13\
\x08\xc8\x96\xe7\
\x00\x72\
\x00\x61\x00\x64\x00\x69\x00\x6f\x00\x5f\x00\x75\x00\x6e\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\x00\x64\x00\x2e\x00\x70\
\x00\x6e\x00\x67\
\x00\x15\
\x0f\xf3\xc0\x07\
\x00\x75\
\x00\x70\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\
\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x1f\
\x0a\xae\x27\x47\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x75\x00\x6e\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\
\x00\x64\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0f\
\x0c\xe2\x68\x67\
\x00\x74\
\x00\x72\x00\x61\x00\x6e\x00\x73\x00\x70\x00\x61\x00\x72\x00\x65\x00\x6e\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x16\
\x01\x75\xcc\x87\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x75\x00\x6e\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\
\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x14\
\x0b\xc5\xd7\xc7\
\x00\x73\
\x00\x74\x00\x79\x00\x6c\x00\x65\x00\x73\x00\x68\x00\x65\x00\x65\x00\x74\x00\x2d\x00\x76\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2e\
\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x08\x90\x94\x67\
\x00\x63\
\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x2d\x00\x70\x00\x72\x00\x65\x00\x73\x00\x73\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x14\
\x07\xec\xd1\xc7\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\x00\x64\x00\x2e\
\x00\x70\x00\x6e\x00\x67\
\x00\x0e\
\x0e\xde\xfa\xc7\
\x00\x6c\
\x00\x65\x00\x66\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x12\
\x07\x8f\x9d\x27\
\x00\x62\
\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x5f\x00\x6f\x00\x70\x00\x65\x00\x6e\x00\x2d\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\
\x00\x67\
\x00\x0f\
\x02\x9f\x05\x87\
\x00\x72\
\x00\x69\x00\x67\x00\x68\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0e\
\x04\xa2\xfc\xa7\
\x00\x64\
\x00\x6f\x00\x77\x00\x6e\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x08\xc4\x6a\xa7\
\x00\x56\
\x00\x73\x00\x65\x00\x70\x00\x61\x00\x72\x00\x74\x00\x6f\x00\x6f\x00\x6c\x00\x62\x00\x61\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x10\
\x01\x07\x4a\xa7\
\x00\x56\
\x00\x6d\x00\x6f\x00\x76\x00\x65\x00\x74\x00\x6f\x00\x6f\x00\x6c\x00\x62\x00\x61\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x19\
\x08\x3e\xcc\x07\
\x00\x73\
\x00\x74\x00\x79\x00\x6c\x00\x65\x00\x73\x00\x68\x00\x65\x00\x65\x00\x74\x00\x2d\x00\x62\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\
\x00\x2d\x00\x65\x00\x6e\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x1c\
\x01\xe0\x4a\x07\
\x00\x72\
\x00\x61\x00\x64\x00\x69\x00\x6f\x00\x5f\x00\x75\x00\x6e\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\x00\x64\x00\x5f\x00\x64\
\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x14\
\x06\x5e\x2c\x07\
\x00\x62\
\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x5f\x00\x63\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x64\x00\x2d\x00\x6f\x00\x6e\x00\x2e\
\x00\x70\x00\x6e\x00\x67\
\x00\x0f\
\x06\x53\x25\xa7\
\x00\x62\
\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x5f\x00\x6f\x00\x70\x00\x65\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0c\
\x06\x41\x40\x87\
\x00\x73\
\x00\x69\x00\x7a\x00\x65\x00\x67\x00\x72\x00\x69\x00\x70\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x10\
\x01\x00\xca\xa7\
\x00\x48\
\x00\x6d\x00\x6f\x00\x76\x00\x65\x00\x74\x00\x6f\x00\x6f\x00\x6c\x00\x62\x00\x61\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x1c\
\x08\x3f\xda\x67\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x75\x00\x6e\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\
\x00\x64\x00\x5f\x00\x66\x00\x6f\x00\x63\x00\x75\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0f\
\x01\xf4\x81\x47\
\x00\x63\
\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x2d\x00\x68\x00\x6f\x00\x76\x00\x65\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x18\
\x03\x8e\xde\x67\
\x00\x72\
\x00\x69\x00\x67\x00\x68\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\
\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x1a\
\x0e\xbc\xc3\x67\
\x00\x72\
\x00\x61\x00\x64\x00\x69\x00\x6f\x00\x5f\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\x00\x64\x00\x5f\x00\x64\x00\x69\x00\x73\
\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x17\
\x0c\xab\x51\x07\
\x00\x64\
\x00\x6f\x00\x77\x00\x6e\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\
\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x0b\xda\x30\xa7\
\x00\x62\
\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x5f\x00\x63\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x1a\
\x01\x87\xae\x67\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x69\x00\x6e\x00\x64\x00\x65\x00\x74\x00\x65\x00\x72\x00\x6d\
\x00\x69\x00\x6e\x00\x61\x00\x74\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x17\
\x0c\x65\xce\x07\
\x00\x6c\
\x00\x65\x00\x66\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\
\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x19\
\x0b\x59\x6e\x87\
\x00\x72\
\x00\x61\x00\x64\x00\x69\x00\x6f\x00\x5f\x00\x75\x00\x6e\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\x00\x64\x00\x5f\x00\x66\
\x00\x6f\x00\x63\x00\x75\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x1a\
\x05\x11\xe0\xe7\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\x00\x64\x00\x5f\
\x00\x66\x00\x6f\x00\x63\x00\x75\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x17\
\x0f\x1e\x9b\x47\
\x00\x72\
\x00\x61\x00\x64\x00\x69\x00\x6f\x00\x5f\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\x00\x64\x00\x5f\x00\x66\x00\x6f\x00\x63\
\x00\x75\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x20\
\x09\xd7\x1f\xa7\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x69\x00\x6e\x00\x64\x00\x65\x00\x74\x00\x65\x00\x72\x00\x6d\
\x00\x69\x00\x6e\x00\x61\x00\x74\x00\x65\x00\x5f\x00\x66\x00\x6f\x00\x63\x00\x75\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0c\
\x06\xe6\xe6\x67\
\x00\x75\
\x00\x70\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x1d\
\x09\x07\x81\x07\
\x00\x63\
\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x65\x00\x64\x00\x5f\
\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\
\x00\x00\x00\x18\x00\x02\x00\x00\x00\x01\x00\x00\x00\x2b\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x4a\x00\x02\x00\x00\x00\x27\x00\x00\x00\x04\
\x00\x00\x04\x50\x00\x00\x00\x00\x00\x01\x00\x00\x31\xfe\
\x00\x00\x03\x44\x00\x00\x00\x00\x00\x01\x00\x00\x2b\x70\
\x00\x00\x00\xbc\x00\x00\x00\x00\x00\x01\x00\x00\x17\xf5\
\x00\x00\x01\xd4\x00\x00\x00\x00\x00\x01\x00\x00\x21\x0f\
\x00\x00\x05\xa4\x00\x00\x00\x00\x00\x01\x00\x00\x3c\xe1\
\x00\x00\x03\xa2\x00\x00\x00\x00\x00\x01\x00\x00\x2d\x3c\
\x00\x00\x04\xb4\x00\x00\x00\x00\x00\x01\x00\x00\x34\xc5\
\x00\x00\x02\xd6\x00\x00\x00\x00\x00\x01\x00\x00\x29\x64\
\x00\x00\x04\xd8\x00\x00\x00\x00\x00\x01\x00\x00\x37\x1f\
\x00\x00\x02\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x2a\x08\
\x00\x00\x06\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x42\x54\
\x00\x00\x00\xf6\x00\x00\x00\x00\x00\x01\x00\x00\x18\xaf\
\x00\x00\x04\x32\x00\x00\x00\x00\x00\x01\x00\x00\x31\x79\
\x00\x00\x04\x0e\x00\x00\x00\x00\x00\x01\x00\x00\x30\xcf\
\x00\x00\x03\xe0\x00\x00\x00\x00\x00\x01\x00\x00\x30\x38\
\x00\x00\x00\x7c\x00\x00\x00\x00\x00\x01\x00\x00\x14\xf7\
\x00\x00\x06\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x4a\x07\
\x00\x00\x02\xac\x00\x00\x00\x00\x00\x01\x00\x00\x28\xca\
\x00\x00\x02\x5c\x00\x00\x00\x00\x00\x01\x00\x00\x26\x30\
\x00\x00\x03\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x2c\x58\
\x00\x00\x04\x76\x00\x00\x00\x00\x00\x01\x00\x00\x32\xde\
\x00\x00\x00\x94\x00\x00\x00\x00\x00\x01\x00\x00\x17\x45\
\x00\x00\x02\x34\x00\x00\x00\x00\x00\x01\x00\x00\x23\xd6\
\x00\x00\x03\x1c\x00\x00\x00\x00\x00\x01\x00\x00\x2a\xb1\
\x00\x00\x01\x10\x00\x00\x00\x00\x00\x01\x00\x00\x1a\xf5\
\x00\x00\x07\x1c\x00\x00\x00\x00\x00\x01\x00\x00\x4a\xa9\
\x00\x00\x06\xb8\x00\x00\x00\x00\x00\x01\x00\x00\x48\x01\
\x00\x00\x01\x6c\x00\x00\x00\x00\x00\x01\x00\x00\x1e\x74\
\x00\x00\x00\x54\x00\x00\x00\x00\x00\x01\x00\x00\x11\x47\
\x00\x00\x06\x12\x00\x00\x00\x00\x00\x01\x00\x00\x3f\x7c\
\x00\x00\x02\x06\x00\x00\x00\x00\x00\x01\x00\x00\x22\xe3\
\x00\x00\x05\x7c\x00\x00\x00\x00\x00\x01\x00\x00\x3c\x3d\
\x00\x00\x05\xde\x00\x00\x00\x00\x00\x01\x00\x00\x3e\xd2\
\x00\x00\x05\x48\x00\x00\x00\x00\x00\x01\x00\x00\x3b\x93\
\x00\x00\x01\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x20\x48\
\x00\x00\x05\x0e\x00\x00\x00\x00\x00\x01\x00\x00\x37\xc3\
\x00\x00\x02\x8a\x00\x00\x00\x00\x00\x01\x00\x00\x28\x20\
\x00\x00\x06\x84\x00\x00\x00\x00\x00\x01\x00\x00\x44\x58\
\x00\x00\x01\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x1d\xd1\
\x00\x00\x00\x32\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
gpl-3.0
|
lokirius/python-for-android
|
python3-alpha/python3-src/Lib/test/test_socket.py
|
47
|
73373
|
#!/usr/bin/env python3
import unittest
from test import support
import errno
import io
import socket
import select
import time
import traceback
import queue
import sys
import os
import array
import platform
import contextlib
from weakref import proxy
import signal
import math
try:
import fcntl
except ImportError:
fcntl = False
def try_address(host, port=0, family=socket.AF_INET):
"""Try to bind a socket on the given host:port and return True
if that has been possible."""
try:
sock = socket.socket(family, socket.SOCK_STREAM)
sock.bind((host, port))
except (socket.error, socket.gaierror):
return False
else:
sock.close()
return True
def linux_version():
try:
# platform.release() is something like '2.6.33.7-desktop-2mnb'
version_string = platform.release().split('-')[0]
return tuple(map(int, version_string.split('.')))
except ValueError:
return 0, 0, 0
HOST = support.HOST
MSG = 'Michael Gilfix was here\u1234\r\n'.encode('utf8') ## test unicode string and carriage return
SUPPORTS_IPV6 = socket.has_ipv6 and try_address('::1', family=socket.AF_INET6)
try:
import _thread as thread
import threading
except ImportError:
thread = None
threading = None
class SocketTCPTest(unittest.TestCase):
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = support.bind_port(self.serv)
self.serv.listen(1)
def tearDown(self):
self.serv.close()
self.serv = None
class SocketUDPTest(unittest.TestCase):
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.port = support.bind_port(self.serv)
def tearDown(self):
self.serv.close()
self.serv = None
class ThreadableTest:
"""Threadable Test class
The ThreadableTest class makes it easy to create a threaded
client/server pair from an existing unit test. To create a
new threaded class from an existing unit test, use multiple
inheritance:
class NewClass (OldClass, ThreadableTest):
pass
This class defines two new fixture functions with obvious
purposes for overriding:
clientSetUp ()
clientTearDown ()
Any new test functions within the class must then define
tests in pairs, where the test name is preceeded with a
'_' to indicate the client portion of the test. Ex:
def testFoo(self):
# Server portion
def _testFoo(self):
# Client portion
Any exceptions raised by the clients during their tests
are caught and transferred to the main thread to alert
the testing framework.
Note, the server setup function cannot call any blocking
functions that rely on the client thread during setup,
unless serverExplicitReady() is called just before
the blocking call (such as in setting up a client/server
connection and performing the accept() in setUp().
"""
def __init__(self):
# Swap the true setup function
self.__setUp = self.setUp
self.__tearDown = self.tearDown
self.setUp = self._setUp
self.tearDown = self._tearDown
def serverExplicitReady(self):
"""This method allows the server to explicitly indicate that
it wants the client thread to proceed. This is useful if the
server is about to execute a blocking routine that is
dependent upon the client thread during its setup routine."""
self.server_ready.set()
def _setUp(self):
self.server_ready = threading.Event()
self.client_ready = threading.Event()
self.done = threading.Event()
self.queue = queue.Queue(1)
# Do some munging to start the client test.
methodname = self.id()
i = methodname.rfind('.')
methodname = methodname[i+1:]
test_method = getattr(self, '_' + methodname)
self.client_thread = thread.start_new_thread(
self.clientRun, (test_method,))
self.__setUp()
if not self.server_ready.is_set():
self.server_ready.set()
self.client_ready.wait()
def _tearDown(self):
self.__tearDown()
self.done.wait()
if self.queue.qsize():
exc = self.queue.get()
raise exc
def clientRun(self, test_func):
self.server_ready.wait()
self.client_ready.set()
self.clientSetUp()
if not hasattr(test_func, '__call__'):
raise TypeError("test_func must be a callable function")
try:
test_func()
except BaseException as e:
self.queue.put(e)
finally:
self.clientTearDown()
def clientSetUp(self):
raise NotImplementedError("clientSetUp must be implemented.")
def clientTearDown(self):
self.done.set()
thread.exit()
class ThreadedTCPSocketTest(SocketTCPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
SocketTCPTest.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def clientSetUp(self):
self.cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def clientTearDown(self):
self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
class ThreadedUDPSocketTest(SocketUDPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
SocketUDPTest.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def clientSetUp(self):
self.cli = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def clientTearDown(self):
self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
class SocketConnectedTest(ThreadedTCPSocketTest):
"""Socket tests for client-server connection.
self.cli_conn is a client socket connected to the server. The
setUp() method guarantees that it is connected to the server.
"""
def __init__(self, methodName='runTest'):
ThreadedTCPSocketTest.__init__(self, methodName=methodName)
def setUp(self):
ThreadedTCPSocketTest.setUp(self)
# Indicate explicitly we're ready for the client thread to
# proceed and then perform the blocking call to accept
self.serverExplicitReady()
conn, addr = self.serv.accept()
self.cli_conn = conn
def tearDown(self):
self.cli_conn.close()
self.cli_conn = None
ThreadedTCPSocketTest.tearDown(self)
def clientSetUp(self):
ThreadedTCPSocketTest.clientSetUp(self)
self.cli.connect((HOST, self.port))
self.serv_conn = self.cli
def clientTearDown(self):
self.serv_conn.close()
self.serv_conn = None
ThreadedTCPSocketTest.clientTearDown(self)
class SocketPairTest(unittest.TestCase, ThreadableTest):
def __init__(self, methodName='runTest'):
unittest.TestCase.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def setUp(self):
self.serv, self.cli = socket.socketpair()
def tearDown(self):
self.serv.close()
self.serv = None
def clientSetUp(self):
pass
def clientTearDown(self):
self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
#######################################################################
## Begin Tests
class GeneralModuleTests(unittest.TestCase):
def test_repr(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addCleanup(s.close)
self.assertTrue(repr(s).startswith("<socket.socket object"))
def test_weakref(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
p = proxy(s)
self.assertEqual(p.fileno(), s.fileno())
s.close()
s = None
try:
p.fileno()
except ReferenceError:
pass
else:
self.fail('Socket proxy still exists')
def testSocketError(self):
# Testing socket module exceptions
def raise_error(*args, **kwargs):
raise socket.error
def raise_herror(*args, **kwargs):
raise socket.herror
def raise_gaierror(*args, **kwargs):
raise socket.gaierror
self.assertRaises(socket.error, raise_error,
"Error raising socket exception.")
self.assertRaises(socket.error, raise_herror,
"Error raising socket exception.")
self.assertRaises(socket.error, raise_gaierror,
"Error raising socket exception.")
def testSendtoErrors(self):
# Testing that sendto doens't masks failures. See #10169.
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.addCleanup(s.close)
s.bind(('', 0))
sockname = s.getsockname()
# 2 args
with self.assertRaises(TypeError) as cm:
s.sendto('\u2620', sockname)
self.assertEqual(str(cm.exception),
"'str' does not support the buffer interface")
with self.assertRaises(TypeError) as cm:
s.sendto(5j, sockname)
self.assertEqual(str(cm.exception),
"'complex' does not support the buffer interface")
with self.assertRaises(TypeError) as cm:
s.sendto(b'foo', None)
self.assertIn('not NoneType',str(cm.exception))
# 3 args
with self.assertRaises(TypeError) as cm:
s.sendto('\u2620', 0, sockname)
self.assertEqual(str(cm.exception),
"'str' does not support the buffer interface")
with self.assertRaises(TypeError) as cm:
s.sendto(5j, 0, sockname)
self.assertEqual(str(cm.exception),
"'complex' does not support the buffer interface")
with self.assertRaises(TypeError) as cm:
s.sendto(b'foo', 0, None)
self.assertIn('not NoneType', str(cm.exception))
with self.assertRaises(TypeError) as cm:
s.sendto(b'foo', 'bar', sockname)
self.assertIn('an integer is required', str(cm.exception))
with self.assertRaises(TypeError) as cm:
s.sendto(b'foo', None, None)
self.assertIn('an integer is required', str(cm.exception))
# wrong number of args
with self.assertRaises(TypeError) as cm:
s.sendto(b'foo')
self.assertIn('(1 given)', str(cm.exception))
with self.assertRaises(TypeError) as cm:
s.sendto(b'foo', 0, sockname, 4)
self.assertIn('(4 given)', str(cm.exception))
def testCrucialConstants(self):
# Testing for mission critical constants
socket.AF_INET
socket.SOCK_STREAM
socket.SOCK_DGRAM
socket.SOCK_RAW
socket.SOCK_RDM
socket.SOCK_SEQPACKET
socket.SOL_SOCKET
socket.SO_REUSEADDR
def testHostnameRes(self):
# Testing hostname resolution mechanisms
hostname = socket.gethostname()
try:
ip = socket.gethostbyname(hostname)
except socket.error:
# Probably name lookup wasn't set up right; skip this test
return
self.assertTrue(ip.find('.') >= 0, "Error resolving host to ip.")
try:
hname, aliases, ipaddrs = socket.gethostbyaddr(ip)
except socket.error:
# Probably a similar problem as above; skip this test
return
all_host_names = [hostname, hname] + aliases
fqhn = socket.getfqdn(ip)
if not fqhn in all_host_names:
self.fail("Error testing host resolution mechanisms. (fqdn: %s, all: %s)" % (fqhn, repr(all_host_names)))
def testRefCountGetNameInfo(self):
# Testing reference count for getnameinfo
if hasattr(sys, "getrefcount"):
try:
# On some versions, this loses a reference
orig = sys.getrefcount(__name__)
socket.getnameinfo(__name__,0)
except TypeError:
if sys.getrefcount(__name__) != orig:
self.fail("socket.getnameinfo loses a reference")
def testInterpreterCrash(self):
# Making sure getnameinfo doesn't crash the interpreter
try:
# On some versions, this crashes the interpreter.
socket.getnameinfo(('x', 0, 0, 0), 0)
except socket.error:
pass
def testNtoH(self):
# This just checks that htons etc. are their own inverse,
# when looking at the lower 16 or 32 bits.
sizes = {socket.htonl: 32, socket.ntohl: 32,
socket.htons: 16, socket.ntohs: 16}
for func, size in sizes.items():
mask = (1<<size) - 1
for i in (0, 1, 0xffff, ~0xffff, 2, 0x01234567, 0x76543210):
self.assertEqual(i & mask, func(func(i&mask)) & mask)
swapped = func(mask)
self.assertEqual(swapped & mask, mask)
self.assertRaises(OverflowError, func, 1<<34)
def testNtoHErrors(self):
good_values = [ 1, 2, 3, 1, 2, 3 ]
bad_values = [ -1, -2, -3, -1, -2, -3 ]
for k in good_values:
socket.ntohl(k)
socket.ntohs(k)
socket.htonl(k)
socket.htons(k)
for k in bad_values:
self.assertRaises(OverflowError, socket.ntohl, k)
self.assertRaises(OverflowError, socket.ntohs, k)
self.assertRaises(OverflowError, socket.htonl, k)
self.assertRaises(OverflowError, socket.htons, k)
def testGetServBy(self):
eq = self.assertEqual
# Find one service that exists, then check all the related interfaces.
# I've ordered this by protocols that have both a tcp and udp
# protocol, at least for modern Linuxes.
if (sys.platform.startswith('linux') or
sys.platform.startswith('freebsd') or
sys.platform.startswith('netbsd') or
sys.platform == 'darwin'):
# avoid the 'echo' service on this platform, as there is an
# assumption breaking non-standard port/protocol entry
services = ('daytime', 'qotd', 'domain')
else:
services = ('echo', 'daytime', 'domain')
for service in services:
try:
port = socket.getservbyname(service, 'tcp')
break
except socket.error:
pass
else:
raise socket.error
# Try same call with optional protocol omitted
port2 = socket.getservbyname(service)
eq(port, port2)
# Try udp, but don't barf it it doesn't exist
try:
udpport = socket.getservbyname(service, 'udp')
except socket.error:
udpport = None
else:
eq(udpport, port)
# Now make sure the lookup by port returns the same service name
eq(socket.getservbyport(port2), service)
eq(socket.getservbyport(port, 'tcp'), service)
if udpport is not None:
eq(socket.getservbyport(udpport, 'udp'), service)
# Make sure getservbyport does not accept out of range ports.
self.assertRaises(OverflowError, socket.getservbyport, -1)
self.assertRaises(OverflowError, socket.getservbyport, 65536)
def testDefaultTimeout(self):
# Testing default timeout
# The default timeout should initially be None
self.assertEqual(socket.getdefaulttimeout(), None)
s = socket.socket()
self.assertEqual(s.gettimeout(), None)
s.close()
# Set the default timeout to 10, and see if it propagates
socket.setdefaulttimeout(10)
self.assertEqual(socket.getdefaulttimeout(), 10)
s = socket.socket()
self.assertEqual(s.gettimeout(), 10)
s.close()
# Reset the default timeout to None, and see if it propagates
socket.setdefaulttimeout(None)
self.assertEqual(socket.getdefaulttimeout(), None)
s = socket.socket()
self.assertEqual(s.gettimeout(), None)
s.close()
# Check that setting it to an invalid value raises ValueError
self.assertRaises(ValueError, socket.setdefaulttimeout, -1)
# Check that setting it to an invalid type raises TypeError
self.assertRaises(TypeError, socket.setdefaulttimeout, "spam")
def testIPv4_inet_aton_fourbytes(self):
if not hasattr(socket, 'inet_aton'):
return # No inet_aton, nothing to check
# Test that issue1008086 and issue767150 are fixed.
# It must return 4 bytes.
self.assertEqual(b'\x00'*4, socket.inet_aton('0.0.0.0'))
self.assertEqual(b'\xff'*4, socket.inet_aton('255.255.255.255'))
def testIPv4toString(self):
if not hasattr(socket, 'inet_pton'):
return # No inet_pton() on this platform
from socket import inet_aton as f, inet_pton, AF_INET
g = lambda a: inet_pton(AF_INET, a)
self.assertEqual(b'\x00\x00\x00\x00', f('0.0.0.0'))
self.assertEqual(b'\xff\x00\xff\x00', f('255.0.255.0'))
self.assertEqual(b'\xaa\xaa\xaa\xaa', f('170.170.170.170'))
self.assertEqual(b'\x01\x02\x03\x04', f('1.2.3.4'))
self.assertEqual(b'\xff\xff\xff\xff', f('255.255.255.255'))
self.assertEqual(b'\x00\x00\x00\x00', g('0.0.0.0'))
self.assertEqual(b'\xff\x00\xff\x00', g('255.0.255.0'))
self.assertEqual(b'\xaa\xaa\xaa\xaa', g('170.170.170.170'))
self.assertEqual(b'\xff\xff\xff\xff', g('255.255.255.255'))
def testIPv6toString(self):
if not hasattr(socket, 'inet_pton'):
return # No inet_pton() on this platform
try:
from socket import inet_pton, AF_INET6, has_ipv6
if not has_ipv6:
return
except ImportError:
return
f = lambda a: inet_pton(AF_INET6, a)
self.assertEqual(b'\x00' * 16, f('::'))
self.assertEqual(b'\x00' * 16, f('0::0'))
self.assertEqual(b'\x00\x01' + b'\x00' * 14, f('1::'))
self.assertEqual(
b'\x45\xef\x76\xcb\x00\x1a\x56\xef\xaf\xeb\x0b\xac\x19\x24\xae\xae',
f('45ef:76cb:1a:56ef:afeb:bac:1924:aeae')
)
def testStringToIPv4(self):
if not hasattr(socket, 'inet_ntop'):
return # No inet_ntop() on this platform
from socket import inet_ntoa as f, inet_ntop, AF_INET
g = lambda a: inet_ntop(AF_INET, a)
self.assertEqual('1.0.1.0', f(b'\x01\x00\x01\x00'))
self.assertEqual('170.85.170.85', f(b'\xaa\x55\xaa\x55'))
self.assertEqual('255.255.255.255', f(b'\xff\xff\xff\xff'))
self.assertEqual('1.2.3.4', f(b'\x01\x02\x03\x04'))
self.assertEqual('1.0.1.0', g(b'\x01\x00\x01\x00'))
self.assertEqual('170.85.170.85', g(b'\xaa\x55\xaa\x55'))
self.assertEqual('255.255.255.255', g(b'\xff\xff\xff\xff'))
def testStringToIPv6(self):
if not hasattr(socket, 'inet_ntop'):
return # No inet_ntop() on this platform
try:
from socket import inet_ntop, AF_INET6, has_ipv6
if not has_ipv6:
return
except ImportError:
return
f = lambda a: inet_ntop(AF_INET6, a)
self.assertEqual('::', f(b'\x00' * 16))
self.assertEqual('::1', f(b'\x00' * 15 + b'\x01'))
self.assertEqual(
'aef:b01:506:1001:ffff:9997:55:170',
f(b'\x0a\xef\x0b\x01\x05\x06\x10\x01\xff\xff\x99\x97\x00\x55\x01\x70')
)
# XXX The following don't test module-level functionality...
def testSockName(self):
# Testing getsockname()
port = support.find_unused_port()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addCleanup(sock.close)
sock.bind(("0.0.0.0", port))
name = sock.getsockname()
# XXX(nnorwitz): http://tinyurl.com/os5jz seems to indicate
# it reasonable to get the host's addr in addition to 0.0.0.0.
# At least for eCos. This is required for the S/390 to pass.
try:
my_ip_addr = socket.gethostbyname(socket.gethostname())
except socket.error:
# Probably name lookup wasn't set up right; skip this test
return
self.assertIn(name[0], ("0.0.0.0", my_ip_addr), '%s invalid' % name[0])
self.assertEqual(name[1], port)
def testGetSockOpt(self):
# Testing getsockopt()
# We know a socket should start without reuse==0
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addCleanup(sock.close)
reuse = sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR)
self.assertFalse(reuse != 0, "initial mode is reuse")
def testSetSockOpt(self):
# Testing setsockopt()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addCleanup(sock.close)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
reuse = sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR)
self.assertFalse(reuse == 0, "failed to set reuse mode")
def testSendAfterClose(self):
# testing send() after close() with timeout
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
sock.close()
self.assertRaises(socket.error, sock.send, b"spam")
def testNewAttributes(self):
# testing .family, .type and .protocol
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.assertEqual(sock.family, socket.AF_INET)
self.assertEqual(sock.type, socket.SOCK_STREAM)
self.assertEqual(sock.proto, 0)
sock.close()
def test_getsockaddrarg(self):
host = '0.0.0.0'
port = support.find_unused_port()
big_port = port + 65536
neg_port = port - 65536
sock = socket.socket()
try:
self.assertRaises(OverflowError, sock.bind, (host, big_port))
self.assertRaises(OverflowError, sock.bind, (host, neg_port))
sock.bind((host, port))
finally:
sock.close()
@unittest.skipUnless(os.name == "nt", "Windows specific")
def test_sock_ioctl(self):
self.assertTrue(hasattr(socket.socket, 'ioctl'))
self.assertTrue(hasattr(socket, 'SIO_RCVALL'))
self.assertTrue(hasattr(socket, 'RCVALL_ON'))
self.assertTrue(hasattr(socket, 'RCVALL_OFF'))
self.assertTrue(hasattr(socket, 'SIO_KEEPALIVE_VALS'))
s = socket.socket()
self.addCleanup(s.close)
self.assertRaises(ValueError, s.ioctl, -1, None)
s.ioctl(socket.SIO_KEEPALIVE_VALS, (1, 100, 100))
def testGetaddrinfo(self):
try:
socket.getaddrinfo('localhost', 80)
except socket.gaierror as err:
if err.errno == socket.EAI_SERVICE:
# see http://bugs.python.org/issue1282647
self.skipTest("buggy libc version")
raise
# len of every sequence is supposed to be == 5
for info in socket.getaddrinfo(HOST, None):
self.assertEqual(len(info), 5)
# host can be a domain name, a string representation of an
# IPv4/v6 address or None
socket.getaddrinfo('localhost', 80)
socket.getaddrinfo('127.0.0.1', 80)
socket.getaddrinfo(None, 80)
if SUPPORTS_IPV6:
socket.getaddrinfo('::1', 80)
# port can be a string service name such as "http", a numeric
# port number or None
socket.getaddrinfo(HOST, "http")
socket.getaddrinfo(HOST, 80)
socket.getaddrinfo(HOST, None)
# test family and socktype filters
infos = socket.getaddrinfo(HOST, None, socket.AF_INET)
for family, _, _, _, _ in infos:
self.assertEqual(family, socket.AF_INET)
infos = socket.getaddrinfo(HOST, None, 0, socket.SOCK_STREAM)
for _, socktype, _, _, _ in infos:
self.assertEqual(socktype, socket.SOCK_STREAM)
# test proto and flags arguments
socket.getaddrinfo(HOST, None, 0, 0, socket.SOL_TCP)
socket.getaddrinfo(HOST, None, 0, 0, 0, socket.AI_PASSIVE)
# a server willing to support both IPv4 and IPv6 will
# usually do this
socket.getaddrinfo(None, 0, socket.AF_UNSPEC, socket.SOCK_STREAM, 0,
socket.AI_PASSIVE)
# test keyword arguments
a = socket.getaddrinfo(HOST, None)
b = socket.getaddrinfo(host=HOST, port=None)
self.assertEqual(a, b)
a = socket.getaddrinfo(HOST, None, socket.AF_INET)
b = socket.getaddrinfo(HOST, None, family=socket.AF_INET)
self.assertEqual(a, b)
a = socket.getaddrinfo(HOST, None, 0, socket.SOCK_STREAM)
b = socket.getaddrinfo(HOST, None, type=socket.SOCK_STREAM)
self.assertEqual(a, b)
a = socket.getaddrinfo(HOST, None, 0, 0, socket.SOL_TCP)
b = socket.getaddrinfo(HOST, None, proto=socket.SOL_TCP)
self.assertEqual(a, b)
a = socket.getaddrinfo(HOST, None, 0, 0, 0, socket.AI_PASSIVE)
b = socket.getaddrinfo(HOST, None, flags=socket.AI_PASSIVE)
self.assertEqual(a, b)
a = socket.getaddrinfo(None, 0, socket.AF_UNSPEC, socket.SOCK_STREAM, 0,
socket.AI_PASSIVE)
b = socket.getaddrinfo(host=None, port=0, family=socket.AF_UNSPEC,
type=socket.SOCK_STREAM, proto=0,
flags=socket.AI_PASSIVE)
self.assertEqual(a, b)
# Issue #6697.
self.assertRaises(UnicodeEncodeError, socket.getaddrinfo, 'localhost', '\uD800')
def test_getnameinfo(self):
# only IP addresses are allowed
self.assertRaises(socket.error, socket.getnameinfo, ('mail.python.org',0), 0)
@unittest.skipUnless(support.is_resource_enabled('network'),
'network is not enabled')
def test_idna(self):
support.requires('network')
# these should all be successful
socket.gethostbyname('испытание.python.org')
socket.gethostbyname_ex('испытание.python.org')
socket.getaddrinfo('испытание.python.org',0,socket.AF_UNSPEC,socket.SOCK_STREAM)
# this may not work if the forward lookup choses the IPv6 address, as that doesn't
# have a reverse entry yet
# socket.gethostbyaddr('испытание.python.org')
def check_sendall_interrupted(self, with_timeout):
# socketpair() is not stricly required, but it makes things easier.
if not hasattr(signal, 'alarm') or not hasattr(socket, 'socketpair'):
self.skipTest("signal.alarm and socket.socketpair required for this test")
# Our signal handlers clobber the C errno by calling a math function
# with an invalid domain value.
def ok_handler(*args):
self.assertRaises(ValueError, math.acosh, 0)
def raising_handler(*args):
self.assertRaises(ValueError, math.acosh, 0)
1 // 0
c, s = socket.socketpair()
old_alarm = signal.signal(signal.SIGALRM, raising_handler)
try:
if with_timeout:
# Just above the one second minimum for signal.alarm
c.settimeout(1.5)
with self.assertRaises(ZeroDivisionError):
signal.alarm(1)
c.sendall(b"x" * (1024**2))
if with_timeout:
signal.signal(signal.SIGALRM, ok_handler)
signal.alarm(1)
self.assertRaises(socket.timeout, c.sendall, b"x" * (1024**2))
finally:
signal.signal(signal.SIGALRM, old_alarm)
c.close()
s.close()
def test_sendall_interrupted(self):
self.check_sendall_interrupted(False)
def test_sendall_interrupted_with_timeout(self):
self.check_sendall_interrupted(True)
def test_dealloc_warn(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
r = repr(sock)
with self.assertWarns(ResourceWarning) as cm:
sock = None
support.gc_collect()
self.assertIn(r, str(cm.warning.args[0]))
# An open socket file object gets dereferenced after the socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
f = sock.makefile('rb')
r = repr(sock)
sock = None
support.gc_collect()
with self.assertWarns(ResourceWarning):
f = None
support.gc_collect()
def test_name_closed_socketio(self):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
fp = sock.makefile("rb")
fp.close()
self.assertEqual(repr(fp), "<_io.BufferedReader name=-1>")
def testListenBacklog0(self):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.bind((HOST, 0))
# backlog = 0
srv.listen(0)
srv.close()
@unittest.skipUnless(thread, 'Threading required for this test.')
class BasicTCPTest(SocketConnectedTest):
def __init__(self, methodName='runTest'):
SocketConnectedTest.__init__(self, methodName=methodName)
def testRecv(self):
# Testing large receive over TCP
msg = self.cli_conn.recv(1024)
self.assertEqual(msg, MSG)
def _testRecv(self):
self.serv_conn.send(MSG)
def testOverFlowRecv(self):
# Testing receive in chunks over TCP
seg1 = self.cli_conn.recv(len(MSG) - 3)
seg2 = self.cli_conn.recv(1024)
msg = seg1 + seg2
self.assertEqual(msg, MSG)
def _testOverFlowRecv(self):
self.serv_conn.send(MSG)
def testRecvFrom(self):
# Testing large recvfrom() over TCP
msg, addr = self.cli_conn.recvfrom(1024)
self.assertEqual(msg, MSG)
def _testRecvFrom(self):
self.serv_conn.send(MSG)
def testOverFlowRecvFrom(self):
# Testing recvfrom() in chunks over TCP
seg1, addr = self.cli_conn.recvfrom(len(MSG)-3)
seg2, addr = self.cli_conn.recvfrom(1024)
msg = seg1 + seg2
self.assertEqual(msg, MSG)
def _testOverFlowRecvFrom(self):
self.serv_conn.send(MSG)
def testSendAll(self):
# Testing sendall() with a 2048 byte string over TCP
msg = b''
while 1:
read = self.cli_conn.recv(1024)
if not read:
break
msg += read
self.assertEqual(msg, b'f' * 2048)
def _testSendAll(self):
big_chunk = b'f' * 2048
self.serv_conn.sendall(big_chunk)
def testFromFd(self):
# Testing fromfd()
fd = self.cli_conn.fileno()
sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
self.addCleanup(sock.close)
self.assertIsInstance(sock, socket.socket)
msg = sock.recv(1024)
self.assertEqual(msg, MSG)
def _testFromFd(self):
self.serv_conn.send(MSG)
def testDup(self):
# Testing dup()
sock = self.cli_conn.dup()
self.addCleanup(sock.close)
msg = sock.recv(1024)
self.assertEqual(msg, MSG)
def _testDup(self):
self.serv_conn.send(MSG)
def testShutdown(self):
# Testing shutdown()
msg = self.cli_conn.recv(1024)
self.assertEqual(msg, MSG)
# wait for _testShutdown to finish: on OS X, when the server
# closes the connection the client also becomes disconnected,
# and the client's shutdown call will fail. (Issue #4397.)
self.done.wait()
def _testShutdown(self):
self.serv_conn.send(MSG)
self.serv_conn.shutdown(2)
def testDetach(self):
# Testing detach()
fileno = self.cli_conn.fileno()
f = self.cli_conn.detach()
self.assertEqual(f, fileno)
# cli_conn cannot be used anymore...
self.assertRaises(socket.error, self.cli_conn.recv, 1024)
self.cli_conn.close()
# ...but we can create another socket using the (still open)
# file descriptor
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, fileno=f)
self.addCleanup(sock.close)
msg = sock.recv(1024)
self.assertEqual(msg, MSG)
def _testDetach(self):
self.serv_conn.send(MSG)
@unittest.skipUnless(thread, 'Threading required for this test.')
class BasicUDPTest(ThreadedUDPSocketTest):
def __init__(self, methodName='runTest'):
ThreadedUDPSocketTest.__init__(self, methodName=methodName)
def testSendtoAndRecv(self):
# Testing sendto() and Recv() over UDP
msg = self.serv.recv(len(MSG))
self.assertEqual(msg, MSG)
def _testSendtoAndRecv(self):
self.cli.sendto(MSG, 0, (HOST, self.port))
def testRecvFrom(self):
# Testing recvfrom() over UDP
msg, addr = self.serv.recvfrom(len(MSG))
self.assertEqual(msg, MSG)
def _testRecvFrom(self):
self.cli.sendto(MSG, 0, (HOST, self.port))
def testRecvFromNegative(self):
# Negative lengths passed to recvfrom should give ValueError.
self.assertRaises(ValueError, self.serv.recvfrom, -1)
def _testRecvFromNegative(self):
self.cli.sendto(MSG, 0, (HOST, self.port))
@unittest.skipUnless(thread, 'Threading required for this test.')
class TCPCloserTest(ThreadedTCPSocketTest):
def testClose(self):
conn, addr = self.serv.accept()
conn.close()
sd = self.cli
read, write, err = select.select([sd], [], [], 1.0)
self.assertEqual(read, [sd])
self.assertEqual(sd.recv(1), b'')
# Calling close() many times should be safe.
conn.close()
conn.close()
def _testClose(self):
self.cli.connect((HOST, self.port))
time.sleep(1.0)
@unittest.skipUnless(thread, 'Threading required for this test.')
class BasicSocketPairTest(SocketPairTest):
def __init__(self, methodName='runTest'):
SocketPairTest.__init__(self, methodName=methodName)
def _check_defaults(self, sock):
self.assertIsInstance(sock, socket.socket)
if hasattr(socket, 'AF_UNIX'):
self.assertEqual(sock.family, socket.AF_UNIX)
else:
self.assertEqual(sock.family, socket.AF_INET)
self.assertEqual(sock.type, socket.SOCK_STREAM)
self.assertEqual(sock.proto, 0)
def _testDefaults(self):
self._check_defaults(self.cli)
def testDefaults(self):
self._check_defaults(self.serv)
def testRecv(self):
msg = self.serv.recv(1024)
self.assertEqual(msg, MSG)
def _testRecv(self):
self.cli.send(MSG)
def testSend(self):
self.serv.send(MSG)
def _testSend(self):
msg = self.cli.recv(1024)
self.assertEqual(msg, MSG)
@unittest.skipUnless(thread, 'Threading required for this test.')
class NonBlockingTCPTests(ThreadedTCPSocketTest):
def __init__(self, methodName='runTest'):
ThreadedTCPSocketTest.__init__(self, methodName=methodName)
def testSetBlocking(self):
# Testing whether set blocking works
self.serv.setblocking(0)
start = time.time()
try:
self.serv.accept()
except socket.error:
pass
end = time.time()
self.assertTrue((end - start) < 1.0, "Error setting non-blocking mode.")
def _testSetBlocking(self):
pass
if hasattr(socket, "SOCK_NONBLOCK"):
def testInitNonBlocking(self):
v = linux_version()
if v < (2, 6, 28):
self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
% ".".join(map(str, v)))
# reinit server socket
self.serv.close()
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM |
socket.SOCK_NONBLOCK)
self.port = support.bind_port(self.serv)
self.serv.listen(1)
# actual testing
start = time.time()
try:
self.serv.accept()
except socket.error:
pass
end = time.time()
self.assertTrue((end - start) < 1.0, "Error creating with non-blocking mode.")
def _testInitNonBlocking(self):
pass
def testInheritFlags(self):
# Issue #7995: when calling accept() on a listening socket with a
# timeout, the resulting socket should not be non-blocking.
self.serv.settimeout(10)
try:
conn, addr = self.serv.accept()
message = conn.recv(len(MSG))
finally:
conn.close()
self.serv.settimeout(None)
def _testInheritFlags(self):
time.sleep(0.1)
self.cli.connect((HOST, self.port))
time.sleep(0.5)
self.cli.send(MSG)
def testAccept(self):
# Testing non-blocking accept
self.serv.setblocking(0)
try:
conn, addr = self.serv.accept()
except socket.error:
pass
else:
self.fail("Error trying to do non-blocking accept.")
read, write, err = select.select([self.serv], [], [])
if self.serv in read:
conn, addr = self.serv.accept()
conn.close()
else:
self.fail("Error trying to do accept after select.")
def _testAccept(self):
time.sleep(0.1)
self.cli.connect((HOST, self.port))
def testConnect(self):
# Testing non-blocking connect
conn, addr = self.serv.accept()
conn.close()
def _testConnect(self):
self.cli.settimeout(10)
self.cli.connect((HOST, self.port))
def testRecv(self):
# Testing non-blocking recv
conn, addr = self.serv.accept()
conn.setblocking(0)
try:
msg = conn.recv(len(MSG))
except socket.error:
pass
else:
self.fail("Error trying to do non-blocking recv.")
read, write, err = select.select([conn], [], [])
if conn in read:
msg = conn.recv(len(MSG))
conn.close()
self.assertEqual(msg, MSG)
else:
self.fail("Error during select call to non-blocking socket.")
def _testRecv(self):
self.cli.connect((HOST, self.port))
time.sleep(0.1)
self.cli.send(MSG)
@unittest.skipUnless(thread, 'Threading required for this test.')
class FileObjectClassTestCase(SocketConnectedTest):
"""Unit tests for the object returned by socket.makefile()
self.read_file is the io object returned by makefile() on
the client connection. You can read from this file to
get output from the server.
self.write_file is the io object returned by makefile() on the
server connection. You can write to this file to send output
to the client.
"""
bufsize = -1 # Use default buffer size
encoding = 'utf8'
errors = 'strict'
newline = None
read_mode = 'rb'
read_msg = MSG
write_mode = 'wb'
write_msg = MSG
def __init__(self, methodName='runTest'):
SocketConnectedTest.__init__(self, methodName=methodName)
def setUp(self):
self.evt1, self.evt2, self.serv_finished, self.cli_finished = [
threading.Event() for i in range(4)]
SocketConnectedTest.setUp(self)
self.read_file = self.cli_conn.makefile(
self.read_mode, self.bufsize,
encoding = self.encoding,
errors = self.errors,
newline = self.newline)
def tearDown(self):
self.serv_finished.set()
self.read_file.close()
self.assertTrue(self.read_file.closed)
self.read_file = None
SocketConnectedTest.tearDown(self)
def clientSetUp(self):
SocketConnectedTest.clientSetUp(self)
self.write_file = self.serv_conn.makefile(
self.write_mode, self.bufsize,
encoding = self.encoding,
errors = self.errors,
newline = self.newline)
def clientTearDown(self):
self.cli_finished.set()
self.write_file.close()
self.assertTrue(self.write_file.closed)
self.write_file = None
SocketConnectedTest.clientTearDown(self)
def testReadAfterTimeout(self):
# Issue #7322: A file object must disallow further reads
# after a timeout has occurred.
self.cli_conn.settimeout(1)
self.read_file.read(3)
# First read raises a timeout
self.assertRaises(socket.timeout, self.read_file.read, 1)
# Second read is disallowed
with self.assertRaises(IOError) as ctx:
self.read_file.read(1)
self.assertIn("cannot read from timed out object", str(ctx.exception))
def _testReadAfterTimeout(self):
self.write_file.write(self.write_msg[0:3])
self.write_file.flush()
self.serv_finished.wait()
def testSmallRead(self):
# Performing small file read test
first_seg = self.read_file.read(len(self.read_msg)-3)
second_seg = self.read_file.read(3)
msg = first_seg + second_seg
self.assertEqual(msg, self.read_msg)
def _testSmallRead(self):
self.write_file.write(self.write_msg)
self.write_file.flush()
def testFullRead(self):
# read until EOF
msg = self.read_file.read()
self.assertEqual(msg, self.read_msg)
def _testFullRead(self):
self.write_file.write(self.write_msg)
self.write_file.close()
def testUnbufferedRead(self):
# Performing unbuffered file read test
buf = type(self.read_msg)()
while 1:
char = self.read_file.read(1)
if not char:
break
buf += char
self.assertEqual(buf, self.read_msg)
def _testUnbufferedRead(self):
self.write_file.write(self.write_msg)
self.write_file.flush()
def testReadline(self):
# Performing file readline test
line = self.read_file.readline()
self.assertEqual(line, self.read_msg)
def _testReadline(self):
self.write_file.write(self.write_msg)
self.write_file.flush()
def testCloseAfterMakefile(self):
# The file returned by makefile should keep the socket open.
self.cli_conn.close()
# read until EOF
msg = self.read_file.read()
self.assertEqual(msg, self.read_msg)
def _testCloseAfterMakefile(self):
self.write_file.write(self.write_msg)
self.write_file.flush()
def testMakefileAfterMakefileClose(self):
self.read_file.close()
msg = self.cli_conn.recv(len(MSG))
if isinstance(self.read_msg, str):
msg = msg.decode()
self.assertEqual(msg, self.read_msg)
def _testMakefileAfterMakefileClose(self):
self.write_file.write(self.write_msg)
self.write_file.flush()
def testClosedAttr(self):
self.assertTrue(not self.read_file.closed)
def _testClosedAttr(self):
self.assertTrue(not self.write_file.closed)
def testAttributes(self):
self.assertEqual(self.read_file.mode, self.read_mode)
self.assertEqual(self.read_file.name, self.cli_conn.fileno())
def _testAttributes(self):
self.assertEqual(self.write_file.mode, self.write_mode)
self.assertEqual(self.write_file.name, self.serv_conn.fileno())
def testRealClose(self):
self.read_file.close()
self.assertRaises(ValueError, self.read_file.fileno)
self.cli_conn.close()
self.assertRaises(socket.error, self.cli_conn.getsockname)
def _testRealClose(self):
pass
class FileObjectInterruptedTestCase(unittest.TestCase):
"""Test that the file object correctly handles EINTR internally."""
class MockSocket(object):
def __init__(self, recv_funcs=()):
# A generator that returns callables that we'll call for each
# call to recv().
self._recv_step = iter(recv_funcs)
def recv_into(self, buffer):
data = next(self._recv_step)()
assert len(buffer) >= len(data)
buffer[:len(data)] = data
return len(data)
def _decref_socketios(self):
pass
def _textiowrap_for_test(self, buffering=-1):
raw = socket.SocketIO(self, "r")
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
return raw
buffer = io.BufferedReader(raw, buffering)
text = io.TextIOWrapper(buffer, None, None)
text.mode = "rb"
return text
@staticmethod
def _raise_eintr():
raise socket.error(errno.EINTR)
def _textiowrap_mock_socket(self, mock, buffering=-1):
raw = socket.SocketIO(mock, "r")
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
return raw
buffer = io.BufferedReader(raw, buffering)
text = io.TextIOWrapper(buffer, None, None)
text.mode = "rb"
return text
def _test_readline(self, size=-1, buffering=-1):
mock_sock = self.MockSocket(recv_funcs=[
lambda : b"This is the first line\nAnd the sec",
self._raise_eintr,
lambda : b"ond line is here\n",
lambda : b"",
lambda : b"", # XXX(gps): io library does an extra EOF read
])
fo = mock_sock._textiowrap_for_test(buffering=buffering)
self.assertEqual(fo.readline(size), "This is the first line\n")
self.assertEqual(fo.readline(size), "And the second line is here\n")
def _test_read(self, size=-1, buffering=-1):
mock_sock = self.MockSocket(recv_funcs=[
lambda : b"This is the first line\nAnd the sec",
self._raise_eintr,
lambda : b"ond line is here\n",
lambda : b"",
lambda : b"", # XXX(gps): io library does an extra EOF read
])
expecting = (b"This is the first line\n"
b"And the second line is here\n")
fo = mock_sock._textiowrap_for_test(buffering=buffering)
if buffering == 0:
data = b''
else:
data = ''
expecting = expecting.decode('utf8')
while len(data) != len(expecting):
part = fo.read(size)
if not part:
break
data += part
self.assertEqual(data, expecting)
def test_default(self):
self._test_readline()
self._test_readline(size=100)
self._test_read()
self._test_read(size=100)
def test_with_1k_buffer(self):
self._test_readline(buffering=1024)
self._test_readline(size=100, buffering=1024)
self._test_read(buffering=1024)
self._test_read(size=100, buffering=1024)
def _test_readline_no_buffer(self, size=-1):
mock_sock = self.MockSocket(recv_funcs=[
lambda : b"a",
lambda : b"\n",
lambda : b"B",
self._raise_eintr,
lambda : b"b",
lambda : b"",
])
fo = mock_sock._textiowrap_for_test(buffering=0)
self.assertEqual(fo.readline(size), b"a\n")
self.assertEqual(fo.readline(size), b"Bb")
def test_no_buffer(self):
self._test_readline_no_buffer()
self._test_readline_no_buffer(size=4)
self._test_read(buffering=0)
self._test_read(size=100, buffering=0)
class UnbufferedFileObjectClassTestCase(FileObjectClassTestCase):
"""Repeat the tests from FileObjectClassTestCase with bufsize==0.
In this case (and in this case only), it should be possible to
create a file object, read a line from it, create another file
object, read another line from it, without loss of data in the
first file object's buffer. Note that http.client relies on this
when reading multiple requests from the same socket."""
bufsize = 0 # Use unbuffered mode
def testUnbufferedReadline(self):
# Read a line, create a new file object, read another line with it
line = self.read_file.readline() # first line
self.assertEqual(line, b"A. " + self.write_msg) # first line
self.read_file = self.cli_conn.makefile('rb', 0)
line = self.read_file.readline() # second line
self.assertEqual(line, b"B. " + self.write_msg) # second line
def _testUnbufferedReadline(self):
self.write_file.write(b"A. " + self.write_msg)
self.write_file.write(b"B. " + self.write_msg)
self.write_file.flush()
def testMakefileClose(self):
# The file returned by makefile should keep the socket open...
self.cli_conn.close()
msg = self.cli_conn.recv(1024)
self.assertEqual(msg, self.read_msg)
# ...until the file is itself closed
self.read_file.close()
self.assertRaises(socket.error, self.cli_conn.recv, 1024)
def _testMakefileClose(self):
self.write_file.write(self.write_msg)
self.write_file.flush()
def testMakefileCloseSocketDestroy(self):
refcount_before = sys.getrefcount(self.cli_conn)
self.read_file.close()
refcount_after = sys.getrefcount(self.cli_conn)
self.assertEqual(refcount_before - 1, refcount_after)
def _testMakefileCloseSocketDestroy(self):
pass
# Non-blocking ops
# NOTE: to set `read_file` as non-blocking, we must call
# `cli_conn.setblocking` and vice-versa (see setUp / clientSetUp).
def testSmallReadNonBlocking(self):
self.cli_conn.setblocking(False)
self.assertEqual(self.read_file.readinto(bytearray(10)), None)
self.assertEqual(self.read_file.read(len(self.read_msg) - 3), None)
self.evt1.set()
self.evt2.wait(1.0)
first_seg = self.read_file.read(len(self.read_msg) - 3)
if first_seg is None:
# Data not arrived (can happen under Windows), wait a bit
time.sleep(0.5)
first_seg = self.read_file.read(len(self.read_msg) - 3)
buf = bytearray(10)
n = self.read_file.readinto(buf)
self.assertEqual(n, 3)
msg = first_seg + buf[:n]
self.assertEqual(msg, self.read_msg)
self.assertEqual(self.read_file.readinto(bytearray(16)), None)
self.assertEqual(self.read_file.read(1), None)
def _testSmallReadNonBlocking(self):
self.evt1.wait(1.0)
self.write_file.write(self.write_msg)
self.write_file.flush()
self.evt2.set()
# Avoid cloding the socket before the server test has finished,
# otherwise system recv() will return 0 instead of EWOULDBLOCK.
self.serv_finished.wait(5.0)
def testWriteNonBlocking(self):
self.cli_finished.wait(5.0)
# The client thread can't skip directly - the SkipTest exception
# would appear as a failure.
if self.serv_skipped:
self.skipTest(self.serv_skipped)
def _testWriteNonBlocking(self):
self.serv_skipped = None
self.serv_conn.setblocking(False)
# Try to saturate the socket buffer pipe with repeated large writes.
BIG = b"x" * (1024 ** 2)
LIMIT = 10
# The first write() succeeds since a chunk of data can be buffered
n = self.write_file.write(BIG)
self.assertGreater(n, 0)
for i in range(LIMIT):
n = self.write_file.write(BIG)
if n is None:
# Succeeded
break
self.assertGreater(n, 0)
else:
# Let us know that this test didn't manage to establish
# the expected conditions. This is not a failure in itself but,
# if it happens repeatedly, the test should be fixed.
self.serv_skipped = "failed to saturate the socket buffer"
class LineBufferedFileObjectClassTestCase(FileObjectClassTestCase):
bufsize = 1 # Default-buffered for reading; line-buffered for writing
class SmallBufferedFileObjectClassTestCase(FileObjectClassTestCase):
bufsize = 2 # Exercise the buffering code
class UnicodeReadFileObjectClassTestCase(FileObjectClassTestCase):
"""Tests for socket.makefile() in text mode (rather than binary)"""
read_mode = 'r'
read_msg = MSG.decode('utf8')
write_mode = 'wb'
write_msg = MSG
newline = ''
class UnicodeWriteFileObjectClassTestCase(FileObjectClassTestCase):
"""Tests for socket.makefile() in text mode (rather than binary)"""
read_mode = 'rb'
read_msg = MSG
write_mode = 'w'
write_msg = MSG.decode('utf8')
newline = ''
class UnicodeReadWriteFileObjectClassTestCase(FileObjectClassTestCase):
"""Tests for socket.makefile() in text mode (rather than binary)"""
read_mode = 'r'
read_msg = MSG.decode('utf8')
write_mode = 'w'
write_msg = MSG.decode('utf8')
newline = ''
class NetworkConnectionTest(object):
"""Prove network connection."""
def clientSetUp(self):
# We're inherited below by BasicTCPTest2, which also inherits
# BasicTCPTest, which defines self.port referenced below.
self.cli = socket.create_connection((HOST, self.port))
self.serv_conn = self.cli
class BasicTCPTest2(NetworkConnectionTest, BasicTCPTest):
"""Tests that NetworkConnection does not break existing TCP functionality.
"""
class NetworkConnectionNoServer(unittest.TestCase):
class MockSocket(socket.socket):
def connect(self, *args):
raise socket.timeout('timed out')
@contextlib.contextmanager
def mocked_socket_module(self):
"""Return a socket which times out on connect"""
old_socket = socket.socket
socket.socket = self.MockSocket
try:
yield
finally:
socket.socket = old_socket
def test_connect(self):
port = support.find_unused_port()
cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addCleanup(cli.close)
with self.assertRaises(socket.error) as cm:
cli.connect((HOST, port))
self.assertEqual(cm.exception.errno, errno.ECONNREFUSED)
def test_create_connection(self):
# Issue #9792: errors raised by create_connection() should have
# a proper errno attribute.
port = support.find_unused_port()
with self.assertRaises(socket.error) as cm:
socket.create_connection((HOST, port))
self.assertEqual(cm.exception.errno, errno.ECONNREFUSED)
def test_create_connection_timeout(self):
# Issue #9792: create_connection() should not recast timeout errors
# as generic socket errors.
with self.mocked_socket_module():
with self.assertRaises(socket.timeout):
socket.create_connection((HOST, 1234))
@unittest.skipUnless(thread, 'Threading required for this test.')
class NetworkConnectionAttributesTest(SocketTCPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
SocketTCPTest.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def clientSetUp(self):
self.source_port = support.find_unused_port()
def clientTearDown(self):
self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
def _justAccept(self):
conn, addr = self.serv.accept()
conn.close()
testFamily = _justAccept
def _testFamily(self):
self.cli = socket.create_connection((HOST, self.port), timeout=30)
self.addCleanup(self.cli.close)
self.assertEqual(self.cli.family, 2)
testSourceAddress = _justAccept
def _testSourceAddress(self):
self.cli = socket.create_connection((HOST, self.port), timeout=30,
source_address=('', self.source_port))
self.addCleanup(self.cli.close)
self.assertEqual(self.cli.getsockname()[1], self.source_port)
# The port number being used is sufficient to show that the bind()
# call happened.
testTimeoutDefault = _justAccept
def _testTimeoutDefault(self):
# passing no explicit timeout uses socket's global default
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(42)
try:
self.cli = socket.create_connection((HOST, self.port))
self.addCleanup(self.cli.close)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(self.cli.gettimeout(), 42)
testTimeoutNone = _justAccept
def _testTimeoutNone(self):
# None timeout means the same as sock.settimeout(None)
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
self.cli = socket.create_connection((HOST, self.port), timeout=None)
self.addCleanup(self.cli.close)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(self.cli.gettimeout(), None)
testTimeoutValueNamed = _justAccept
def _testTimeoutValueNamed(self):
self.cli = socket.create_connection((HOST, self.port), timeout=30)
self.assertEqual(self.cli.gettimeout(), 30)
testTimeoutValueNonamed = _justAccept
def _testTimeoutValueNonamed(self):
self.cli = socket.create_connection((HOST, self.port), 30)
self.addCleanup(self.cli.close)
self.assertEqual(self.cli.gettimeout(), 30)
@unittest.skipUnless(thread, 'Threading required for this test.')
class NetworkConnectionBehaviourTest(SocketTCPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
SocketTCPTest.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
def clientSetUp(self):
pass
def clientTearDown(self):
self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
def testInsideTimeout(self):
conn, addr = self.serv.accept()
self.addCleanup(conn.close)
time.sleep(3)
conn.send(b"done!")
testOutsideTimeout = testInsideTimeout
def _testInsideTimeout(self):
self.cli = sock = socket.create_connection((HOST, self.port))
data = sock.recv(5)
self.assertEqual(data, b"done!")
def _testOutsideTimeout(self):
self.cli = sock = socket.create_connection((HOST, self.port), timeout=1)
self.assertRaises(socket.timeout, lambda: sock.recv(5))
class TCPTimeoutTest(SocketTCPTest):
def testTCPTimeout(self):
def raise_timeout(*args, **kwargs):
self.serv.settimeout(1.0)
self.serv.accept()
self.assertRaises(socket.timeout, raise_timeout,
"Error generating a timeout exception (TCP)")
def testTimeoutZero(self):
ok = False
try:
self.serv.settimeout(0.0)
foo = self.serv.accept()
except socket.timeout:
self.fail("caught timeout instead of error (TCP)")
except socket.error:
ok = True
except:
self.fail("caught unexpected exception (TCP)")
if not ok:
self.fail("accept() returned success when we did not expect it")
def testInterruptedTimeout(self):
# XXX I don't know how to do this test on MSWindows or any other
# plaform that doesn't support signal.alarm() or os.kill(), though
# the bug should have existed on all platforms.
if not hasattr(signal, "alarm"):
return # can only test on *nix
self.serv.settimeout(5.0) # must be longer than alarm
class Alarm(Exception):
pass
def alarm_handler(signal, frame):
raise Alarm
old_alarm = signal.signal(signal.SIGALRM, alarm_handler)
try:
signal.alarm(2) # POSIX allows alarm to be up to 1 second early
try:
foo = self.serv.accept()
except socket.timeout:
self.fail("caught timeout instead of Alarm")
except Alarm:
pass
except:
self.fail("caught other exception instead of Alarm:"
" %s(%s):\n%s" %
(sys.exc_info()[:2] + (traceback.format_exc(),)))
else:
self.fail("nothing caught")
finally:
signal.alarm(0) # shut off alarm
except Alarm:
self.fail("got Alarm in wrong place")
finally:
# no alarm can be pending. Safe to restore old handler.
signal.signal(signal.SIGALRM, old_alarm)
class UDPTimeoutTest(SocketTCPTest):
def testUDPTimeout(self):
def raise_timeout(*args, **kwargs):
self.serv.settimeout(1.0)
self.serv.recv(1024)
self.assertRaises(socket.timeout, raise_timeout,
"Error generating a timeout exception (UDP)")
def testTimeoutZero(self):
ok = False
try:
self.serv.settimeout(0.0)
foo = self.serv.recv(1024)
except socket.timeout:
self.fail("caught timeout instead of error (UDP)")
except socket.error:
ok = True
except:
self.fail("caught unexpected exception (UDP)")
if not ok:
self.fail("recv() returned success when we did not expect it")
class TestExceptions(unittest.TestCase):
def testExceptionTree(self):
self.assertTrue(issubclass(socket.error, Exception))
self.assertTrue(issubclass(socket.herror, socket.error))
self.assertTrue(issubclass(socket.gaierror, socket.error))
self.assertTrue(issubclass(socket.timeout, socket.error))
class TestLinuxAbstractNamespace(unittest.TestCase):
UNIX_PATH_MAX = 108
def testLinuxAbstractNamespace(self):
address = b"\x00python-test-hello\x00\xff"
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s1:
s1.bind(address)
s1.listen(1)
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s2:
s2.connect(s1.getsockname())
with s1.accept()[0] as s3:
self.assertEqual(s1.getsockname(), address)
self.assertEqual(s2.getpeername(), address)
def testMaxName(self):
address = b"\x00" + b"h" * (self.UNIX_PATH_MAX - 1)
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s:
s.bind(address)
self.assertEqual(s.getsockname(), address)
def testNameOverflow(self):
address = "\x00" + "h" * self.UNIX_PATH_MAX
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s:
self.assertRaises(socket.error, s.bind, address)
@unittest.skipUnless(thread, 'Threading required for this test.')
class BufferIOTest(SocketConnectedTest):
"""
Test the buffer versions of socket.recv() and socket.send().
"""
def __init__(self, methodName='runTest'):
SocketConnectedTest.__init__(self, methodName=methodName)
def testRecvIntoArray(self):
buf = bytearray(1024)
nbytes = self.cli_conn.recv_into(buf)
self.assertEqual(nbytes, len(MSG))
msg = buf[:len(MSG)]
self.assertEqual(msg, MSG)
def _testRecvIntoArray(self):
buf = bytes(MSG)
self.serv_conn.send(buf)
def testRecvIntoBytearray(self):
buf = bytearray(1024)
nbytes = self.cli_conn.recv_into(buf)
self.assertEqual(nbytes, len(MSG))
msg = buf[:len(MSG)]
self.assertEqual(msg, MSG)
_testRecvIntoBytearray = _testRecvIntoArray
def testRecvIntoMemoryview(self):
buf = bytearray(1024)
nbytes = self.cli_conn.recv_into(memoryview(buf))
self.assertEqual(nbytes, len(MSG))
msg = buf[:len(MSG)]
self.assertEqual(msg, MSG)
_testRecvIntoMemoryview = _testRecvIntoArray
def testRecvFromIntoArray(self):
buf = bytearray(1024)
nbytes, addr = self.cli_conn.recvfrom_into(buf)
self.assertEqual(nbytes, len(MSG))
msg = buf[:len(MSG)]
self.assertEqual(msg, MSG)
def _testRecvFromIntoArray(self):
buf = bytes(MSG)
self.serv_conn.send(buf)
def testRecvFromIntoBytearray(self):
buf = bytearray(1024)
nbytes, addr = self.cli_conn.recvfrom_into(buf)
self.assertEqual(nbytes, len(MSG))
msg = buf[:len(MSG)]
self.assertEqual(msg, MSG)
_testRecvFromIntoBytearray = _testRecvFromIntoArray
def testRecvFromIntoMemoryview(self):
buf = bytearray(1024)
nbytes, addr = self.cli_conn.recvfrom_into(memoryview(buf))
self.assertEqual(nbytes, len(MSG))
msg = buf[:len(MSG)]
self.assertEqual(msg, MSG)
_testRecvFromIntoMemoryview = _testRecvFromIntoArray
TIPC_STYPE = 2000
TIPC_LOWER = 200
TIPC_UPPER = 210
def isTipcAvailable():
"""Check if the TIPC module is loaded
The TIPC module is not loaded automatically on Ubuntu and probably
other Linux distros.
"""
if not hasattr(socket, "AF_TIPC"):
return False
if not os.path.isfile("/proc/modules"):
return False
with open("/proc/modules") as f:
for line in f:
if line.startswith("tipc "):
return True
if support.verbose:
print("TIPC module is not loaded, please 'sudo modprobe tipc'")
return False
class TIPCTest (unittest.TestCase):
def testRDM(self):
srv = socket.socket(socket.AF_TIPC, socket.SOCK_RDM)
cli = socket.socket(socket.AF_TIPC, socket.SOCK_RDM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srvaddr = (socket.TIPC_ADDR_NAMESEQ, TIPC_STYPE,
TIPC_LOWER, TIPC_UPPER)
srv.bind(srvaddr)
sendaddr = (socket.TIPC_ADDR_NAME, TIPC_STYPE,
TIPC_LOWER + int((TIPC_UPPER - TIPC_LOWER) / 2), 0)
cli.sendto(MSG, sendaddr)
msg, recvaddr = srv.recvfrom(1024)
self.assertEqual(cli.getsockname(), recvaddr)
self.assertEqual(msg, MSG)
class TIPCThreadableTest (unittest.TestCase, ThreadableTest):
def __init__(self, methodName = 'runTest'):
unittest.TestCase.__init__(self, methodName = methodName)
ThreadableTest.__init__(self)
def setUp(self):
self.srv = socket.socket(socket.AF_TIPC, socket.SOCK_STREAM)
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srvaddr = (socket.TIPC_ADDR_NAMESEQ, TIPC_STYPE,
TIPC_LOWER, TIPC_UPPER)
self.srv.bind(srvaddr)
self.srv.listen(5)
self.serverExplicitReady()
self.conn, self.connaddr = self.srv.accept()
def clientSetUp(self):
# The is a hittable race between serverExplicitReady() and the
# accept() call; sleep a little while to avoid it, otherwise
# we could get an exception
time.sleep(0.1)
self.cli = socket.socket(socket.AF_TIPC, socket.SOCK_STREAM)
addr = (socket.TIPC_ADDR_NAME, TIPC_STYPE,
TIPC_LOWER + int((TIPC_UPPER - TIPC_LOWER) / 2), 0)
self.cli.connect(addr)
self.cliaddr = self.cli.getsockname()
def testStream(self):
msg = self.conn.recv(1024)
self.assertEqual(msg, MSG)
self.assertEqual(self.cliaddr, self.connaddr)
def _testStream(self):
self.cli.send(MSG)
self.cli.close()
@unittest.skipUnless(thread, 'Threading required for this test.')
class ContextManagersTest(ThreadedTCPSocketTest):
def _testSocketClass(self):
# base test
with socket.socket() as sock:
self.assertFalse(sock._closed)
self.assertTrue(sock._closed)
# close inside with block
with socket.socket() as sock:
sock.close()
self.assertTrue(sock._closed)
# exception inside with block
with socket.socket() as sock:
self.assertRaises(socket.error, sock.sendall, b'foo')
self.assertTrue(sock._closed)
def testCreateConnectionBase(self):
conn, addr = self.serv.accept()
self.addCleanup(conn.close)
data = conn.recv(1024)
conn.sendall(data)
def _testCreateConnectionBase(self):
address = self.serv.getsockname()
with socket.create_connection(address) as sock:
self.assertFalse(sock._closed)
sock.sendall(b'foo')
self.assertEqual(sock.recv(1024), b'foo')
self.assertTrue(sock._closed)
def testCreateConnectionClose(self):
conn, addr = self.serv.accept()
self.addCleanup(conn.close)
data = conn.recv(1024)
conn.sendall(data)
def _testCreateConnectionClose(self):
address = self.serv.getsockname()
with socket.create_connection(address) as sock:
sock.close()
self.assertTrue(sock._closed)
self.assertRaises(socket.error, sock.sendall, b'foo')
@unittest.skipUnless(hasattr(socket, "SOCK_CLOEXEC"),
"SOCK_CLOEXEC not defined")
@unittest.skipUnless(fcntl, "module fcntl not available")
class CloexecConstantTest(unittest.TestCase):
def test_SOCK_CLOEXEC(self):
v = linux_version()
if v < (2, 6, 28):
self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
% ".".join(map(str, v)))
with socket.socket(socket.AF_INET,
socket.SOCK_STREAM | socket.SOCK_CLOEXEC) as s:
self.assertTrue(s.type & socket.SOCK_CLOEXEC)
self.assertTrue(fcntl.fcntl(s, fcntl.F_GETFD) & fcntl.FD_CLOEXEC)
@unittest.skipUnless(hasattr(socket, "SOCK_NONBLOCK"),
"SOCK_NONBLOCK not defined")
class NonblockConstantTest(unittest.TestCase):
def checkNonblock(self, s, nonblock=True, timeout=0.0):
if nonblock:
self.assertTrue(s.type & socket.SOCK_NONBLOCK)
self.assertEqual(s.gettimeout(), timeout)
else:
self.assertFalse(s.type & socket.SOCK_NONBLOCK)
self.assertEqual(s.gettimeout(), None)
def test_SOCK_NONBLOCK(self):
v = linux_version()
if v < (2, 6, 28):
self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
% ".".join(map(str, v)))
# a lot of it seems silly and redundant, but I wanted to test that
# changing back and forth worked ok
with socket.socket(socket.AF_INET,
socket.SOCK_STREAM | socket.SOCK_NONBLOCK) as s:
self.checkNonblock(s)
s.setblocking(1)
self.checkNonblock(s, False)
s.setblocking(0)
self.checkNonblock(s)
s.settimeout(None)
self.checkNonblock(s, False)
s.settimeout(2.0)
self.checkNonblock(s, timeout=2.0)
s.setblocking(1)
self.checkNonblock(s, False)
# defaulttimeout
t = socket.getdefaulttimeout()
socket.setdefaulttimeout(0.0)
with socket.socket() as s:
self.checkNonblock(s)
socket.setdefaulttimeout(None)
with socket.socket() as s:
self.checkNonblock(s, False)
socket.setdefaulttimeout(2.0)
with socket.socket() as s:
self.checkNonblock(s, timeout=2.0)
socket.setdefaulttimeout(None)
with socket.socket() as s:
self.checkNonblock(s, False)
socket.setdefaulttimeout(t)
def test_main():
tests = [GeneralModuleTests, BasicTCPTest, TCPCloserTest, TCPTimeoutTest,
TestExceptions, BufferIOTest, BasicTCPTest2, BasicUDPTest, UDPTimeoutTest ]
tests.extend([
NonBlockingTCPTests,
FileObjectClassTestCase,
FileObjectInterruptedTestCase,
UnbufferedFileObjectClassTestCase,
LineBufferedFileObjectClassTestCase,
SmallBufferedFileObjectClassTestCase,
UnicodeReadFileObjectClassTestCase,
UnicodeWriteFileObjectClassTestCase,
UnicodeReadWriteFileObjectClassTestCase,
NetworkConnectionNoServer,
NetworkConnectionAttributesTest,
NetworkConnectionBehaviourTest,
ContextManagersTest,
CloexecConstantTest,
NonblockConstantTest
])
if hasattr(socket, "socketpair"):
tests.append(BasicSocketPairTest)
if sys.platform == 'linux2':
tests.append(TestLinuxAbstractNamespace)
if isTipcAvailable():
tests.append(TIPCTest)
tests.append(TIPCThreadableTest)
thread_info = support.threading_setup()
support.run_unittest(*tests)
support.threading_cleanup(*thread_info)
if __name__ == "__main__":
test_main()
|
apache-2.0
|
NetDBNCKU/GAE-Conference-Web-App
|
django/contrib/gis/sitemaps/views.py
|
84
|
4413
|
from django.http import HttpResponse, Http404
from django.template import loader
from django.contrib.sites.models import get_current_site
from django.core import urlresolvers
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.contrib.gis.db.models.fields import GeometryField
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.models import get_model
from django.utils.encoding import smart_str
from django.utils.translation import ugettext as _
from django.contrib.gis.shortcuts import render_to_kml, render_to_kmz
def index(request, sitemaps):
"""
This view generates a sitemap index that uses the proper view
for resolving geographic section sitemap URLs.
"""
current_site = get_current_site(request)
sites = []
protocol = request.is_secure() and 'https' or 'http'
for section, site in sitemaps.items():
if callable(site):
pages = site().paginator.num_pages
else:
pages = site.paginator.num_pages
sitemap_url = urlresolvers.reverse('django.contrib.gis.sitemaps.views.sitemap', kwargs={'section': section})
sites.append('%s://%s%s' % (protocol, current_site.domain, sitemap_url))
if pages > 1:
for page in range(2, pages+1):
sites.append('%s://%s%s?p=%s' % (protocol, current_site.domain, sitemap_url, page))
xml = loader.render_to_string('sitemap_index.xml', {'sitemaps': sites})
return HttpResponse(xml, content_type='application/xml')
def sitemap(request, sitemaps, section=None):
"""
This view generates a sitemap with additional geographic
elements defined by Google.
"""
maps, urls = [], []
if section is not None:
if section not in sitemaps:
raise Http404(_(u"No sitemap available for section: %r") % section)
maps.append(sitemaps[section])
else:
maps = sitemaps.values()
page = request.GET.get("p", 1)
current_site = get_current_site(request)
for site in maps:
try:
if callable(site):
urls.extend(site().get_urls(page=page, site=current_site))
else:
urls.extend(site.get_urls(page=page, site=current_site))
except EmptyPage:
raise Http404(_(u"Page %s empty") % page)
except PageNotAnInteger:
raise Http404(_(u"No page '%s'") % page)
xml = smart_str(loader.render_to_string('gis/sitemaps/geo_sitemap.xml', {'urlset': urls}))
return HttpResponse(xml, content_type='application/xml')
def kml(request, label, model, field_name=None, compress=False, using=DEFAULT_DB_ALIAS):
"""
This view generates KML for the given app label, model, and field name.
The model's default manager must be GeoManager, and the field name
must be that of a geographic field.
"""
placemarks = []
klass = get_model(label, model)
if not klass:
raise Http404('You must supply a valid app label and module name. Got "%s.%s"' % (label, model))
if field_name:
try:
info = klass._meta.get_field_by_name(field_name)
if not isinstance(info[0], GeometryField):
raise Exception
except:
raise Http404('Invalid geometry field.')
connection = connections[using]
if connection.ops.postgis:
# PostGIS will take care of transformation.
placemarks = klass._default_manager.using(using).kml(field_name=field_name)
else:
# There's no KML method on Oracle or MySQL, so we use the `kml`
# attribute of the lazy geometry instead.
placemarks = []
if connection.ops.oracle:
qs = klass._default_manager.using(using).transform(4326, field_name=field_name)
else:
qs = klass._default_manager.using(using).all()
for mod in qs:
mod.kml = getattr(mod, field_name).kml
placemarks.append(mod)
# Getting the render function and rendering to the correct.
if compress:
render = render_to_kmz
else:
render = render_to_kml
return render('gis/kml/placemarks.kml', {'places' : placemarks})
def kmz(request, label, model, field_name=None, using=DEFAULT_DB_ALIAS):
"""
This view returns KMZ for the given app label, model, and field name.
"""
return kml(request, label, model, field_name, compress=True, using=using)
|
bsd-3-clause
|
civisanalytics/ansible
|
lib/ansible/modules/monitoring/pingdom.py
|
19
|
4360
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
module: pingdom
short_description: Pause/unpause Pingdom alerts
description:
- This module will let you pause/unpause Pingdom alerts
version_added: "1.2"
author:
- "Dylan Silva (@thaumos)"
- "Justin Johns"
requirements:
- "This pingdom python library: https://github.com/mbabineau/pingdom-python"
options:
state:
description:
- Define whether or not the check should be running or paused.
required: true
default: null
choices: [ "running", "paused" ]
aliases: []
checkid:
description:
- Pingdom ID of the check.
required: true
default: null
choices: []
aliases: []
uid:
description:
- Pingdom user ID.
required: true
default: null
choices: []
aliases: []
passwd:
description:
- Pingdom user password.
required: true
default: null
choices: []
aliases: []
key:
description:
- Pingdom API key.
required: true
default: null
choices: []
aliases: []
notes:
- This module does not yet have support to add/remove checks.
'''
EXAMPLES = '''
# Pause the check with the ID of 12345.
- pingdom:
uid: [email protected]
passwd: password123
key: apipassword123
checkid: 12345
state: paused
# Unpause the check with the ID of 12345.
- pingdom:
uid: [email protected]
passwd: password123
key: apipassword123
checkid: 12345
state: running
'''
try:
import pingdom
HAS_PINGDOM = True
except:
HAS_PINGDOM = False
def pause(checkid, uid, passwd, key):
c = pingdom.PingdomConnection(uid, passwd, key)
c.modify_check(checkid, paused=True)
check = c.get_check(checkid)
name = check.name
result = check.status
#if result != "paused": # api output buggy - accept raw exception for now
# return (True, name, result)
return (False, name, result)
def unpause(checkid, uid, passwd, key):
c = pingdom.PingdomConnection(uid, passwd, key)
c.modify_check(checkid, paused=False)
check = c.get_check(checkid)
name = check.name
result = check.status
#if result != "up": # api output buggy - accept raw exception for now
# return (True, name, result)
return (False, name, result)
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(required=True, choices=['running', 'paused', 'started', 'stopped']),
checkid=dict(required=True),
uid=dict(required=True),
passwd=dict(required=True, no_log=True),
key=dict(required=True)
)
)
if not HAS_PINGDOM:
module.fail_json(msg="Missing required pingdom module (check docs)")
checkid = module.params['checkid']
state = module.params['state']
uid = module.params['uid']
passwd = module.params['passwd']
key = module.params['key']
if (state == "paused" or state == "stopped"):
(rc, name, result) = pause(checkid, uid, passwd, key)
if (state == "running" or state == "started"):
(rc, name, result) = unpause(checkid, uid, passwd, key)
if rc != 0:
module.fail_json(checkid=checkid, name=name, status=result)
module.exit_json(checkid=checkid, name=name, status=result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
DrMeers/django
|
django/contrib/formtools/tests/wizard/namedwizardtests/forms.py
|
95
|
1760
|
import os
import tempfile
from django import forms
from django.core.files.storage import FileSystemStorage
from django.forms.formsets import formset_factory
from django.http import HttpResponse
from django.template import Template, Context
from django.contrib.auth.models import User
from django.contrib.formtools.wizard.views import NamedUrlWizardView
temp_storage_location = tempfile.mkdtemp(dir=os.environ.get('DJANGO_TEST_TEMP_DIR'))
temp_storage = FileSystemStorage(location=temp_storage_location)
class Page1(forms.Form):
name = forms.CharField(max_length=100)
user = forms.ModelChoiceField(queryset=User.objects.all())
thirsty = forms.NullBooleanField()
class Page2(forms.Form):
address1 = forms.CharField(max_length=100)
address2 = forms.CharField(max_length=100)
file1 = forms.FileField()
class Page3(forms.Form):
random_crap = forms.CharField(max_length=100)
Page4 = formset_factory(Page3, extra=2)
class ContactWizard(NamedUrlWizardView):
file_storage = temp_storage
def done(self, form_list, **kwargs):
c = Context({
'form_list': [x.cleaned_data for x in form_list],
'form_dict': kwargs.get('form_dict'),
'all_cleaned_data': self.get_all_cleaned_data()
})
for form in self.form_list.keys():
c[form] = self.get_cleaned_data_for_step(form)
c['this_will_fail'] = self.get_cleaned_data_for_step('this_will_fail')
return HttpResponse(Template('').render(c))
class SessionContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class CookieContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
|
bsd-3-clause
|
jcoady9/python-for-android
|
python-build/python-libs/gdata/src/gdata/tlslite/X509CertChain.py
|
238
|
6861
|
"""Class representing an X.509 certificate chain."""
from utils import cryptomath
class X509CertChain:
"""This class represents a chain of X.509 certificates.
@type x509List: list
@ivar x509List: A list of L{tlslite.X509.X509} instances,
starting with the end-entity certificate and with every
subsequent certificate certifying the previous.
"""
def __init__(self, x509List=None):
"""Create a new X509CertChain.
@type x509List: list
@param x509List: A list of L{tlslite.X509.X509} instances,
starting with the end-entity certificate and with every
subsequent certificate certifying the previous.
"""
if x509List:
self.x509List = x509List
else:
self.x509List = []
def getNumCerts(self):
"""Get the number of certificates in this chain.
@rtype: int
"""
return len(self.x509List)
def getEndEntityPublicKey(self):
"""Get the public key from the end-entity certificate.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].publicKey
def getFingerprint(self):
"""Get the hex-encoded fingerprint of the end-entity certificate.
@rtype: str
@return: A hex-encoded fingerprint.
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].getFingerprint()
def getCommonName(self):
"""Get the Subject's Common Name from the end-entity certificate.
The cryptlib_py module must be installed in order to use this
function.
@rtype: str or None
@return: The CN component of the certificate's subject DN, if
present.
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].getCommonName()
def validate(self, x509TrustList):
"""Check the validity of the certificate chain.
This checks that every certificate in the chain validates with
the subsequent one, until some certificate validates with (or
is identical to) one of the passed-in root certificates.
The cryptlib_py module must be installed in order to use this
function.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
certificate chain must extend to one of these certificates to
be considered valid.
"""
import cryptlib_py
c1 = None
c2 = None
lastC = None
rootC = None
try:
rootFingerprints = [c.getFingerprint() for c in x509TrustList]
#Check that every certificate in the chain validates with the
#next one
for cert1, cert2 in zip(self.x509List, self.x509List[1:]):
#If we come upon a root certificate, we're done.
if cert1.getFingerprint() in rootFingerprints:
return True
c1 = cryptlib_py.cryptImportCert(cert1.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
c2 = cryptlib_py.cryptImportCert(cert2.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
try:
cryptlib_py.cryptCheckCert(c1, c2)
except:
return False
cryptlib_py.cryptDestroyCert(c1)
c1 = None
cryptlib_py.cryptDestroyCert(c2)
c2 = None
#If the last certificate is one of the root certificates, we're
#done.
if self.x509List[-1].getFingerprint() in rootFingerprints:
return True
#Otherwise, find a root certificate that the last certificate
#chains to, and validate them.
lastC = cryptlib_py.cryptImportCert(self.x509List[-1].writeBytes(),
cryptlib_py.CRYPT_UNUSED)
for rootCert in x509TrustList:
rootC = cryptlib_py.cryptImportCert(rootCert.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
if self._checkChaining(lastC, rootC):
try:
cryptlib_py.cryptCheckCert(lastC, rootC)
return True
except:
return False
return False
finally:
if not (c1 is None):
cryptlib_py.cryptDestroyCert(c1)
if not (c2 is None):
cryptlib_py.cryptDestroyCert(c2)
if not (lastC is None):
cryptlib_py.cryptDestroyCert(lastC)
if not (rootC is None):
cryptlib_py.cryptDestroyCert(rootC)
def _checkChaining(self, lastC, rootC):
import cryptlib_py
import array
def compareNames(name):
try:
length = cryptlib_py.cryptGetAttributeString(lastC, name, None)
lastName = array.array('B', [0] * length)
cryptlib_py.cryptGetAttributeString(lastC, name, lastName)
lastName = lastName.tostring()
except cryptlib_py.CryptException, e:
if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
lastName = None
try:
length = cryptlib_py.cryptGetAttributeString(rootC, name, None)
rootName = array.array('B', [0] * length)
cryptlib_py.cryptGetAttributeString(rootC, name, rootName)
rootName = rootName.tostring()
except cryptlib_py.CryptException, e:
if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
rootName = None
return lastName == rootName
cryptlib_py.cryptSetAttribute(lastC,
cryptlib_py.CRYPT_CERTINFO_ISSUERNAME,
cryptlib_py.CRYPT_UNUSED)
if not compareNames(cryptlib_py.CRYPT_CERTINFO_COUNTRYNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_LOCALITYNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONALUNITNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_COMMONNAME):
return False
return True
|
apache-2.0
|
edfungus/Crouton-Python-Example
|
env/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/__init__.py
|
482
|
2055
|
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = 'Andrey Petrov ([email protected])'
__license__ = 'MIT'
__version__ = '1.10.4'
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url
)
from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
from .util.url import get_host
from .util.timeout import Timeout
from .util.retry import Retry
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug('Added a stderr logging handler to logger: %s' % __name__)
return handler
# ... Clean up.
del NullHandler
import warnings
# SecurityWarning's always go off by default.
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""
Helper for quickly disabling all urllib3 warnings.
"""
warnings.simplefilter('ignore', category)
|
mit
|
valkjsaaa/sl4a
|
python/src/Lib/json/tests/test_fail.py
|
51
|
2884
|
from unittest import TestCase
import json
# Fri Dec 30 18:57:26 2005
JSONDOCS = [
# http://json.org/JSON_checker/test/fail1.json
'"A JSON payload should be an object or array, not a string."',
# http://json.org/JSON_checker/test/fail2.json
'["Unclosed array"',
# http://json.org/JSON_checker/test/fail3.json
'{unquoted_key: "keys must be quoted}',
# http://json.org/JSON_checker/test/fail4.json
'["extra comma",]',
# http://json.org/JSON_checker/test/fail5.json
'["double extra comma",,]',
# http://json.org/JSON_checker/test/fail6.json
'[ , "<-- missing value"]',
# http://json.org/JSON_checker/test/fail7.json
'["Comma after the close"],',
# http://json.org/JSON_checker/test/fail8.json
'["Extra close"]]',
# http://json.org/JSON_checker/test/fail9.json
'{"Extra comma": true,}',
# http://json.org/JSON_checker/test/fail10.json
'{"Extra value after close": true} "misplaced quoted value"',
# http://json.org/JSON_checker/test/fail11.json
'{"Illegal expression": 1 + 2}',
# http://json.org/JSON_checker/test/fail12.json
'{"Illegal invocation": alert()}',
# http://json.org/JSON_checker/test/fail13.json
'{"Numbers cannot have leading zeroes": 013}',
# http://json.org/JSON_checker/test/fail14.json
'{"Numbers cannot be hex": 0x14}',
# http://json.org/JSON_checker/test/fail15.json
'["Illegal backslash escape: \\x15"]',
# http://json.org/JSON_checker/test/fail16.json
'["Illegal backslash escape: \\\'"]',
# http://json.org/JSON_checker/test/fail17.json
'["Illegal backslash escape: \\017"]',
# http://json.org/JSON_checker/test/fail18.json
'[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]',
# http://json.org/JSON_checker/test/fail19.json
'{"Missing colon" null}',
# http://json.org/JSON_checker/test/fail20.json
'{"Double colon":: null}',
# http://json.org/JSON_checker/test/fail21.json
'{"Comma instead of colon", null}',
# http://json.org/JSON_checker/test/fail22.json
'["Colon instead of comma": false]',
# http://json.org/JSON_checker/test/fail23.json
'["Bad value", truth]',
# http://json.org/JSON_checker/test/fail24.json
"['single quote']",
# http://code.google.com/p/simplejson/issues/detail?id=3
u'["A\u001FZ control characters in string"]',
]
SKIPS = {
1: "why not have a string payload?",
18: "spec doesn't specify any nesting limitations",
}
class TestFail(TestCase):
def test_failures(self):
for idx, doc in enumerate(JSONDOCS):
idx = idx + 1
if idx in SKIPS:
json.loads(doc)
continue
try:
json.loads(doc)
except ValueError:
pass
else:
self.fail("Expected failure for fail%d.json: %r" % (idx, doc))
|
apache-2.0
|
JackPrice/ansible-modules-extras
|
monitoring/airbrake_deployment.py
|
51
|
3918
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2013 Bruce Pennypacker <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: airbrake_deployment
version_added: "1.2"
author: Bruce Pennypacker
short_description: Notify airbrake about app deployments
description:
- Notify airbrake about app deployments (see http://help.airbrake.io/kb/api-2/deploy-tracking)
options:
token:
description:
- API token.
required: true
environment:
description:
- The airbrake environment name, typically 'production', 'staging', etc.
required: true
user:
description:
- The username of the person doing the deployment
required: false
repo:
description:
- URL of the project repository
required: false
revision:
description:
- A hash, number, tag, or other identifier showing what revision was deployed
required: false
url:
description:
- Optional URL to submit the notification to. Use to send notifications to Airbrake-compliant tools like Errbit.
required: false
default: "https://airbrake.io/deploys"
version_added: "1.5"
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
# informational: requirements for nodes
requirements: [ urllib, urllib2 ]
'''
EXAMPLES = '''
- airbrake_deployment: token=AAAAAA
environment='staging'
user='ansible'
revision=4.2
'''
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
token=dict(required=True),
environment=dict(required=True),
user=dict(required=False),
repo=dict(required=False),
revision=dict(required=False),
url=dict(required=False, default='https://api.airbrake.io/deploys.txt'),
validate_certs=dict(default='yes', type='bool'),
),
supports_check_mode=True
)
# build list of params
params = {}
if module.params["environment"]:
params["deploy[rails_env]"] = module.params["environment"]
if module.params["user"]:
params["deploy[local_username]"] = module.params["user"]
if module.params["repo"]:
params["deploy[scm_repository]"] = module.params["repo"]
if module.params["revision"]:
params["deploy[scm_revision]"] = module.params["revision"]
params["api_key"] = module.params["token"]
url = module.params.get('url')
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=True)
# Send the data to airbrake
data = urllib.urlencode(params)
response, info = fetch_url(module, url, data=data)
if info['status'] == 200:
module.exit_json(changed=True)
else:
module.fail_json(msg="HTTP result code: %d connecting to %s" % (info['status'], url))
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
|
gpl-3.0
|
bradh/openchange
|
mapiproxy/services/web/rpcproxy/rpcproxy/RPCProxyApplication.py
|
12
|
4107
|
# RPCProxyApplication.py -- OpenChange RPC-over-HTTP implementation
#
# Copyright (C) 2012 Wolfgang Sourdeau <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
from errno import EEXIST
from os import umask, mkdir, rmdir, listdir, getpid
from os.path import join
from uuid import uuid4
from shutil import rmtree
import sys
from channels import RPCProxyInboundChannelHandler,\
RPCProxyOutboundChannelHandler
class RPCProxyApplication(object):
def __init__(self, samba_host, log_level=logging.DEBUG):
# we keep a reference to the rmtree function until our instance is
# deleted
self.rmtree = rmtree
dirname = "/tmp/rpcproxy"
try:
mkdir(dirname)
except:
pass
self.sockets_dir = dirname
print >>sys.stderr, "RPCProxy started"
# has_socket_dir = False
# umask(0077)
# while not has_socket_dir:
# # leafname = "rpcproxy-%s" % str(uuid4())
# leafname = "rpcproxy" # % getpid()
# dirname = "/tmp/%s" % leafname
# try:
# mkdir(dirname)
# has_socket_dir = True
# self.sockets_dir = dirname
# except OSError, e:
# if e.errno != EEXIST:
# raise
self.samba_host = samba_host
self.log_level = log_level
def __del__(self):
# self.rmtree(self.sockets_dir)
pass
def __call__(self, environ, start_response):
if "REQUEST_METHOD" in environ:
method = environ["REQUEST_METHOD"]
method_name = "_do_" + method
if hasattr(self, method_name):
if "wsgi.errors" in environ:
log_stream = environ["wsgi.errors"]
else:
log_stream = sys.stderr
logHandler = logging.StreamHandler(log_stream)
fmter = logging.Formatter("[%(process)d:%(name)s] %(levelname)s: %(message)s")
logHandler.setFormatter(fmter)
if "REMOTE_PORT" in environ:
rmt_port = environ["REMOTE_PORT"]
else:
rmt_port = "<unknown>"
logger = logging.Logger(method + ":" + rmt_port)
logger.setLevel(self.log_level)
logger.addHandler(logHandler)
# logger.set_name(method)
channel_method = getattr(self, method_name)
channel = channel_method(logger)
response = channel.sequence(environ, start_response)
else:
response = self._unsupported_method(environ, start_response)
else:
response = self._unsupported_method(environ, start_response)
return response
@staticmethod
def _unsupported_method(environ, start_response):
msg = "Unsupported method"
start_response("501 Not Implemented", [("Content-Type", "text/plain"),
("Content-length",
str(len(msg)))])
return [msg]
def _do_RPC_IN_DATA(self, logger):
return RPCProxyInboundChannelHandler(self.sockets_dir, logger)
def _do_RPC_OUT_DATA(self, logger):
return RPCProxyOutboundChannelHandler(self.sockets_dir,
self.samba_host,
logger)
|
gpl-3.0
|
jiachenning/odoo
|
doc/_themes/odoodoc/sphinx_monkeypatch.py
|
22
|
3300
|
# -*- coding: utf-8 -*-
import sphinx.roles
import sphinx.environment
from sphinx.writers.html import HTMLTranslator
from docutils.writers.html4css1 import HTMLTranslator as DocutilsTranslator
def patch():
# navify toctree (oh god)
@monkey(sphinx.environment.BuildEnvironment)
def resolve_toctree(old_resolve, self, *args, **kwargs):
""" If navbar, bootstrapify TOC to yield a navbar
"""
navbar = kwargs.pop('navbar', None)
toc = old_resolve(self, *args, **kwargs)
if toc is None:
return None
navbarify(toc[0], navbar=navbar)
return toc
# monkeypatch visit_table to remove border and add .table
HTMLTranslator.visit_table = visit_table
# disable colspec crap
HTMLTranslator.write_colspecs = lambda self: None
# copy data- attributes straight from source to dest
HTMLTranslator.starttag = starttag_data
def navbarify(node, navbar=None):
"""
:param node: toctree node to navbarify
:param navbar: Whether this toctree is a 'main' navbar, a 'side' navbar or
not a navbar at all
"""
if navbar == 'side':
for n in node.traverse():
if n.tagname == 'bullet_list':
n['classes'].append('nav')
elif navbar == 'main':
# add classes to just toplevel
node['classes'].extend(['nav', 'navbar-nav', 'navbar-right'])
for list_item in node.children:
# bullet_list
# list_item
# compact_paragraph
# reference
# bullet_list
# list_item
# compact_paragraph
# reference
# no bullet_list.list_item -> don't dropdownify
if not list_item.children[1].children:
return
list_item['classes'].append('dropdown')
# list_item.compact_paragraph.reference
link = list_item.children[0].children[0]
link['classes'].append('dropdown-toggle')
link.attributes['data-toggle'] = 'dropdown'
# list_item.bullet_list
list_item.children[1]['classes'].append('dropdown-menu')
def visit_table(self, node):
"""
* remove border
* add table class
"""
self._table_row_index = 0
self.context.append(self.compact_p)
self.compact_p = True
classes = {self.settings.table_style}
node_classes = node.get('classes', [])
if 'no-table' in node_classes: node_classes.remove('no-table')
else: classes.add('table')
self.body.append(self.starttag(node, 'table', CLASS=' '.join(classes).strip()))
def starttag_data(self, node, tagname, suffix='\n', empty=False, **attributes):
attributes.update(
(k, v) for k, v in node.attributes.iteritems()
if k.startswith('data-')
)
# oh dear
return DocutilsTranslator.starttag(
self, node, tagname, suffix=suffix, empty=empty, **attributes)
class monkey(object):
def __init__(self, obj):
self.obj = obj
def __call__(self, fn):
name = fn.__name__
old = getattr(self.obj, name)
setattr(self.obj, name, lambda self_, *args, **kwargs: \
fn(old, self_, *args, **kwargs))
|
agpl-3.0
|
SmartXiaoMing/clean_openwrt_sdk
|
staging_dir/host/lib/scons-2.1.0/SCons/Tool/PharLapCommon.py
|
21
|
5236
|
"""SCons.Tool.PharLapCommon
This module contains common code used by all Tools for the
Phar Lap ETS tool chain. Right now, this is linkloc and
386asm.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/PharLapCommon.py 5357 2011/09/09 21:31:03 bdeegan"
import os
import os.path
import SCons.Errors
import SCons.Util
import re
def getPharLapPath():
"""Reads the registry to find the installed path of the Phar Lap ETS
development kit.
Raises UserError if no installed version of Phar Lap can
be found."""
if not SCons.Util.can_read_reg:
raise SCons.Errors.InternalError("No Windows registry module was found")
try:
k=SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
'SOFTWARE\\Pharlap\\ETS')
val, type = SCons.Util.RegQueryValueEx(k, 'BaseDir')
# The following is a hack...there is (not surprisingly)
# an odd issue in the Phar Lap plug in that inserts
# a bunch of junk data after the phar lap path in the
# registry. We must trim it.
idx=val.find('\0')
if idx >= 0:
val = val[:idx]
return os.path.normpath(val)
except SCons.Util.RegError:
raise SCons.Errors.UserError("Cannot find Phar Lap ETS path in the registry. Is it installed properly?")
REGEX_ETS_VER = re.compile(r'#define\s+ETS_VER\s+([0-9]+)')
def getPharLapVersion():
"""Returns the version of the installed ETS Tool Suite as a
decimal number. This version comes from the ETS_VER #define in
the embkern.h header. For example, '#define ETS_VER 1010' (which
is what Phar Lap 10.1 defines) would cause this method to return
1010. Phar Lap 9.1 does not have such a #define, but this method
will return 910 as a default.
Raises UserError if no installed version of Phar Lap can
be found."""
include_path = os.path.join(getPharLapPath(), os.path.normpath("include/embkern.h"))
if not os.path.exists(include_path):
raise SCons.Errors.UserError("Cannot find embkern.h in ETS include directory.\nIs Phar Lap ETS installed properly?")
mo = REGEX_ETS_VER.search(open(include_path, 'r').read())
if mo:
return int(mo.group(1))
# Default return for Phar Lap 9.1
return 910
def addPathIfNotExists(env_dict, key, path, sep=os.pathsep):
"""This function will take 'key' out of the dictionary
'env_dict', then add the path 'path' to that key if it is not
already there. This treats the value of env_dict[key] as if it
has a similar format to the PATH variable...a list of paths
separated by tokens. The 'path' will get added to the list if it
is not already there."""
try:
is_list = 1
paths = env_dict[key]
if not SCons.Util.is_List(env_dict[key]):
paths = paths.split(sep)
is_list = 0
if os.path.normcase(path) not in list(map(os.path.normcase, paths)):
paths = [ path ] + paths
if is_list:
env_dict[key] = paths
else:
env_dict[key] = sep.join(paths)
except KeyError:
env_dict[key] = path
def addPharLapPaths(env):
"""This function adds the path to the Phar Lap binaries, includes,
and libraries, if they are not already there."""
ph_path = getPharLapPath()
try:
env_dict = env['ENV']
except KeyError:
env_dict = {}
env['ENV'] = env_dict
addPathIfNotExists(env_dict, 'PATH',
os.path.join(ph_path, 'bin'))
addPathIfNotExists(env_dict, 'INCLUDE',
os.path.join(ph_path, 'include'))
addPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, 'lib'))
addPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, os.path.normpath('lib/vclib')))
env['PHARLAP_PATH'] = getPharLapPath()
env['PHARLAP_VERSION'] = str(getPharLapVersion())
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
gpl-2.0
|
open-craft/XBlock
|
xblock/test/test_json_conversion.py
|
1
|
1905
|
"""
Tests asserting that ModelTypes convert to and from json when working
with ModelDatas
"""
# Allow inspection of private class members
# pylint: disable=W0212
from mock import Mock
from xblock.core import XBlock
from xblock.fields import Field, Scope
from xblock.field_data import DictFieldData
from xblock.test.tools import assert_equals, assert_is_instance
class TestJSONConversionField(Field):
"""Field for testing json conversion"""
def from_json(self, value):
assert_equals('set', value['$type'])
return set(value['$vals'])
def to_json(self, value):
return {
'$type': 'set',
'$vals': sorted(value)
}
class TestBlock(XBlock):
"""XBlock for testing json conversion"""
field_a = TestJSONConversionField(scope=Scope.content)
field_b = TestJSONConversionField(scope=Scope.content)
class TestModel(DictFieldData):
"""ModelData for testing json conversion"""
def default(self, block, name):
return {'$type': 'set', '$vals': [0, 1]}
class TestJsonConversion(object):
"""
Verify that all ModelType operations correctly convert
the json that comes out of the ModelData to python objects
"""
def setUp(self):
self.block = TestBlock(
Mock(),
TestModel({
'field_a': {'$type': 'set', '$vals': [1, 2, 3]}
}),
Mock()
)
def test_get(self):
# Test field with a value
assert_is_instance(self.block.field_a, set)
# Test ModelData default
assert_is_instance(self.block.field_b, set)
def test_set(self):
self.block.field_b = set([5, 6, 5])
self.block.save()
assert_is_instance(self.block.field_b, set)
assert_equals(
{'$type': 'set', '$vals': [5, 6]},
self.block._field_data.get(self.block, 'field_b')
)
|
apache-2.0
|
mudbungie/NetExplorer
|
env/lib/python3.4/site-packages/bulbs/tests/model_tests.py
|
3
|
2922
|
import unittest
from .testcase import BulbsTestCase
from bulbs.model import Node, NodeProxy, Relationship, RelationshipProxy
from bulbs.property import Integer, String, DateTime, Bool
from bulbs.utils import current_datetime
class Knows(Relationship):
label = "knows"
timestamp = DateTime(default=current_datetime)
# Lightbulb Person model doesn't have age so it breaks get_all() when in use
class Person(Node):
element_type = "person"
name = String(nullable=False)
age = Integer()
is_adult = Bool()
class NodeTestCase(BulbsTestCase):
def setUp(self):
indices = self.vertex_index_proxy(self.index_class,self.client)
self.people = NodeProxy(Person,self.client)
self.people.index = indices.get_or_create("person")
self.james = self.people.create(name="James", age=34, is_adult=True)
def test_properties(self):
#assert type(self.james.eid) == int
assert self.james.element_type == "person"
assert self.james.name == "James"
assert self.james.age == 34
assert self.james.is_adult is True
def test_get(self):
person = self.people.get(self.james.eid)
assert person == self.james
def test_get_all(self):
people = self.people.get_all()
assert len(list(people)) > 1
def test_index_name(self):
index_name = self.people.index.index_name
assert index_name == "person"
# Will this work for autmatic indices?
#def test_index_put_and_get(self):
# must test put/get together b/c self.james gets reset every time
# self.people.index.put(self.james.eid,age=self.james.age)
# james = self.people.index.get_unique("age",'34')
# assert self.james == james
#Person.remove(self.james.eid,dict(age="34"))
class RelationshipTestCase(BulbsTestCase):
def setUp(self):
indicesV = self.vertex_index_proxy(self.index_class,self.client)
indicesE = self.edge_index_proxy(self.index_class,self.client)
self.people = NodeProxy(Person,self.client)
self.people.index = indicesV.get_or_create("people")
self.knows = RelationshipProxy(Knows,self.client)
self.knows.index = indicesE.get_or_create("knows")
self.james = self.people.create(name="James", age=34)
self.julie = self.people.create(name="Julie", age=28)
def test_properties(self):
self.relationship = self.knows.create(self.james,self.julie)
assert self.relationship._label == "knows"
assert self.relationship.outV()._id == self.james.eid
assert self.relationship.inV()._id == self.julie.eid
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(NodeTestCase))
suite.addTest(unittest.makeSuite(RelationshipTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
mit
|
servalproject/nikola
|
tests/test_rst_extensions.py
|
1
|
8282
|
# coding: utf8
# Author: Rodrigo Bistolfi
# Date: 03/2013
""" Test cases for Nikola ReST extensions.
A base class ReSTExtensionTestCase provides the tests basic behaivor.
Subclasses must override the "sample" class attribute with the ReST markup.
The sample will be rendered as HTML using publish_parts() by setUp().
One method is provided for checking the resulting HTML:
* assertHTMLContains(element, attributes=None, text=None)
The HTML is parsed with lxml for checking against the data you provide. The
method takes an element argument, a string representing the *name* of an HTML
tag, like "script" or "iframe". We will try to find this tag in the document
and perform the tests on it. You can pass a dictionary to the attributes kwarg
representing the name and the value of the tag attributes. The text kwarg takes
a string argument, which will be tested against the contents of the HTML
element.
One last caveat: you need to url unquote your urls if you are going to test
attributes like "src" or "link", since the HTML rendered by docutils will be
always unquoted.
"""
from __future__ import unicode_literals
try:
from io import StringIO
except ImportError:
from StringIO import StringIO # NOQA
from docutils.core import publish_parts
from lxml import html
import mock
import unittest
import nikola.plugins.compile_rest
from nikola.utils import _reload
from base import BaseTestCase
class ReSTExtensionTestCase(BaseTestCase):
""" Base class for testing ReST extensions """
sample = None
def setUp(self):
""" Parse cls.sample into a HTML document tree """
super(ReSTExtensionTestCase, self).setUp()
self.setHtmlFromRst(self.sample)
def setHtmlFromRst(self, rst):
""" Create html output from rst string """
self.html = publish_parts(rst, writer_name="html")["body"]
self.html_doc = html.parse(StringIO(self.html))
def assertHTMLContains(self, element, attributes=None, text=None):
""" Test if HTML document includes an element with the given
attributes and text content
"""
try:
tag = next(self.html_doc.iter(element))
except StopIteration:
raise Exception("<{}> not in {}".format(element, self.html))
else:
if attributes:
arg_attrs = set(attributes.items())
tag_attrs = set(tag.items())
self.assertTrue(arg_attrs.issubset(tag_attrs))
if text:
self.assertIn(text, tag.text)
class ReSTExtensionTestCaseTestCase(ReSTExtensionTestCase):
""" Simple test for our base class :) """
sample = '.. raw:: html\n\n <iframe src="foo" height="bar">spam</iframe>'
def test_test(self):
self.assertHTMLContains("iframe", attributes={"src": "foo"},
text="spam")
self.assertRaises(Exception, self.assertHTMLContains, "eggs", {})
class GistTestCase(ReSTExtensionTestCase):
""" Test GitHubGist.
We will replace get_raw_gist() and get_raw_gist_with_filename()
monkeypatching the GitHubGist class for avoiding network dependency
"""
gist_type = nikola.plugins.compile_rest.GitHubGist
sample = '.. gist:: fake_id\n :file: spam.py'
sample_without_filename = '.. gist:: fake_id2'
def setUp(self):
""" Patch GitHubGist for avoiding network dependency """
self.gist_type.get_raw_gist_with_filename = lambda *_: 'raw_gist_file'
self.gist_type.get_raw_gist = lambda *_: "raw_gist"
_reload(nikola.plugins.compile_rest)
def test_gist(self):
""" Test the gist directive with filename """
self.setHtmlFromRst(self.sample)
output = 'https://gist.github.com/fake_id.js?file=spam.py'
self.assertHTMLContains("script", attributes={"src": output})
self.assertHTMLContains("pre", text="raw_gist_file")
def test_gist_without_filename(self):
""" Test the gist directive without filename """
self.setHtmlFromRst(self.sample_without_filename)
output = 'https://gist.github.com/fake_id2.js'
self.assertHTMLContains("script", attributes={"src": output})
self.assertHTMLContains("pre", text="raw_gist")
class GistIntegrationTestCase(ReSTExtensionTestCase):
""" Test requests integration. The gist plugin uses requests to fetch gist
contents and place it in a noscript tag.
"""
sample = '.. gist:: 1812835'
def test_gist_integration(self):
""" Fetch contents of the gist from GH and render in a noscript tag """
text = ('Be alone, that is the secret of invention: be alone, that is'
' when ideas are born. -- Nikola Tesla')
self.assertHTMLContains('pre', text=text)
class SlidesTestCase(ReSTExtensionTestCase):
""" Slides test case """
sample = '.. slides:: IMG.jpg\n'
def test_slides(self):
""" Test the slides js generation and img tag creation """
self.assertHTMLContains("img", attributes={"src": "IMG.jpg"})
class SoundCloudTestCase(ReSTExtensionTestCase):
""" SoundCloud test case """
sample = '.. soundcloud:: SID\n :height: 400\n :width: 600'
def test_soundcloud(self):
""" Test SoundCloud iframe tag generation """
self.assertHTMLContains("iframe",
attributes={"src": ("https://w.soundcloud.com"
"/player/?url=http://"
"api.soundcloud.com/"
"tracks/SID"),
"height": "400", "width": "600"})
class VimeoTestCase(ReSTExtensionTestCase):
"""Vimeo test.
Set Vimeo.request_size to False for avoiding querying the Vimeo api
over the network
"""
sample = '.. vimeo:: VID\n :height: 400\n :width: 600'
def setUp(self):
""" Disable query of the vimeo api over the wire """
nikola.plugins.compile_rest.Vimeo.request_size = False
super(VimeoTestCase, self).setUp()
_reload(nikola.plugins.compile_rest)
def test_vimeo(self):
""" Test Vimeo iframe tag generation """
self.assertHTMLContains("iframe",
attributes={"src": ("http://player.vimeo.com/"
"video/VID"),
"height": "400", "width": "600"})
class YoutubeTestCase(ReSTExtensionTestCase):
""" Youtube test case """
sample = '.. youtube:: YID\n :height: 400\n :width: 600'
def test_youtube(self):
""" Test Youtube iframe tag generation """
self.assertHTMLContains("iframe",
attributes={"src": ("http://www.youtube.com/"
"embed/YID?rel=0&hd=1&"
"wmode=transparent"),
"height": "400", "width": "600"})
class ListingTestCase(ReSTExtensionTestCase):
""" Listing test case and CodeBlock alias tests """
sample = '.. listing:: nikola.py python'
sample2 = '.. code-block:: python\n\n import antigravity'
sample3 = '.. sourcecode:: python\n\n import antigravity'
opener_mock = mock.mock_open(read_data="import antigravity\n")
opener_mock.return_value.readlines.return_value = "import antigravity\n"
def setUp(self):
""" Inject a mock open function for not generating a test site """
self.f = StringIO("import antigravity\n")
#_reload(nikola.plugins.compile_rest)
def test_listing(self):
""" Test that we can render a file object contents without errors """
with mock.patch("nikola.plugins.compile_rest.listing.codecs_open", self.opener_mock, create=True):
self.setHtmlFromRst(self.sample)
def test_codeblock_alias(self):
""" Test CodeBlock aliases """
with mock.patch("nikola.plugins.compile_rest.listing.codecs_open", self.opener_mock, create=True):
self.setHtmlFromRst(self.sample2)
self.setHtmlFromRst(self.sample3)
if __name__ == "__main__":
unittest.main()
|
mit
|
Zenfone2-Dev/kernel-FlareM
|
Documentation/networking/cxacru-cf.py
|
14668
|
1626
|
#!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
|
gpl-2.0
|
diegocortassa/TACTIC
|
src/context/client/pyasm/application/xsi/xsi_builder.py
|
2
|
1975
|
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['XSIBuilder']
from xsi_environment import XSIEnvironment
from xsi import XSI, XSINodeNaming
from pyasm.application.common import SessionBuilder
class XSIBuilder(SessionBuilder):
'''builds a xsi session file'''
def import_file(self, node_name, path, instantiation='import', use_namespace=True):
naming = XSINodeNaming(node_name)
# if there is no node_name name, then just import without namespaces
if not naming.has_instance():
# import file into namespace
if instantiation == 'reference':
created_node = self.app.import_reference(path)
else:
created_node = self.app.import_file(path)
else:
# import file into namespace
if instantiation == 'reference':
created_node = self.app.import_reference(path,node_name)
else:
created_node = self.app.import_file(path,node_name)
# FIXME: created node name is not always node_name
# select newly created node
self.app.select(created_node)
return created_node
def import_anim(self, node_name, path, created_node=""):
self.app.import_anim(path, node_name)
def set_attr(self, node_name, node):
'''set attribute for the current app'''
attr = node.getAttribute("attr")
value = node.getAttribute("value")
attr_type = node.getAttribute("type")
file_range = node.getAttribute("file_range")
extra_data = {"file_range": file_range}
self.app.set_attr(node_name,attr,value,attr_type, extra_data)
|
epl-1.0
|
synopat/pyload
|
module/network/HTTPRequest.py
|
5
|
11514
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: RaNaN
"""
import cStringIO
import pycurl
from codecs import getincrementaldecoder, lookup, BOM_UTF8
from urllib import quote, urlencode
from httplib import responses
from logging import getLogger
from module.plugins.Plugin import Abort
def myquote(url):
return quote(url.encode('utf_8') if isinstance(url, unicode) else url, safe="%/:=&?~#+!$,;'@()*[]")
def myurlencode(data):
data = dict(data)
return urlencode(dict((x.encode('utf_8') if isinstance(x, unicode) else x, \
y.encode('utf_8') if isinstance(y, unicode) else y ) for x, y in data.iteritems()))
bad_headers = range(400, 404) + range(405, 418) + range(500, 506)
unofficial_responses = {
440: "Login Timeout - The client's session has expired and must log in again.",
449: 'Retry With - The server cannot honour the request because the user has not provided the required information',
451: 'Redirect - Unsupported Redirect Header',
509: 'Bandwidth Limit Exceeded',
520: 'Unknown Error',
521: 'Web Server Is Down - The origin server has refused the connection from CloudFlare',
522: 'Connection Timed Out - CloudFlare could not negotiate a TCP handshake with the origin server',
523: 'Origin Is Unreachable - CloudFlare could not reach the origin server',
524: 'A Timeout Occurred - CloudFlare did not receive a timely HTTP response',
525: 'SSL Handshake Failed - CloudFlare could not negotiate a SSL/TLS handshake with the origin server',
526: 'Invalid SSL Certificate - CloudFlare could not validate the SSL/TLS certificate that the origin server presented',
527: 'Railgun Error - CloudFlare requests timeout or failed after the WAN connection has been established',
530: 'Site Is Frozen - Used by the Pantheon web platform to indicate a site that has been frozen due to inactivity'}
class BadHeader(Exception):
def __init__(self, code, header="", content=""):
int_code = int(code)
Exception.__init__(self, "Bad server response: %s %s" %
(code, responses.get(int_code, unofficial_responses.get(int_code, "unknown error code"))))
self.code = int_code
self.header = header
self.content = content
class HTTPRequest():
def __init__(self, cookies=None, options=None):
self.c = pycurl.Curl()
self.rep = None
self.cj = cookies #cookiejar
self.lastURL = None
self.lastEffectiveURL = None
self.abort = False
self.code = 0 # last http code
self.header = ""
self.headers = [] #temporary request header
self.initHandle()
self.setInterface(options)
self.c.setopt(pycurl.WRITEFUNCTION, self.write)
self.c.setopt(pycurl.HEADERFUNCTION, self.writeHeader)
self.log = getLogger("log")
def initHandle(self):
""" sets common options to curl handle """
self.c.setopt(pycurl.FOLLOWLOCATION, 1)
self.c.setopt(pycurl.MAXREDIRS, 5)
self.c.setopt(pycurl.CONNECTTIMEOUT, 30)
self.c.setopt(pycurl.NOSIGNAL, 1)
self.c.setopt(pycurl.NOPROGRESS, 1)
if hasattr(pycurl, "AUTOREFERER"):
self.c.setopt(pycurl.AUTOREFERER, 1)
self.c.setopt(pycurl.SSL_VERIFYPEER, 0)
self.c.setopt(pycurl.LOW_SPEED_TIME, 30)
self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5)
#self.c.setopt(pycurl.VERBOSE, 1)
self.c.setopt(pycurl.USERAGENT,
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:55.0) Gecko/20100101 Firefox/55.0")
if pycurl.version_info()[7]:
self.c.setopt(pycurl.ENCODING, "gzip, deflate")
self.c.setopt(pycurl.HTTPHEADER, ["Accept: */*",
"Accept-Language: en-US,en",
"Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7",
"Connection: keep-alive",
"Keep-Alive: 300",
"Expect:"])
def setInterface(self, options):
interface, proxy, ipv6 = options["interface"], options["proxies"], options["ipv6"]
if interface and interface.lower() != "none":
self.c.setopt(pycurl.INTERFACE, str(interface))
if proxy:
if proxy["type"] == "socks4":
self.c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
elif proxy["type"] == "socks5":
self.c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
else:
self.c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_HTTP)
self.c.setopt(pycurl.PROXY, str(proxy["address"]))
self.c.setopt(pycurl.PROXYPORT, proxy["port"])
if proxy["username"]:
self.c.setopt(pycurl.PROXYUSERPWD, str("%s:%s" % (proxy["username"], proxy["password"])))
if ipv6:
self.c.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
else:
self.c.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
if "auth" in options:
self.c.setopt(pycurl.USERPWD, str(options["auth"]))
if "timeout" in options:
self.c.setopt(pycurl.LOW_SPEED_TIME, options["timeout"])
def addCookies(self):
""" put cookies from curl handle to cj """
if self.cj:
self.cj.addCookies(self.c.getinfo(pycurl.INFO_COOKIELIST))
def getCookies(self):
""" add cookies from cj to curl handle """
if self.cj:
for c in self.cj.getCookies():
self.c.setopt(pycurl.COOKIELIST, c)
return
def clearCookies(self):
self.c.setopt(pycurl.COOKIELIST, "")
def setRequestContext(self, url, get, post, referer, cookies, multipart=False):
""" sets everything needed for the request """
self.rep = cStringIO.StringIO()
url = myquote(url)
if get:
get = urlencode(get)
url = "%s?%s" % (url, get)
self.c.setopt(pycurl.URL, url)
self.c.lastUrl = url
if post:
self.c.setopt(pycurl.POST, 1)
if not multipart:
if type(post) == unicode:
post = str(post) #unicode not allowed
elif type(post) == str:
pass
else:
post = myurlencode(post)
self.c.setopt(pycurl.POSTFIELDS, post)
else:
post = [(x, y.encode('utf8') if type(y) == unicode else y ) for x, y in post.iteritems()]
self.c.setopt(pycurl.HTTPPOST, post)
else:
self.c.setopt(pycurl.POST, 0)
if referer and self.lastURL:
self.c.setopt(pycurl.REFERER, str(self.lastURL))
if cookies:
self.c.setopt(pycurl.COOKIEFILE, "")
self.c.setopt(pycurl.COOKIEJAR, "")
self.getCookies()
def load(self, url, get={}, post={}, referer=True, cookies=True, just_header=False, multipart=False, decode=False):
""" load and returns a given page """
self.setRequestContext(url, get, post, referer, cookies, multipart)
self.header = ""
self.c.setopt(pycurl.HTTPHEADER, self.headers)
if just_header:
self.c.setopt(pycurl.FOLLOWLOCATION, 0)
self.c.setopt(pycurl.NOBODY, 1)
self.c.perform()
rep = self.header
self.c.setopt(pycurl.FOLLOWLOCATION, 1)
self.c.setopt(pycurl.NOBODY, 0)
else:
self.c.perform()
rep = self.getResponse()
self.c.setopt(pycurl.POSTFIELDS, "")
self.lastEffectiveURL = self.c.getinfo(pycurl.EFFECTIVE_URL)
self.addCookies()
try:
self.code = self.verifyHeader()
finally:
self.rep.close()
self.rep = None
if decode:
rep = self.decodeResponse(rep)
return rep
def verifyHeader(self):
""" raise an exceptions on bad headers """
code = int(self.c.getinfo(pycurl.RESPONSE_CODE))
if code in bad_headers:
#404 will NOT raise an exception
raise BadHeader(code, self.header, self.getResponse())
return code
def checkHeader(self):
""" check if header indicates failure"""
return int(self.c.getinfo(pycurl.RESPONSE_CODE)) not in bad_headers
def getResponse(self):
""" retrieve response from string io """
if self.rep is None:
return ""
else:
return self.rep.getvalue()
def decodeResponse(self, rep):
""" decode with correct encoding, relies on header """
header = self.header.splitlines()
encoding = "utf8" # default encoding
for line in header:
line = line.lower().replace(" ", "")
if not line.startswith("content-type:") or\
("text" not in line and "application" not in line):
continue
none, delemiter, charset = line.rpartition("charset=")
if delemiter:
charset = charset.split(";")
if charset:
encoding = charset[0]
try:
#self.log.debug("Decoded %s" % encoding )
if lookup(encoding).name == 'utf-8' and rep.startswith(BOM_UTF8):
encoding = 'utf-8-sig'
decoder = getincrementaldecoder(encoding)("replace")
rep = decoder.decode(rep, True)
#TODO: html_unescape as default
except LookupError:
self.log.debug("No Decoder foung for %s" % encoding)
except Exception:
self.log.debug("Error when decoding string from %s." % encoding)
return rep
def write(self, buf):
""" writes response """
if self.rep.tell() > 1000000 or self.abort:
rep = self.getResponse()
if self.abort:
raise Abort()
f = open("response.dump", "wb")
f.write(rep)
f.close()
raise Exception("Loaded Url exceeded limit")
self.rep.write(buf)
def writeHeader(self, buf):
""" writes header """
self.header += buf
def putHeader(self, name, value):
self.headers.append("%s: %s" % (name, value))
def clearHeaders(self):
self.headers = []
def close(self):
""" cleanup, unusable after this """
if self.rep:
self.rep.close()
del self.rep
if hasattr(self, "cj"):
del self.cj
if hasattr(self, "c"):
self.c.close()
del self.c
if __name__ == "__main__":
url = "http://pyload.org"
c = HTTPRequest()
print c.load(url)
|
gpl-3.0
|
ran5515/DeepDecision
|
tensorflow/contrib/framework/python/framework/tensor_util.py
|
48
|
10771
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensor utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
__all__ = [
'assert_same_float_dtype',
'assert_scalar',
'assert_scalar_int',
'convert_to_tensor_or_sparse_tensor',
'is_tensor',
'reduce_sum_n',
'remove_squeezable_dimensions',
'with_shape',
'with_same_shape']
# Temporary for backwards compatibility
is_tensor = tensor_util.is_tensor
assert_same_float_dtype = check_ops.assert_same_float_dtype
assert_scalar = check_ops.assert_scalar
convert_to_tensor_or_sparse_tensor = (
sparse_tensor.convert_to_tensor_or_sparse_tensor)
def reduce_sum_n(tensors, name=None):
"""Reduce tensors to a scalar sum.
This reduces each tensor in `tensors` to a scalar via `tf.reduce_sum`, then
adds them via `tf.add_n`.
Args:
tensors: List of tensors, all of the same numeric type.
name: Tensor name, and scope for all other ops.
Returns:
Total loss tensor, or None if no losses have been configured.
Raises:
ValueError: if `losses` is missing or empty.
"""
if not tensors:
raise ValueError('No tensors provided.')
with ops.name_scope(name, 'reduce_sum_n', tensors) as name_scope:
tensors = [
math_ops.reduce_sum(t, name='%s/sum' % t.op.name) for t in tensors]
if len(tensors) == 1:
return tensors[0]
return math_ops.add_n(tensors, name=name_scope)
def remove_squeezable_dimensions(predictions, labels, name=None):
"""Squeeze last dim if ranks of `predictions` and `labels` differ by 1.
This will use static shape if available. Otherwise, it will add graph
operations, which could result in a performance hit.
Args:
predictions: Predicted values, a `Tensor` of arbitrary dimensions.
labels: Label values, a `Tensor` whose dimensions match `predictions`.
name: Name of the op.
Returns:
Tuple of `predictions` and `labels`, possibly with last dim squeezed.
"""
with ops.name_scope(name, 'remove_squeezable_dimensions',
[predictions, labels]):
predictions = ops.convert_to_tensor(predictions)
labels = ops.convert_to_tensor(labels)
predictions_shape = predictions.get_shape()
predictions_rank = predictions_shape.ndims
labels_shape = labels.get_shape()
labels_rank = labels_shape.ndims
if (labels_rank is not None) and (predictions_rank is not None):
# Use static rank.
rank_diff = predictions_rank - labels_rank
if rank_diff == -1:
labels = array_ops.squeeze(labels, [-1])
elif rank_diff == 1:
predictions = array_ops.squeeze(predictions, [-1])
return predictions, labels
# Use dynamic rank.
rank_diff = array_ops.rank(predictions) - array_ops.rank(labels)
if (predictions_rank is None) or (
predictions_shape.dims[-1].is_compatible_with(1)):
predictions = control_flow_ops.cond(
math_ops.equal(1, rank_diff),
lambda: array_ops.squeeze(predictions, [-1]),
lambda: predictions)
if (labels_rank is None) or (
labels_shape.dims[-1].is_compatible_with(1)):
labels = control_flow_ops.cond(
math_ops.equal(-1, rank_diff),
lambda: array_ops.squeeze(labels, [-1]),
lambda: labels)
return predictions, labels
def _all_equal(tensor0, tensor1):
with ops.name_scope('all_equal', values=[tensor0, tensor1]) as scope:
return math_ops.reduce_all(
math_ops.equal(tensor0, tensor1, name='equal'), name=scope)
def _is_rank(expected_rank, actual_tensor):
"""Returns whether actual_tensor's rank is expected_rank.
Args:
expected_rank: Integer defining the expected rank, or tensor of same.
actual_tensor: Tensor to test.
Returns:
New tensor.
"""
with ops.name_scope('is_rank', values=[actual_tensor]) as scope:
expected = ops.convert_to_tensor(expected_rank, name='expected')
actual = array_ops.rank(actual_tensor, name='actual')
return math_ops.equal(expected, actual, name=scope)
def _is_shape(expected_shape, actual_tensor, actual_shape=None):
"""Returns whether actual_tensor's shape is expected_shape.
Args:
expected_shape: Integer list defining the expected shape, or tensor of same.
actual_tensor: Tensor to test.
actual_shape: Shape of actual_tensor, if we already have it.
Returns:
New tensor.
"""
with ops.name_scope('is_shape', values=[actual_tensor]) as scope:
is_rank = _is_rank(array_ops.size(expected_shape), actual_tensor)
if actual_shape is None:
actual_shape = array_ops.shape(actual_tensor, name='actual')
shape_equal = _all_equal(
ops.convert_to_tensor(expected_shape, name='expected'),
actual_shape)
return math_ops.logical_and(is_rank, shape_equal, name=scope)
def _assert_shape_op(expected_shape, actual_tensor):
"""Asserts actual_tensor's shape is expected_shape.
Args:
expected_shape: List of integers defining the expected shape, or tensor of
same.
actual_tensor: Tensor to test.
Returns:
New assert tensor.
"""
with ops.name_scope('assert_shape', values=[actual_tensor]) as scope:
actual_shape = array_ops.shape(actual_tensor, name='actual')
is_shape = _is_shape(expected_shape, actual_tensor, actual_shape)
return control_flow_ops.Assert(
is_shape, [
'Wrong shape for %s [expected] [actual].' % actual_tensor.name,
expected_shape,
actual_shape
], name=scope)
def with_same_shape(expected_tensor, tensor):
"""Assert tensors are the same shape, from the same graph.
Args:
expected_tensor: Tensor with expected shape.
tensor: Tensor of actual values.
Returns:
Tuple of (actual_tensor, label_tensor), possibly with assert ops added.
"""
with ops.name_scope('%s/' % tensor.op.name, values=[expected_tensor, tensor]):
tensor_shape = expected_tensor.get_shape()
expected_shape = (
tensor_shape.as_list() if tensor_shape.is_fully_defined()
else array_ops.shape(expected_tensor, name='expected_shape'))
return with_shape(expected_shape, tensor)
def with_shape(expected_shape, tensor):
"""Asserts tensor has expected shape.
If tensor shape and expected_shape, are fully defined, assert they match.
Otherwise, add assert op that will validate the shape when tensor is
evaluated, and set shape on tensor.
Args:
expected_shape: Expected shape to assert, as a 1D array of ints, or tensor
of same.
tensor: Tensor whose shape we're validating.
Returns:
tensor, perhaps with a dependent assert operation.
Raises:
ValueError: if tensor has an invalid shape.
"""
if isinstance(tensor, sparse_tensor.SparseTensor):
raise ValueError('SparseTensor not supported.')
# Shape type must be 1D int32.
if tensor_util.is_tensor(expected_shape):
if expected_shape.dtype.base_dtype != dtypes.int32:
raise ValueError(
'Invalid dtype %s for shape %s expected of tensor %s.' % (
expected_shape.dtype, expected_shape, tensor.name))
if isinstance(expected_shape, (list, tuple)):
if not expected_shape:
expected_shape = np.asarray([], dtype=np.int32)
else:
np_expected_shape = np.asarray(expected_shape)
expected_shape = (
np.asarray(expected_shape, dtype=np.int32)
if np_expected_shape.dtype == np.int64 else np_expected_shape)
if isinstance(expected_shape, np.ndarray):
if expected_shape.ndim > 1:
raise ValueError(
'Invalid rank %s for shape %s expected of tensor %s.' % (
expected_shape.ndim, expected_shape, tensor.name))
if expected_shape.dtype != np.int32:
raise ValueError(
'Invalid dtype %s for shape %s expected of tensor %s.' % (
expected_shape.dtype, expected_shape, tensor.name))
actual_shape = tensor.get_shape()
if (not actual_shape.is_fully_defined()
or tensor_util.is_tensor(expected_shape)):
with ops.name_scope('%s/' % tensor.op.name, values=[tensor]):
if (not tensor_util.is_tensor(expected_shape)
and (len(expected_shape) < 1)):
# TODO(irving): Remove scalar special case
return array_ops.reshape(tensor, [])
with ops.control_dependencies([_assert_shape_op(expected_shape, tensor)]):
result = array_ops.identity(tensor)
if not tensor_util.is_tensor(expected_shape):
result.set_shape(expected_shape)
return result
if (not tensor_util.is_tensor(expected_shape) and
not actual_shape.is_compatible_with(expected_shape)):
if (len(expected_shape) < 1) and actual_shape.is_compatible_with([1]):
# TODO(irving): Remove scalar special case.
with ops.name_scope('%s/' % tensor.op.name, values=[tensor]):
return array_ops.reshape(tensor, [])
raise ValueError('Invalid shape for tensor %s, expected %s, got %s.' % (
tensor.name, expected_shape, actual_shape))
return tensor
def assert_scalar_int(tensor, name=None):
"""Assert `tensor` is 0-D, of type `tf.int32` or `tf.int64`.
Args:
tensor: `Tensor` to test.
name: Name of the op and of the new `Tensor` if one is created.
Returns:
`tensor`, for chaining.
Raises:
ValueError: if `tensor` is not 0-D, of integer type.
"""
with ops.name_scope(name, 'assert_scalar_int', [tensor]) as name_scope:
tensor = ops.convert_to_tensor(tensor)
data_type = tensor.dtype
if not data_type.base_dtype.is_integer:
raise ValueError('Expected integer type for %s, received type: %s.'
% (tensor.name, data_type))
return check_ops.assert_scalar(tensor, name=name_scope)
|
apache-2.0
|
datapythonista/pandas
|
pandas/core/window/indexers.py
|
2
|
12001
|
"""Indexer objects for computing start/end window bounds for rolling operations"""
from datetime import timedelta
from typing import (
Dict,
Optional,
Tuple,
Type,
)
import numpy as np
from pandas._libs.window.indexers import calculate_variable_window_bounds
from pandas.util._decorators import Appender
from pandas.core.dtypes.common import ensure_platform_int
from pandas.tseries.offsets import Nano
get_window_bounds_doc = """
Computes the bounds of a window.
Parameters
----------
num_values : int, default 0
number of values that will be aggregated over
window_size : int, default 0
the number of rows in a window
min_periods : int, default None
min_periods passed from the top level rolling API
center : bool, default None
center passed from the top level rolling API
closed : str, default None
closed passed from the top level rolling API
win_type : str, default None
win_type passed from the top level rolling API
Returns
-------
A tuple of ndarray[int64]s, indicating the boundaries of each
window
"""
class BaseIndexer:
"""Base class for window bounds calculations."""
def __init__(
self, index_array: Optional[np.ndarray] = None, window_size: int = 0, **kwargs
):
"""
Parameters
----------
**kwargs :
keyword arguments that will be available when get_window_bounds is called
"""
self.index_array = index_array
self.window_size = window_size
# Set user defined kwargs as attributes that can be used in get_window_bounds
for key, value in kwargs.items():
setattr(self, key, value)
@Appender(get_window_bounds_doc)
def get_window_bounds(
self,
num_values: int = 0,
min_periods: Optional[int] = None,
center: Optional[bool] = None,
closed: Optional[str] = None,
) -> Tuple[np.ndarray, np.ndarray]:
raise NotImplementedError
class FixedWindowIndexer(BaseIndexer):
"""Creates window boundaries that are of fixed length."""
@Appender(get_window_bounds_doc)
def get_window_bounds(
self,
num_values: int = 0,
min_periods: Optional[int] = None,
center: Optional[bool] = None,
closed: Optional[str] = None,
) -> Tuple[np.ndarray, np.ndarray]:
if center:
offset = (self.window_size - 1) // 2
else:
offset = 0
end = np.arange(1 + offset, num_values + 1 + offset, dtype="int64")
start = end - self.window_size
if closed in ["left", "both"]:
start -= 1
if closed in ["left", "neither"]:
end -= 1
end = np.clip(end, 0, num_values)
start = np.clip(start, 0, num_values)
return start, end
class VariableWindowIndexer(BaseIndexer):
"""Creates window boundaries that are of variable length, namely for time series."""
@Appender(get_window_bounds_doc)
def get_window_bounds(
self,
num_values: int = 0,
min_periods: Optional[int] = None,
center: Optional[bool] = None,
closed: Optional[str] = None,
) -> Tuple[np.ndarray, np.ndarray]:
# error: Argument 4 to "calculate_variable_window_bounds" has incompatible
# type "Optional[bool]"; expected "bool"
# error: Argument 6 to "calculate_variable_window_bounds" has incompatible
# type "Optional[ndarray]"; expected "ndarray"
return calculate_variable_window_bounds(
num_values,
self.window_size,
min_periods,
center, # type: ignore[arg-type]
closed,
self.index_array, # type: ignore[arg-type]
)
class VariableOffsetWindowIndexer(BaseIndexer):
"""Calculate window boundaries based on a non-fixed offset such as a BusinessDay"""
def __init__(
self,
index_array: Optional[np.ndarray] = None,
window_size: int = 0,
index=None,
offset=None,
**kwargs,
):
super().__init__(index_array, window_size, **kwargs)
self.index = index
self.offset = offset
@Appender(get_window_bounds_doc)
def get_window_bounds(
self,
num_values: int = 0,
min_periods: Optional[int] = None,
center: Optional[bool] = None,
closed: Optional[str] = None,
) -> Tuple[np.ndarray, np.ndarray]:
# if windows is variable, default is 'right', otherwise default is 'both'
if closed is None:
closed = "right" if self.index is not None else "both"
right_closed = closed in ["right", "both"]
left_closed = closed in ["left", "both"]
if self.index[num_values - 1] < self.index[0]:
index_growth_sign = -1
else:
index_growth_sign = 1
start = np.empty(num_values, dtype="int64")
start.fill(-1)
end = np.empty(num_values, dtype="int64")
end.fill(-1)
start[0] = 0
# right endpoint is closed
if right_closed:
end[0] = 1
# right endpoint is open
else:
end[0] = 0
# start is start of slice interval (including)
# end is end of slice interval (not including)
for i in range(1, num_values):
end_bound = self.index[i]
start_bound = self.index[i] - index_growth_sign * self.offset
# left endpoint is closed
if left_closed:
start_bound -= Nano(1)
# advance the start bound until we are
# within the constraint
start[i] = i
for j in range(start[i - 1], i):
if (self.index[j] - start_bound) * index_growth_sign > timedelta(0):
start[i] = j
break
# end bound is previous end
# or current index
if (self.index[end[i - 1]] - end_bound) * index_growth_sign <= timedelta(0):
end[i] = i + 1
else:
end[i] = end[i - 1]
# right endpoint is open
if not right_closed:
end[i] -= 1
return start, end
class ExpandingIndexer(BaseIndexer):
"""Calculate expanding window bounds, mimicking df.expanding()"""
@Appender(get_window_bounds_doc)
def get_window_bounds(
self,
num_values: int = 0,
min_periods: Optional[int] = None,
center: Optional[bool] = None,
closed: Optional[str] = None,
) -> Tuple[np.ndarray, np.ndarray]:
return (
np.zeros(num_values, dtype=np.int64),
np.arange(1, num_values + 1, dtype=np.int64),
)
class FixedForwardWindowIndexer(BaseIndexer):
"""
Creates window boundaries for fixed-length windows that include the
current row.
Examples
--------
>>> df = pd.DataFrame({'B': [0, 1, 2, np.nan, 4]})
>>> df
B
0 0.0
1 1.0
2 2.0
3 NaN
4 4.0
>>> indexer = pd.api.indexers.FixedForwardWindowIndexer(window_size=2)
>>> df.rolling(window=indexer, min_periods=1).sum()
B
0 1.0
1 3.0
2 2.0
3 4.0
4 4.0
"""
@Appender(get_window_bounds_doc)
def get_window_bounds(
self,
num_values: int = 0,
min_periods: Optional[int] = None,
center: Optional[bool] = None,
closed: Optional[str] = None,
) -> Tuple[np.ndarray, np.ndarray]:
if center:
raise ValueError("Forward-looking windows can't have center=True")
if closed is not None:
raise ValueError(
"Forward-looking windows don't support setting the closed argument"
)
start = np.arange(num_values, dtype="int64")
end_s = start[: -self.window_size] + self.window_size
end_e = np.full(self.window_size, num_values, dtype="int64")
end = np.concatenate([end_s, end_e])
return start, end
class GroupbyIndexer(BaseIndexer):
"""Calculate bounds to compute groupby rolling, mimicking df.groupby().rolling()"""
def __init__(
self,
index_array: Optional[np.ndarray] = None,
window_size: int = 0,
groupby_indicies: Optional[Dict] = None,
window_indexer: Type[BaseIndexer] = BaseIndexer,
indexer_kwargs: Optional[Dict] = None,
**kwargs,
):
"""
Parameters
----------
index_array : np.ndarray or None
np.ndarray of the index of the original object that we are performing
a chained groupby operation over. This index has been pre-sorted relative to
the groups
window_size : int
window size during the windowing operation
groupby_indicies : dict or None
dict of {group label: [positional index of rows belonging to the group]}
window_indexer : BaseIndexer
BaseIndexer class determining the start and end bounds of each group
indexer_kwargs : dict or None
Custom kwargs to be passed to window_indexer
**kwargs :
keyword arguments that will be available when get_window_bounds is called
"""
self.groupby_indicies = groupby_indicies or {}
self.window_indexer = window_indexer
self.indexer_kwargs = indexer_kwargs or {}
super().__init__(
index_array, self.indexer_kwargs.pop("window_size", window_size), **kwargs
)
@Appender(get_window_bounds_doc)
def get_window_bounds(
self,
num_values: int = 0,
min_periods: Optional[int] = None,
center: Optional[bool] = None,
closed: Optional[str] = None,
) -> Tuple[np.ndarray, np.ndarray]:
# 1) For each group, get the indices that belong to the group
# 2) Use the indices to calculate the start & end bounds of the window
# 3) Append the window bounds in group order
start_arrays = []
end_arrays = []
window_indicies_start = 0
for key, indices in self.groupby_indicies.items():
index_array: np.ndarray | None
if self.index_array is not None:
index_array = self.index_array.take(ensure_platform_int(indices))
else:
index_array = self.index_array
indexer = self.window_indexer(
index_array=index_array,
window_size=self.window_size,
**self.indexer_kwargs,
)
start, end = indexer.get_window_bounds(
len(indices), min_periods, center, closed
)
start = start.astype(np.int64)
end = end.astype(np.int64)
# Cannot use groupby_indicies as they might not be monotonic with the object
# we're rolling over
window_indicies = np.arange(
window_indicies_start, window_indicies_start + len(indices)
)
window_indicies_start += len(indices)
# Extend as we'll be slicing window like [start, end)
window_indicies = np.append(
window_indicies, [window_indicies[-1] + 1]
).astype(np.int64)
start_arrays.append(window_indicies.take(ensure_platform_int(start)))
end_arrays.append(window_indicies.take(ensure_platform_int(end)))
start = np.concatenate(start_arrays)
end = np.concatenate(end_arrays)
return start, end
class ExponentialMovingWindowIndexer(BaseIndexer):
"""Calculate ewm window bounds (the entire window)"""
@Appender(get_window_bounds_doc)
def get_window_bounds(
self,
num_values: int = 0,
min_periods: Optional[int] = None,
center: Optional[bool] = None,
closed: Optional[str] = None,
) -> Tuple[np.ndarray, np.ndarray]:
return np.array([0], dtype=np.int64), np.array([num_values], dtype=np.int64)
|
bsd-3-clause
|
jotes/pontoon
|
pontoon/base/management/commands/calculate_stats.py
|
1
|
1652
|
import logging
from django.core.management.base import BaseCommand
from django.db.models import Count
from pontoon.base.models import (
Project,
TranslatedResource,
)
log = logging.getLogger(__name__)
class Command(BaseCommand):
help = """
Re-calculate statistics for all translated resources and corresponding
objects.
Note: while unlikely, it's possible that running this command may
result in IntegrityErrors. That happens if at the same time when
calculate_stats() is being executed for a TranslatedResource instance,
translation is added, accepted or rejected for that very instance.
To be completely sure errors don't occur, command needs to run in a
maintenance mode.
See bug 1470337 for more details.
"""
def handle(self, *args, **options):
# Start with enabled projects in ascending order of resource count
projects = Project.objects.annotate(resource_count=Count("resources")).order_by(
"disabled", "resource_count"
)
for index, project in enumerate(projects):
log.info(
u'Calculating stats for project "{project}" ({index}/{total})'.format(
index=index + 1, total=len(projects), project=project.name,
)
)
translated_resources = TranslatedResource.objects.filter(
resource__project=project
)
for translated_resource in translated_resources:
translated_resource.calculate_stats()
log.info("Calculating stats complete for all projects.")
|
bsd-3-clause
|
PetePriority/home-assistant
|
homeassistant/components/cover/group.py
|
7
|
10162
|
"""
This platform allows several cover to be grouped into one cover.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/cover.group/
"""
import logging
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.components.cover import (
DOMAIN, PLATFORM_SCHEMA, CoverDevice, ATTR_POSITION,
ATTR_CURRENT_POSITION, ATTR_TILT_POSITION, ATTR_CURRENT_TILT_POSITION,
SUPPORT_OPEN, SUPPORT_CLOSE, SUPPORT_STOP, SUPPORT_SET_POSITION,
SUPPORT_OPEN_TILT, SUPPORT_CLOSE_TILT,
SUPPORT_STOP_TILT, SUPPORT_SET_TILT_POSITION,
SERVICE_OPEN_COVER, SERVICE_CLOSE_COVER, SERVICE_SET_COVER_POSITION,
SERVICE_STOP_COVER, SERVICE_OPEN_COVER_TILT, SERVICE_CLOSE_COVER_TILT,
SERVICE_STOP_COVER_TILT, SERVICE_SET_COVER_TILT_POSITION)
from homeassistant.const import (
ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES,
CONF_ENTITIES, CONF_NAME, STATE_CLOSED)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_state_change
_LOGGER = logging.getLogger(__name__)
KEY_OPEN_CLOSE = 'open_close'
KEY_STOP = 'stop'
KEY_POSITION = 'position'
DEFAULT_NAME = 'Cover Group'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN),
})
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up the Group Cover platform."""
async_add_entities(
[CoverGroup(config[CONF_NAME], config[CONF_ENTITIES])])
class CoverGroup(CoverDevice):
"""Representation of a CoverGroup."""
def __init__(self, name, entities):
"""Initialize a CoverGroup entity."""
self._name = name
self._is_closed = False
self._cover_position = 100
self._tilt_position = None
self._supported_features = 0
self._assumed_state = True
self._entities = entities
self._covers = {KEY_OPEN_CLOSE: set(), KEY_STOP: set(),
KEY_POSITION: set()}
self._tilts = {KEY_OPEN_CLOSE: set(), KEY_STOP: set(),
KEY_POSITION: set()}
@callback
def update_supported_features(self, entity_id, old_state, new_state,
update_state=True):
"""Update dictionaries with supported features."""
if not new_state:
for values in self._covers.values():
values.discard(entity_id)
for values in self._tilts.values():
values.discard(entity_id)
if update_state:
self.async_schedule_update_ha_state(True)
return
features = new_state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if features & (SUPPORT_OPEN | SUPPORT_CLOSE):
self._covers[KEY_OPEN_CLOSE].add(entity_id)
else:
self._covers[KEY_OPEN_CLOSE].discard(entity_id)
if features & (SUPPORT_STOP):
self._covers[KEY_STOP].add(entity_id)
else:
self._covers[KEY_STOP].discard(entity_id)
if features & (SUPPORT_SET_POSITION):
self._covers[KEY_POSITION].add(entity_id)
else:
self._covers[KEY_POSITION].discard(entity_id)
if features & (SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT):
self._tilts[KEY_OPEN_CLOSE].add(entity_id)
else:
self._tilts[KEY_OPEN_CLOSE].discard(entity_id)
if features & (SUPPORT_STOP_TILT):
self._tilts[KEY_STOP].add(entity_id)
else:
self._tilts[KEY_STOP].discard(entity_id)
if features & (SUPPORT_SET_TILT_POSITION):
self._tilts[KEY_POSITION].add(entity_id)
else:
self._tilts[KEY_POSITION].discard(entity_id)
if update_state:
self.async_schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Register listeners."""
for entity_id in self._entities:
new_state = self.hass.states.get(entity_id)
self.update_supported_features(entity_id, None, new_state,
update_state=False)
async_track_state_change(self.hass, self._entities,
self.update_supported_features)
await self.async_update()
@property
def name(self):
"""Return the name of the cover."""
return self._name
@property
def assumed_state(self):
"""Enable buttons even if at end position."""
return self._assumed_state
@property
def should_poll(self):
"""Disable polling for cover group."""
return False
@property
def supported_features(self):
"""Flag supported features for the cover."""
return self._supported_features
@property
def is_closed(self):
"""Return if all covers in group are closed."""
return self._is_closed
@property
def current_cover_position(self):
"""Return current position for all covers."""
return self._cover_position
@property
def current_cover_tilt_position(self):
"""Return current tilt position for all covers."""
return self._tilt_position
async def async_open_cover(self, **kwargs):
"""Move the covers up."""
data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, data, blocking=True)
async def async_close_cover(self, **kwargs):
"""Move the covers down."""
data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, data, blocking=True)
async def async_stop_cover(self, **kwargs):
"""Fire the stop action."""
data = {ATTR_ENTITY_ID: self._covers[KEY_STOP]}
await self.hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER, data, blocking=True)
async def async_set_cover_position(self, **kwargs):
"""Set covers position."""
data = {ATTR_ENTITY_ID: self._covers[KEY_POSITION],
ATTR_POSITION: kwargs[ATTR_POSITION]}
await self.hass.services.async_call(
DOMAIN, SERVICE_SET_COVER_POSITION, data, blocking=True)
async def async_open_cover_tilt(self, **kwargs):
"""Tilt covers open."""
data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, data, blocking=True)
async def async_close_cover_tilt(self, **kwargs):
"""Tilt covers closed."""
data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER_TILT, data, blocking=True)
async def async_stop_cover_tilt(self, **kwargs):
"""Stop cover tilt."""
data = {ATTR_ENTITY_ID: self._tilts[KEY_STOP]}
await self.hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER_TILT, data, blocking=True)
async def async_set_cover_tilt_position(self, **kwargs):
"""Set tilt position."""
data = {ATTR_ENTITY_ID: self._tilts[KEY_POSITION],
ATTR_TILT_POSITION: kwargs[ATTR_TILT_POSITION]}
await self.hass.services.async_call(
DOMAIN, SERVICE_SET_COVER_TILT_POSITION, data, blocking=True)
async def async_update(self):
"""Update state and attributes."""
self._assumed_state = False
self._is_closed = True
for entity_id in self._entities:
state = self.hass.states.get(entity_id)
if not state:
continue
if state.state != STATE_CLOSED:
self._is_closed = False
break
self._cover_position = None
if self._covers[KEY_POSITION]:
position = -1
self._cover_position = 0 if self.is_closed else 100
for entity_id in self._covers[KEY_POSITION]:
state = self.hass.states.get(entity_id)
pos = state.attributes.get(ATTR_CURRENT_POSITION)
if position == -1:
position = pos
elif position != pos:
self._assumed_state = True
break
else:
if position != -1:
self._cover_position = position
self._tilt_position = None
if self._tilts[KEY_POSITION]:
position = -1
self._tilt_position = 100
for entity_id in self._tilts[KEY_POSITION]:
state = self.hass.states.get(entity_id)
pos = state.attributes.get(ATTR_CURRENT_TILT_POSITION)
if position == -1:
position = pos
elif position != pos:
self._assumed_state = True
break
else:
if position != -1:
self._tilt_position = position
supported_features = 0
supported_features |= SUPPORT_OPEN | SUPPORT_CLOSE \
if self._covers[KEY_OPEN_CLOSE] else 0
supported_features |= SUPPORT_STOP \
if self._covers[KEY_STOP] else 0
supported_features |= SUPPORT_SET_POSITION \
if self._covers[KEY_POSITION] else 0
supported_features |= SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT \
if self._tilts[KEY_OPEN_CLOSE] else 0
supported_features |= SUPPORT_STOP_TILT \
if self._tilts[KEY_STOP] else 0
supported_features |= SUPPORT_SET_TILT_POSITION \
if self._tilts[KEY_POSITION] else 0
self._supported_features = supported_features
if not self._assumed_state:
for entity_id in self._entities:
state = self.hass.states.get(entity_id)
if state and state.attributes.get(ATTR_ASSUMED_STATE):
self._assumed_state = True
break
|
apache-2.0
|
pirata-cat/mieli
|
identity/registration/backend/urls.py
|
1
|
3742
|
from identity.registration.backend.views import ActivationView
from identity.registration.backend.views import RegistrationView
from identity.registration.backend.views import ApproveView
from identity.registration.backend.forms import LoginForm
from django.contrib.auth import views as auth_views
from django.views.generic.base import TemplateView
from django.core.urlresolvers import reverse_lazy
from django.conf.urls import include
from django.conf.urls import url
urlpatterns = [
url(r'^activate/complete/$',
TemplateView.as_view(template_name='registration/activation_complete.html'),
name='registration_activation_complete'),
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get to the view;
# that way it can return a sensible "invalid key" message instead of a
# confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
ActivationView.as_view(),
name='registration_activate'),
url(r'^register/$',
RegistrationView.as_view(),
name='registration_register'),
url(r'^register/complete/$',
TemplateView.as_view(template_name='registration/registration_complete.html'),
name='registration_complete'),
url(r'^register/closed/$',
TemplateView.as_view(template_name='registration/registration_closed.html'),
name='registration_disallowed'),
url(r'^login/$',
auth_views.login,
{'template_name': 'registration/login.html', 'authentication_form': LoginForm},
name='auth_login'),
url(r'^logout/$',
auth_views.logout,
{'template_name': 'registration/logout.html'},
name='auth_logout'),
url(r'^password/change/$',
auth_views.password_change,
{'post_change_redirect': '/'},
name='auth_password_change'),
url(r'^password/change/done/$',
auth_views.password_change_done,
name='auth_password_change_done'),
url(r'^password/reset/$',
auth_views.password_reset,
{'post_reset_redirect': reverse_lazy('auth_password_reset_done')},
name='auth_password_reset'),
url(r'^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$',
auth_views.password_reset_confirm,
{'post_reset_redirect': reverse_lazy('auth_password_reset_complete')},
name='auth_password_reset_confirm'),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
name='auth_password_reset_complete'),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
name='auth_password_reset_done'),
url(r'^approve/(?P<user_pk>[0-9]+)/$',
ApproveView.as_view(),
name='auth_approve'),
]
|
agpl-3.0
|
sanmiguel/home-assistant
|
homeassistant/helpers/state.py
|
4
|
1685
|
"""
homeassistant.helpers.state
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Helpers that help with state related things.
"""
import logging
from homeassistant import State
import homeassistant.util.dt as dt_util
from homeassistant.const import (
STATE_ON, STATE_OFF, SERVICE_TURN_ON, SERVICE_TURN_OFF, ATTR_ENTITY_ID)
_LOGGER = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods, attribute-defined-outside-init
class TrackStates(object):
"""
Records the time when the with-block is entered. Will add all states
that have changed since the start time to the return list when with-block
is exited.
"""
def __init__(self, hass):
self.hass = hass
self.states = []
def __enter__(self):
self.now = dt_util.utcnow()
return self.states
def __exit__(self, exc_type, exc_value, traceback):
self.states.extend(self.hass.states.get_since(self.now))
def reproduce_state(hass, states, blocking=False):
""" Takes in a state and will try to have the entity reproduce it. """
if isinstance(states, State):
states = [states]
for state in states:
current_state = hass.states.get(state.entity_id)
if current_state is None:
continue
if state.state == STATE_ON:
service = SERVICE_TURN_ON
elif state.state == STATE_OFF:
service = SERVICE_TURN_OFF
else:
_LOGGER.warning("Unable to reproduce state for %s", state)
continue
service_data = dict(state.attributes)
service_data[ATTR_ENTITY_ID] = state.entity_id
hass.services.call(state.domain, service, service_data, blocking)
|
mit
|
jallohm/django
|
django/contrib/admin/actions.py
|
395
|
3316
|
"""
Built-in, globally-available admin actions.
"""
from django.contrib import messages
from django.contrib.admin import helpers
from django.contrib.admin.utils import get_deleted_objects, model_ngettext
from django.core.exceptions import PermissionDenied
from django.db import router
from django.template.response import TemplateResponse
from django.utils.encoding import force_text
from django.utils.translation import ugettext as _, ugettext_lazy
def delete_selected(modeladmin, request, queryset):
"""
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it deletes all selected objects and redirects back to the change list.
"""
opts = modeladmin.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not modeladmin.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(modeladmin.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, model_count, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, modeladmin.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_text(obj)
modeladmin.log_deletion(request, obj, obj_display)
queryset.delete()
modeladmin.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(modeladmin.opts, n)
}, messages.SUCCESS)
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = dict(
modeladmin.admin_site.each_context(request),
title=title,
objects_name=objects_name,
deletable_objects=[deletable_objects],
model_count=dict(model_count).items(),
queryset=queryset,
perms_lacking=perms_needed,
protected=protected,
opts=opts,
action_checkbox_name=helpers.ACTION_CHECKBOX_NAME,
)
request.current_app = modeladmin.admin_site.name
# Display the confirmation page
return TemplateResponse(request, modeladmin.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.model_name),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
|
bsd-3-clause
|
2014c2g1/c2g1
|
w2/static/Brython2.0.0-20140209-164925/Lib/posix.py
|
96
|
20839
|
"""This module provides access to operating system functionality that is
standardized by the C Standard and the POSIX standard (a thinly
disguised Unix interface). Refer to the library manual and
corresponding Unix manual entries for more information on calls."""
import datetime
F_OK = 0
O_APPEND = 8
O_BINARY = 32768
O_CREAT = 256
O_EXCL = 1024
O_NOINHERIT = 128
O_RANDOM = 16
O_RDONLY = 0
O_RDWR = 2
O_SEQUENTIAL = 32
O_SHORT_LIVED = 4096
O_TEMPORARY = 64
O_TEXT = 16384
O_TRUNC = 512
O_WRONLY = 1
P_DETACH = 4
P_NOWAIT = 1
P_NOWAITO = 3
P_OVERLAY = 2
P_WAIT = 0
R_OK = 4
TMP_MAX = 32767
W_OK = 2
X_OK = 1
class __loader__:
pass
def _exit(*args,**kw):
"""_exit(status)
Exit to the system with specified status, without normal exit processing."""
pass
def _getdiskusage(*args,**kw):
"""_getdiskusage(path) -> (total, free)
Return disk usage statistics about the given path as (total, free) tuple."""
pass
def _getfileinformation(*args,**kw):
pass
def _getfinalpathname(*args,**kw):
pass
def _getfullpathname(*args,**kw):
pass
_have_functions = ['MS_WINDOWS']
def _isdir(*args,**kw):
"""Return true if the pathname refers to an existing directory."""
pass
def abort(*args,**kw):
"""abort() -> does not return!
Abort the interpreter immediately. This 'dumps core' or otherwise fails
in the hardest way possible on the hosting operating system."""
pass
def access(*args,**kw):
"""access(path, mode, *, dir_fd=None, effective_ids=False, follow_symlinks=True)
Use the real uid/gid to test for access to a path. Returns True if granted,
False otherwise.
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
If effective_ids is True, access will use the effective uid/gid instead of
the real uid/gid.
If follow_symlinks is False, and the last element of the path is a symbolic
link, access will examine the symbolic link itself instead of the file the
link points to.
dir_fd, effective_ids, and follow_symlinks may not be implemented
on your platform. If they are unavailable, using them will raise a
NotImplementedError.
Note that most operations will use the effective uid/gid, therefore this
routine can be used in a suid/sgid environment to test if the invoking user
has the specified access to the path.
The mode argument can be F_OK to test existence, or the inclusive-OR
of R_OK, W_OK, and X_OK."""
pass
def chdir(*args,**kw):
"""chdir(path)
Change the current working directory to the specified path.
path may always be specified as a string.
On some platforms, path may also be specified as an open file descriptor.
If this functionality is unavailable, using it raises an exception."""
pass
def chmod(*args,**kw):
"""chmod(path, mode, *, dir_fd=None, follow_symlinks=True)
Change the access permissions of a file.
path may always be specified as a string.
On some platforms, path may also be specified as an open file descriptor.
If this functionality is unavailable, using it raises an exception.
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
If follow_symlinks is False, and the last element of the path is a symbolic
link, chmod will modify the symbolic link itself instead of the file the
link points to.
It is an error to use dir_fd or follow_symlinks when specifying path as
an open file descriptor.
dir_fd and follow_symlinks may not be implemented on your platform.
If they are unavailable, using them will raise a NotImplementedError."""
pass
def close(*args,**kw):
"""close(fd)
Close a file descriptor (for low level IO)."""
pass
def closerange(*args,**kw):
"""closerange(fd_low, fd_high)
Closes all file descriptors in [fd_low, fd_high), ignoring errors."""
pass
def device_encoding(*args,**kw):
"""device_encoding(fd) -> str
Return a string describing the encoding of the device
if the output is a terminal; else return None."""
pass
def dup(*args,**kw):
"""dup(fd) -> fd2
Return a duplicate of a file descriptor."""
pass
def dup2(*args,**kw):
"""dup2(old_fd, new_fd)
Duplicate file descriptor."""
pass
environ = {'PYTHONUSERBASE': ' '}
error = OSError
def execv(*args,**kw):
"""execv(path, args)
Execute an executable path with arguments, replacing current process.
path: path of executable file
args: tuple or list of strings"""
pass
def execve(*args,**kw):
"""execve(path, args, env)
Execute a path with arguments and environment, replacing current process.
path: path of executable file
args: tuple or list of arguments
env: dictionary of strings mapping to strings
On some platforms, you may specify an open file descriptor for path;
execve will execute the program the file descriptor is open to.
If this functionality is unavailable, using it raises NotImplementedError."""
pass
def fstat(*args,**kw):
"""fstat(fd) -> stat result
Like stat(), but for an open file descriptor.
Equivalent to stat(fd=fd)."""
pass
def fsync(*args,**kw):
"""fsync(fildes)
force write of file with filedescriptor to disk."""
pass
def get_terminal_size(*args,**kw):
"""Return the size of the terminal window as (columns, lines).
The optional argument fd (default standard output) specifies
which file descriptor should be queried.
If the file descriptor is not connected to a terminal, an OSError
is thrown.
This function will only be defined if an implementation is
available for this system.
shutil.get_terminal_size is the high-level function which should
normally be used, os.get_terminal_size is the low-level implementation."""
pass
def getcwd(*args,**kw):
"""getcwd() -> path
Return a unicode string representing the current working directory."""
return __BRYTHON__.brython_path # XXX fix me
def getcwdb(*args,**kw):
"""getcwdb() -> path
Return a bytes string representing the current working directory."""
pass
def getlogin(*args,**kw):
"""getlogin() -> string
Return the actual login name."""
pass
def getpid(*args,**kw):
"""getpid() -> pid
Return the current process id"""
return 0
def getppid(*args,**kw):
"""getppid() -> ppid
Return the parent's process id. If the parent process has already exited,
Windows machines will still return its id; others systems will return the id
of the 'init' process (1)."""
pass
def isatty(*args,**kw):
"""isatty(fd) -> bool
Return True if the file descriptor 'fd' is an open file descriptor
connected to the slave end of a terminal."""
pass
def kill(*args,**kw):
"""kill(pid, sig)
Kill a process with a signal."""
pass
def link(*args,**kw):
"""link(src, dst, *, src_dir_fd=None, dst_dir_fd=None, follow_symlinks=True)
Create a hard link to a file.
If either src_dir_fd or dst_dir_fd is not None, it should be a file
descriptor open to a directory, and the respective path string (src or dst)
should be relative; the path will then be relative to that directory.
If follow_symlinks is False, and the last element of src is a symbolic
link, link will create a link to the symbolic link itself instead of the
file the link points to.
src_dir_fd, dst_dir_fd, and follow_symlinks may not be implemented on your
platform. If they are unavailable, using them will raise a
NotImplementedError."""
pass
def listdir(*args,**kw):
"""listdir(path='.') -> list_of_filenames
Return a list containing the names of the files in the directory.
The list is in arbitrary order. It does not include the special
entries '.' and '..' even if they are present in the directory.
path can be specified as either str or bytes. If path is bytes,
the filenames returned will also be bytes; in all other circumstances
the filenames returned will be str.
On some platforms, path may also be specified as an open file descriptor;
the file descriptor must refer to a directory.
If this functionality is unavailable, using it raises NotImplementedError."""
pass
def lseek(*args,**kw):
"""lseek(fd, pos, how) -> newpos
Set the current position of a file descriptor.
Return the new cursor position in bytes, starting from the beginning."""
pass
def lstat(*args,**kw):
"""lstat(path, *, dir_fd=None) -> stat result
Like stat(), but do not follow symbolic links.
Equivalent to stat(path, follow_symlinks=False)."""
return stat_result()
def mkdir(*args,**kw):
"""mkdir(path, mode=0o777, *, dir_fd=None)
Create a directory.
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
dir_fd may not be implemented on your platform.
If it is unavailable, using it will raise a NotImplementedError.
The mode argument is ignored on Windows."""
pass
def open(*args,**kw):
"""open(path, flags, mode=0o777, *, dir_fd=None)
Open a file for low level IO. Returns a file handle (integer).
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
dir_fd may not be implemented on your platform.
If it is unavailable, using it will raise a NotImplementedError."""
pass
def pipe(*args,**kw):
"""pipe() -> (read_end, write_end)
Create a pipe."""
pass
def putenv(*args,**kw):
"""putenv(key, value)
Change or add an environment variable."""
pass
def read(*args,**kw):
"""read(fd, buffersize) -> string
Read a file descriptor."""
pass
def readlink(*args,**kw):
"""readlink(path, *, dir_fd=None) -> path
Return a string representing the path to which the symbolic link points.
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
dir_fd may not be implemented on your platform.
If it is unavailable, using it will raise a NotImplementedError."""
pass
def remove(*args,**kw):
"""remove(path, *, dir_fd=None)
Remove a file (same as unlink()).
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
dir_fd may not be implemented on your platform.
If it is unavailable, using it will raise a NotImplementedError."""
pass
def rename(*args,**kw):
"""rename(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
Rename a file or directory.
If either src_dir_fd or dst_dir_fd is not None, it should be a file
descriptor open to a directory, and the respective path string (src or dst)
should be relative; the path will then be relative to that directory.
src_dir_fd and dst_dir_fd, may not be implemented on your platform.
If they are unavailable, using them will raise a NotImplementedError."""
pass
def replace(*args,**kw):
"""replace(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
Rename a file or directory, overwriting the destination.
If either src_dir_fd or dst_dir_fd is not None, it should be a file
descriptor open to a directory, and the respective path string (src or dst)
should be relative; the path will then be relative to that directory.
src_dir_fd and dst_dir_fd, may not be implemented on your platform.
If they are unavailable, using them will raise a NotImplementedError."""
pass
def rmdir(*args,**kw):
"""rmdir(path, *, dir_fd=None)
Remove a directory.
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
dir_fd may not be implemented on your platform.
If it is unavailable, using it will raise a NotImplementedError."""
pass
def spawnv(*args,**kw):
"""spawnv(mode, path, args)
Execute the program 'path' in a new process.
mode: mode of process creation
path: path of executable file
args: tuple or list of strings"""
pass
def spawnve(*args,**kw):
"""spawnve(mode, path, args, env)
Execute the program 'path' in a new process.
mode: mode of process creation
path: path of executable file
args: tuple or list of arguments
env: dictionary of strings mapping to strings"""
pass
def startfile(*args,**kw):
"""startfile(filepath [, operation]) - Start a file with its associated application.
When "operation" is not specified or "open", this acts like
double-clicking the file in Explorer, or giving the file name as an
argument to the DOS "start" command: the file is opened with whatever
application (if any) its extension is associated.
When another "operation" is given, it specifies what should be done with
the file. A typical operation is "print".
startfile returns as soon as the associated application is launched.
There is no option to wait for the application to close, and no way
to retrieve the application's exit status.
The filepath is relative to the current directory. If you want to use
an absolute path, make sure the first character is not a slash ("/");
the underlying Win32 ShellExecute function doesn't work if it is."""
pass
def stat(*args,**kw):
"""stat(path, *, dir_fd=None, follow_symlinks=True) -> stat result
Perform a stat system call on the given path.
path may be specified as either a string or as an open file descriptor.
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
dir_fd may not be supported on your platform; if it is unavailable, using
it will raise a NotImplementedError.
If follow_symlinks is False, and the last element of the path is a symbolic
link, stat will examine the symbolic link itself instead of the file the
link points to.
It is an error to use dir_fd or follow_symlinks when specifying path as
an open file descriptor."""
return stat_result()
def stat_float_times(*args,**kw):
"""stat_float_times([newval]) -> oldval
Determine whether os.[lf]stat represents time stamps as float objects.
If newval is True, future calls to stat() return floats, if it is False,
future calls return ints.
If newval is omitted, return the current setting.
"""
pass
class stat_result:
def __init__(self):
"""st_mode - protection bits,
st_ino - inode number,
st_dev - device,
st_nlink - number of hard links,
st_uid - user id of owner,
st_gid - group id of owner,
st_size - size of file, in bytes,
st_atime - time of most recent access expressed in seconds,
st_mtime - time of most recent content modification expressed in
seconds,
st_ctime - platform dependent; time of most recent metadata change on
Unix, or the time of creation on Windows, expressed in seconds
st_atime_ns - time of most recent access expressed in nanoseconds as an
integer,
st_mtime_ns - time of most recent content modification expressed in
nanoseconds as an integer,
st_ctime_ns - platform dependent; time of most recent metadata change
on Unix, or the time of creation on Windows, expressed in
nanoseconds as an integer """
# Brython : fake values
self.st_atime = datetime.datetime.now()
self.st_mtime = self.st_ctime = self.st_atime_ns = \
self.st_mtime_ns = self.st_ctime_ns = self.st_atime
self.st_uid = self.st_gid = self.st_ino = -1
self.st_mode = 0
self.st_size = 1
class statvfs_result:
pass
def strerror(*args,**kw):
"""strerror(code) -> string
Translate an error code to a message string."""
pass
def symlink(*args,**kw):
"""symlink(src, dst, target_is_directory=False, *, dir_fd=None)
Create a symbolic link pointing to src named dst.
target_is_directory is required on Windows if the target is to be
interpreted as a directory. (On Windows, symlink requires
Windows 6.0 or greater, and raises a NotImplementedError otherwise.)
target_is_directory is ignored on non-Windows platforms.
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
dir_fd may not be implemented on your platform.
If it is unavailable, using it will raise a NotImplementedError."""
pass
def system(*args,**kw):
"""system(command) -> exit_status
Execute the command (a string) in a subshell."""
pass
class terminal_size:
pass
def times(*args,**kw):
"""times() -> times_result
Return an object containing floating point numbers indicating process
times. The object behaves like a named tuple with these fields:
(utime, stime, cutime, cstime, elapsed_time)"""
pass
class times_result:
pass
def umask(*args,**kw):
"""umask(new_mask) -> old_mask
Set the current numeric umask and return the previous umask."""
pass
class uname_result:
pass
def unlink(*args,**kw):
"""unlink(path, *, dir_fd=None)
Remove a file (same as remove()).
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
dir_fd may not be implemented on your platform.
If it is unavailable, using it will raise a NotImplementedError."""
pass
def urandom(n):
"""urandom(n) -> str
Return n random bytes suitable for cryptographic use."""
import __random
randbytes= [chr(__random.randint(0,256)) for i in range(n)]
return ''.join(randbytes)
def utime(*args,**kw):
"""utime(path, times=None, *, ns=None, dir_fd=None, follow_symlinks=True) Set the access and modified time of path.
path may always be specified as a string.
On some platforms, path may also be specified as an open file descriptor.
If this functionality is unavailable, using it raises an exception.
If times is not None, it must be a tuple (atime, mtime);
atime and mtime should be expressed as float seconds since the epoch.
If ns is not None, it must be a tuple (atime_ns, mtime_ns);
atime_ns and mtime_ns should be expressed as integer nanoseconds
since the epoch.
If both times and ns are None, utime uses the current time.
Specifying tuples for both times and ns is an error.
If dir_fd is not None, it should be a file descriptor open to a directory,
and path should be relative; path will then be relative to that directory.
If follow_symlinks is False, and the last element of the path is a symbolic
link, utime will modify the symbolic link itself instead of the file the
link points to.
It is an error to use dir_fd or follow_symlinks when specifying path
as an open file descriptor.
dir_fd and follow_symlinks may not be available on your platform.
If they are unavailable, using them will raise a NotImplementedError."""
pass
def waitpid(*args,**kw):
"""waitpid(pid, options) -> (pid, status << 8)
Wait for completion of a given process. options is ignored on Windows."""
pass
def write(*args,**kw):
"""write(fd, string) -> byteswritten
Write a string to a file descriptor."""
pass
## put WIFSIGNALED here. its needed by os module, and os module imports all
## functions in this module
def WIFSIGNALED(a):
return False
def WTERMSIG(status):
return 0
def WIFSIGNALED(status):
"Return True if the process exited due to a signal, otherwise return False"
return False
def WIFEXITED(status):
return False
def WEXITSTATUS(status):
pass
def WNOHANG():
return (0,0)
|
gpl-2.0
|
mistio/libcloud
|
libcloud/common/gogrid.py
|
28
|
6395
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import time
from libcloud.utils.py3 import b
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.common.types import MalformedResponseError
from libcloud.common.base import ConnectionUserAndKey, JsonResponse
from libcloud.compute.base import NodeLocation
HOST = 'api.gogrid.com'
PORTS_BY_SECURITY = {True: 443, False: 80}
API_VERSION = '1.8'
__all__ = [
"GoGridResponse",
"GoGridConnection",
"GoGridIpAddress",
"BaseGoGridDriver",
]
class GoGridResponse(JsonResponse):
def __init__(self, *args, **kwargs):
self.driver = BaseGoGridDriver
super(GoGridResponse, self).__init__(*args, **kwargs)
def success(self):
if self.status == 403:
raise InvalidCredsError('Invalid credentials', self.driver)
if self.status == 401:
raise InvalidCredsError('API Key has insufficient rights',
self.driver)
if not self.body:
return None
try:
return self.parse_body()['status'] == 'success'
except ValueError:
raise MalformedResponseError('Malformed reply',
body=self.body,
driver=self.driver)
def parse_error(self):
try:
return self.parse_body()["list"][0]["message"]
except (ValueError, KeyError):
return None
class GoGridConnection(ConnectionUserAndKey):
"""
Connection class for the GoGrid driver
"""
host = HOST
responseCls = GoGridResponse
def add_default_params(self, params):
params["api_key"] = self.user_id
params["v"] = API_VERSION
params["format"] = 'json'
params["sig"] = self.get_signature(self.user_id, self.key)
return params
def get_signature(self, key, secret):
""" create sig from md5 of key + secret + time """
m = hashlib.md5(b(key + secret + str(int(time.time()))))
return m.hexdigest()
def request(self, action, params=None, data='', headers=None, method='GET',
raw=False):
return super(GoGridConnection, self).request(action, params, data,
headers, method, raw)
class GoGridIpAddress(object):
"""
IP Address
"""
def __init__(self, id, ip, public, state, subnet):
self.id = id
self.ip = ip
self.public = public
self.state = state
self.subnet = subnet
class BaseGoGridDriver(object):
"""GoGrid has common object model for services they
provide, like locations and IP, so keep handling of
these things in a single place."""
name = "GoGrid"
def _get_ip(self, element):
return element.get('ip').get('ip')
def _to_ip(self, element):
ip = GoGridIpAddress(id=element['id'],
ip=element['ip'],
public=element['public'],
subnet=element['subnet'],
state=element["state"]["name"])
ip.location = self._to_location(element['datacenter'])
return ip
def _to_ips(self, object):
return [self._to_ip(el)
for el in object['list']]
def _to_location(self, element):
# pylint: disable=no-member
location = NodeLocation(id=element['id'],
name=element['name'],
country="US",
driver=self.connection.driver)
return location
def _to_locations(self, object):
return [self._to_location(el)
for el in object['list']]
def ex_list_ips(self, **kwargs):
"""Return list of IP addresses assigned to
the account.
:keyword public: set to True to list only
public IPs or False to list only
private IPs. Set to None or not specify
at all not to filter by type
:type public: ``bool``
:keyword assigned: set to True to list only addresses
assigned to servers, False to list unassigned
addresses and set to None or don't set at all
not no filter by state
:type assigned: ``bool``
:keyword location: filter IP addresses by location
:type location: :class:`NodeLocation`
:rtype: ``list`` of :class:`GoGridIpAddress`
"""
params = {}
if "public" in kwargs and kwargs["public"] is not None:
params["ip.type"] = {True: "Public",
False: "Private"}[kwargs["public"]]
if "assigned" in kwargs and kwargs["assigned"] is not None:
params["ip.state"] = {True: "Assigned",
False: "Unassigned"}[kwargs["assigned"]]
if "location" in kwargs and kwargs['location'] is not None:
params['datacenter'] = kwargs['location'].id
# pylint: disable=no-member
response = self.connection.request('/api/grid/ip/list', params=params)
ips = self._to_ips(response.object)
return ips
def _get_first_ip(self, location=None):
ips = self.ex_list_ips(public=True, assigned=False, location=location)
try:
return ips[0].ip
except IndexError:
# pylint: disable=no-member
raise LibcloudError('No public unassigned IPs left',
self.driver)
|
apache-2.0
|
zbigniewz/jenkins-build-failure-analyzer
|
analyzer.py
|
2
|
3862
|
#!/usr/bin/python
import pprint
import re
import argparse
from datetime import datetime
from statsd import StatsClient
from utils import failureReasons, JenkinsClient
def is_build_failed(job):
if 'lastBuild' in job and job['lastBuild'] is not None and 'result' in job['lastBuild']:
if job['lastBuild']['result'] == 'FAILURE':
return True
return False
def was_built_in_last_24h(job):
if 'lastBuild' in job and job['lastBuild']:
build_date_time = datetime.utcfromtimestamp(job['lastBuild']['timestamp'] / 1e3) # to proper timestamp
time_diff_in_hours = (datetime.now() - build_date_time).total_seconds() / 60 / 60 # seconds to hours
if time_diff_in_hours < 24:
return True
return False
def find_failure_reason(console_output):
for reason in failureReasons.possible_reasons:
for regex in reason['regex']:
match = re.search(regex, console_output)
if match:
return reason['name']
return failureReasons.unknown_reason['name']
def update_results(results, reason, job):
for entry in results:
if entry['name'] == reason:
entry['count'] += 1
entry['job'].append(
{
'job name': job['name'],
'build url': '{job_url}{build_number}/console'.format(job_url=job['url'],
build_number=job['lastBuild']['number'])
}
)
break
return results
def analyze_jobs(filtered_jobs, jenkins_server):
results = failureReasons.possible_reasons
results.append(failureReasons.unknown_reason)
for entry in results:
entry['count'] = 0
entry['job'] = []
counter = 0
for job in filtered_jobs:
counter += 1
print("Analyzing job {id} / {all}".format(id=counter, all=len(filtered_jobs)))
console_output = jenkins_server.get_job_console_output(job)
failure_reason = find_failure_reason(console_output)
results = update_results(results, failure_reason, job)
return results
def print_results(results):
pp = pprint.PrettyPrinter()
pp.pprint(results)
print('\n\n\n Quick summary:\n')
for entry in results:
print('{reason} : {count}'.format(reason=entry['name'], count=entry['count']))
def report_to_graphite(host, port, prefix, results):
statsd = StatsClient(host=host, port=port, prefix=prefix, maxudpsize=512)
for entry in results:
statsd.gauge(entry['graphite key'], entry['count'])
def create_arg_parser():
parser = argparse.ArgumentParser(description='Analyze jenkins failures and report them to graphite server')
parser.add_argument('jenkins_host')
parser.add_argument('jenkins_user')
parser.add_argument('jenkins_pass')
parser.add_argument('statsd_host')
parser.add_argument('statsd_port')
parser.add_argument('graphite_key')
return parser
def main():
parser = create_arg_parser()
args = parser.parse_args()
jenkins_server = JenkinsClient.JenkinsClient(args.jenkins_host, args.jenkins_user, args.jenkins_pass)
all_jobs = jenkins_server.get_all_jobs()
only_failed_jobs = list(filter(is_build_failed, all_jobs))
only_failed_in_last_24h_jobs = list(filter(was_built_in_last_24h, only_failed_jobs))
results = analyze_jobs(only_failed_in_last_24h_jobs, jenkins_server)
report_to_graphite(args.statsd_host, args.statsd_port, args.graphite_key, results)
print('\n Full results (only jobs that were built and failed in last 24 hours):\n')
print_results(results)
results = analyze_jobs(only_failed_jobs, jenkins_server)
print('\n Full results (all jobs that failed):\n')
print_results(results)
if __name__ == '__main__':
main()
|
apache-2.0
|
vikas-parashar/zulip
|
zerver/context_processors.py
|
13
|
2865
|
from __future__ import absolute_import
from typing import Dict, Any
from django.http import HttpRequest
from django.conf import settings
from zerver.models import UserProfile, get_realm_by_string_id
from zproject.backends import (password_auth_enabled, dev_auth_enabled,
google_auth_enabled, github_auth_enabled)
from zerver.lib.utils import get_subdomain
def common_context(user):
# type: (UserProfile) -> Dict[str, Any]
return {
'realm_uri': user.realm.uri,
'server_uri': settings.SERVER_URI,
'external_uri_scheme': settings.EXTERNAL_URI_SCHEME,
'external_host': settings.EXTERNAL_HOST,
}
def add_settings(request):
# type: (HttpRequest) -> Dict[str, Any]
realm = None
if hasattr(request.user, "realm"):
realm = request.user.realm
elif settings.REALMS_HAVE_SUBDOMAINS:
subdomain = get_subdomain(request)
realm = get_realm_by_string_id(subdomain)
if realm is not None:
realm_uri = realm.uri
else:
realm_uri = settings.SERVER_URI
return {
'custom_logo_url': settings.CUSTOM_LOGO_URL,
'register_link_disabled': settings.REGISTER_LINK_DISABLED,
'login_link_disabled': settings.LOGIN_LINK_DISABLED,
'about_link_disabled': settings.ABOUT_LINK_DISABLED,
'show_oss_announcement': settings.SHOW_OSS_ANNOUNCEMENT,
'zulip_admin': settings.ZULIP_ADMINISTRATOR,
'terms_of_service': settings.TERMS_OF_SERVICE,
'login_url': settings.HOME_NOT_LOGGED_IN,
'only_sso': settings.ONLY_SSO,
'external_api_path': settings.EXTERNAL_API_PATH,
'external_api_uri': settings.EXTERNAL_API_URI,
'external_host': settings.EXTERNAL_HOST,
'external_uri_scheme': settings.EXTERNAL_URI_SCHEME,
'realm_uri': realm_uri,
'server_uri': settings.SERVER_URI,
'api_site_required': settings.EXTERNAL_API_PATH != "api.zulip.com",
'email_integration_enabled': settings.EMAIL_GATEWAY_BOT != "",
'email_gateway_example': settings.EMAIL_GATEWAY_EXAMPLE,
'open_realm_creation': settings.OPEN_REALM_CREATION,
'password_auth_enabled': password_auth_enabled(realm),
'dev_auth_enabled': dev_auth_enabled(realm),
'google_auth_enabled': google_auth_enabled(realm),
'github_auth_enabled': github_auth_enabled(realm),
'development_environment': settings.DEVELOPMENT,
'support_email': settings.ZULIP_ADMINISTRATOR,
}
def add_metrics(request):
# type: (HttpRequest) -> Dict[str, str]
return {
'dropboxAppKey': settings.DROPBOX_APP_KEY
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.