text
stringlengths 2
100k
| meta
dict |
---|---|
# Author: Steven J. Bethard <[email protected]>.
"""Command-line parsing library
This module is an optparse-inspired command-line parsing library that:
- handles both optional and positional arguments
- produces highly informative usage messages
- supports parsers that dispatch to sub-parsers
The following is a simple usage example that sums integers from the
command-line and writes the result to a file::
parser = argparse.ArgumentParser(
description='sum the integers at the command line')
parser.add_argument(
'integers', metavar='int', nargs='+', type=int,
help='an integer to be summed')
parser.add_argument(
'--log', default=sys.stdout, type=argparse.FileType('w'),
help='the file where the sum should be written')
args = parser.parse_args()
args.log.write('%s' % sum(args.integers))
args.log.close()
The module contains the following public classes:
- ArgumentParser -- The main entry point for command-line parsing. As the
example above shows, the add_argument() method is used to populate
the parser with actions for optional and positional arguments. Then
the parse_args() method is invoked to convert the args at the
command-line into an object with attributes.
- ArgumentError -- The exception raised by ArgumentParser objects when
there are errors with the parser's actions. Errors raised while
parsing the command-line are caught by ArgumentParser and emitted
as command-line messages.
- FileType -- A factory for defining types of files to be created. As the
example above shows, instances of FileType are typically passed as
the type= argument of add_argument() calls.
- Action -- The base class for parser actions. Typically actions are
selected by passing strings like 'store_true' or 'append_const' to
the action= argument of add_argument(). However, for greater
customization of ArgumentParser actions, subclasses of Action may
be defined and passed as the action= argument.
- HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
ArgumentDefaultsHelpFormatter -- Formatter classes which
may be passed as the formatter_class= argument to the
ArgumentParser constructor. HelpFormatter is the default,
RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
not to change the formatting for help text, and
ArgumentDefaultsHelpFormatter adds information about argument defaults
to the help.
All other classes in this module are considered implementation details.
(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
considered public as object names -- the API of the formatter objects is
still considered an implementation detail.)
"""
__version__ = '1.1'
__all__ = [
'ArgumentParser',
'ArgumentError',
'ArgumentTypeError',
'FileType',
'HelpFormatter',
'ArgumentDefaultsHelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
'MetavarTypeHelpFormatter',
'Namespace',
'Action',
'ONE_OR_MORE',
'OPTIONAL',
'PARSER',
'REMAINDER',
'SUPPRESS',
'ZERO_OR_MORE',
]
import collections as _collections
import copy as _copy
import os as _os
import re as _re
import sys as _sys
import textwrap as _textwrap
from gettext import gettext as _, ngettext
SUPPRESS = '==SUPPRESS=='
OPTIONAL = '?'
ZERO_OR_MORE = '*'
ONE_OR_MORE = '+'
PARSER = 'A...'
REMAINDER = '...'
_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
# =============================
# Utility functions and classes
# =============================
class _AttributeHolder(object):
"""Abstract base class that provides __repr__.
The __repr__ method returns a string in the format::
ClassName(attr=name, attr=name, ...)
The attributes are determined either by a class-level attribute,
'_kwarg_names', or by inspecting the instance __dict__.
"""
def __repr__(self):
type_name = type(self).__name__
arg_strings = []
star_args = {}
for arg in self._get_args():
arg_strings.append(repr(arg))
for name, value in self._get_kwargs():
if name.isidentifier():
arg_strings.append('%s=%r' % (name, value))
else:
star_args[name] = value
if star_args:
arg_strings.append('**%s' % repr(star_args))
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
return sorted(self.__dict__.items())
def _get_args(self):
return []
def _ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
# ===============
# Formatting Help
# ===============
class HelpFormatter(object):
"""Formatter for generating usage messages and argument help strings.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def __init__(self,
prog,
indent_increment=2,
max_help_position=24,
width=None):
# default setting for width
if width is None:
try:
width = int(_os.environ['COLUMNS'])
except (KeyError, ValueError):
width = 80
width -= 2
self._prog = prog
self._indent_increment = indent_increment
self._max_help_position = max_help_position
self._max_help_position = min(max_help_position,
max(width - 20, indent_increment * 2))
self._width = width
self._current_indent = 0
self._level = 0
self._action_max_length = 0
self._root_section = self._Section(self, None)
self._current_section = self._root_section
self._whitespace_matcher = _re.compile(r'\s+', _re.ASCII)
self._long_break_matcher = _re.compile(r'\n\n\n+')
# ===============================
# Section and indentation methods
# ===============================
def _indent(self):
self._current_indent += self._indent_increment
self._level += 1
def _dedent(self):
self._current_indent -= self._indent_increment
assert self._current_indent >= 0, 'Indent decreased below 0.'
self._level -= 1
class _Section(object):
def __init__(self, formatter, parent, heading=None):
self.formatter = formatter
self.parent = parent
self.heading = heading
self.items = []
def format_help(self):
# format the indented section
if self.parent is not None:
self.formatter._indent()
join = self.formatter._join_parts
item_help = join([func(*args) for func, args in self.items])
if self.parent is not None:
self.formatter._dedent()
# return nothing if the section was empty
if not item_help:
return ''
# add the heading if the section was non-empty
if self.heading is not SUPPRESS and self.heading is not None:
current_indent = self.formatter._current_indent
heading = '%*s%s:\n' % (current_indent, '', self.heading)
else:
heading = ''
# join the section-initial newline, the heading and the help
return join(['\n', heading, item_help, '\n'])
def _add_item(self, func, args):
self._current_section.items.append((func, args))
# ========================
# Message building methods
# ========================
def start_section(self, heading):
self._indent()
section = self._Section(self, self._current_section, heading)
self._add_item(section.format_help, [])
self._current_section = section
def end_section(self):
self._current_section = self._current_section.parent
self._dedent()
def add_text(self, text):
if text is not SUPPRESS and text is not None:
self._add_item(self._format_text, [text])
def add_usage(self, usage, actions, groups, prefix=None):
if usage is not SUPPRESS:
args = usage, actions, groups, prefix
self._add_item(self._format_usage, args)
def add_argument(self, action):
if action.help is not SUPPRESS:
# find all invocations
get_invocation = self._format_action_invocation
invocations = [get_invocation(action)]
for subaction in self._iter_indented_subactions(action):
invocations.append(get_invocation(subaction))
# update the maximum item length
invocation_length = max([len(s) for s in invocations])
action_length = invocation_length + self._current_indent
self._action_max_length = max(self._action_max_length,
action_length)
# add the item to the list
self._add_item(self._format_action, [action])
def add_arguments(self, actions):
for action in actions:
self.add_argument(action)
# =======================
# Help-formatting methods
# =======================
def format_help(self):
help = self._root_section.format_help()
if help:
help = self._long_break_matcher.sub('\n\n', help)
help = help.strip('\n') + '\n'
return help
def _join_parts(self, part_strings):
return ''.join([part
for part in part_strings
if part and part is not SUPPRESS])
def _format_usage(self, usage, actions, groups, prefix):
if prefix is None:
prefix = _('usage: ')
# if usage is specified, use that
if usage is not None:
usage = usage % dict(prog=self._prog)
# if no optionals or positionals are available, usage is just prog
elif usage is None and not actions:
usage = '%(prog)s' % dict(prog=self._prog)
# if optionals and positionals are available, calculate usage
elif usage is None:
prog = '%(prog)s' % dict(prog=self._prog)
# split optionals from positionals
optionals = []
positionals = []
for action in actions:
if action.option_strings:
optionals.append(action)
else:
positionals.append(action)
# build full usage string
format = self._format_actions_usage
action_usage = format(optionals + positionals, groups)
usage = ' '.join([s for s in [prog, action_usage] if s])
# wrap the usage parts if it's too long
text_width = self._width - self._current_indent
if len(prefix) + len(usage) > text_width:
# break usage into wrappable parts
part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
opt_usage = format(optionals, groups)
pos_usage = format(positionals, groups)
opt_parts = _re.findall(part_regexp, opt_usage)
pos_parts = _re.findall(part_regexp, pos_usage)
assert ' '.join(opt_parts) == opt_usage
assert ' '.join(pos_parts) == pos_usage
# helper for wrapping lines
def get_lines(parts, indent, prefix=None):
lines = []
line = []
if prefix is not None:
line_len = len(prefix) - 1
else:
line_len = len(indent) - 1
for part in parts:
if line_len + 1 + len(part) > text_width and line:
lines.append(indent + ' '.join(line))
line = []
line_len = len(indent) - 1
line.append(part)
line_len += len(part) + 1
if line:
lines.append(indent + ' '.join(line))
if prefix is not None:
lines[0] = lines[0][len(indent):]
return lines
# if prog is short, follow it with optionals or positionals
if len(prefix) + len(prog) <= 0.75 * text_width:
indent = ' ' * (len(prefix) + len(prog) + 1)
if opt_parts:
lines = get_lines([prog] + opt_parts, indent, prefix)
lines.extend(get_lines(pos_parts, indent))
elif pos_parts:
lines = get_lines([prog] + pos_parts, indent, prefix)
else:
lines = [prog]
# if prog is long, put it on its own line
else:
indent = ' ' * len(prefix)
parts = opt_parts + pos_parts
lines = get_lines(parts, indent)
if len(lines) > 1:
lines = []
lines.extend(get_lines(opt_parts, indent))
lines.extend(get_lines(pos_parts, indent))
lines = [prog] + lines
# join lines into usage
usage = '\n'.join(lines)
# prefix with 'usage:'
return '%s%s\n\n' % (prefix, usage)
def _format_actions_usage(self, actions, groups):
# find group indices and identify actions in groups
group_actions = set()
inserts = {}
for group in groups:
try:
start = actions.index(group._group_actions[0])
except ValueError:
continue
else:
end = start + len(group._group_actions)
if actions[start:end] == group._group_actions:
for action in group._group_actions:
group_actions.add(action)
if not group.required:
if start in inserts:
inserts[start] += ' ['
else:
inserts[start] = '['
inserts[end] = ']'
else:
if start in inserts:
inserts[start] += ' ('
else:
inserts[start] = '('
inserts[end] = ')'
for i in range(start + 1, end):
inserts[i] = '|'
# collect all actions format strings
parts = []
for i, action in enumerate(actions):
# suppressed arguments are marked with None
# remove | separators for suppressed arguments
if action.help is SUPPRESS:
parts.append(None)
if inserts.get(i) == '|':
inserts.pop(i)
elif inserts.get(i + 1) == '|':
inserts.pop(i + 1)
# produce all arg strings
elif not action.option_strings:
default = self._get_default_metavar_for_positional(action)
part = self._format_args(action, default)
# if it's in a group, strip the outer []
if action in group_actions:
if part[0] == '[' and part[-1] == ']':
part = part[1:-1]
# add the action string to the list
parts.append(part)
# produce the first way to invoke the option in brackets
else:
option_string = action.option_strings[0]
# if the Optional doesn't take a value, format is:
# -s or --long
if action.nargs == 0:
part = '%s' % option_string
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
default = self._get_default_metavar_for_optional(action)
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
# make it look optional if it's not required or in a group
if not action.required and action not in group_actions:
part = '[%s]' % part
# add the action string to the list
parts.append(part)
# insert things at the necessary indices
for i in sorted(inserts, reverse=True):
parts[i:i] = [inserts[i]]
# join all the action items with spaces
text = ' '.join([item for item in parts if item is not None])
# clean up separators for mutually exclusive groups
open = r'[\[(]'
close = r'[\])]'
text = _re.sub(r'(%s) ' % open, r'\1', text)
text = _re.sub(r' (%s)' % close, r'\1', text)
text = _re.sub(r'%s *%s' % (open, close), r'', text)
text = _re.sub(r'\(([^|]*)\)', r'\1', text)
text = text.strip()
# return the text
return text
def _format_text(self, text):
if '%(prog)' in text:
text = text % dict(prog=self._prog)
text_width = max(self._width - self._current_indent, 11)
indent = ' ' * self._current_indent
return self._fill_text(text, text_width, indent) + '\n\n'
def _format_action(self, action):
# determine the required width and the entry label
help_position = min(self._action_max_length + 2,
self._max_help_position)
help_width = max(self._width - help_position, 11)
action_width = help_position - self._current_indent - 2
action_header = self._format_action_invocation(action)
# no help; start on same line and add a final newline
if not action.help:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
# short action name; start on the same line and pad two spaces
elif len(action_header) <= action_width:
tup = self._current_indent, '', action_width, action_header
action_header = '%*s%-*s ' % tup
indent_first = 0
# long action name; start on the next line
else:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
indent_first = help_position
# collect the pieces of the action help
parts = [action_header]
# if there was help for the action, add lines of help text
if action.help:
help_text = self._expand_help(action)
help_lines = self._split_lines(help_text, help_width)
parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
for line in help_lines[1:]:
parts.append('%*s%s\n' % (help_position, '', line))
# or add a newline if the description doesn't end with one
elif not action_header.endswith('\n'):
parts.append('\n')
# if there are any sub-actions, add their help as well
for subaction in self._iter_indented_subactions(action):
parts.append(self._format_action(subaction))
# return a single string
return self._join_parts(parts)
def _format_action_invocation(self, action):
if not action.option_strings:
default = self._get_default_metavar_for_positional(action)
metavar, = self._metavar_formatter(action, default)(1)
return metavar
else:
parts = []
# if the Optional doesn't take a value, format is:
# -s, --long
if action.nargs == 0:
parts.extend(action.option_strings)
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
default = self._get_default_metavar_for_optional(action)
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
return ', '.join(parts)
def _metavar_formatter(self, action, default_metavar):
if action.metavar is not None:
result = action.metavar
elif action.choices is not None:
choice_strs = [str(choice) for choice in action.choices]
result = '{%s}' % ','.join(choice_strs)
else:
result = default_metavar
def format(tuple_size):
if isinstance(result, tuple):
return result
else:
return (result, ) * tuple_size
return format
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
elif action.nargs == ONE_OR_MORE:
result = '%s [%s ...]' % get_metavar(2)
elif action.nargs == REMAINDER:
result = '...'
elif action.nargs == PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
def _expand_help(self, action):
params = dict(vars(action), prog=self._prog)
for name in list(params):
if params[name] is SUPPRESS:
del params[name]
for name in list(params):
if hasattr(params[name], '__name__'):
params[name] = params[name].__name__
if params.get('choices') is not None:
choices_str = ', '.join([str(c) for c in params['choices']])
params['choices'] = choices_str
return self._get_help_string(action) % params
def _iter_indented_subactions(self, action):
try:
get_subactions = action._get_subactions
except AttributeError:
pass
else:
self._indent()
yield from get_subactions()
self._dedent()
def _split_lines(self, text, width):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.wrap(text, width)
def _fill_text(self, text, width, indent):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.fill(text, width, initial_indent=indent,
subsequent_indent=indent)
def _get_help_string(self, action):
return action.help
def _get_default_metavar_for_optional(self, action):
return action.dest.upper()
def _get_default_metavar_for_positional(self, action):
return action.dest
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _fill_text(self, text, width, indent):
return ''.join(indent + line for line in text.splitlines(keepends=True))
class RawTextHelpFormatter(RawDescriptionHelpFormatter):
"""Help message formatter which retains formatting of all help text.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _split_lines(self, text, width):
return text.splitlines()
class ArgumentDefaultsHelpFormatter(HelpFormatter):
"""Help message formatter which adds default values to argument help.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _get_help_string(self, action):
help = action.help
if '%(default)' not in action.help:
if action.default is not SUPPRESS:
defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
help += ' (default: %(default)s)'
return help
class MetavarTypeHelpFormatter(HelpFormatter):
"""Help message formatter which uses the argument 'type' as the default
metavar value (instead of the argument 'dest')
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _get_default_metavar_for_optional(self, action):
return action.type.__name__
def _get_default_metavar_for_positional(self, action):
return action.type.__name__
# =====================
# Options and Arguments
# =====================
def _get_action_name(argument):
if argument is None:
return None
elif argument.option_strings:
return '/'.join(argument.option_strings)
elif argument.metavar not in (None, SUPPRESS):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
else:
return None
class ArgumentError(Exception):
"""An error from creating or using an argument (optional or positional).
The string value of this exception is the message, augmented with
information about the argument that caused it.
"""
def __init__(self, argument, message):
self.argument_name = _get_action_name(argument)
self.message = message
def __str__(self):
if self.argument_name is None:
format = '%(message)s'
else:
format = 'argument %(argument_name)s: %(message)s'
return format % dict(message=self.message,
argument_name=self.argument_name)
class ArgumentTypeError(Exception):
"""An error from trying to convert a command line string to a type."""
pass
# ==============
# Action classes
# ==============
class Action(_AttributeHolder):
"""Information about how to convert command line strings to Python objects.
Action objects are used by an ArgumentParser to represent the information
needed to parse a single argument from one or more strings from the
command line. The keyword arguments to the Action constructor are also
all attributes of Action instances.
Keyword Arguments:
- option_strings -- A list of command-line option strings which
should be associated with this action.
- dest -- The name of the attribute to hold the created object(s)
- nargs -- The number of command-line arguments that should be
consumed. By default, one argument will be consumed and a single
value will be produced. Other values include:
- N (an integer) consumes N arguments (and produces a list)
- '?' consumes zero or one arguments
- '*' consumes zero or more arguments (and produces a list)
- '+' consumes one or more arguments (and produces a list)
Note that the difference between the default and nargs=1 is that
with the default, a single value will be produced, while with
nargs=1, a list containing a single value will be produced.
- const -- The value to be produced if the option is specified and the
option uses an action that takes no values.
- default -- The value to be produced if the option is not specified.
- type -- A callable that accepts a single string argument, and
returns the converted value. The standard Python types str, int,
float, and complex are useful examples of such callables. If None,
str is used.
- choices -- A container of values that should be allowed. If not None,
after a command-line argument has been converted to the appropriate
type, an exception will be raised if it is not a member of this
collection.
- required -- True if the action must always be specified at the
command line. This is only meaningful for optional command-line
arguments.
- help -- The help string describing the argument.
- metavar -- The name to be used for the option's argument with the
help string. If None, the 'dest' value will be used as the name.
"""
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
self.option_strings = option_strings
self.dest = dest
self.nargs = nargs
self.const = const
self.default = default
self.type = type
self.choices = choices
self.required = required
self.help = help
self.metavar = metavar
def _get_kwargs(self):
names = [
'option_strings',
'dest',
'nargs',
'const',
'default',
'type',
'choices',
'help',
'metavar',
]
return [(name, getattr(self, name)) for name in names]
def __call__(self, parser, namespace, values, option_string=None):
raise NotImplementedError(_('.__call__() not defined'))
class _StoreAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for store actions must be > 0; if you '
'have nothing to store, actions such as store '
'true or store const may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_StoreAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
class _StoreConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_StoreConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, self.const)
class _StoreTrueAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=False,
required=False,
help=None):
super(_StoreTrueAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=True,
default=default,
required=required,
help=help)
class _StoreFalseAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=True,
required=False,
help=None):
super(_StoreFalseAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=False,
default=default,
required=required,
help=help)
class _AppendAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_AppendAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(values)
setattr(namespace, self.dest, items)
class _AppendConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_AppendConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(self.const)
setattr(namespace, self.dest, items)
class _CountAction(Action):
def __init__(self,
option_strings,
dest,
default=None,
required=False,
help=None):
super(_CountAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
new_count = _ensure_value(namespace, self.dest, 0) + 1
setattr(namespace, self.dest, new_count)
class _HelpAction(Action):
def __init__(self,
option_strings,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
parser.exit()
class _VersionAction(Action):
def __init__(self,
option_strings,
version=None,
dest=SUPPRESS,
default=SUPPRESS,
help="show program's version number and exit"):
super(_VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version = version
def __call__(self, parser, namespace, values, option_string=None):
version = self.version
if version is None:
version = parser.version
formatter = parser._get_formatter()
formatter.add_text(version)
parser._print_message(formatter.format_help(), _sys.stdout)
parser.exit()
class _SubParsersAction(Action):
class _ChoicesPseudoAction(Action):
def __init__(self, name, aliases, help):
metavar = dest = name
if aliases:
metavar += ' (%s)' % ', '.join(aliases)
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
sup.__init__(option_strings=[], dest=dest, help=help,
metavar=metavar)
def __init__(self,
option_strings,
prog,
parser_class,
dest=SUPPRESS,
help=None,
metavar=None):
self._prog_prefix = prog
self._parser_class = parser_class
self._name_parser_map = _collections.OrderedDict()
self._choices_actions = []
super(_SubParsersAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=PARSER,
choices=self._name_parser_map,
help=help,
metavar=metavar)
def add_parser(self, name, **kwargs):
# set prog from the existing prefix
if kwargs.get('prog') is None:
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
aliases = kwargs.pop('aliases', ())
# create a pseudo-action to hold the choice help
if 'help' in kwargs:
help = kwargs.pop('help')
choice_action = self._ChoicesPseudoAction(name, aliases, help)
self._choices_actions.append(choice_action)
# create the parser and add it to the map
parser = self._parser_class(**kwargs)
self._name_parser_map[name] = parser
# make parser available under aliases also
for alias in aliases:
self._name_parser_map[alias] = parser
return parser
def _get_subactions(self):
return self._choices_actions
def __call__(self, parser, namespace, values, option_string=None):
parser_name = values[0]
arg_strings = values[1:]
# set the parser name if requested
if self.dest is not SUPPRESS:
setattr(namespace, self.dest, parser_name)
# select the parser
try:
parser = self._name_parser_map[parser_name]
except KeyError:
args = {'parser_name': parser_name,
'choices': ', '.join(self._name_parser_map)}
msg = _('unknown parser %(parser_name)r (choices: %(choices)s)') % args
raise ArgumentError(self, msg)
# parse all the remaining options into the namespace
# store any unrecognized options on the object, so that the top
# level parser can decide what to do with them
# In case this subparser defines new defaults, we parse them
# in a new namespace object and then update the original
# namespace for the relevant parts.
subnamespace, arg_strings = parser.parse_known_args(arg_strings, None)
for key, value in vars(subnamespace).items():
setattr(namespace, key, value)
if arg_strings:
vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
# ==============
# Type classes
# ==============
class FileType(object):
"""Factory for creating file object types
Instances of FileType are typically passed as type= arguments to the
ArgumentParser add_argument() method.
Keyword Arguments:
- mode -- A string indicating how the file is to be opened. Accepts the
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
- encoding -- The file's encoding. Accepts the same values as the
builtin open() function.
- errors -- A string indicating how encoding and decoding errors are to
be handled. Accepts the same value as the builtin open() function.
"""
def __init__(self, mode='r', bufsize=-1, encoding=None, errors=None):
self._mode = mode
self._bufsize = bufsize
self._encoding = encoding
self._errors = errors
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
if string == '-':
if 'r' in self._mode:
return _sys.stdin
elif 'w' in self._mode:
return _sys.stdout
else:
msg = _('argument "-" with mode %r') % self._mode
raise ValueError(msg)
# all other arguments are used as file names
try:
return open(string, self._mode, self._bufsize, self._encoding,
self._errors)
except OSError as e:
message = _("can't open '%s': %s")
raise ArgumentTypeError(message % (string, e))
def __repr__(self):
args = self._mode, self._bufsize
kwargs = [('encoding', self._encoding), ('errors', self._errors)]
args_str = ', '.join([repr(arg) for arg in args if arg != -1] +
['%s=%r' % (kw, arg) for kw, arg in kwargs
if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
# Optional and Positional Parsing
# ===========================
class Namespace(_AttributeHolder):
"""Simple object for storing attributes.
Implements equality by attribute names and values, and provides a simple
string representation.
"""
def __init__(self, **kwargs):
for name in kwargs:
setattr(self, name, kwargs[name])
def __eq__(self, other):
if not isinstance(other, Namespace):
return NotImplemented
return vars(self) == vars(other)
def __contains__(self, key):
return key in self.__dict__
class _ActionsContainer(object):
def __init__(self,
description,
prefix_chars,
argument_default,
conflict_handler):
super(_ActionsContainer, self).__init__()
self.description = description
self.argument_default = argument_default
self.prefix_chars = prefix_chars
self.conflict_handler = conflict_handler
# set up registries
self._registries = {}
# register actions
self.register('action', None, _StoreAction)
self.register('action', 'store', _StoreAction)
self.register('action', 'store_const', _StoreConstAction)
self.register('action', 'store_true', _StoreTrueAction)
self.register('action', 'store_false', _StoreFalseAction)
self.register('action', 'append', _AppendAction)
self.register('action', 'append_const', _AppendConstAction)
self.register('action', 'count', _CountAction)
self.register('action', 'help', _HelpAction)
self.register('action', 'version', _VersionAction)
self.register('action', 'parsers', _SubParsersAction)
# raise an exception if the conflict handler is invalid
self._get_handler()
# action storage
self._actions = []
self._option_string_actions = {}
# groups
self._action_groups = []
self._mutually_exclusive_groups = []
# defaults storage
self._defaults = {}
# determines whether an "option" looks like a negative number
self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
# whether or not there are any optionals that look like negative
# numbers -- uses a list so it can be shared and edited
self._has_negative_number_optionals = []
# ====================
# Registration methods
# ====================
def register(self, registry_name, value, object):
registry = self._registries.setdefault(registry_name, {})
registry[value] = object
def _registry_get(self, registry_name, value, default=None):
return self._registries[registry_name].get(value, default)
# ==================================
# Namespace default accessor methods
# ==================================
def set_defaults(self, **kwargs):
self._defaults.update(kwargs)
# if these defaults match any existing arguments, replace
# the previous default on the object with the new one
for action in self._actions:
if action.dest in kwargs:
action.default = kwargs[action.dest]
def get_default(self, dest):
for action in self._actions:
if action.dest == dest and action.default is not None:
return action.default
return self._defaults.get(dest, None)
# =======================
# Adding argument actions
# =======================
def add_argument(self, *args, **kwargs):
"""
add_argument(dest, ..., name=value, ...)
add_argument(option_string, option_string, ..., name=value, ...)
"""
# if no positional args are supplied or only one is supplied and
# it doesn't look like an option string, parse a positional
# argument
chars = self.prefix_chars
if not args or len(args) == 1 and args[0][0] not in chars:
if args and 'dest' in kwargs:
raise ValueError('dest supplied twice for positional argument')
kwargs = self._get_positional_kwargs(*args, **kwargs)
# otherwise, we're adding an optional argument
else:
kwargs = self._get_optional_kwargs(*args, **kwargs)
# if no default was supplied, use the parser-level default
if 'default' not in kwargs:
dest = kwargs['dest']
if dest in self._defaults:
kwargs['default'] = self._defaults[dest]
elif self.argument_default is not None:
kwargs['default'] = self.argument_default
# create the action object, and add it to the parser
action_class = self._pop_action_class(kwargs)
if not callable(action_class):
raise ValueError('unknown action "%s"' % (action_class,))
action = action_class(**kwargs)
# raise an error if the action type is not callable
type_func = self._registry_get('type', action.type, action.type)
if not callable(type_func):
raise ValueError('%r is not callable' % (type_func,))
# raise an error if the metavar does not match the type
if hasattr(self, "_get_formatter"):
try:
self._get_formatter()._format_args(action, None)
except TypeError:
raise ValueError("length of metavar tuple does not match nargs")
return self._add_action(action)
def add_argument_group(self, *args, **kwargs):
group = _ArgumentGroup(self, *args, **kwargs)
self._action_groups.append(group)
return group
def add_mutually_exclusive_group(self, **kwargs):
group = _MutuallyExclusiveGroup(self, **kwargs)
self._mutually_exclusive_groups.append(group)
return group
def _add_action(self, action):
# resolve any conflicts
self._check_conflict(action)
# add to actions list
self._actions.append(action)
action.container = self
# index the action by any option strings it has
for option_string in action.option_strings:
self._option_string_actions[option_string] = action
# set the flag if any option strings look like negative numbers
for option_string in action.option_strings:
if self._negative_number_matcher.match(option_string):
if not self._has_negative_number_optionals:
self._has_negative_number_optionals.append(True)
# return the created action
return action
def _remove_action(self, action):
self._actions.remove(action)
def _add_container_actions(self, container):
# collect groups by titles
title_group_map = {}
for group in self._action_groups:
if group.title in title_group_map:
msg = _('cannot merge actions - two groups are named %r')
raise ValueError(msg % (group.title))
title_group_map[group.title] = group
# map each action to its group
group_map = {}
for group in container._action_groups:
# if a group with the title exists, use that, otherwise
# create a new group matching the container's group
if group.title not in title_group_map:
title_group_map[group.title] = self.add_argument_group(
title=group.title,
description=group.description,
conflict_handler=group.conflict_handler)
# map the actions to their new group
for action in group._group_actions:
group_map[action] = title_group_map[group.title]
# add container's mutually exclusive groups
# NOTE: if add_mutually_exclusive_group ever gains title= and
# description= then this code will need to be expanded as above
for group in container._mutually_exclusive_groups:
mutex_group = self.add_mutually_exclusive_group(
required=group.required)
# map the actions to their new mutex group
for action in group._group_actions:
group_map[action] = mutex_group
# add all actions to this container or their group
for action in container._actions:
group_map.get(action, self)._add_action(action)
def _get_positional_kwargs(self, dest, **kwargs):
# make sure required is not specified
if 'required' in kwargs:
msg = _("'required' is an invalid argument for positionals")
raise TypeError(msg)
# mark positional arguments as required if at least one is
# always required
if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
kwargs['required'] = True
if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
kwargs['required'] = True
# return the keyword arguments with no option strings
return dict(kwargs, dest=dest, option_strings=[])
def _get_optional_kwargs(self, *args, **kwargs):
# determine short and long option strings
option_strings = []
long_option_strings = []
for option_string in args:
# error on strings that don't start with an appropriate prefix
if not option_string[0] in self.prefix_chars:
args = {'option': option_string,
'prefix_chars': self.prefix_chars}
msg = _('invalid option string %(option)r: '
'must start with a character %(prefix_chars)r')
raise ValueError(msg % args)
# strings starting with two prefix characters are long options
option_strings.append(option_string)
if option_string[0] in self.prefix_chars:
if len(option_string) > 1:
if option_string[1] in self.prefix_chars:
long_option_strings.append(option_string)
# infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
dest = kwargs.pop('dest', None)
if dest is None:
if long_option_strings:
dest_option_string = long_option_strings[0]
else:
dest_option_string = option_strings[0]
dest = dest_option_string.lstrip(self.prefix_chars)
if not dest:
msg = _('dest= is required for options like %r')
raise ValueError(msg % option_string)
dest = dest.replace('-', '_')
# return the updated keyword arguments
return dict(kwargs, dest=dest, option_strings=option_strings)
def _pop_action_class(self, kwargs, default=None):
action = kwargs.pop('action', default)
return self._registry_get('action', action, action)
def _get_handler(self):
# determine function from conflict handler string
handler_func_name = '_handle_conflict_%s' % self.conflict_handler
try:
return getattr(self, handler_func_name)
except AttributeError:
msg = _('invalid conflict_resolution value: %r')
raise ValueError(msg % self.conflict_handler)
def _check_conflict(self, action):
# find all options that conflict with this option
confl_optionals = []
for option_string in action.option_strings:
if option_string in self._option_string_actions:
confl_optional = self._option_string_actions[option_string]
confl_optionals.append((option_string, confl_optional))
# resolve any conflicts
if confl_optionals:
conflict_handler = self._get_handler()
conflict_handler(action, confl_optionals)
def _handle_conflict_error(self, action, conflicting_actions):
message = ngettext('conflicting option string: %s',
'conflicting option strings: %s',
len(conflicting_actions))
conflict_string = ', '.join([option_string
for option_string, action
in conflicting_actions])
raise ArgumentError(action, message % conflict_string)
def _handle_conflict_resolve(self, action, conflicting_actions):
# remove all conflicting options
for option_string, action in conflicting_actions:
# remove the conflicting option
action.option_strings.remove(option_string)
self._option_string_actions.pop(option_string, None)
# if the option now has no option string, remove it from the
# container holding it
if not action.option_strings:
action.container._remove_action(action)
class _ArgumentGroup(_ActionsContainer):
def __init__(self, container, title=None, description=None, **kwargs):
# add any missing keyword arguments by checking the container
update = kwargs.setdefault
update('conflict_handler', container.conflict_handler)
update('prefix_chars', container.prefix_chars)
update('argument_default', container.argument_default)
super_init = super(_ArgumentGroup, self).__init__
super_init(description=description, **kwargs)
# group attributes
self.title = title
self._group_actions = []
# share most attributes with the container
self._registries = container._registries
self._actions = container._actions
self._option_string_actions = container._option_string_actions
self._defaults = container._defaults
self._has_negative_number_optionals = \
container._has_negative_number_optionals
self._mutually_exclusive_groups = container._mutually_exclusive_groups
def _add_action(self, action):
action = super(_ArgumentGroup, self)._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
super(_ArgumentGroup, self)._remove_action(action)
self._group_actions.remove(action)
class _MutuallyExclusiveGroup(_ArgumentGroup):
def __init__(self, container, required=False):
super(_MutuallyExclusiveGroup, self).__init__(container)
self.required = required
self._container = container
def _add_action(self, action):
if action.required:
msg = _('mutually exclusive arguments must be optional')
raise ValueError(msg)
action = self._container._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
self._container._remove_action(action)
self._group_actions.remove(action)
class ArgumentParser(_AttributeHolder, _ActionsContainer):
"""Object for parsing command line strings into Python objects.
Keyword Arguments:
- prog -- The name of the program (default: sys.argv[0])
- usage -- A usage message (default: auto-generated from arguments)
- description -- A description of what the program does
- epilog -- Text following the argument descriptions
- parents -- Parsers whose arguments should be copied into this one
- formatter_class -- HelpFormatter class for printing help messages
- prefix_chars -- Characters that prefix optional arguments
- fromfile_prefix_chars -- Characters that prefix files containing
additional arguments
- argument_default -- The default value for all arguments
- conflict_handler -- String indicating how to handle conflicts
- add_help -- Add a -h/-help option
- allow_abbrev -- Allow long options to be abbreviated unambiguously
"""
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
parents=[],
formatter_class=HelpFormatter,
prefix_chars='-',
fromfile_prefix_chars=None,
argument_default=None,
conflict_handler='error',
add_help=True,
allow_abbrev=True):
superinit = super(ArgumentParser, self).__init__
superinit(description=description,
prefix_chars=prefix_chars,
argument_default=argument_default,
conflict_handler=conflict_handler)
# default setting for prog
if prog is None:
prog = _os.path.basename(_sys.argv[0])
self.prog = prog
self.usage = usage
self.epilog = epilog
self.formatter_class = formatter_class
self.fromfile_prefix_chars = fromfile_prefix_chars
self.add_help = add_help
self.allow_abbrev = allow_abbrev
add_group = self.add_argument_group
self._positionals = add_group(_('positional arguments'))
self._optionals = add_group(_('optional arguments'))
self._subparsers = None
# register types
def identity(string):
return string
self.register('type', None, identity)
# add help argument if necessary
# (using explicit default to override global argument_default)
default_prefix = '-' if '-' in prefix_chars else prefix_chars[0]
if self.add_help:
self.add_argument(
default_prefix+'h', default_prefix*2+'help',
action='help', default=SUPPRESS,
help=_('show this help message and exit'))
# add parent arguments and defaults
for parent in parents:
self._add_container_actions(parent)
try:
defaults = parent._defaults
except AttributeError:
pass
else:
self._defaults.update(defaults)
# =======================
# Pretty __repr__ methods
# =======================
def _get_kwargs(self):
names = [
'prog',
'usage',
'description',
'formatter_class',
'conflict_handler',
'add_help',
]
return [(name, getattr(self, name)) for name in names]
# ==================================
# Optional/Positional adding methods
# ==================================
def add_subparsers(self, **kwargs):
if self._subparsers is not None:
self.error(_('cannot have multiple subparser arguments'))
# add the parser class to the arguments if it's not present
kwargs.setdefault('parser_class', type(self))
if 'title' in kwargs or 'description' in kwargs:
title = _(kwargs.pop('title', 'subcommands'))
description = _(kwargs.pop('description', None))
self._subparsers = self.add_argument_group(title, description)
else:
self._subparsers = self._positionals
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kwargs.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(self.usage, positionals, groups, '')
kwargs['prog'] = formatter.format_help().strip()
# create the parsers action and add it to the positionals list
parsers_class = self._pop_action_class(kwargs, 'parsers')
action = parsers_class(option_strings=[], **kwargs)
self._subparsers._add_action(action)
# return the created parsers action
return action
def _add_action(self, action):
if action.option_strings:
self._optionals._add_action(action)
else:
self._positionals._add_action(action)
return action
def _get_optional_actions(self):
return [action
for action in self._actions
if action.option_strings]
def _get_positional_actions(self):
return [action
for action in self._actions
if not action.option_strings]
# =====================================
# Command line argument parsing methods
# =====================================
def parse_args(self, args=None, namespace=None):
args, argv = self.parse_known_args(args, namespace)
if argv:
msg = _('unrecognized arguments: %s')
self.error(msg % ' '.join(argv))
return args
def parse_known_args(self, args=None, namespace=None):
if args is None:
# args default to the system args
args = _sys.argv[1:]
else:
# make sure that args are mutable
args = list(args)
# default Namespace built from parser defaults
if namespace is None:
namespace = Namespace()
# add any action defaults that aren't present
for action in self._actions:
if action.dest is not SUPPRESS:
if not hasattr(namespace, action.dest):
if action.default is not SUPPRESS:
setattr(namespace, action.dest, action.default)
# add any parser defaults that aren't present
for dest in self._defaults:
if not hasattr(namespace, dest):
setattr(namespace, dest, self._defaults[dest])
# parse the arguments and exit if there are any errors
try:
namespace, args = self._parse_known_args(args, namespace)
if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
return namespace, args
except ArgumentError:
err = _sys.exc_info()[1]
self.error(str(err))
def _parse_known_args(self, arg_strings, namespace):
# replace arg strings that are file references
if self.fromfile_prefix_chars is not None:
arg_strings = self._read_args_from_files(arg_strings)
# map all mutually exclusive arguments to the other arguments
# they can't occur with
action_conflicts = {}
for mutex_group in self._mutually_exclusive_groups:
group_actions = mutex_group._group_actions
for i, mutex_action in enumerate(mutex_group._group_actions):
conflicts = action_conflicts.setdefault(mutex_action, [])
conflicts.extend(group_actions[:i])
conflicts.extend(group_actions[i + 1:])
# find all option indices, and determine the arg_string_pattern
# which has an 'O' if there is an option at an index,
# an 'A' if there is an argument, or a '-' if there is a '--'
option_string_indices = {}
arg_string_pattern_parts = []
arg_strings_iter = iter(arg_strings)
for i, arg_string in enumerate(arg_strings_iter):
# all args after -- are non-options
if arg_string == '--':
arg_string_pattern_parts.append('-')
for arg_string in arg_strings_iter:
arg_string_pattern_parts.append('A')
# otherwise, add the arg to the arg strings
# and note the index if it was an option
else:
option_tuple = self._parse_optional(arg_string)
if option_tuple is None:
pattern = 'A'
else:
option_string_indices[i] = option_tuple
pattern = 'O'
arg_string_pattern_parts.append(pattern)
# join the pieces together to form the pattern
arg_strings_pattern = ''.join(arg_string_pattern_parts)
# converts arg strings to the appropriate and then takes the action
seen_actions = set()
seen_non_default_actions = set()
def take_action(action, argument_strings, option_string=None):
seen_actions.add(action)
argument_values = self._get_values(action, argument_strings)
# error if this argument is not allowed with other previously
# seen arguments, assuming that actions that use the default
# value don't really count as "present"
if argument_values is not action.default:
seen_non_default_actions.add(action)
for conflict_action in action_conflicts.get(action, []):
if conflict_action in seen_non_default_actions:
msg = _('not allowed with argument %s')
action_name = _get_action_name(conflict_action)
raise ArgumentError(action, msg % action_name)
# take the action if we didn't receive a SUPPRESS value
# (e.g. from a default)
if argument_values is not SUPPRESS:
action(self, namespace, argument_values, option_string)
# function to convert arg_strings into an optional action
def consume_optional(start_index):
# get the optional identified at this index
option_tuple = option_string_indices[start_index]
action, option_string, explicit_arg = option_tuple
# identify additional optionals in the same arg string
# (e.g. -xyz is the same as -x -y -z if no args are required)
match_argument = self._match_argument
action_tuples = []
while True:
# if we found no optional action, skip it
if action is None:
extras.append(arg_strings[start_index])
return start_index + 1
# if there is an explicit argument, try to match the
# optional's string arguments to only this
if explicit_arg is not None:
arg_count = match_argument(action, 'A')
# if the action is a single-dash option and takes no
# arguments, try to parse more single-dash options out
# of the tail of the option string
chars = self.prefix_chars
if arg_count == 0 and option_string[1] not in chars:
action_tuples.append((action, [], option_string))
char = option_string[0]
option_string = char + explicit_arg[0]
new_explicit_arg = explicit_arg[1:] or None
optionals_map = self._option_string_actions
if option_string in optionals_map:
action = optionals_map[option_string]
explicit_arg = new_explicit_arg
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if the action expect exactly one argument, we've
# successfully matched the option; exit the loop
elif arg_count == 1:
stop = start_index + 1
args = [explicit_arg]
action_tuples.append((action, args, option_string))
break
# error if a double-dash option did not use the
# explicit argument
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if there is no explicit argument, try to match the
# optional's string arguments with the following strings
# if successful, exit the loop
else:
start = start_index + 1
selected_patterns = arg_strings_pattern[start:]
arg_count = match_argument(action, selected_patterns)
stop = start + arg_count
args = arg_strings[start:stop]
action_tuples.append((action, args, option_string))
break
# add the Optional to the list and return the index at which
# the Optional's string args stopped
assert action_tuples
for action, args, option_string in action_tuples:
take_action(action, args, option_string)
return stop
# the list of Positionals left to be parsed; this is modified
# by consume_positionals()
positionals = self._get_positional_actions()
# function to convert arg_strings into positional actions
def consume_positionals(start_index):
# match as many Positionals as possible
match_partial = self._match_arguments_partial
selected_pattern = arg_strings_pattern[start_index:]
arg_counts = match_partial(positionals, selected_pattern)
# slice off the appropriate arg strings for each Positional
# and add the Positional and its args to the list
for action, arg_count in zip(positionals, arg_counts):
args = arg_strings[start_index: start_index + arg_count]
start_index += arg_count
take_action(action, args)
# slice off the Positionals that we just parsed and return the
# index at which the Positionals' string args stopped
positionals[:] = positionals[len(arg_counts):]
return start_index
# consume Positionals and Optionals alternately, until we have
# passed the last option string
extras = []
start_index = 0
if option_string_indices:
max_option_string_index = max(option_string_indices)
else:
max_option_string_index = -1
while start_index <= max_option_string_index:
# consume any Positionals preceding the next option
next_option_string_index = min([
index
for index in option_string_indices
if index >= start_index])
if start_index != next_option_string_index:
positionals_end_index = consume_positionals(start_index)
# only try to parse the next optional if we didn't consume
# the option string during the positionals parsing
if positionals_end_index > start_index:
start_index = positionals_end_index
continue
else:
start_index = positionals_end_index
# if we consumed all the positionals we could and we're not
# at the index of an option string, there were extra arguments
if start_index not in option_string_indices:
strings = arg_strings[start_index:next_option_string_index]
extras.extend(strings)
start_index = next_option_string_index
# consume the next optional and any arguments for it
start_index = consume_optional(start_index)
# consume any positionals following the last Optional
stop_index = consume_positionals(start_index)
# if we didn't consume all the argument strings, there were extras
extras.extend(arg_strings[stop_index:])
# make sure all required actions were present and also convert
# action defaults which were not given as arguments
required_actions = []
for action in self._actions:
if action not in seen_actions:
if action.required:
required_actions.append(_get_action_name(action))
else:
# Convert action default now instead of doing it before
# parsing arguments to avoid calling convert functions
# twice (which may fail) if the argument was given, but
# only if it was defined already in the namespace
if (action.default is not None and
isinstance(action.default, str) and
hasattr(namespace, action.dest) and
action.default is getattr(namespace, action.dest)):
setattr(namespace, action.dest,
self._get_value(action, action.default))
if required_actions:
self.error(_('the following arguments are required: %s') %
', '.join(required_actions))
# make sure all required groups had one option present
for group in self._mutually_exclusive_groups:
if group.required:
for action in group._group_actions:
if action in seen_non_default_actions:
break
# if no actions were used, report the error
else:
names = [_get_action_name(action)
for action in group._group_actions
if action.help is not SUPPRESS]
msg = _('one of the arguments %s is required')
self.error(msg % ' '.join(names))
# return the updated namespace and the extra arguments
return namespace, extras
def _read_args_from_files(self, arg_strings):
# expand arguments referencing files
new_arg_strings = []
for arg_string in arg_strings:
# for regular arguments, just add them back into the list
if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
# replace arguments referencing files with the file content
else:
try:
with open(arg_string[1:]) as args_file:
arg_strings = []
for arg_line in args_file.read().splitlines():
for arg in self.convert_arg_line_to_args(arg_line):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
except OSError:
err = _sys.exc_info()[1]
self.error(str(err))
# return the modified argument list
return new_arg_strings
def convert_arg_line_to_args(self, arg_line):
return [arg_line]
def _match_argument(self, action, arg_strings_pattern):
# match the pattern for this action to the arg strings
nargs_pattern = self._get_nargs_pattern(action)
match = _re.match(nargs_pattern, arg_strings_pattern)
# raise an exception if we weren't able to find a match
if match is None:
nargs_errors = {
None: _('expected one argument'),
OPTIONAL: _('expected at most one argument'),
ONE_OR_MORE: _('expected at least one argument'),
}
default = ngettext('expected %s argument',
'expected %s arguments',
action.nargs) % action.nargs
msg = nargs_errors.get(action.nargs, default)
raise ArgumentError(action, msg)
# return the number of arguments matched
return len(match.group(1))
def _match_arguments_partial(self, actions, arg_strings_pattern):
# progressively shorten the actions list by slicing off the
# final actions until we find a match
result = []
for i in range(len(actions), 0, -1):
actions_slice = actions[:i]
pattern = ''.join([self._get_nargs_pattern(action)
for action in actions_slice])
match = _re.match(pattern, arg_strings_pattern)
if match is not None:
result.extend([len(string) for string in match.groups()])
break
# return the list of arg string counts
return result
def _parse_optional(self, arg_string):
# if it's an empty string, it was meant to be a positional
if not arg_string:
return None
# if it doesn't start with a prefix, it was meant to be positional
if not arg_string[0] in self.prefix_chars:
return None
# if the option string is present in the parser, return the action
if arg_string in self._option_string_actions:
action = self._option_string_actions[arg_string]
return action, arg_string, None
# if it's just a single character, it was meant to be positional
if len(arg_string) == 1:
return None
# if the option string before the "=" is present, return the action
if '=' in arg_string:
option_string, explicit_arg = arg_string.split('=', 1)
if option_string in self._option_string_actions:
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
if self.allow_abbrev:
# search through all possible prefixes of the option string
# and all actions in the parser for possible interpretations
option_tuples = self._get_option_tuples(arg_string)
# if multiple actions match, the option string was ambiguous
if len(option_tuples) > 1:
options = ', '.join([option_string
for action, option_string, explicit_arg in option_tuples])
args = {'option': arg_string, 'matches': options}
msg = _('ambiguous option: %(option)s could match %(matches)s')
self.error(msg % args)
# if exactly one action matched, this segmentation is good,
# so return the parsed action
elif len(option_tuples) == 1:
option_tuple, = option_tuples
return option_tuple
# if it was not found as an option, but it looks like a negative
# number, it was meant to be positional
# unless there are negative-number-like options
if self._negative_number_matcher.match(arg_string):
if not self._has_negative_number_optionals:
return None
# if it contains a space, it was meant to be a positional
if ' ' in arg_string:
return None
# it was meant to be an optional but there is no such option
# in this parser (though it might be a valid option in a subparser)
return None, arg_string, None
def _get_option_tuples(self, option_string):
result = []
# option strings starting with two prefix characters are only
# split at the '='
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
if '=' in option_string:
option_prefix, explicit_arg = option_string.split('=', 1)
else:
option_prefix = option_string
explicit_arg = None
for option_string in self._option_string_actions:
if option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# single character options can be concatenated with their arguments
# but multiple character options always have to have their argument
# separate
elif option_string[0] in chars and option_string[1] not in chars:
option_prefix = option_string
explicit_arg = None
short_option_prefix = option_string[:2]
short_explicit_arg = option_string[2:]
for option_string in self._option_string_actions:
if option_string == short_option_prefix:
action = self._option_string_actions[option_string]
tup = action, option_string, short_explicit_arg
result.append(tup)
elif option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# shouldn't ever get here
else:
self.error(_('unexpected option string: %s') % option_string)
# return the collected option tuples
return result
def _get_nargs_pattern(self, action):
# in all examples below, we have to allow for '--' args
# which are represented as '-' in the pattern
nargs = action.nargs
# the default (None) is assumed to be a single argument
if nargs is None:
nargs_pattern = '(-*A-*)'
# allow zero or one arguments
elif nargs == OPTIONAL:
nargs_pattern = '(-*A?-*)'
# allow zero or more arguments
elif nargs == ZERO_OR_MORE:
nargs_pattern = '(-*[A-]*)'
# allow one or more arguments
elif nargs == ONE_OR_MORE:
nargs_pattern = '(-*A[A-]*)'
# allow any number of options or arguments
elif nargs == REMAINDER:
nargs_pattern = '([-AO]*)'
# allow one argument followed by any number of options or arguments
elif nargs == PARSER:
nargs_pattern = '(-*A[-AO]*)'
# all others should be integers
else:
nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
# if this is an optional action, -- is not allowed
if action.option_strings:
nargs_pattern = nargs_pattern.replace('-*', '')
nargs_pattern = nargs_pattern.replace('-', '')
# return the pattern
return nargs_pattern
# ========================
# Value conversion methods
# ========================
def _get_values(self, action, arg_strings):
# for everything but PARSER, REMAINDER args, strip out first '--'
if action.nargs not in [PARSER, REMAINDER]:
try:
arg_strings.remove('--')
except ValueError:
pass
# optional argument produces a default when not present
if not arg_strings and action.nargs == OPTIONAL:
if action.option_strings:
value = action.const
else:
value = action.default
if isinstance(value, str):
value = self._get_value(action, value)
self._check_value(action, value)
# when nargs='*' on a positional, if there were no command-line
# args, use the default if it is anything other than None
elif (not arg_strings and action.nargs == ZERO_OR_MORE and
not action.option_strings):
if action.default is not None:
value = action.default
else:
value = arg_strings
self._check_value(action, value)
# single argument or optional argument produces a single value
elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
arg_string, = arg_strings
value = self._get_value(action, arg_string)
self._check_value(action, value)
# REMAINDER arguments convert all values, checking none
elif action.nargs == REMAINDER:
value = [self._get_value(action, v) for v in arg_strings]
# PARSER arguments convert all values, but check only the first
elif action.nargs == PARSER:
value = [self._get_value(action, v) for v in arg_strings]
self._check_value(action, value[0])
# all other types of nargs produce a list
else:
value = [self._get_value(action, v) for v in arg_strings]
for v in value:
self._check_value(action, v)
# return the converted value
return value
def _get_value(self, action, arg_string):
type_func = self._registry_get('type', action.type, action.type)
if not callable(type_func):
msg = _('%r is not callable')
raise ArgumentError(action, msg % type_func)
# convert the value to the appropriate type
try:
result = type_func(arg_string)
# ArgumentTypeErrors indicate errors
except ArgumentTypeError:
name = getattr(action.type, '__name__', repr(action.type))
msg = str(_sys.exc_info()[1])
raise ArgumentError(action, msg)
# TypeErrors or ValueErrors also indicate errors
except (TypeError, ValueError):
name = getattr(action.type, '__name__', repr(action.type))
args = {'type': name, 'value': arg_string}
msg = _('invalid %(type)s value: %(value)r')
raise ArgumentError(action, msg % args)
# return the converted value
return result
def _check_value(self, action, value):
# converted value must be one of the choices (if specified)
if action.choices is not None and value not in action.choices:
args = {'value': value,
'choices': ', '.join(map(repr, action.choices))}
msg = _('invalid choice: %(value)r (choose from %(choices)s)')
raise ArgumentError(action, msg % args)
# =======================
# Help-formatting methods
# =======================
def format_usage(self):
formatter = self._get_formatter()
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
return formatter.format_help()
def format_help(self):
formatter = self._get_formatter()
# usage
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
# description
formatter.add_text(self.description)
# positionals, optionals and user-defined groups
for action_group in self._action_groups:
formatter.start_section(action_group.title)
formatter.add_text(action_group.description)
formatter.add_arguments(action_group._group_actions)
formatter.end_section()
# epilog
formatter.add_text(self.epilog)
# determine help from format above
return formatter.format_help()
def _get_formatter(self):
return self.formatter_class(prog=self.prog)
# =====================
# Help-printing methods
# =====================
def print_usage(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_usage(), file)
def print_help(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_help(), file)
def _print_message(self, message, file=None):
if message:
if file is None:
file = _sys.stderr
file.write(message)
# ===============
# Exiting methods
# ===============
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
_sys.exit(status)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(_sys.stderr)
args = {'prog': self.prog, 'message': message}
self.exit(2, _('%(prog)s: error: %(message)s\n') % args)
| {
"pile_set_name": "Github"
} |
/*--------------------------------------------------------*\
| |
| hprose |
| |
| Official WebSite: https://hprose.com |
| |
| UInt32Deserializer.cs |
| |
| UInt32Deserializer class for C#. |
| |
| LastModified: Jun 30, 2020 |
| Author: Ma Bingyao <[email protected]> |
| |
\*________________________________________________________*/
namespace Hprose.IO.Deserializers {
using static Tags;
internal class UInt32Deserializer : Deserializer<uint> {
public override uint Read(Reader reader, int tag) => tag switch
{
'0' => 0,
'1' => 1,
'2' => 2,
'3' => 3,
'4' => 4,
'5' => 5,
'6' => 6,
'7' => 7,
'8' => 8,
'9' => 9,
TagInteger => (uint)ValueReader.ReadInt(reader.Stream),
TagLong => (uint)ValueReader.ReadLong(reader.Stream),
TagDouble => (uint)ValueReader.ReadDouble(reader.Stream),
TagTrue => 1,
TagFalse => 0,
TagEmpty => 0,
TagUTF8Char => Converter<uint>.Convert(ValueReader.ReadUTF8Char(reader.Stream)),
TagString => Converter<uint>.Convert(ReferenceReader.ReadString(reader)),
_ => base.Read(reader, tag),
};
}
}
| {
"pile_set_name": "Github"
} |
#N canvas 394 1 692 722 10;
#X obj 45 615 dac~ 1 2;
#X obj 45 574 *~ 0.1;
#X obj 45 553 osc~ 440;
#X text 464 21 ............................;
#X text 464 31 . ____ ._____ _ .......___ .;
#X text 464 41 .| __ )| ____| |....../ _ |.;
#X text 464 61 .| |_) | |___| |___ / ___ |.;
#X text 464 71 .|____/|_____|_____/_/..._|.io;
#X text 464 81 ............................;
#X text 464 51 .| __ || __|.| |...../ _| |.;
#X text 35 32 Using custom render.cpp files;
#X text 35 42 =============================;
#X text 35 58 It is possible to modify the default libpd and heavy
;
#X text 35 72 wrapper templates in order to combine c++ code with;
#X text 35 86 your puredata patches.;
#X text 35 100 In this example you should hear a tremolo effect being
;
#X text 35 114 applied to the output. This is done by taking the output
;
#X text 35 128 buffer returned from libpd or heavy and applying further
;
#X text 35 142 processing before writing the buffer to the Bela context.
;
#X text 35 156 It is also possible to define more input or output channels
;
#X text 35 170 within the render.cpp file in order to pass auxiliary
;
#X text 35 198 code.;
#X text 35 184 signals (e.g. envelopes) across the pd patch and the
c++;
#X obj 46 716 adc~ 3;
#X obj 46 738 snapshot~;
#X obj 97 694 loadbang;
#X obj 97 716 metro 5;
#X obj 46 804 * 20;
#X obj 46 826 + 0.5;
#X text 94 826 map to exponential range 0.5Hz<->20Hz;
#X text 194 848 <<< this receiver doesn't exist in the patch but is
parsed;
#X text 36 520 Simple 440Hz sine wave;
#X text 36 530 ----------------------;
#X text 36 660 Sending messages to modified render.cpp;
#X text 36 670 ---------------------------------------;
#X text 35 211 In this example we are sending float values to a receiver
;
#X text 35 225 named 'tremoloRate' which is parsed by the modified
;
#X text 35 239 render.cpp file and used to control the rate of the
tremolo;
#X text 35 253 effect applied to the output.;
#X obj 46 760 t f f;
#X obj 46 782 *;
#X text 35 267 See the render.cpp file in the project folder for the
libpd;
#X text 35 282 implementation. The heavy implementation can be found
inside;
#X text 35 295 the enclosed /heavy folder. (This is where custom render.cpp
;
#X text 35 309 files for heavy need to be placed when compiling with
heavy);
#X text 219 860 by a hook function in the modified render.cpp file.
;
#X text 240 897 Bela_floatHook(const char *source \, float value);
#X text 220 962 For heavy this is:;
#X text 219 884 For libpd this function is:;
#X text 240 973 sendHook( double timestamp \, const char *receiverName
;
#X text 300 986 \, const HvMessage *const m \, void *userData);
#X text 220 911 Note that in libpd the receiver name needs to be registered
;
#X text 220 925 using the libpd_bind(const char *sym) function (see
the;
#X text 220 939 contents of setup());
#X text 35 324 Search for 'MODIFICATION' (no quotation marks) inside
either;
#X text 35 338 of the render files to inspect all the modifications
that;
#X text 36 352 were made for this example.;
#X text 36 386 Running the patch with the modified render.cpp file
;
#X text 36 396 ---------------------------------------------------
;
#X text 36 409 If using libpd \, you can simply run this patch from
the;
#X text 36 423 Bela IDE. The system will automatically detect the;
#X text 36 437 modified file use it instead of the template.;
#X text 36 457 Similarly \, the build script for the Heavy compiler
;
#X text 36 471 will detect the presence of a render.cpp file inside
;
#X text 36 485 the enclosed /heavy/ folder and bypass the default wrapper.
;
#X obj 46 848 s tremoloRate @hv_param;
#X text 33 895 The "@hv_param" parameter;
#X text 33 905 is required by Heavy;
#X text 33 915 and it is ignored by libpd;
#X text 220 1002 The receiver name does not need to be registered anywhere
\,;
#X text 143 874 ^;
#X text 143 868 ^;
#X text 143 880 ^;
#X text 220 1012 but we need to use the @hv_param bit.;
#X connect 1 0 0 1;
#X connect 1 0 0 0;
#X connect 2 0 1 0;
#X connect 23 0 24 0;
#X connect 24 0 39 0;
#X connect 25 0 26 0;
#X connect 26 0 24 0;
#X connect 27 0 28 0;
#X connect 28 0 65 0;
#X connect 39 0 40 0;
#X connect 39 1 40 1;
#X connect 40 0 27 0;
| {
"pile_set_name": "Github"
} |
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.core.thing.binding;
import java.util.List;
import java.util.Map;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.config.core.Configuration;
import org.openhab.core.config.core.validation.ConfigValidationException;
import org.openhab.core.thing.Bridge;
import org.openhab.core.thing.Channel;
import org.openhab.core.thing.ChannelGroupUID;
import org.openhab.core.thing.ChannelUID;
import org.openhab.core.thing.Thing;
import org.openhab.core.thing.ThingStatusInfo;
import org.openhab.core.thing.ThingTypeUID;
import org.openhab.core.thing.ThingUID;
import org.openhab.core.thing.binding.builder.ChannelBuilder;
import org.openhab.core.thing.type.ChannelGroupType;
import org.openhab.core.thing.type.ChannelGroupTypeUID;
import org.openhab.core.thing.type.ChannelType;
import org.openhab.core.thing.type.ChannelTypeUID;
import org.openhab.core.types.Command;
import org.openhab.core.types.State;
/**
* {@link ThingHandlerCallback} is callback interface for {@link ThingHandler}s. The implementation of a
* {@link ThingHandler} must use the callback to inform the framework about changes like state updates, status updated
* or an update of the whole thing.
*
* @author Dennis Nobel - Initial contribution
* @author Stefan Bußweiler - Added new thing status info, added new configuration update info
* @author Christoph Weitkamp - Moved OSGI ServiceTracker from BaseThingHandler to ThingHandlerCallback
* @author Christoph Weitkamp - Added preconfigured ChannelGroupBuilder
*/
@NonNullByDefault
public interface ThingHandlerCallback {
/**
* Informs about an updated state for a channel.
*
* @param channelUID channel UID (must not be null)
* @param state state (must not be null)
*/
void stateUpdated(ChannelUID channelUID, State state);
/**
* Informs about a command, which is sent from the channel.
*
* @param channelUID channel UID
* @param command command
*/
void postCommand(ChannelUID channelUID, Command command);
/**
* Informs about an updated status of a thing.
*
* @param thing thing (must not be null)
* @param thingStatus thing status (must not be null)
*/
void statusUpdated(Thing thing, ThingStatusInfo thingStatus);
/**
* Informs about an update of the whole thing.
*
* @param thing thing that was updated (must not be null)
* @throws IllegalStateException if the {@link Thing} is read-only.
*/
void thingUpdated(Thing thing);
/**
* Validates the given configuration parameters against the configuration description.
*
* @param thing thing with the updated configuration (must no be null)
* @param configurationParameters the configuration parameters to be validated
* @throws ConfigValidationException if one or more of the given configuration parameters do not match
* their declarations in the configuration description
*/
void validateConfigurationParameters(Thing thing, Map<String, Object> configurationParameters);
/**
* Informs about an updated configuration of a thing.
*
* @param thing thing with the updated configuration (must no be null)
*/
void configurationUpdated(Thing thing);
/**
* Informs the framework that the ThingType of the given {@link Thing} should be changed.
*
* @param thing thing that should be migrated to another ThingType (must not be null)
* @param thingTypeUID the new type of the thing (must not be null)
* @param configuration a configuration that should be applied to the given {@link Thing}
*/
void migrateThingType(Thing thing, ThingTypeUID thingTypeUID, Configuration configuration);
/**
* Informs the framework that a channel has been triggered.
*
* @param thing thing (must not be null)
* @param channelUID UID of the channel over which has been triggered.
* @param event Event.
*/
void channelTriggered(Thing thing, ChannelUID channelUID, String event);
/**
* Creates a {@link ChannelBuilder} which is preconfigured with values from the given {@link ChannelType}.
*
* @param channelUID the UID of the {@link Channel} to be created
* @param channelTypeUID the {@link ChannelTypeUID} for which the {@link Channel} should be created
* @return a preconfigured {@link ChannelBuilder}
* @throws IllegalArgumentException if the referenced {@link ChannelType} is not known
*/
ChannelBuilder createChannelBuilder(ChannelUID channelUID, ChannelTypeUID channelTypeUID);
/**
* Creates a {@link ChannelBuilder} which is preconfigured with values from the given {@link Channel} and allows to
* modify it. The methods {@link BaseThingHandler#editThing(Thing)} and {@link BaseThingHandler#updateThing(Thing)}
* must be called to persist the changes.
*
* @param thing {@link Thing} (must not be null)
* @param channelUID the UID of the {@link Channel} to be edited
* @return a preconfigured {@link ChannelBuilder}
* @throws IllegalArgumentException if no {@link Channel} with the given UID exists for the given {@link Thing}
*/
ChannelBuilder editChannel(Thing thing, ChannelUID channelUID);
/**
* Creates a list of {@link ChannelBuilder}s which are preconfigured with values from the given
* {@link ChannelGroupType}.
*
* @param channelGroupUID the UID of the channel group to be created
* @param channelGroupTypeUID the {@link ChannelGroupUID} for which the {@link Channel}s should be created
* @return a list of preconfigured {@link ChannelBuilder}s
* @throws IllegalArgumentException if the referenced {@link ChannelGroupType} is not known
*/
List<ChannelBuilder> createChannelBuilders(ChannelGroupUID channelGroupUID,
ChannelGroupTypeUID channelGroupTypeUID);
/**
* Returns whether at least one item is linked for the given UID of the channel.
*
* @param channelUID UID of the channel (must not be null)
* @return true if at least one item is linked, false otherwise
*/
boolean isChannelLinked(ChannelUID channelUID);
/**
* Returns the bridge of the thing.
*
* @param bridgeUID {@link ThingUID} UID of the bridge (must not be null)
* @return returns the bridge of the thing or null if the thing has no bridge
*/
@Nullable
Bridge getBridge(ThingUID bridgeUID);
}
| {
"pile_set_name": "Github"
} |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Text;
#if DNXCORE50
using Xunit;
using Test = Xunit.FactAttribute;
using Assert = Newtonsoft.Json.Tests.XUnitAssert;
#else
using NUnit.Framework;
#endif
namespace Newtonsoft.Json.Tests.Documentation.Samples.Serializer
{
[TestFixture]
public class JsonObjectAttributeOptIn : TestFixtureBase
{
#region Types
[JsonObject(MemberSerialization.OptIn)]
public class File
{
// excluded from serialization
// does not have JsonPropertyAttribute
public Guid Id { get; set; }
[JsonProperty]
public string Name { get; set; }
[JsonProperty]
public int Size { get; set; }
}
#endregion
[Test]
public void Example()
{
#region Usage
File file = new File
{
Id = Guid.NewGuid(),
Name = "ImportantLegalDocuments.docx",
Size = 50 * 1024
};
string json = JsonConvert.SerializeObject(file, Formatting.Indented);
Console.WriteLine(json);
// {
// "Name": "ImportantLegalDocuments.docx",
// "Size": 51200
// }
#endregion
StringAssert.AreEqual(@"{
""Name"": ""ImportantLegalDocuments.docx"",
""Size"": 51200
}", json);
}
}
} | {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<title>Rgaa32017 Test.11.9.2 Passed 01</title>
</head>
<body class="Passed">
<div>
<h1>Rgaa32017 Test.11.9.2 Passed 01</h1>
<!-- START [test-detail] -->
<div class="test-detail" lang="fr">
Dans chaque formulaire, l'intitulé de chaque <a href="http://references.modernisation.gouv.fr/rgaa-accessibilite/glossaire.html#bouton-formulaire">bouton</a> implémenté via une propriété ARIA <code lang="en">aria-labelledby</code> vérifie-t-il ces conditions ? <ul><li><a href="http://references.modernisation.gouv.fr/rgaa-accessibilite/glossaire.html#passage-texte-aria">Le passage de texte</a> servant d'intitulé possède un attribut <code lang="en">id</code>.</li> <li>La valeur de l'attribut <code lang="en">id</code> est unique.</li> <li>Les valeurs de la propriété ARIA <code lang="en">aria-labelledby</code> sont égales aux valeurs des attributs <code lang="en">id</code> des passages de texte utilisés pour créer l'intitulé.</li> <li><a href="http://references.modernisation.gouv.fr/rgaa-accessibilite/glossaire.html#passage-texte-aria">Le passage de texte</a> est pertinent.</li> </ul>
</div>
<!-- END [test-detail] -->
<div class="testcase">
</div>
<div class="test-explanation">
Passed.
</div>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
# Date : 29th July
# Author : Alexander Baker
import sys
from pylab import figure, show, clf, savefig, cm
from numpy.fft import fft, fft2, ifft, ifft2, fftshift
from optparse import OptionParser
from numpy import *
def generateResolution(n):
return 2**n, (2**n)-1
def store_value(option, opt_str, value, parser):
setattr(parser.values, option.dest, value)
def main():
parser = OptionParser()
parser.add_option("-r", "--resolution", action="callback", callback=store_value, type="int", nargs=1, dest="resolution", help="resolution of the grid parameter")
parser.add_option("-g", "--grid", action="callback", callback=store_value, type="int", nargs=1, dest="grid", help="grid size parameter")
parser.add_option("-s", "--steps", action="callback", callback=store_value, type="int", nargs=1, dest="steps", help="number of steps")
parser.add_option("-b", "--beam", action="callback", callback=store_value, type="int", nargs=1, dest="beam", help="beam size")
parser.add_option("-c", "--core", action="callback", callback=store_value, type="float", nargs=1, dest="core", help="beam size")
parser.add_option("-p", "--phase", action="callback", callback=store_value, type="int", nargs=1, dest="phase", help="phase size parameter")
parser.add_option("-i", "--image", action="callback", callback=store_value, type="string", nargs=1, dest="image", help="phase size parameter")
(options, args) = parser.parse_args()
grid_resolution = 6
grid_size = 8
steps = 35
beam = 8
core = 0.3
phase = 15
image = 'bw'
if options.resolution:
grid_resolution = options.resolution
if options.grid:
grid_size = options.grid
if options.steps:
steps = options.steps
if options.beam:
beam = options.beam
if options.core:
core = options.core
if options.phase:
phase = options.phase
if options.image:
image = options.image
print '\n######################################################'
print '# Numerical Lattice Model'
print '# Alexander Baker - August 2007'
print '######################################################\n'
grid_resolution, grid_resolution_1 = generateResolution(grid_resolution)
# The grid size effects the number of steps we apply to get to the upper limit
print "grid_size : [%d]\ngrid_resolution [%d]\ngrid_resolution-1 [%d]" %(grid_size, grid_resolution, grid_resolution_1)
x1 = arange(-1 * 1.0* grid_size,(-1 * grid_size) + grid_resolution_1 * 1.0 * (2 * grid_size)/grid_resolution, (grid_size * 1.0)/grid_resolution)
x2 = arange(-1 * 1.0 * grid_size,(-1 * grid_size) + grid_resolution_1 * 1.0 * (2 * grid_size)/grid_resolution, (grid_size * 1.0)/grid_resolution)
xmin, xmax, ymin, ymax = -1 * 1.0 * grid_size, (-1 * grid_size) + grid_resolution_1 * 1.0 * (2 * grid_size)/grid_resolution, -1 * 1.0 * grid_size, (-1 * grid_size) + grid_resolution_1 * 1.0 * (2 * grid_size)/grid_resolution
extent = xmin, xmax, ymin, ymax
print "\ngrid extent X-[%f][%f] Y-[%f][%f]" % (xmin, xmax, ymin, ymax)
print "shape x1", shape(x1)
print "shape x2", shape(x2)
print "step [%f]" % ((grid_size * 1.0)/grid_resolution)
print "start [%f], end[%f]" % (-1 * 1.0* grid_size, grid_resolution_1 * 1.0 * (2 * grid_size)/grid_resolution)
[x1,x2] = meshgrid(x1,x2)
rbeam = beam
wcore = core
vpos = 1
_del = 1
print "\nrbeam [%d]\nwcore [%f]\nvpos [%f]\n_del [%f]" % (rbeam, wcore, vpos, _del)
#
# Step 1. Take your initial beam profile (gaussian, a vortex etc etc) at z = 0
#
y = 1.0*exp(-2*(x1**2 + x2**2)/rbeam**2)*exp(1j* phase *(+arctan2(x2,x1))) * tanh(pow((x1**2 + x2**2), 0.5)/wcore)
fig1 = figure(1)
fig1.clf()
ax1a = fig1.add_subplot(121)
if image == 'bw':
ax1a.imshow(angle((y)), cmap=cm.gist_gray, alpha=.9, interpolation='bilinear', extent=extent)
else:
ax1a.imshow(angle((y)), cmap=cm.jet, alpha=.9, interpolation='bilinear', extent=extent)
ax1a.set_title(r'Angle')
ax1b = fig1.add_subplot(122)
if image == 'bw':
ax1b.imshow(abs((y)), cmap=cm.gist_gray, alpha=.9, interpolation='bilinear', extent=extent)
else:
ax1b.imshow(abs((y)), cmap=cm.jet, alpha=.9, interpolation='bilinear', extent=extent)
ax1b.set_title(r'Amplitude')
savefig('big_start_' + str(wcore)+'_' + str(vpos) +'.png')
u1 = arange(-1.0,-1+1.0*grid_resolution_1* ((2 * 1.0)/grid_resolution), 1.0/grid_resolution)
u2 = arange(-1.0,-1+1.0*grid_resolution_1* ((2 * 1.0)/grid_resolution), 1.0/grid_resolution)
u1min, u1max, u2min, u2max = -1.0, -1+1.0*grid_resolution_1* ((2 * 1.0)/grid_resolution), -1.0, -1+1.0*grid_resolution_1* ((2 * 1.0)/grid_resolution)
print "\npropagation grid X-[%f][%f] Y-[%f][%f]" % (u1min, u1max, u2min, u2max)
print "shape u1", shape(u1)
print "shape u2", shape(u2)
print "step [%f]" % (1.0/grid_resolution)
print "start [%f], end[%f]" % (-1.0, -1+1.0*grid_resolution_1* ((2 * 1.0)/grid_resolution))
print "\nbeam power (start) - [%f]" % (sum(sum(abs(y**2))))
[u1,u2] = meshgrid(u1,u2)
t = exp(2*pi*1j*(u1**2 + u2**2)*_del)
w = fftshift(t)
#
# Step 2. Split step progagation
#
for i in arange(100,100+steps, 1):
z = fft2(y)
zp = z * w
yp = ifft2(zp)
p = (exp(+0.01*pi*1j*(x1**2 + x2**2)*_del + 0.05*pi*1j*y*conj(y))*_del);
yp = yp * p
y = yp
zp = fft2(yp)
zp = zp * w
yp = ifft2(zp)
fig3 = figure(3)
fig3.clf()
ax3 = fig3.add_subplot(111)
if image == 'bw':
ax3.imshow(abs((yp)), cmap=cm.gist_gray, alpha=.9, interpolation='bilinear', extent=extent)
else:
ax3.imshow(abs((yp)), cmap=cm.jet, alpha=.9, interpolation='bilinear', extent=extent)
ax3 = fig3.add_subplot(111)
if image == 'bw':
ax3.imshow(angle((yp)), cmap=cm.gist_gray, alpha=.9, interpolation='bilinear', extent=extent)
else :
ax3.imshow(angle((yp)), cmap=cm.jet, alpha=.9, interpolation='bilinear', extent=extent)
print sum(sum(abs(yp**2))), i-100
print "beam power (end) - [%f]" % (sum(sum(abs(yp**2))))
fig2 = figure(2)
fig2.clf()
ax2a = fig2.add_subplot(121)
if image == 'bw':
ax2a.imshow(angle((yp)), cmap=cm.gist_gray, alpha=.9, interpolation='bilinear', extent=extent)
else:
ax2a.imshow(angle((yp)), cmap=cm.jet, alpha=.9, interpolation='bilinear', extent=extent)
ax2a.set_title(r'Angle')
ax2b = fig2.add_subplot(122)
if image == 'bw':
ax2b.imshow(abs((yp)), cmap=cm.gist_gray, alpha=.9, interpolation='bilinear', extent=extent)
else:
ax2b.imshow(abs((yp)), cmap=cm.jet, alpha=.9, interpolation='bilinear', extent=extent)
ax2b.set_title(r'Amplitude')
savefig('big_end_' + str(wcore)+'_' + str(vpos) +'.png')
print '\ndone. ok'
if __name__ == "__main__":
main()
| {
"pile_set_name": "Github"
} |
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package http2
// A list of the possible cipher suite ids. Taken from
// http://www.iana.org/assignments/tls-parameters/tls-parameters.txt
const (
cipher_TLS_NULL_WITH_NULL_NULL uint16 = 0x0000
cipher_TLS_RSA_WITH_NULL_MD5 uint16 = 0x0001
cipher_TLS_RSA_WITH_NULL_SHA uint16 = 0x0002
cipher_TLS_RSA_EXPORT_WITH_RC4_40_MD5 uint16 = 0x0003
cipher_TLS_RSA_WITH_RC4_128_MD5 uint16 = 0x0004
cipher_TLS_RSA_WITH_RC4_128_SHA uint16 = 0x0005
cipher_TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5 uint16 = 0x0006
cipher_TLS_RSA_WITH_IDEA_CBC_SHA uint16 = 0x0007
cipher_TLS_RSA_EXPORT_WITH_DES40_CBC_SHA uint16 = 0x0008
cipher_TLS_RSA_WITH_DES_CBC_SHA uint16 = 0x0009
cipher_TLS_RSA_WITH_3DES_EDE_CBC_SHA uint16 = 0x000A
cipher_TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA uint16 = 0x000B
cipher_TLS_DH_DSS_WITH_DES_CBC_SHA uint16 = 0x000C
cipher_TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA uint16 = 0x000D
cipher_TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA uint16 = 0x000E
cipher_TLS_DH_RSA_WITH_DES_CBC_SHA uint16 = 0x000F
cipher_TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA uint16 = 0x0010
cipher_TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA uint16 = 0x0011
cipher_TLS_DHE_DSS_WITH_DES_CBC_SHA uint16 = 0x0012
cipher_TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA uint16 = 0x0013
cipher_TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA uint16 = 0x0014
cipher_TLS_DHE_RSA_WITH_DES_CBC_SHA uint16 = 0x0015
cipher_TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA uint16 = 0x0016
cipher_TLS_DH_anon_EXPORT_WITH_RC4_40_MD5 uint16 = 0x0017
cipher_TLS_DH_anon_WITH_RC4_128_MD5 uint16 = 0x0018
cipher_TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA uint16 = 0x0019
cipher_TLS_DH_anon_WITH_DES_CBC_SHA uint16 = 0x001A
cipher_TLS_DH_anon_WITH_3DES_EDE_CBC_SHA uint16 = 0x001B
// Reserved uint16 = 0x001C-1D
cipher_TLS_KRB5_WITH_DES_CBC_SHA uint16 = 0x001E
cipher_TLS_KRB5_WITH_3DES_EDE_CBC_SHA uint16 = 0x001F
cipher_TLS_KRB5_WITH_RC4_128_SHA uint16 = 0x0020
cipher_TLS_KRB5_WITH_IDEA_CBC_SHA uint16 = 0x0021
cipher_TLS_KRB5_WITH_DES_CBC_MD5 uint16 = 0x0022
cipher_TLS_KRB5_WITH_3DES_EDE_CBC_MD5 uint16 = 0x0023
cipher_TLS_KRB5_WITH_RC4_128_MD5 uint16 = 0x0024
cipher_TLS_KRB5_WITH_IDEA_CBC_MD5 uint16 = 0x0025
cipher_TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA uint16 = 0x0026
cipher_TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA uint16 = 0x0027
cipher_TLS_KRB5_EXPORT_WITH_RC4_40_SHA uint16 = 0x0028
cipher_TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5 uint16 = 0x0029
cipher_TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5 uint16 = 0x002A
cipher_TLS_KRB5_EXPORT_WITH_RC4_40_MD5 uint16 = 0x002B
cipher_TLS_PSK_WITH_NULL_SHA uint16 = 0x002C
cipher_TLS_DHE_PSK_WITH_NULL_SHA uint16 = 0x002D
cipher_TLS_RSA_PSK_WITH_NULL_SHA uint16 = 0x002E
cipher_TLS_RSA_WITH_AES_128_CBC_SHA uint16 = 0x002F
cipher_TLS_DH_DSS_WITH_AES_128_CBC_SHA uint16 = 0x0030
cipher_TLS_DH_RSA_WITH_AES_128_CBC_SHA uint16 = 0x0031
cipher_TLS_DHE_DSS_WITH_AES_128_CBC_SHA uint16 = 0x0032
cipher_TLS_DHE_RSA_WITH_AES_128_CBC_SHA uint16 = 0x0033
cipher_TLS_DH_anon_WITH_AES_128_CBC_SHA uint16 = 0x0034
cipher_TLS_RSA_WITH_AES_256_CBC_SHA uint16 = 0x0035
cipher_TLS_DH_DSS_WITH_AES_256_CBC_SHA uint16 = 0x0036
cipher_TLS_DH_RSA_WITH_AES_256_CBC_SHA uint16 = 0x0037
cipher_TLS_DHE_DSS_WITH_AES_256_CBC_SHA uint16 = 0x0038
cipher_TLS_DHE_RSA_WITH_AES_256_CBC_SHA uint16 = 0x0039
cipher_TLS_DH_anon_WITH_AES_256_CBC_SHA uint16 = 0x003A
cipher_TLS_RSA_WITH_NULL_SHA256 uint16 = 0x003B
cipher_TLS_RSA_WITH_AES_128_CBC_SHA256 uint16 = 0x003C
cipher_TLS_RSA_WITH_AES_256_CBC_SHA256 uint16 = 0x003D
cipher_TLS_DH_DSS_WITH_AES_128_CBC_SHA256 uint16 = 0x003E
cipher_TLS_DH_RSA_WITH_AES_128_CBC_SHA256 uint16 = 0x003F
cipher_TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 uint16 = 0x0040
cipher_TLS_RSA_WITH_CAMELLIA_128_CBC_SHA uint16 = 0x0041
cipher_TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA uint16 = 0x0042
cipher_TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA uint16 = 0x0043
cipher_TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA uint16 = 0x0044
cipher_TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA uint16 = 0x0045
cipher_TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA uint16 = 0x0046
// Reserved uint16 = 0x0047-4F
// Reserved uint16 = 0x0050-58
// Reserved uint16 = 0x0059-5C
// Unassigned uint16 = 0x005D-5F
// Reserved uint16 = 0x0060-66
cipher_TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 uint16 = 0x0067
cipher_TLS_DH_DSS_WITH_AES_256_CBC_SHA256 uint16 = 0x0068
cipher_TLS_DH_RSA_WITH_AES_256_CBC_SHA256 uint16 = 0x0069
cipher_TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 uint16 = 0x006A
cipher_TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 uint16 = 0x006B
cipher_TLS_DH_anon_WITH_AES_128_CBC_SHA256 uint16 = 0x006C
cipher_TLS_DH_anon_WITH_AES_256_CBC_SHA256 uint16 = 0x006D
// Unassigned uint16 = 0x006E-83
cipher_TLS_RSA_WITH_CAMELLIA_256_CBC_SHA uint16 = 0x0084
cipher_TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA uint16 = 0x0085
cipher_TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA uint16 = 0x0086
cipher_TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA uint16 = 0x0087
cipher_TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA uint16 = 0x0088
cipher_TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA uint16 = 0x0089
cipher_TLS_PSK_WITH_RC4_128_SHA uint16 = 0x008A
cipher_TLS_PSK_WITH_3DES_EDE_CBC_SHA uint16 = 0x008B
cipher_TLS_PSK_WITH_AES_128_CBC_SHA uint16 = 0x008C
cipher_TLS_PSK_WITH_AES_256_CBC_SHA uint16 = 0x008D
cipher_TLS_DHE_PSK_WITH_RC4_128_SHA uint16 = 0x008E
cipher_TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA uint16 = 0x008F
cipher_TLS_DHE_PSK_WITH_AES_128_CBC_SHA uint16 = 0x0090
cipher_TLS_DHE_PSK_WITH_AES_256_CBC_SHA uint16 = 0x0091
cipher_TLS_RSA_PSK_WITH_RC4_128_SHA uint16 = 0x0092
cipher_TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA uint16 = 0x0093
cipher_TLS_RSA_PSK_WITH_AES_128_CBC_SHA uint16 = 0x0094
cipher_TLS_RSA_PSK_WITH_AES_256_CBC_SHA uint16 = 0x0095
cipher_TLS_RSA_WITH_SEED_CBC_SHA uint16 = 0x0096
cipher_TLS_DH_DSS_WITH_SEED_CBC_SHA uint16 = 0x0097
cipher_TLS_DH_RSA_WITH_SEED_CBC_SHA uint16 = 0x0098
cipher_TLS_DHE_DSS_WITH_SEED_CBC_SHA uint16 = 0x0099
cipher_TLS_DHE_RSA_WITH_SEED_CBC_SHA uint16 = 0x009A
cipher_TLS_DH_anon_WITH_SEED_CBC_SHA uint16 = 0x009B
cipher_TLS_RSA_WITH_AES_128_GCM_SHA256 uint16 = 0x009C
cipher_TLS_RSA_WITH_AES_256_GCM_SHA384 uint16 = 0x009D
cipher_TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 uint16 = 0x009E
cipher_TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 uint16 = 0x009F
cipher_TLS_DH_RSA_WITH_AES_128_GCM_SHA256 uint16 = 0x00A0
cipher_TLS_DH_RSA_WITH_AES_256_GCM_SHA384 uint16 = 0x00A1
cipher_TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 uint16 = 0x00A2
cipher_TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 uint16 = 0x00A3
cipher_TLS_DH_DSS_WITH_AES_128_GCM_SHA256 uint16 = 0x00A4
cipher_TLS_DH_DSS_WITH_AES_256_GCM_SHA384 uint16 = 0x00A5
cipher_TLS_DH_anon_WITH_AES_128_GCM_SHA256 uint16 = 0x00A6
cipher_TLS_DH_anon_WITH_AES_256_GCM_SHA384 uint16 = 0x00A7
cipher_TLS_PSK_WITH_AES_128_GCM_SHA256 uint16 = 0x00A8
cipher_TLS_PSK_WITH_AES_256_GCM_SHA384 uint16 = 0x00A9
cipher_TLS_DHE_PSK_WITH_AES_128_GCM_SHA256 uint16 = 0x00AA
cipher_TLS_DHE_PSK_WITH_AES_256_GCM_SHA384 uint16 = 0x00AB
cipher_TLS_RSA_PSK_WITH_AES_128_GCM_SHA256 uint16 = 0x00AC
cipher_TLS_RSA_PSK_WITH_AES_256_GCM_SHA384 uint16 = 0x00AD
cipher_TLS_PSK_WITH_AES_128_CBC_SHA256 uint16 = 0x00AE
cipher_TLS_PSK_WITH_AES_256_CBC_SHA384 uint16 = 0x00AF
cipher_TLS_PSK_WITH_NULL_SHA256 uint16 = 0x00B0
cipher_TLS_PSK_WITH_NULL_SHA384 uint16 = 0x00B1
cipher_TLS_DHE_PSK_WITH_AES_128_CBC_SHA256 uint16 = 0x00B2
cipher_TLS_DHE_PSK_WITH_AES_256_CBC_SHA384 uint16 = 0x00B3
cipher_TLS_DHE_PSK_WITH_NULL_SHA256 uint16 = 0x00B4
cipher_TLS_DHE_PSK_WITH_NULL_SHA384 uint16 = 0x00B5
cipher_TLS_RSA_PSK_WITH_AES_128_CBC_SHA256 uint16 = 0x00B6
cipher_TLS_RSA_PSK_WITH_AES_256_CBC_SHA384 uint16 = 0x00B7
cipher_TLS_RSA_PSK_WITH_NULL_SHA256 uint16 = 0x00B8
cipher_TLS_RSA_PSK_WITH_NULL_SHA384 uint16 = 0x00B9
cipher_TLS_RSA_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0x00BA
cipher_TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0x00BB
cipher_TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0x00BC
cipher_TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0x00BD
cipher_TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0x00BE
cipher_TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0x00BF
cipher_TLS_RSA_WITH_CAMELLIA_256_CBC_SHA256 uint16 = 0x00C0
cipher_TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA256 uint16 = 0x00C1
cipher_TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA256 uint16 = 0x00C2
cipher_TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA256 uint16 = 0x00C3
cipher_TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA256 uint16 = 0x00C4
cipher_TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA256 uint16 = 0x00C5
// Unassigned uint16 = 0x00C6-FE
cipher_TLS_EMPTY_RENEGOTIATION_INFO_SCSV uint16 = 0x00FF
// Unassigned uint16 = 0x01-55,*
cipher_TLS_FALLBACK_SCSV uint16 = 0x5600
// Unassigned uint16 = 0x5601 - 0xC000
cipher_TLS_ECDH_ECDSA_WITH_NULL_SHA uint16 = 0xC001
cipher_TLS_ECDH_ECDSA_WITH_RC4_128_SHA uint16 = 0xC002
cipher_TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA uint16 = 0xC003
cipher_TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA uint16 = 0xC004
cipher_TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA uint16 = 0xC005
cipher_TLS_ECDHE_ECDSA_WITH_NULL_SHA uint16 = 0xC006
cipher_TLS_ECDHE_ECDSA_WITH_RC4_128_SHA uint16 = 0xC007
cipher_TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA uint16 = 0xC008
cipher_TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA uint16 = 0xC009
cipher_TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA uint16 = 0xC00A
cipher_TLS_ECDH_RSA_WITH_NULL_SHA uint16 = 0xC00B
cipher_TLS_ECDH_RSA_WITH_RC4_128_SHA uint16 = 0xC00C
cipher_TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA uint16 = 0xC00D
cipher_TLS_ECDH_RSA_WITH_AES_128_CBC_SHA uint16 = 0xC00E
cipher_TLS_ECDH_RSA_WITH_AES_256_CBC_SHA uint16 = 0xC00F
cipher_TLS_ECDHE_RSA_WITH_NULL_SHA uint16 = 0xC010
cipher_TLS_ECDHE_RSA_WITH_RC4_128_SHA uint16 = 0xC011
cipher_TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA uint16 = 0xC012
cipher_TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA uint16 = 0xC013
cipher_TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA uint16 = 0xC014
cipher_TLS_ECDH_anon_WITH_NULL_SHA uint16 = 0xC015
cipher_TLS_ECDH_anon_WITH_RC4_128_SHA uint16 = 0xC016
cipher_TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA uint16 = 0xC017
cipher_TLS_ECDH_anon_WITH_AES_128_CBC_SHA uint16 = 0xC018
cipher_TLS_ECDH_anon_WITH_AES_256_CBC_SHA uint16 = 0xC019
cipher_TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA uint16 = 0xC01A
cipher_TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA uint16 = 0xC01B
cipher_TLS_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA uint16 = 0xC01C
cipher_TLS_SRP_SHA_WITH_AES_128_CBC_SHA uint16 = 0xC01D
cipher_TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA uint16 = 0xC01E
cipher_TLS_SRP_SHA_DSS_WITH_AES_128_CBC_SHA uint16 = 0xC01F
cipher_TLS_SRP_SHA_WITH_AES_256_CBC_SHA uint16 = 0xC020
cipher_TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA uint16 = 0xC021
cipher_TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA uint16 = 0xC022
cipher_TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 uint16 = 0xC023
cipher_TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 uint16 = 0xC024
cipher_TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256 uint16 = 0xC025
cipher_TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384 uint16 = 0xC026
cipher_TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 uint16 = 0xC027
cipher_TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 uint16 = 0xC028
cipher_TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256 uint16 = 0xC029
cipher_TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384 uint16 = 0xC02A
cipher_TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 uint16 = 0xC02B
cipher_TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 uint16 = 0xC02C
cipher_TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256 uint16 = 0xC02D
cipher_TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384 uint16 = 0xC02E
cipher_TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 uint16 = 0xC02F
cipher_TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 uint16 = 0xC030
cipher_TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256 uint16 = 0xC031
cipher_TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384 uint16 = 0xC032
cipher_TLS_ECDHE_PSK_WITH_RC4_128_SHA uint16 = 0xC033
cipher_TLS_ECDHE_PSK_WITH_3DES_EDE_CBC_SHA uint16 = 0xC034
cipher_TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA uint16 = 0xC035
cipher_TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA uint16 = 0xC036
cipher_TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256 uint16 = 0xC037
cipher_TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA384 uint16 = 0xC038
cipher_TLS_ECDHE_PSK_WITH_NULL_SHA uint16 = 0xC039
cipher_TLS_ECDHE_PSK_WITH_NULL_SHA256 uint16 = 0xC03A
cipher_TLS_ECDHE_PSK_WITH_NULL_SHA384 uint16 = 0xC03B
cipher_TLS_RSA_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC03C
cipher_TLS_RSA_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC03D
cipher_TLS_DH_DSS_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC03E
cipher_TLS_DH_DSS_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC03F
cipher_TLS_DH_RSA_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC040
cipher_TLS_DH_RSA_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC041
cipher_TLS_DHE_DSS_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC042
cipher_TLS_DHE_DSS_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC043
cipher_TLS_DHE_RSA_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC044
cipher_TLS_DHE_RSA_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC045
cipher_TLS_DH_anon_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC046
cipher_TLS_DH_anon_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC047
cipher_TLS_ECDHE_ECDSA_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC048
cipher_TLS_ECDHE_ECDSA_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC049
cipher_TLS_ECDH_ECDSA_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC04A
cipher_TLS_ECDH_ECDSA_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC04B
cipher_TLS_ECDHE_RSA_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC04C
cipher_TLS_ECDHE_RSA_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC04D
cipher_TLS_ECDH_RSA_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC04E
cipher_TLS_ECDH_RSA_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC04F
cipher_TLS_RSA_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC050
cipher_TLS_RSA_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC051
cipher_TLS_DHE_RSA_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC052
cipher_TLS_DHE_RSA_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC053
cipher_TLS_DH_RSA_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC054
cipher_TLS_DH_RSA_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC055
cipher_TLS_DHE_DSS_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC056
cipher_TLS_DHE_DSS_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC057
cipher_TLS_DH_DSS_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC058
cipher_TLS_DH_DSS_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC059
cipher_TLS_DH_anon_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC05A
cipher_TLS_DH_anon_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC05B
cipher_TLS_ECDHE_ECDSA_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC05C
cipher_TLS_ECDHE_ECDSA_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC05D
cipher_TLS_ECDH_ECDSA_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC05E
cipher_TLS_ECDH_ECDSA_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC05F
cipher_TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC060
cipher_TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC061
cipher_TLS_ECDH_RSA_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC062
cipher_TLS_ECDH_RSA_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC063
cipher_TLS_PSK_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC064
cipher_TLS_PSK_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC065
cipher_TLS_DHE_PSK_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC066
cipher_TLS_DHE_PSK_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC067
cipher_TLS_RSA_PSK_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC068
cipher_TLS_RSA_PSK_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC069
cipher_TLS_PSK_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC06A
cipher_TLS_PSK_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC06B
cipher_TLS_DHE_PSK_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC06C
cipher_TLS_DHE_PSK_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC06D
cipher_TLS_RSA_PSK_WITH_ARIA_128_GCM_SHA256 uint16 = 0xC06E
cipher_TLS_RSA_PSK_WITH_ARIA_256_GCM_SHA384 uint16 = 0xC06F
cipher_TLS_ECDHE_PSK_WITH_ARIA_128_CBC_SHA256 uint16 = 0xC070
cipher_TLS_ECDHE_PSK_WITH_ARIA_256_CBC_SHA384 uint16 = 0xC071
cipher_TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0xC072
cipher_TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384 uint16 = 0xC073
cipher_TLS_ECDH_ECDSA_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0xC074
cipher_TLS_ECDH_ECDSA_WITH_CAMELLIA_256_CBC_SHA384 uint16 = 0xC075
cipher_TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0xC076
cipher_TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384 uint16 = 0xC077
cipher_TLS_ECDH_RSA_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0xC078
cipher_TLS_ECDH_RSA_WITH_CAMELLIA_256_CBC_SHA384 uint16 = 0xC079
cipher_TLS_RSA_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC07A
cipher_TLS_RSA_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC07B
cipher_TLS_DHE_RSA_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC07C
cipher_TLS_DHE_RSA_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC07D
cipher_TLS_DH_RSA_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC07E
cipher_TLS_DH_RSA_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC07F
cipher_TLS_DHE_DSS_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC080
cipher_TLS_DHE_DSS_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC081
cipher_TLS_DH_DSS_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC082
cipher_TLS_DH_DSS_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC083
cipher_TLS_DH_anon_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC084
cipher_TLS_DH_anon_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC085
cipher_TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC086
cipher_TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC087
cipher_TLS_ECDH_ECDSA_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC088
cipher_TLS_ECDH_ECDSA_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC089
cipher_TLS_ECDHE_RSA_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC08A
cipher_TLS_ECDHE_RSA_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC08B
cipher_TLS_ECDH_RSA_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC08C
cipher_TLS_ECDH_RSA_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC08D
cipher_TLS_PSK_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC08E
cipher_TLS_PSK_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC08F
cipher_TLS_DHE_PSK_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC090
cipher_TLS_DHE_PSK_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC091
cipher_TLS_RSA_PSK_WITH_CAMELLIA_128_GCM_SHA256 uint16 = 0xC092
cipher_TLS_RSA_PSK_WITH_CAMELLIA_256_GCM_SHA384 uint16 = 0xC093
cipher_TLS_PSK_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0xC094
cipher_TLS_PSK_WITH_CAMELLIA_256_CBC_SHA384 uint16 = 0xC095
cipher_TLS_DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0xC096
cipher_TLS_DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384 uint16 = 0xC097
cipher_TLS_RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0xC098
cipher_TLS_RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384 uint16 = 0xC099
cipher_TLS_ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256 uint16 = 0xC09A
cipher_TLS_ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384 uint16 = 0xC09B
cipher_TLS_RSA_WITH_AES_128_CCM uint16 = 0xC09C
cipher_TLS_RSA_WITH_AES_256_CCM uint16 = 0xC09D
cipher_TLS_DHE_RSA_WITH_AES_128_CCM uint16 = 0xC09E
cipher_TLS_DHE_RSA_WITH_AES_256_CCM uint16 = 0xC09F
cipher_TLS_RSA_WITH_AES_128_CCM_8 uint16 = 0xC0A0
cipher_TLS_RSA_WITH_AES_256_CCM_8 uint16 = 0xC0A1
cipher_TLS_DHE_RSA_WITH_AES_128_CCM_8 uint16 = 0xC0A2
cipher_TLS_DHE_RSA_WITH_AES_256_CCM_8 uint16 = 0xC0A3
cipher_TLS_PSK_WITH_AES_128_CCM uint16 = 0xC0A4
cipher_TLS_PSK_WITH_AES_256_CCM uint16 = 0xC0A5
cipher_TLS_DHE_PSK_WITH_AES_128_CCM uint16 = 0xC0A6
cipher_TLS_DHE_PSK_WITH_AES_256_CCM uint16 = 0xC0A7
cipher_TLS_PSK_WITH_AES_128_CCM_8 uint16 = 0xC0A8
cipher_TLS_PSK_WITH_AES_256_CCM_8 uint16 = 0xC0A9
cipher_TLS_PSK_DHE_WITH_AES_128_CCM_8 uint16 = 0xC0AA
cipher_TLS_PSK_DHE_WITH_AES_256_CCM_8 uint16 = 0xC0AB
cipher_TLS_ECDHE_ECDSA_WITH_AES_128_CCM uint16 = 0xC0AC
cipher_TLS_ECDHE_ECDSA_WITH_AES_256_CCM uint16 = 0xC0AD
cipher_TLS_ECDHE_ECDSA_WITH_AES_128_CCM_8 uint16 = 0xC0AE
cipher_TLS_ECDHE_ECDSA_WITH_AES_256_CCM_8 uint16 = 0xC0AF
// Unassigned uint16 = 0xC0B0-FF
// Unassigned uint16 = 0xC1-CB,*
// Unassigned uint16 = 0xCC00-A7
cipher_TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 uint16 = 0xCCA8
cipher_TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 uint16 = 0xCCA9
cipher_TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256 uint16 = 0xCCAA
cipher_TLS_PSK_WITH_CHACHA20_POLY1305_SHA256 uint16 = 0xCCAB
cipher_TLS_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256 uint16 = 0xCCAC
cipher_TLS_DHE_PSK_WITH_CHACHA20_POLY1305_SHA256 uint16 = 0xCCAD
cipher_TLS_RSA_PSK_WITH_CHACHA20_POLY1305_SHA256 uint16 = 0xCCAE
)
// isBadCipher reports whether the cipher is blacklisted by the HTTP/2 spec.
// References:
// https://tools.ietf.org/html/rfc7540#appendix-A
// Reject cipher suites from Appendix A.
// "This list includes those cipher suites that do not
// offer an ephemeral key exchange and those that are
// based on the TLS null, stream or block cipher type"
func isBadCipher(cipher uint16) bool {
switch cipher {
case cipher_TLS_NULL_WITH_NULL_NULL,
cipher_TLS_RSA_WITH_NULL_MD5,
cipher_TLS_RSA_WITH_NULL_SHA,
cipher_TLS_RSA_EXPORT_WITH_RC4_40_MD5,
cipher_TLS_RSA_WITH_RC4_128_MD5,
cipher_TLS_RSA_WITH_RC4_128_SHA,
cipher_TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5,
cipher_TLS_RSA_WITH_IDEA_CBC_SHA,
cipher_TLS_RSA_EXPORT_WITH_DES40_CBC_SHA,
cipher_TLS_RSA_WITH_DES_CBC_SHA,
cipher_TLS_RSA_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA,
cipher_TLS_DH_DSS_WITH_DES_CBC_SHA,
cipher_TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA,
cipher_TLS_DH_RSA_WITH_DES_CBC_SHA,
cipher_TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA,
cipher_TLS_DHE_DSS_WITH_DES_CBC_SHA,
cipher_TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,
cipher_TLS_DHE_RSA_WITH_DES_CBC_SHA,
cipher_TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_DH_anon_EXPORT_WITH_RC4_40_MD5,
cipher_TLS_DH_anon_WITH_RC4_128_MD5,
cipher_TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA,
cipher_TLS_DH_anon_WITH_DES_CBC_SHA,
cipher_TLS_DH_anon_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_KRB5_WITH_DES_CBC_SHA,
cipher_TLS_KRB5_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_KRB5_WITH_RC4_128_SHA,
cipher_TLS_KRB5_WITH_IDEA_CBC_SHA,
cipher_TLS_KRB5_WITH_DES_CBC_MD5,
cipher_TLS_KRB5_WITH_3DES_EDE_CBC_MD5,
cipher_TLS_KRB5_WITH_RC4_128_MD5,
cipher_TLS_KRB5_WITH_IDEA_CBC_MD5,
cipher_TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA,
cipher_TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA,
cipher_TLS_KRB5_EXPORT_WITH_RC4_40_SHA,
cipher_TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5,
cipher_TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5,
cipher_TLS_KRB5_EXPORT_WITH_RC4_40_MD5,
cipher_TLS_PSK_WITH_NULL_SHA,
cipher_TLS_DHE_PSK_WITH_NULL_SHA,
cipher_TLS_RSA_PSK_WITH_NULL_SHA,
cipher_TLS_RSA_WITH_AES_128_CBC_SHA,
cipher_TLS_DH_DSS_WITH_AES_128_CBC_SHA,
cipher_TLS_DH_RSA_WITH_AES_128_CBC_SHA,
cipher_TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
cipher_TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
cipher_TLS_DH_anon_WITH_AES_128_CBC_SHA,
cipher_TLS_RSA_WITH_AES_256_CBC_SHA,
cipher_TLS_DH_DSS_WITH_AES_256_CBC_SHA,
cipher_TLS_DH_RSA_WITH_AES_256_CBC_SHA,
cipher_TLS_DHE_DSS_WITH_AES_256_CBC_SHA,
cipher_TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
cipher_TLS_DH_anon_WITH_AES_256_CBC_SHA,
cipher_TLS_RSA_WITH_NULL_SHA256,
cipher_TLS_RSA_WITH_AES_128_CBC_SHA256,
cipher_TLS_RSA_WITH_AES_256_CBC_SHA256,
cipher_TLS_DH_DSS_WITH_AES_128_CBC_SHA256,
cipher_TLS_DH_RSA_WITH_AES_128_CBC_SHA256,
cipher_TLS_DHE_DSS_WITH_AES_128_CBC_SHA256,
cipher_TLS_RSA_WITH_CAMELLIA_128_CBC_SHA,
cipher_TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA,
cipher_TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA,
cipher_TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA,
cipher_TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA,
cipher_TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA,
cipher_TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
cipher_TLS_DH_DSS_WITH_AES_256_CBC_SHA256,
cipher_TLS_DH_RSA_WITH_AES_256_CBC_SHA256,
cipher_TLS_DHE_DSS_WITH_AES_256_CBC_SHA256,
cipher_TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
cipher_TLS_DH_anon_WITH_AES_128_CBC_SHA256,
cipher_TLS_DH_anon_WITH_AES_256_CBC_SHA256,
cipher_TLS_RSA_WITH_CAMELLIA_256_CBC_SHA,
cipher_TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA,
cipher_TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA,
cipher_TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA,
cipher_TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA,
cipher_TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA,
cipher_TLS_PSK_WITH_RC4_128_SHA,
cipher_TLS_PSK_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_PSK_WITH_AES_128_CBC_SHA,
cipher_TLS_PSK_WITH_AES_256_CBC_SHA,
cipher_TLS_DHE_PSK_WITH_RC4_128_SHA,
cipher_TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_DHE_PSK_WITH_AES_128_CBC_SHA,
cipher_TLS_DHE_PSK_WITH_AES_256_CBC_SHA,
cipher_TLS_RSA_PSK_WITH_RC4_128_SHA,
cipher_TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_RSA_PSK_WITH_AES_128_CBC_SHA,
cipher_TLS_RSA_PSK_WITH_AES_256_CBC_SHA,
cipher_TLS_RSA_WITH_SEED_CBC_SHA,
cipher_TLS_DH_DSS_WITH_SEED_CBC_SHA,
cipher_TLS_DH_RSA_WITH_SEED_CBC_SHA,
cipher_TLS_DHE_DSS_WITH_SEED_CBC_SHA,
cipher_TLS_DHE_RSA_WITH_SEED_CBC_SHA,
cipher_TLS_DH_anon_WITH_SEED_CBC_SHA,
cipher_TLS_RSA_WITH_AES_128_GCM_SHA256,
cipher_TLS_RSA_WITH_AES_256_GCM_SHA384,
cipher_TLS_DH_RSA_WITH_AES_128_GCM_SHA256,
cipher_TLS_DH_RSA_WITH_AES_256_GCM_SHA384,
cipher_TLS_DH_DSS_WITH_AES_128_GCM_SHA256,
cipher_TLS_DH_DSS_WITH_AES_256_GCM_SHA384,
cipher_TLS_DH_anon_WITH_AES_128_GCM_SHA256,
cipher_TLS_DH_anon_WITH_AES_256_GCM_SHA384,
cipher_TLS_PSK_WITH_AES_128_GCM_SHA256,
cipher_TLS_PSK_WITH_AES_256_GCM_SHA384,
cipher_TLS_RSA_PSK_WITH_AES_128_GCM_SHA256,
cipher_TLS_RSA_PSK_WITH_AES_256_GCM_SHA384,
cipher_TLS_PSK_WITH_AES_128_CBC_SHA256,
cipher_TLS_PSK_WITH_AES_256_CBC_SHA384,
cipher_TLS_PSK_WITH_NULL_SHA256,
cipher_TLS_PSK_WITH_NULL_SHA384,
cipher_TLS_DHE_PSK_WITH_AES_128_CBC_SHA256,
cipher_TLS_DHE_PSK_WITH_AES_256_CBC_SHA384,
cipher_TLS_DHE_PSK_WITH_NULL_SHA256,
cipher_TLS_DHE_PSK_WITH_NULL_SHA384,
cipher_TLS_RSA_PSK_WITH_AES_128_CBC_SHA256,
cipher_TLS_RSA_PSK_WITH_AES_256_CBC_SHA384,
cipher_TLS_RSA_PSK_WITH_NULL_SHA256,
cipher_TLS_RSA_PSK_WITH_NULL_SHA384,
cipher_TLS_RSA_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_RSA_WITH_CAMELLIA_256_CBC_SHA256,
cipher_TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA256,
cipher_TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA256,
cipher_TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA256,
cipher_TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA256,
cipher_TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA256,
cipher_TLS_EMPTY_RENEGOTIATION_INFO_SCSV,
cipher_TLS_ECDH_ECDSA_WITH_NULL_SHA,
cipher_TLS_ECDH_ECDSA_WITH_RC4_128_SHA,
cipher_TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA,
cipher_TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA,
cipher_TLS_ECDHE_ECDSA_WITH_NULL_SHA,
cipher_TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,
cipher_TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
cipher_TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
cipher_TLS_ECDH_RSA_WITH_NULL_SHA,
cipher_TLS_ECDH_RSA_WITH_RC4_128_SHA,
cipher_TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_ECDH_RSA_WITH_AES_128_CBC_SHA,
cipher_TLS_ECDH_RSA_WITH_AES_256_CBC_SHA,
cipher_TLS_ECDHE_RSA_WITH_NULL_SHA,
cipher_TLS_ECDHE_RSA_WITH_RC4_128_SHA,
cipher_TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
cipher_TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
cipher_TLS_ECDH_anon_WITH_NULL_SHA,
cipher_TLS_ECDH_anon_WITH_RC4_128_SHA,
cipher_TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_ECDH_anon_WITH_AES_128_CBC_SHA,
cipher_TLS_ECDH_anon_WITH_AES_256_CBC_SHA,
cipher_TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_SRP_SHA_WITH_AES_128_CBC_SHA,
cipher_TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA,
cipher_TLS_SRP_SHA_DSS_WITH_AES_128_CBC_SHA,
cipher_TLS_SRP_SHA_WITH_AES_256_CBC_SHA,
cipher_TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA,
cipher_TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA,
cipher_TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
cipher_TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
cipher_TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256,
cipher_TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384,
cipher_TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
cipher_TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
cipher_TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256,
cipher_TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384,
cipher_TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256,
cipher_TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384,
cipher_TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256,
cipher_TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384,
cipher_TLS_ECDHE_PSK_WITH_RC4_128_SHA,
cipher_TLS_ECDHE_PSK_WITH_3DES_EDE_CBC_SHA,
cipher_TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA,
cipher_TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA,
cipher_TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256,
cipher_TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA384,
cipher_TLS_ECDHE_PSK_WITH_NULL_SHA,
cipher_TLS_ECDHE_PSK_WITH_NULL_SHA256,
cipher_TLS_ECDHE_PSK_WITH_NULL_SHA384,
cipher_TLS_RSA_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_RSA_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_DH_DSS_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_DH_DSS_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_DH_RSA_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_DH_RSA_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_DHE_DSS_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_DHE_DSS_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_DHE_RSA_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_DHE_RSA_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_DH_anon_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_DH_anon_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_ECDHE_ECDSA_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_ECDHE_ECDSA_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_ECDH_ECDSA_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_ECDH_ECDSA_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_ECDHE_RSA_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_ECDHE_RSA_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_ECDH_RSA_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_ECDH_RSA_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_RSA_WITH_ARIA_128_GCM_SHA256,
cipher_TLS_RSA_WITH_ARIA_256_GCM_SHA384,
cipher_TLS_DH_RSA_WITH_ARIA_128_GCM_SHA256,
cipher_TLS_DH_RSA_WITH_ARIA_256_GCM_SHA384,
cipher_TLS_DH_DSS_WITH_ARIA_128_GCM_SHA256,
cipher_TLS_DH_DSS_WITH_ARIA_256_GCM_SHA384,
cipher_TLS_DH_anon_WITH_ARIA_128_GCM_SHA256,
cipher_TLS_DH_anon_WITH_ARIA_256_GCM_SHA384,
cipher_TLS_ECDH_ECDSA_WITH_ARIA_128_GCM_SHA256,
cipher_TLS_ECDH_ECDSA_WITH_ARIA_256_GCM_SHA384,
cipher_TLS_ECDH_RSA_WITH_ARIA_128_GCM_SHA256,
cipher_TLS_ECDH_RSA_WITH_ARIA_256_GCM_SHA384,
cipher_TLS_PSK_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_PSK_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_DHE_PSK_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_DHE_PSK_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_RSA_PSK_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_RSA_PSK_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_PSK_WITH_ARIA_128_GCM_SHA256,
cipher_TLS_PSK_WITH_ARIA_256_GCM_SHA384,
cipher_TLS_RSA_PSK_WITH_ARIA_128_GCM_SHA256,
cipher_TLS_RSA_PSK_WITH_ARIA_256_GCM_SHA384,
cipher_TLS_ECDHE_PSK_WITH_ARIA_128_CBC_SHA256,
cipher_TLS_ECDHE_PSK_WITH_ARIA_256_CBC_SHA384,
cipher_TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384,
cipher_TLS_ECDH_ECDSA_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_ECDH_ECDSA_WITH_CAMELLIA_256_CBC_SHA384,
cipher_TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384,
cipher_TLS_ECDH_RSA_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_ECDH_RSA_WITH_CAMELLIA_256_CBC_SHA384,
cipher_TLS_RSA_WITH_CAMELLIA_128_GCM_SHA256,
cipher_TLS_RSA_WITH_CAMELLIA_256_GCM_SHA384,
cipher_TLS_DH_RSA_WITH_CAMELLIA_128_GCM_SHA256,
cipher_TLS_DH_RSA_WITH_CAMELLIA_256_GCM_SHA384,
cipher_TLS_DH_DSS_WITH_CAMELLIA_128_GCM_SHA256,
cipher_TLS_DH_DSS_WITH_CAMELLIA_256_GCM_SHA384,
cipher_TLS_DH_anon_WITH_CAMELLIA_128_GCM_SHA256,
cipher_TLS_DH_anon_WITH_CAMELLIA_256_GCM_SHA384,
cipher_TLS_ECDH_ECDSA_WITH_CAMELLIA_128_GCM_SHA256,
cipher_TLS_ECDH_ECDSA_WITH_CAMELLIA_256_GCM_SHA384,
cipher_TLS_ECDH_RSA_WITH_CAMELLIA_128_GCM_SHA256,
cipher_TLS_ECDH_RSA_WITH_CAMELLIA_256_GCM_SHA384,
cipher_TLS_PSK_WITH_CAMELLIA_128_GCM_SHA256,
cipher_TLS_PSK_WITH_CAMELLIA_256_GCM_SHA384,
cipher_TLS_RSA_PSK_WITH_CAMELLIA_128_GCM_SHA256,
cipher_TLS_RSA_PSK_WITH_CAMELLIA_256_GCM_SHA384,
cipher_TLS_PSK_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_PSK_WITH_CAMELLIA_256_CBC_SHA384,
cipher_TLS_DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384,
cipher_TLS_RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384,
cipher_TLS_ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256,
cipher_TLS_ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384,
cipher_TLS_RSA_WITH_AES_128_CCM,
cipher_TLS_RSA_WITH_AES_256_CCM,
cipher_TLS_RSA_WITH_AES_128_CCM_8,
cipher_TLS_RSA_WITH_AES_256_CCM_8,
cipher_TLS_PSK_WITH_AES_128_CCM,
cipher_TLS_PSK_WITH_AES_256_CCM,
cipher_TLS_PSK_WITH_AES_128_CCM_8,
cipher_TLS_PSK_WITH_AES_256_CCM_8:
return true
default:
return false
}
}
| {
"pile_set_name": "Github"
} |
<?xml version='1.0' ?>
<Module id='205' name='Rmdir_Target'>
<Interfaces>
<Interface id='0x01010031' provider='0x01011005' />
</Interfaces>
<Dependencies>
</Dependencies>
<Lp>Yes</Lp>
<Target>Yes</Target>
<Architecture type='i386'>
<Platform family='winnt'>
<Version major='*' minor='*' other='*'>
<File loadtype='file'>i386-winnt-vc9s/release/Rmdir_Target.dll</File>
<File loadtype='memory'>i386-winnt-vc9s/release/Rmdir_Target.dll</File>
</Version>
</Platform>
</Architecture>
<Architecture type='x64'>
<Platform family='winnt'>
<Version major='*' minor='*' other='*'>
<File loadtype='file'>x64-winnt-vc9s/release/Rmdir_Target.dll</File>
<File loadtype='memory'>x64-winnt-vc9s/release/Rmdir_Target.dll</File>
</Version>
</Platform>
</Architecture>
</Module>
| {
"pile_set_name": "Github"
} |
let path = null
setInterval(() => {
if (path !== window.location.pathname) {
path = window.location.pathname
document.querySelectorAll('.lang-mermaid').forEach(node => {
const newNode = node.cloneNode(true)
newNode.removeAttribute('class')
const hr = document.createElement('hr')
node.parentNode.insertBefore(hr, node)
node.parentNode.insertBefore(newNode, hr)
})
window.mermaid.initialize({ theme: 'default' })
window.mermaid.init(undefined, document.querySelectorAll('.lang-mermaid'))
}
}, 1000)
| {
"pile_set_name": "Github"
} |
# pre 1.0.0 change
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.grpc.GrpcClientSettings.grpcLoadBalancingType")
| {
"pile_set_name": "Github"
} |
/***************************************************************************************
Toolkit for WPF
Copyright (C) 2007-2017 Xceed Software Inc.
This program is provided to you under the terms of the Microsoft Public
License (Ms-PL) as published at http://wpftoolkit.codeplex.com/license
For more features, controls, and fast professional support,
pick up the Plus Edition at https://xceed.com/xceed-toolkit-plus-for-wpf/
Stay informed: follow @datagrid on Twitter or Like http://facebook.com/datagrids
*************************************************************************************/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows.Data;
using System.Globalization;
using System.Windows;
namespace Xceed.Wpf.Toolkit.LiveExplorer.Samples.Theming.Converters
{
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Eclipse Public License version 1.0, available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.jboss.forge.roaster.model.source;
import org.jboss.forge.roaster.Internal;
/**
* Represents an imported element in a {@link JavaSource}.
*
* @author <a href="mailto:[email protected]">Lincoln Baxter, III</a>
*
*/
public interface Import extends Internal, StaticCapableSource<Import>
{
public static final String WILDCARD = "*";
/**
* Returns the package part of a import.
*
* @return the package of this import
*/
String getPackage();
/**
* Returns the simple name of a import.
*
* @return the simple class name of a import or {@link #WILDCARD}, if this is a wildcard import.
* @see Class#getSimpleName()
*/
String getSimpleName();
/**
* Returns the qualified name, so it's the same as '{@code getPackage() + "." + getSimpleName()}'. In the case this
* is a wildcard import, the whole import including a '*' at the end is returned.
*
* @return the qualified name or the full name if this is a wildcard import
*/
String getQualifiedName();
/**
* Checks if this import is a wildcard ({@code *}) import.
*
* @return true if this is a wildcard import, false otherwise
*/
boolean isWildcard();
/**
* Sets the data of this import object. The data is the part of a {@code import} statement which is between
* {@code import} and {@code ;}.
*
* <p>
* This method is <b>not</b> intended to be called from externally.
* </p>
*
* @param name the actual data of the import
* @return {@code this}
*/
Import setName(final String name);
} | {
"pile_set_name": "Github"
} |
Feedback
========
.. note::
All participants must follow the `pimutils Code of Conduct
<http://pimutils.org/coc>`_.
Please do provide feedback if *khal* works for you or even more importantly,
if it doesn't. Feature requests and other ideas on how to improve khal are also
welcome (see below).
In case you are not satisfied with khal, there are at least two other
projects with similar aims you might want to check out: calendar-cli_ (no
offline storage and a bit different scope) and gcalcli_ (only works with
google's calendar).
.. _calendar-cli: https://github.com/tobixen/calendar-cli
.. _gcalcli: https://github.com/insanum/gcalcli
Submitting a Bug
----------------
If you found a bug or any part of khal isn't working as you expected, please
check if that bug is also present in the latest version from github (see
:doc:`install`) and is not already reported_ (you still might want to comment on
an already open issue).
If it isn't, please open a new bug. In case you submit a new bug report,
please include:
* how you ran khal (please run in verbose mode with `-v DEBUG`)
* what you expected khal to do
* what it did instead
* everything khal printed to the screen (you may redact private details)
* in case khal complains about a specific .ics file, please include that as
well (or create a .ics which leads to the same error without any private
information)
* the version of khal and python you are using, which operating system you are
using and how you installed khal
Suggesting Features
-------------------
If you believe khal is lacking a useful feature or some part of khal is not
working the way you think it should, please first check if there isn't already
a relevant issue_ for it and otherwise open a new one.
.. _contact:
Contact
-------
* You might get quick answers on the `#pimutils`_ IRC channel on Freenode, if
nobody is answering you, please hang around for a bit. You can also use this
channel for general discussions about :command:`khal` and `related tools`_.
* Open a github issue_
* If the above mentioned methods do not work, you can always contact the `main
developer`_.
.. _#pimutils: irc://#pimutils@Freenode
.. _related tools: https://github.com/pimutils/
.. _issue: https://github.com/pimutils/khal/issues
.. _reported: https://github.com/pimutils/khal/issues
.. _main developer: https://lostpackets.de
| {
"pile_set_name": "Github"
} |
Ember uses the [Handlebars templating library](http://www.handlebarsjs.com)
to power your app's user interface. Handlebars templates contain static HTML and dynamic content inside Handlebars expressions, which are invoked with double curly braces: `{{}}`.
Dynamic content inside a Handlebars expression is rendered with data-binding. This means if you update a property, your usage of that property in a template will be automatically updated to the latest value.
### Displaying Properties
Templates are backed with a context. A context is an object from which
Handlebars expressions read their properties. In Ember this is often a component. For templates rendered by a route (like `application.hbs`), the context is a controller.
For example, this `application.hbs` template will render a first and last name:
```handlebars {data-filename=app/templates/application.hbs}
Hello, <strong>{{firstName}} {{lastName}}</strong>!
```
The `firstName` and `lastName` properties are read from the
context (the application controller in this case), and rendered inside the
`<strong>` HTML tag.
To provide a `firstName` and `lastName` to the above template, properties
must be added to the application controller. If you are following along with
an Ember CLI application, you may need to create this file:
```javascript {data-filename=app/controllers/application.js}
import Controller from '@ember/controller';
export default Controller.extend({
firstName: 'Trek',
lastName: 'Glowacki'
});
```
The above template and controller render as the following HTML:
```html
Hello, <strong>Trek Glowacki</strong>!
```
Remember that `{{firstName}}` and `{{lastName}}` are bound data. That means
if the value of one of those properties changes, the DOM will be updated
automatically.
As an application grows in size, it will have many templates backed by
controllers and components.
### Helpers
Ember Helpers are functions that can compute values and can be used in any template.
Ember gives you the ability to [write your own helpers](../writing-helpers/), to bring a minimum of logic into Ember templating.
For example, let's say you would like the ability to add a few numbers together, without needing to define a computed property everywhere you would like to do so.
```javascript {data-filename=app/helpers/sum.js}
import { helper } from '@ember/component/helper';
export function sum(params) {
return params.reduce((a, b) => {
return a + b;
});
};
export default helper(sum);
```
The above code will allow you invoke the `sum()` function as a `{{sum}}` handlebars "helper" in your templates:
```html
<p>Total: {{sum 1 2 3}}</p>
```
This helper will output a value of `6`.
Ember ships with several built-in helpers, which you will learn more about in the following guides.
#### Nested Helpers
Helpers have the ability to be nested within other helper invocations and also component invocations.
This gives you the flexibility to compute a value _before_ it is passed in as an argument or an attribute of another.
It is not possible to nest curly braces `{{}}`, so the correct way to nest a helper is by using parentheses `()`:
```html
{{sum (multiply 2 4) 2}}
```
In this example, we are using a helper to multiply `2` and `4` _before_ passing the value into `{{sum}}`.
Thus, the output of these combined helpers is `10`.
As you move forward with these template guides, keep in mind that a helper can be used anywhere a normal value can be used.
Thus, many of Ember's built-in helpers (as well as your custom helpers) can be used in nested form.
| {
"pile_set_name": "Github"
} |
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// +k8s:deepcopy-gen=package
// +k8s:protobuf-gen=package
// +groupName=authentication.k8s.io
// +k8s:openapi-gen=true
package v1beta1 // import "k8s.io/api/authentication/v1beta1"
| {
"pile_set_name": "Github"
} |
/*!
* Start Bootstrap - SB Admin 2 Bootstrap Admin Theme (http://startbootstrap.com)
* Code licensed under the Apache License v2.0.
* For details, see http://www.apache.org/licenses/LICENSE-2.0.
*/
body {
background-color: #f8f8f8;
}
#wrapper {
width: 100%;
}
#page-wrapper {
padding: 0 15px;
min-height: 568px;
background-color: #fff;
}
@media(min-width:768px) {
#page-wrapper {
position: inherit;
margin: 0 0 0 250px;
padding: 0 30px;
border-left: 1px solid #e7e7e7;
}
}
.navbar-top-links {
margin-right: 0;
}
.navbar-top-links li {
display: inline-block;
}
.navbar-top-links li:last-child {
margin-right: 15px;
}
.navbar-top-links li a {
padding: 15px;
min-height: 50px;
}
.navbar-top-links .dropdown-menu li {
display: block;
}
.navbar-top-links .dropdown-menu li:last-child {
margin-right: 0;
}
.navbar-top-links .dropdown-menu li a {
padding: 3px 20px;
min-height: 0;
}
.navbar-top-links .dropdown-menu li a div {
white-space: normal;
}
.navbar-top-links .dropdown-messages,
.navbar-top-links .dropdown-tasks,
.navbar-top-links .dropdown-alerts {
width: 310px;
min-width: 0;
}
.navbar-top-links .dropdown-messages {
margin-left: 5px;
}
.navbar-top-links .dropdown-tasks {
margin-left: -59px;
}
.navbar-top-links .dropdown-alerts {
margin-left: -123px;
}
.navbar-top-links .dropdown-user {
right: 0;
left: auto;
}
.sidebar .sidebar-nav.navbar-collapse {
padding-right: 0;
padding-left: 0;
}
.sidebar .sidebar-search {
padding: 15px;
}
.sidebar ul li {
border-bottom: 1px solid #e7e7e7;
}
.sidebar ul li a.active {
background-color: #eee;
}
.sidebar .arrow {
float: right;
}
.sidebar .fa.arrow:before {
content: "\f104";
}
.sidebar .active>a>.fa.arrow:before {
content: "\f107";
}
.sidebar .nav-second-level li,
.sidebar .nav-third-level li {
border-bottom: 0!important;
}
.sidebar .nav-second-level li a {
padding-left: 37px;
}
.sidebar .nav-third-level li a {
padding-left: 52px;
}
@media(min-width:768px) {
.sidebar {
z-index: 1;
position: absolute;
width: 250px;
margin-top: 51px;
}
.navbar-top-links .dropdown-messages,
.navbar-top-links .dropdown-tasks,
.navbar-top-links .dropdown-alerts {
margin-left: auto;
}
}
.btn-outline {
color: inherit;
background-color: transparent;
transition: all .5s;
}
.btn-primary.btn-outline {
color: #428bca;
}
.btn-success.btn-outline {
color: #5cb85c;
}
.btn-info.btn-outline {
color: #5bc0de;
}
.btn-warning.btn-outline {
color: #f0ad4e;
}
.btn-danger.btn-outline {
color: #d9534f;
}
.btn-primary.btn-outline:hover,
.btn-success.btn-outline:hover,
.btn-info.btn-outline:hover,
.btn-warning.btn-outline:hover,
.btn-danger.btn-outline:hover {
color: #fff;
}
.chat {
margin: 0;
padding: 0;
list-style: none;
}
.chat li {
margin-bottom: 10px;
padding-bottom: 5px;
border-bottom: 1px dotted #999;
}
.chat li.left .chat-body {
margin-left: 60px;
}
.chat li.right .chat-body {
margin-right: 60px;
}
.chat li .chat-body p {
margin: 0;
}
.panel .slidedown .glyphicon,
.chat .glyphicon {
margin-right: 5px;
}
.chat-panel .panel-body {
height: 350px;
overflow-y: scroll;
}
.login-panel {
margin-top: 25%;
}
.flot-chart {
display: block;
height: 400px;
}
.flot-chart-content {
width: 100%;
height: 100%;
}
table.dataTable thead .sorting,
table.dataTable thead .sorting_asc,
table.dataTable thead .sorting_desc,
table.dataTable thead .sorting_asc_disabled,
table.dataTable thead .sorting_desc_disabled {
background: 0 0;
}
table.dataTable thead .sorting_asc:after {
content: "\f0de";
float: right;
font-family: fontawesome;
}
table.dataTable thead .sorting_desc:after {
content: "\f0dd";
float: right;
font-family: fontawesome;
}
table.dataTable thead .sorting:after {
content: "\f0dc";
float: right;
font-family: fontawesome;
color: rgba(50,50,50,.5);
}
.btn-circle {
width: 30px;
height: 30px;
padding: 6px 0;
border-radius: 15px;
text-align: center;
font-size: 12px;
line-height: 1.428571429;
}
.btn-circle.btn-lg {
width: 50px;
height: 50px;
padding: 10px 16px;
border-radius: 25px;
font-size: 18px;
line-height: 1.33;
}
.btn-circle.btn-xl {
width: 70px;
height: 70px;
padding: 10px 16px;
border-radius: 35px;
font-size: 24px;
line-height: 1.33;
}
.show-grid [class^=col-] {
padding-top: 10px;
padding-bottom: 10px;
border: 1px solid #ddd;
background-color: #eee!important;
}
.show-grid {
margin: 15px 0;
}
.huge {
font-size: 40px;
}
.panel-green {
border-color: #5cb85c;
}
.panel-green .panel-heading {
border-color: #5cb85c;
color: #fff;
background-color: #5cb85c;
}
.panel-green a {
color: #5cb85c;
}
.panel-green a:hover {
color: #3d8b3d;
}
.panel-red {
border-color: #d9534f;
}
.panel-red .panel-heading {
border-color: #d9534f;
color: #fff;
background-color: #d9534f;
}
.panel-red a {
color: #d9534f;
}
.panel-red a:hover {
color: #b52b27;
}
.panel-yellow {
border-color: #f0ad4e;
}
.panel-yellow .panel-heading {
border-color: #f0ad4e;
color: #fff;
background-color: #f0ad4e;
}
.panel-yellow a {
color: #f0ad4e;
}
.panel-yellow a:hover {
color: #df8a13;
}
| {
"pile_set_name": "Github"
} |
commandlinefu_id: 9694
translator:
weibo: ''
hide: true
command: |-
ls -l =gcc
summary: |-
Get the dir listing of an executable without knowing its location
| {
"pile_set_name": "Github"
} |
DROP TABLE part;
-- data setup
CREATE TABLE part(
p_partkey INT,
p_name STRING,
p_mfgr STRING,
p_brand STRING,
p_type STRING,
p_size INT,
p_container STRING,
p_retailprice DOUBLE,
p_comment STRING
);
LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
DROP TABLE lineitem;
CREATE TABLE lineitem (L_ORDERKEY INT,
L_PARTKEY INT,
L_SUPPKEY INT,
L_LINENUMBER INT,
L_QUANTITY DOUBLE,
L_EXTENDEDPRICE DOUBLE,
L_DISCOUNT DOUBLE,
L_TAX DOUBLE,
L_RETURNFLAG STRING,
L_LINESTATUS STRING,
l_shipdate STRING,
L_COMMITDATE STRING,
L_RECEIPTDATE STRING,
L_SHIPINSTRUCT STRING,
L_SHIPMODE STRING,
L_COMMENT STRING)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '|';
LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem;
-- non agg, non corr
explain
select *
from src
where src.key in (select key from src s1 where s1.key > '9')
;
select *
from src
where src.key in (select key from src s1 where s1.key > '9')
order by key
;
-- non agg, corr
explain
select *
from src b
where b.key in
(select a.key
from src a
where b.value = a.value and a.key > '9'
)
;
select *
from src b
where b.key in
(select a.key
from src a
where b.value = a.value and a.key > '9'
)
order by b.key
;
-- agg, non corr
explain
select p_name, p_size
from
part where part.p_size in
(select avg(p_size)
from (select p_size, rank() over(partition by p_mfgr order by p_size) as r from part) a
where r <= 2
)
;
select p_name, p_size
from
part where part.p_size in
(select avg(p_size)
from (select p_size, rank() over(partition by p_mfgr order by p_size) as r from part) a
where r <= 2
)
order by p_name
;
-- agg, corr
explain
select p_mfgr, p_name, p_size
from part b where b.p_size in
(select min(p_size)
from (select p_mfgr, p_size, rank() over(partition by p_mfgr order by p_size) as r from part) a
where r <= 2 and b.p_mfgr = a.p_mfgr
)
;
select p_mfgr, p_name, p_size
from part b where b.p_size in
(select min(p_size)
from (select p_mfgr, p_size, rank() over(partition by p_mfgr order by p_size) as r from part) a
where r <= 2 and b.p_mfgr = a.p_mfgr
)
order by p_mfgr, p_name, p_size
;
-- distinct, corr
explain
select *
from src b
where b.key in
(select distinct a.key
from src a
where b.value = a.value and a.key > '9'
)
;
select *
from src b
where b.key in
(select distinct a.key
from src a
where b.value = a.value and a.key > '9'
)
order by b.key
;
-- non agg, non corr, windowing
select p_mfgr, p_name, p_size
from part
where part.p_size in
(select first_value(p_size) over(partition by p_mfgr order by p_size) from part)
order by p_mfgr, p_name, p_size
;
-- non agg, non corr, with join in Parent Query
explain
select p.p_partkey, li.l_suppkey
from (select distinct l_partkey as p_partkey from lineitem) p join lineitem li on p.p_partkey = li.l_partkey
where li.l_linenumber = 1 and
li.l_orderkey in (select l_orderkey from lineitem where l_shipmode = 'AIR')
;
select p.p_partkey, li.l_suppkey
from (select distinct l_partkey as p_partkey from lineitem) p join lineitem li on p.p_partkey = li.l_partkey
where li.l_linenumber = 1 and
li.l_orderkey in (select l_orderkey from lineitem where l_shipmode = 'AIR')
order by p.p_partkey, li.l_suppkey
;
-- non agg, corr, with join in Parent Query
select p.p_partkey, li.l_suppkey
from (select distinct l_partkey as p_partkey from lineitem) p join lineitem li on p.p_partkey = li.l_partkey
where li.l_linenumber = 1 and
li.l_orderkey in (select l_orderkey from lineitem where l_shipmode = 'AIR' and l_linenumber = li.l_linenumber)
order by p.p_partkey, li.l_suppkey
;
| {
"pile_set_name": "Github"
} |
/*
** Copyright (c) 2012 The Khronos Group Inc.
**
** Permission is hereby granted, free of charge, to any person obtaining a
** copy of this software and/or associated documentation files (the
** "Materials"), to deal in the Materials without restriction, including
** without limitation the rights to use, copy, modify, merge, publish,
** distribute, sublicense, and/or sell copies of the Materials, and to
** permit persons to whom the Materials are furnished to do so, subject to
** the following conditions:
**
** The above copyright notice and this permission notice shall be included
** in all copies or substantial portions of the Materials.
**
** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
*/
#ifdef GL_ES
#extension GL_OES_standard_derivatives : enable
precision mediump float;
#endif
// setting a boundary for cases where screen sizes may exceed the precision
// of the arithmetic used.
#define SAFETY_BOUND 500.0
// Macro to scale/bias the range of output. If input is [-1.0, 1.0], maps to [0.5, 1.0]..
// Accounts for precision errors magnified by derivative operation.
#define REDUCE_RANGE(A) ((A) + 3.0) / 4.0
varying vec2 vertXY;
uniform float viewportwidth;
uniform float viewportheight;
void main (void)
{
const float M_PI = 3.14159265358979323846;
float cosine;
float sine;
#ifdef GL_OES_standard_derivatives
// fwidth of vertical sine wave with a period of 128 pixels, scaled to go from -1 to +1
sine = sin(fract(gl_FragCoord.y / 128.0) * (2.0 * M_PI));
cosine = REDUCE_RANGE((128.0 / (2.0 * M_PI)) * fwidth(sine));
#else
cosine = 0.5;
#endif
if( (gl_FragCoord.x < SAFETY_BOUND) && (gl_FragCoord.y < SAFETY_BOUND) )
{
gl_FragColor = vec4(cosine, cosine, cosine, 1.0);
}
else discard;
}
| {
"pile_set_name": "Github"
} |
#coding=utf-8
# coding=utf-8
'''
Created on 2014-1-5
@author: ETHAN
'''
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from gatesidelib.common.simplelogger import SimpleLogger
from teamvision.resources.project.resource_string import Project as ProjectRes
from teamvision.project.models import Project
from teamvision.project.pagefactory.project_common_pageworker import ProjectCommonControllPageWorker
from teamvision.project.pagefactory.project_settings_pageworker import ProjectSettingsPageWorker
from business.project.memeber_service import MemberService
@login_required
def add(request,projectid):
result=True
try:
member_ids=request.POST.get("user_ids")
if member_ids!="null":
print(eval(member_ids))
for member_id in eval(member_ids+","):
MemberService.add_member(int(member_id),projectid,request.user)
except Exception as ex:
result=ProjectRes.project_member_save_fail
SimpleLogger.error(ex)
return HttpResponse(result)
@login_required
def import_member(request,projectid):
result=True
try:
from_project_id=request.POST.get("from_project")
from_project_members=MemberService.get_member_users(int(from_project_id))
current_project_members=MemberService.get_member_users(int(projectid))
for member in from_project_members:
if member not in current_project_members:
MemberService.add_member(int(member.id),projectid,request.user)
except Exception as ex:
result=ProjectRes.project_member_save_fail
SimpleLogger.error(ex)
return HttpResponse(result)
@login_required
def remove(request,projectid):
result=True
try:
MemberService.remove_member(request,projectid)
except Exception as ex:
result=ProjectRes.project_member_remove_fail
SimpleLogger.error(ex)
return HttpResponse(result)
@login_required
def update_member_role(request,projectid,userid):
result=True
try:
MemberService.update_role(request,projectid,userid)
except Exception as ex:
result=ProjectRes.project_member_update_role_fail
SimpleLogger.error(ex)
return HttpResponse(result)
@login_required
def get_member_list(request,projectid):
result=False
try:
page_worker=ProjectSettingsPageWorker(request)
result=page_worker.get_project_member_list_controll(projectid,request.user)
except Exception as ex:
SimpleLogger.error(ex)
return HttpResponse(result)
@login_required
def get_member_add_dialog(request,projectid):
result=False
try:
page_worker=ProjectSettingsPageWorker(request)
result=page_worker.get_project_member_add_dialog(request,projectid)
except Exception as ex:
SimpleLogger.error(ex)
return HttpResponse(result)
@login_required
def member_dropdownlist(request,project_id):
result=""
project=Project.objects.get(int(project_id))
if project:
page_worker=ProjectCommonControllPageWorker(request)
member_users=MemberService.get_member_users(project_id)
result=page_worker.get_member_dropdownlist(member_users, project.id,0)
return HttpResponse(result)
| {
"pile_set_name": "Github"
} |
const fs = require("fs");
export class CreateProjectParameters {
public TemplatesDirectory: string;
public ProjectType: string;
public ProjectName: string;
public ProjectDescription: string;
public ORM: string;
public Database: string;
public ConnectionString: string;
public UseDefaultPlugins: string;
public OutputDirectory: string;
public AvailableProductTypes: string[] = ["AspNetCore", "AspNet", "Owin"];
public AvailableORM: string[] = ["Dapper", "EFCore", "MongoDB", "NHibernate"];
public AvailableDatabases: any = {
Dapper: ["MSSQL", "SQLite", "MySQL", "PostgreSQL"],
EFCore: ["MSSQL", "SQLite", "MySQL", "PostgreSQL", "InMemory"],
MongoDB: ["MongoDB"],
NHibernate: ["PostgreSQL", "SQLite", "MySQL", "MSSQL"],
};
public Check(): any {
if (-1 === this.AvailableProductTypes.indexOf(this.ProjectType)) {
return {
isSuccess: false,
msgPrefix: "ProjectTypeMustBeOneOf",
args: this.AvailableProductTypes.join(","),
};
} else if (!this.ProjectName) {
return {
isSuccess: false,
msgPrefix: "ProjectNameCantBeEmpty",
};
} else if (-1 === this.AvailableORM.indexOf(this.ORM)) {
return {
isSuccess: false,
msgPrefix: "ORMMustBeOneOf",
args: this.AvailableORM.join(","),
};
} else if (-1 === this.AvailableDatabases[this.ORM].indexOf(this.Database)) {
return {
isSuccess: false,
msgPrefix: "DatabaseMustBeOneOf",
args: this.AvailableDatabases[this.ORM].join(","),
};
} else if (this.Database !== "InMemory" && !this.ConnectionString) {
return {
isSuccess: false,
msgPrefix: "ConnectionStringCantBeEmpty",
};
} else if (!this.OutputDirectory) {
return {
isSuccess: false,
msgPrefix: "OutputDirectoryCantBeEmpty",
};
}
if (this.UseDefaultPlugins) {
if (!fs.existsSync(this.UseDefaultPlugins)) {
return {
isSuccess: false,
msgPrefix: "DefaultPluginsFileNotFound",
};
}
const json = fs.readFileSync(this.UseDefaultPlugins, "utf8");
const pluginCollection = JSON.parse(json);
let isOrmExit = false;
for (const orm of pluginCollection.SupportedORM) {
if (orm === this.ORM) {
isOrmExit = true;
}
}
if (!isOrmExit) {
return {
isSuccess: false,
msgPrefix: "ORMMustBeOneOf",
args: pluginCollection.SupportedORM.join(","),
};
}
}
return {
isSuccess: true,
};
}
}
| {
"pile_set_name": "Github"
} |
namespace com.clusterrr.hakchi_gui
{
partial class SelectConsoleDialog
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(SelectConsoleDialog));
this.labelSelectConsole = new System.Windows.Forms.Label();
this.buttonNes = new System.Windows.Forms.Button();
this.buttonFamicom = new System.Windows.Forms.Button();
this.buttonSnes = new System.Windows.Forms.Button();
this.buttonSuperFamicom = new System.Windows.Forms.Button();
this.SuspendLayout();
//
// labelSelectConsole
//
resources.ApplyResources(this.labelSelectConsole, "labelSelectConsole");
this.labelSelectConsole.Name = "labelSelectConsole";
//
// buttonNes
//
resources.ApplyResources(this.buttonNes, "buttonNes");
this.buttonNes.Name = "buttonNes";
this.buttonNes.UseVisualStyleBackColor = true;
this.buttonNes.Click += new System.EventHandler(this.buttonNes_Click);
//
// buttonFamicom
//
resources.ApplyResources(this.buttonFamicom, "buttonFamicom");
this.buttonFamicom.Name = "buttonFamicom";
this.buttonFamicom.UseVisualStyleBackColor = true;
this.buttonFamicom.Click += new System.EventHandler(this.buttonFamicom_Click);
//
// buttonSnes
//
resources.ApplyResources(this.buttonSnes, "buttonSnes");
this.buttonSnes.Name = "buttonSnes";
this.buttonSnes.UseVisualStyleBackColor = true;
this.buttonSnes.Click += new System.EventHandler(this.buttonSnes_Click);
//
// buttonSuperFamicom
//
resources.ApplyResources(this.buttonSuperFamicom, "buttonSuperFamicom");
this.buttonSuperFamicom.Name = "buttonSuperFamicom";
this.buttonSuperFamicom.UseVisualStyleBackColor = true;
this.buttonSuperFamicom.Click += new System.EventHandler(this.buttonSuperFamicom_Click);
//
// SelectConsoleDialog
//
resources.ApplyResources(this, "$this");
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ControlBox = false;
this.Controls.Add(this.buttonSuperFamicom);
this.Controls.Add(this.buttonSnes);
this.Controls.Add(this.buttonFamicom);
this.Controls.Add(this.buttonNes);
this.Controls.Add(this.labelSelectConsole);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedToolWindow;
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "SelectConsoleDialog";
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Label labelSelectConsole;
private System.Windows.Forms.Button buttonNes;
private System.Windows.Forms.Button buttonFamicom;
private System.Windows.Forms.Button buttonSnes;
private System.Windows.Forms.Button buttonSuperFamicom;
}
} | {
"pile_set_name": "Github"
} |
---
http_interactions:
- request:
method: get
uri: http://ps.pndsn.com/v2/auth/grant/sub-key/sub-a-mock-key?auth=ruby-authkey&pnsdk=PubNub-Ruby/4.1.0&r=0&signature=WcY3UFpAJZ8pWNbzqp3Hhez_jdYL7omiywfMqME8C-o=×tamp=1464194054&ttl=0&uuid=ruby-test-uuid-client-one&w=0
body:
encoding: UTF-8
string: ''
headers:
User-Agent:
- HTTPClient/1.0 (2.8.0, ruby 2.3.0 (2015-12-25))
Accept:
- "*/*"
Date:
- Wed, 25 May 2016 16:34:14 GMT
response:
status:
code: 400
message: Bad Request
headers:
Server:
- nginx
Date:
- Wed, 25 May 2016 16:34:14 GMT
Content-Type:
- text/javascript; charset=UTF-8
Transfer-Encoding:
- chunked
Connection:
- close
Access-Control-Allow-Origin:
- "*"
Access-Control-Allow-Methods:
- GET
body:
encoding: UTF-8
string: '{"message":"Auth-only grants are reserved for future use","error":true,"service":"Access
Manager","status":400}
'
http_version:
recorded_at: Wed, 25 May 2016 16:34:14 GMT
recorded_with: VCR 3.0.1
| {
"pile_set_name": "Github"
} |
/*****************************************************************************
Copyright (c) 2014, Intel Corp.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.
*****************************************************************************
* Contents: Native middle-level C interface to LAPACK function cgeqrf
* Author: Intel Corporation
* Generated November 2015
*****************************************************************************/
#include "lapacke_utils.h"
lapack_int LAPACKE_cgeqrf_work( int matrix_layout, lapack_int m, lapack_int n,
lapack_complex_float* a, lapack_int lda,
lapack_complex_float* tau,
lapack_complex_float* work, lapack_int lwork )
{
lapack_int info = 0;
if( matrix_layout == LAPACK_COL_MAJOR ) {
/* Call LAPACK function and adjust info */
LAPACK_cgeqrf( &m, &n, a, &lda, tau, work, &lwork, &info );
if( info < 0 ) {
info = info - 1;
}
} else if( matrix_layout == LAPACK_ROW_MAJOR ) {
lapack_int lda_t = MAX(1,m);
lapack_complex_float* a_t = NULL;
/* Check leading dimension(s) */
if( lda < n ) {
info = -5;
LAPACKE_xerbla( "LAPACKE_cgeqrf_work", info );
return info;
}
/* Query optimal working array(s) size if requested */
if( lwork == -1 ) {
LAPACK_cgeqrf( &m, &n, a, &lda_t, tau, work, &lwork, &info );
return (info < 0) ? (info - 1) : info;
}
/* Allocate memory for temporary array(s) */
a_t = (lapack_complex_float*)
LAPACKE_malloc( sizeof(lapack_complex_float) * lda_t * MAX(1,n) );
if( a_t == NULL ) {
info = LAPACK_TRANSPOSE_MEMORY_ERROR;
goto exit_level_0;
}
/* Transpose input matrices */
LAPACKE_cge_trans( matrix_layout, m, n, a, lda, a_t, lda_t );
/* Call LAPACK function and adjust info */
LAPACK_cgeqrf( &m, &n, a_t, &lda_t, tau, work, &lwork, &info );
if( info < 0 ) {
info = info - 1;
}
/* Transpose output matrices */
LAPACKE_cge_trans( LAPACK_COL_MAJOR, m, n, a_t, lda_t, a, lda );
/* Release memory and exit */
LAPACKE_free( a_t );
exit_level_0:
if( info == LAPACK_TRANSPOSE_MEMORY_ERROR ) {
LAPACKE_xerbla( "LAPACKE_cgeqrf_work", info );
}
} else {
info = -1;
LAPACKE_xerbla( "LAPACKE_cgeqrf_work", info );
}
return info;
}
| {
"pile_set_name": "Github"
} |
package walparser
type BlockLocation struct {
RelationFileNode RelFileNode
BlockNo uint32
}
func NewBlockLocation(spcNode, dbNode, relNode Oid, blockNo uint32) *BlockLocation {
return &BlockLocation{
RelationFileNode: RelFileNode{SpcNode: spcNode, DBNode: dbNode, RelNode: relNode},
BlockNo: blockNo,
}
}
| {
"pile_set_name": "Github"
} |
%apply size_t { eastl_size_t };
%apply const size_t& { const eastl_size_t& };
%include "eastl_vector.i"
%template(StringHashList) eastl::vector<Urho3D::StringHash>;
%template(Vector2List) eastl::vector<Urho3D::Vector2>;
%template(Vector3List) eastl::vector<Urho3D::Vector3>;
%template(Vector3Matrix) eastl::vector<eastl::vector<Urho3D::Vector3>>;
%template(Vector4List) eastl::vector<Urho3D::Vector4>;
%template(IntVector2List) eastl::vector<Urho3D::IntVector2>;
%template(IntVector3List) eastl::vector<Urho3D::IntVector3>;
%template(QuaternionList) eastl::vector<Urho3D::Quaternion>;
%template(RectList) eastl::vector<Urho3D::Rect>;
%template(IntRectList) eastl::vector<Urho3D::IntRect>;
%template(Matrix3x4List) eastl::vector<Urho3D::Matrix3x4>;
%template(BoolArray) eastl::vector<bool>;
%template(CharArray) eastl::vector<char>;
%template(ShortArray) eastl::vector<short>;
%template(IntArray) eastl::vector<int>;
%template(UCharArray) eastl::vector<unsigned char>;
%template(UShortArray) eastl::vector<unsigned short>;
%template(UIntArray) eastl::vector<unsigned int>;
%template(FloatArray) eastl::vector<float>;
%template(DoubleArray) eastl::vector<double>;
%template(ObjectList) eastl::vector<Urho3D::Object*>;
%template(SoundSourceList) eastl::vector<Urho3D::SoundSource*>;
//%template(BatchList) eastl::vector<Urho3D::Batch*>;
//%template(BatchGroupList) eastl::vector<Urho3D::BatchGroup*>;
%template(ComponentList) eastl::vector<Urho3D::Component*>;
%template(ComponentRefList) eastl::vector<Urho3D::SharedPtr<Urho3D::Component>>;
%template(DrawableList) eastl::vector<Urho3D::Drawable*>;
%template(ImageList) eastl::vector<Urho3D::Image*>;
%template(LightList) eastl::vector<Urho3D::Light*>;
%template(NodeList) eastl::vector<Urho3D::Node*>;
%template(NodeRefList) eastl::vector<Urho3D::SharedPtr<Urho3D::Node>>;
%template(PassList) eastl::vector<Urho3D::Pass*>;
%template(ReplicationStateList) eastl::vector<Urho3D::ReplicationState*>;
%template(ResourceList) eastl::vector<Urho3D::Resource*>;
//%template(RigidBodyList) eastl::vector<Urho3D::RigidBody*>;
%template(UIElementList) eastl::vector<Urho3D::UIElement*>;
%template(UIElementRefList) eastl::vector<Urho3D::SharedPtr<Urho3D::UIElement>>;
%template(VAnimEventFrameList) eastl::vector<const Urho3D::VAnimEventFrame*>;
%template(VertexElementList) eastl::vector<Urho3D::VertexElement>;
%template(VertexBufferList) eastl::vector<Urho3D::VertexBuffer*>;
%template(VertexBufferRefList) eastl::vector<Urho3D::SharedPtr<Urho3D::VertexBuffer>>;
%template(IndexBufferList) eastl::vector<Urho3D::IndexBuffer*>;
%template(IndexBufferRefList) eastl::vector<Urho3D::SharedPtr<Urho3D::IndexBuffer>>;
%template(BillboardList) eastl::vector<Urho3D::Billboard>;
%template(DecalVertexList) eastl::vector<Urho3D::DecalVertex>;
%template(CustomGeometryVerticesList) eastl::vector<Urho3D::CustomGeometryVertex>;
%template(CustomGeometryVerticesMatrix) eastl::vector<eastl::vector<Urho3D::CustomGeometryVertex>>;
%template(RayQueryResultList) eastl::vector<Urho3D::RayQueryResult>;
%template(SourceBatchList) eastl::vector<Urho3D::SourceBatch>;
%template(CameraList) eastl::vector<Urho3D::Camera*>;
%template(StringList) eastl::vector<eastl::string>;
%template(VariantList) eastl::vector<Urho3D::Variant>;
%template(AttributeInfoList) eastl::vector<Urho3D::AttributeInfo>;
%template(JSONList) eastl::vector<Urho3D::JSONValue>;
%template(PListValueList) eastl::vector<Urho3D::PListValue>;
%template(PackageFileList) eastl::vector<Urho3D::SharedPtr<Urho3D::PackageFile>>;
%template(Texture2DList) eastl::vector<Urho3D::SharedPtr<Urho3D::Texture2D>>;
%template(ComponentVector2) eastl::vector<Urho3D::WeakPtr<Urho3D::Component>>;
//%template(VAnimKeyFrameList) eastl::vector<Urho3D::VAnimKeyFrame>; // some issue with const
%template(GeometryList) eastl::vector<Urho3D::SharedPtr<Urho3D::Geometry>>;
//%template(ConnectionList) eastl::vector<Urho3D::SharedPtr<Urho3D::Connection>>;
%template(GeometriesList) eastl::vector<eastl::vector<Urho3D::SharedPtr<Urho3D::Geometry>>>;
%template(RenderPathCommandList) eastl::vector<Urho3D::RenderPathCommand>;
%template(RenderTargetInfoList) eastl::vector<Urho3D::RenderTargetInfo>;
%template(BonesList) eastl::vector<Urho3D::Bone>;
%template(AnimationControlList) eastl::vector<Urho3D::AnimationControl>;
%template(ModelMorphList) eastl::vector<Urho3D::ModelMorph>;
%template(AnimationStateList) eastl::vector<Urho3D::SharedPtr<Urho3D::AnimationState>>;
%template(UIntArrayList) eastl::vector<eastl::vector<unsigned int>>;
%template(Matrix3x4ArrayList) eastl::vector<eastl::vector<Urho3D::Matrix3x4>>;
%template(AnimationKeyFrameList) eastl::vector<Urho3D::AnimationKeyFrame>;
%template(AnimationTrackList) eastl::vector<Urho3D::AnimationTrack>;
%template(AnimationTriggerPointList) eastl::vector<Urho3D::AnimationTriggerPoint>;
%template(ShaderVariationList) eastl::vector<Urho3D::SharedPtr<Urho3D::ShaderVariation>>;
%template(ColorFrameList) eastl::vector<Urho3D::ColorFrame>;
%template(TextureFrameList) eastl::vector<Urho3D::TextureFrame>;
%template(TechniqueEntryList) eastl::vector<Urho3D::TechniqueEntry>;
%template(CustomGeometryVerticesList) eastl::vector<eastl::vector<Urho3D::CustomGeometryVertex>>;
| {
"pile_set_name": "Github"
} |
# LSB Oracle (crypto 150)
###ENG
[PL](#pl-version)
The task was pretty much the same idea as https://github.com/p4-team/ctf/tree/master/2016-04-15-plaid-ctf/crypto_rabit with the exception that in Plaid CTF we had Rabin cryptosystem and there it was RSA.
We get a [binary](lsb_oracle.vmp.exe.zip) which can give us RSA public key and also it can tell us LSB of decrypted ciphertext.
We also get an encrypted flag.
We approach it the same was as for Rabit on Plaid CTF - we can multiply plaintext by 2 if we multiply ciphertext by `pow(2,e,n)`.
This is because:
```
ct = pt^e mod n
ct' = ct * 2^e mod n = pt^e mod n * 2^e mod n = 2pt^e mod n
ct'^d = (2pt^e mod n)^d mod n = 2pt^ed mod n = 2pt mod n
```
LSB from oracle tells us if the plaintext is even or odd.
Modulus `n` is a product of 2 large primes, so it has to be odd.
`2*x` has to be even.
This means that if LSB of `2*x mod n` is 0 (number is still even) this number was smaller than modulus `n`.
Otherwise the number was bigger than modulus.
We can combine this using binary search approach to get upper and lower bounds of the flag in relation to `n`.
We used a python script for this (slighly more accurate than the one in Rabbit, which was messing up last character):
```python
from subprocess import Popen, PIPE
from Crypto.Util.number import long_to_bytes
def oracle(ciphertext):
print("sent ciphertext " + str(ciphertext))
p = Popen(['lsb_oracle.vmp.exe', '/decrypt'], stdout=PIPE, stdin=PIPE, stderr=PIPE)
result = p.communicate(str(ciphertext) + "\n-1")
lsb = int(result[0][97])
print(lsb, result)
return lsb
def brute_flag(encrypted_flag, n, e, oracle_fun):
flag_count = n_count = 1
flag_lower_bound = 0
flag_upper_bound = n
ciphertext = encrypted_flag
mult = 1
while flag_upper_bound > flag_lower_bound + 1:
ciphertext = (ciphertext * pow(2, e, n)) % n
flag_count *= 2
n_count = n_count * 2 - 1
print("upper = %d" % flag_upper_bound)
print("upper flag = %s" % long_to_bytes(flag_upper_bound))
print("lower = %d" % flag_lower_bound)
print("lower flag = %s" % long_to_bytes(flag_lower_bound))
print("bit = %d" % mult)
mult += 1
if oracle_fun(ciphertext) == 0:
flag_upper_bound = n * n_count / flag_count
else:
flag_lower_bound = n * n_count / flag_count
n_count += 1
return flag_upper_bound
def main():
n = 120357855677795403326899325832599223460081551820351966764960386843755808156627131345464795713923271678835256422889567749230248389850643801263972231981347496433824450373318688699355320061986161918732508402417281836789242987168090513784426195519707785324458125521673657185406738054328228404365636320530340758959
ct = 2201077887205099886799419505257984908140690335465327695978150425602737431754769971309809434546937184700758848191008699273369652758836177602723960420562062515168299835193154932988833308912059796574355781073624762083196012981428684386588839182461902362533633141657081892129830969230482783192049720588548332813
print(long_to_bytes(brute_flag(ct, n, 65537, oracle)))
main()
```
And after a short while we got the flag: `SharifCTF{65d7551577a6a613c99c2b4023039b0a}`
Sadly the flag was at the very end of the plaintext to we had to wait for the whole 1024 bits.
###PL version
Zadanie jest generalnie bardzo podobne do https://github.com/p4-team/ctf/tree/master/2016-04-15-plaid-ctf/crypto_rabit z tą różnicą że na Plaid CTF szyfrowanie odbywało się algorytmem Rabina a tutaj było to RSA.
Dostajemy [binarke](lsb_oracle.vmp.exe.zip) która podaje nam klucz publiczny RSA i potrafi powiedzieć czy najniższy bit plaintextu jest 0 czy 1.
Dostajemy też zaszyfrowaną flagę.
Nasze podejście jest takie samo jak dla Rabit z Plaid CTF - możemy mnożyć plaintext przez 2 poprzez mnożenie ciphertextu przez `pow(2,e,n)`.
Wynika to z tego, że:
```
ct = pt^e mod n
ct' = ct * 2^e mod n = pt^e mod n * 2^e mod n = 2pt^e mod n
ct'^d = (2pt^e mod n)^d mod n = 2pt^ed mod n = 2pt mod n
```
Wyrocznia najniższego bitu mówi nam czy plaintext jest parzysty czy nieparzysty.
Modulus `n` jest iloczynem 2 dużych liczb pierwszych więc musi być nieparzysty.
`2*x` musi być parzyste.
To oznacza, że jeśli LSB `2*x mod n` jest 0 (liczba nadal jest parzysta) to liczba musiała być mniejsza od `n`.
W innym wypadku liczba była większa od `n`.
Możemy to uogólnić i użyć szukania binarnego, aby uzyskać dolne i górne ograniczenie dla flagi, względem `n`.
Wykorzystaliśmy do tego skrypt (trochę bardziej dokładny od tego z Rabit, który psuł ostatni znak):
```python
from subprocess import Popen, PIPE
from Crypto.Util.number import long_to_bytes
def oracle(ciphertext):
print("sent ciphertext " + str(ciphertext))
p = Popen(['lsb_oracle.vmp.exe', '/decrypt'], stdout=PIPE, stdin=PIPE, stderr=PIPE)
result = p.communicate(str(ciphertext) + "\n-1")
lsb = int(result[0][97])
print(lsb, result)
return lsb
def brute_flag(encrypted_flag, n, e, oracle_fun):
flag_count = n_count = 1
flag_lower_bound = 0
flag_upper_bound = n
ciphertext = encrypted_flag
mult = 1
while flag_upper_bound > flag_lower_bound + 1:
ciphertext = (ciphertext * pow(2, e, n)) % n
flag_count *= 2
n_count = n_count * 2 - 1
print("upper = %d" % flag_upper_bound)
print("upper flag = %s" % long_to_bytes(flag_upper_bound))
print("lower = %d" % flag_lower_bound)
print("lower flag = %s" % long_to_bytes(flag_lower_bound))
print("bit = %d" % mult)
mult += 1
if oracle_fun(ciphertext) == 0:
flag_upper_bound = n * n_count / flag_count
else:
flag_lower_bound = n * n_count / flag_count
n_count += 1
return flag_upper_bound
def main():
n = 120357855677795403326899325832599223460081551820351966764960386843755808156627131345464795713923271678835256422889567749230248389850643801263972231981347496433824450373318688699355320061986161918732508402417281836789242987168090513784426195519707785324458125521673657185406738054328228404365636320530340758959
ct = 2201077887205099886799419505257984908140690335465327695978150425602737431754769971309809434546937184700758848191008699273369652758836177602723960420562062515168299835193154932988833308912059796574355781073624762083196012981428684386588839182461902362533633141657081892129830969230482783192049720588548332813
print(long_to_bytes(brute_flag(ct, n, 65537, oracle)))
main()
```
I po chwili dostaliśmy flagę: `SharifCTF{65d7551577a6a613c99c2b4023039b0a}`
Niestety flaga była na samym końcu plaintextu więc musieliśmy czekać na całe 1024 bity.
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License, version 2.0,
* as published by the Free Software Foundation.
*
* This program is also distributed with certain software (including
* but not limited to OpenSSL) that is licensed under separate terms,
* as designated in a particular file or component or in included license
* documentation. The authors of MySQL hereby grant you an additional
* permission to link the program and your derivative works with the
* separately licensed software that they have included with MySQL.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License, version 2.0, for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef PLUGIN_X_SRC_HELPER_MULTITHREAD_LOCK_CONTAINER_H_
#define PLUGIN_X_SRC_HELPER_MULTITHREAD_LOCK_CONTAINER_H_
#include <utility>
namespace xpl {
template <typename Container, typename Locker, typename Lock>
class Locked_container {
public:
Locked_container(Container *container, Lock *lock)
: m_locker(lock), m_ptr(container) {}
Locked_container(Locked_container &&locked_container)
: m_locker(std::move(locked_container.m_locker)),
m_ptr(locked_container.m_ptr) {
locked_container.m_ptr = nullptr;
}
Container *operator->() { return m_ptr; }
Container *container() { return m_ptr; }
private:
Locker m_locker;
Container *m_ptr;
};
} // namespace xpl
#endif // PLUGIN_X_SRC_HELPER_MULTITHREAD_LOCK_CONTAINER_H_
| {
"pile_set_name": "Github"
} |
# vuex-mock-store [](https://circleci.com/gh/posva/vuex-mock-store) [](https://www.npmjs.com/package/vuex-mock-store) [](https://codecov.io/github/posva/vuex-mock-store) [](https://github.com/posva/thanks)
> Simple and straightforward mock for Vuex v3.x Store
Automatically creates spies on `commit` and `dispatch` so you can focus on testing your component without executing your store code.
**Help me keep working on Open Source in a sustainable way 🚀**. Help me with as little as \$1 a month, [sponsor me on Github](https://github.com/sponsors/posva).
<h3 align="center">Silver Sponsors</h3>
<p align="center">
<a href="https://www.vuemastery.com" title="Vue Mastery" target="_blank">
<img src="https://www.vuemastery.com/images/lgo-vuemastery.svg" alt="Vue Mastery logo" height="48px">
</a>
</p>
<p align="center">
<a href="https://vuetifyjs.com" target="_blank" title="Vuetify">
<img src="https://vuejs.org/images/vuetify.png" alt="Vuetify logo" height="48px">
</a>
</p>
<h3 align="center">Bronze Sponsors</h3>
<p align="center">
<a href="https://www.storyblok.com" target="_blank" title="Storyblok">
<img src="https://a.storyblok.com/f/51376/3856x824/fea44d52a9/colored-full.png" alt="Storyblok logo" height="32px">
</a>
</p>
---
## Installation
```sh
npm install -D vuex-mock-store
# with yarn
yarn add -D vuex-mock-store
```
## Usage
ℹ️: _All examples use [Jest](https://jestjs.io) API_. See [below](#providing-custom-spies) to use a different mock library.
Usage with [vue-test-utils](https://github.com/vuejs/vue-test-utils):
Given a component `MyComponent.vue`:
```vue
<template>
<div>
<p class="count">{{ count }}</p>
<p class="doubleCount">{{ doubleCount }}</p>
<button class="increment" @click="increment">+</button>
<button class="decrement" @click="decrement">-</button>
<hr />
<button class="save" @click="save({ count })">Save</button>
</div>
</template>
<script>
import { mapState, mapGetters, mapActions, mapMutations } from 'vuex'
export default {
computed: {
...mapState(['count']),
...mapGetters(['doubleCount']),
},
methods: {
...mapMutations(['increment', 'decrement']),
...mapActions(['save']),
},
}
</script>
```
You can test interactions without relying on the behaviour of your actions and mutations:
```js
import { Store } from 'vuex-mock-store'
import { mount } from '@vue/test-utils'
import MyComponent from '@/components/MyComponent.vue'
// create the Store mock
const store = new Store({
state: { count: 0 },
getters: { doubleCount: 0 },
})
// add other mocks here so they are accessible in every component
const mocks = {
$store: store,
}
// reset spies, initial state and getters
afterEach(() => store.reset())
describe('MyComponent.vue', () => {
let wrapper
beforeEach(() => {
wrapper = mount(MyComponent, { mocks })
})
it('calls increment', () => {
wrapper.find('button.increment').trigger('click')
expect(store.commit).toHaveBeenCalledOnce()
expect(store.commit).toHaveBeenCalledWith('increment')
})
it('dispatch save with count', () => {
wrapper.find('button.save').trigger('click')
expect(store.dispatch).toHaveBeenCalledOnce()
expect(store.dispatch).toHaveBeenCalledWith('save', { count: 0 })
})
})
```
⚠️ The mocked `dispatch` method returns `undefined` instead of a Promise. If you rely on this, you will have to call the appropriate function to make the `dispatch` spy return a Promise:
```js
store.dispatch.mockReturnValue(Promise.resolve(42))
```
If you are using Jest, you can check the documentation [here](https://jestjs.io/docs/en/mock-function-api#mockfnmockreturnvaluevalue)
### Initial state and getters
You can provide a `getters`, and `state` object to mock them:
```js
const store = new Store({
getters: {
name: 'Eduardo',
},
state: {
counter: 0,
},
})
```
### Modules
#### State
To mock module's `state`, provide a nested object in `state` with the same name of the module. As if you were writing the state yourself:
```js
new Store({
state: {
value: 'from root',
moduleA: {
value: 'from A',
moduleC: {
value: 'from A/C',
},
},
moduleB: {
value: 'from B',
},
},
})
```
That will cover the following calls:
```js
import { mapState } from 'vuex'
mapState(['value']) // from root
mapState('moduleA', ['value']) // from A
mapState('moduleB', ['value']) // from B
mapState('moduleA/moduleC', ['value']) // from C
```
_When testing `state`, it doesn't change anything for the module to be namespaced or not_
#### Getters
To mock module's `getters`, provide the correct name based on whether the module is _namespaced_ or not. Given the following modules:
```js
const moduleA = {
namespaced: true,
getters: {
getter: () => 'from A',
},
// nested modules
modules: {
moduleC: {
namespaced: true,
getter: () => 'from A/C',
},
moduleD: {
// not namespaced!
getter: () => 'from A/D',
},
},
}
const moduleB = {
// not namespaced
getters: {
getter: () => 'from B',
},
}
new Vuex.Store({ modules: { moduleA, moduleC } })
```
We need to use the following getters:
```js
new Store({
getters: {
getter: 'from root',
'moduleA/getter': 'from A',
'moduleA/moduleC/getter': 'from A/C',
'moduleA/getter': 'from A/D', // moduleD isn't namespaced
'moduleB/getter': 'from B',
},
})
```
#### Actions/Mutations
As with _getters_, testing actions and mutations depends whether your [modules are namespaced](https://vuex.vuejs.org/guide/modules.html#namespacing) or not. If they are namespaced, make sure to provide the full action/mutation name:
```js
// namespaced module
expect(store.commit).toHaveBeenCalledWith('moduleA/setValue')
expect(store.dispatch).toHaveBeenCalledWith('moduleA/postValue')
// non-namespaced, but could be inside of a module
expect(store.commit).toHaveBeenCalledWith('setValue')
expect(store.dispatch).toHaveBeenCalledWith('postValue')
```
_Refer to the module example below using `getters` for a more detailed example, even though it is using only `getters`, it's exactly the same for `actions` and `mutations`_
### Mutating `state`, providing custom `getters`
You can [modify](#state) the `state` and `getters` directly for any test. Calling [`store.reset()`](#reset) will reset them to the initial values provided.
## API
### `Store` class
#### `constructor(options)`
- `options`
- `state`: initial state object, _default_: `{}`
- `getters`: getters object, _default_: `{}`
- `spy`: interface to create spies. [details below](#providing-custom-spies)
#### `state`
Store state. You can directly modify it to change state:
```js
store.state.name = 'Jeff'
```
#### `getters`
Store getters. You can directly modify it to change a value:
```js
store.getters.upperCaseName = 'JEFF'
```
ℹ️ _Why no functions?_: if you provide a function to a getter, you're reimplementing it. During a test, you know the value, you should be able to provide it directly and be **completely sure** about the value that will be used in the component you are testing.
#### `reset`
Reset `commit` and `dispatch` spies and restore `getters` and `state` to their initial values
#### Providing custom spies
By default, the Store will call `jest.fn()` to create the spies. This will throw an error if you are using `mocha` or any other test framework that isn't Jest. In that situation, you will have to provide an interface to _create_ spies. This is the default interface that uses `jest.fn()`:
```js
new Store({
spy: {
create: handler => jest.fn(handler),
},
})
```
The handler is an optional argument that mocks the implementation of the spy.
If you use Jest, you don't need to do anything.
If you are using something else like [Sinon](https://sinonjs.org), you could provide this interface:
```js
import sinon from 'sinon'
new Store({
spy: {
create: handler => sinon.spy(handler),
},
})
```
### `commit` & `dispatch`
Spies. Dependent on the testing framework
- [jest.fn](https://jestjs.io/docs/en/jest-object#jestfnimplementation)
- [sinon.spy](https://sinonjs.org/releases/v6.3.4/spies)
## Related
- [vue-test-utils](https://github.com/vuejs/vue-test-utils)
- [vuex](https://github.com/vuejs/vuex)
## License
[MIT](http://opensource.org/licenses/MIT)
| {
"pile_set_name": "Github"
} |
/****************************************************************************
****************************************************************************
***
*** This header was automatically generated from a Linux kernel header
*** of the same name, to make information necessary for userspace to
*** call into the kernel available to libc. It contains only constants,
*** structures, and macros generated from the original header, and thus,
*** contains no copyrightable information.
***
*** To edit the content of this header, modify the corresponding
*** source file (e.g. under external/kernel-headers/original/) then
*** run bionic/libc/kernel/tools/update_all.py
***
*** Any manual change here will be lost the next time this script will
*** be run. You've been warned!
***
****************************************************************************
****************************************************************************/
#ifndef _XT_RATEEST_MATCH_H
#define _XT_RATEEST_MATCH_H
#include <linux/types.h>
#include <linux/if.h>
enum xt_rateest_match_flags {
XT_RATEEST_MATCH_INVERT = 1 << 0,
XT_RATEEST_MATCH_ABS = 1 << 1,
XT_RATEEST_MATCH_REL = 1 << 2,
XT_RATEEST_MATCH_DELTA = 1 << 3,
XT_RATEEST_MATCH_BPS = 1 << 4,
XT_RATEEST_MATCH_PPS = 1 << 5,
};
enum xt_rateest_match_mode {
XT_RATEEST_MATCH_NONE,
XT_RATEEST_MATCH_EQ,
XT_RATEEST_MATCH_LT,
XT_RATEEST_MATCH_GT,
};
struct xt_rateest_match_info {
char name1[IFNAMSIZ];
char name2[IFNAMSIZ];
__u16 flags;
__u16 mode;
__u32 bps1;
__u32 pps1;
__u32 bps2;
__u32 pps2;
struct xt_rateest * est1 __attribute__((aligned(8)));
struct xt_rateest * est2 __attribute__((aligned(8)));
};
#endif
| {
"pile_set_name": "Github"
} |
#ifndef NTL_vec_vec_long__H
#define NTL_vec_vec_long__H
#include <NTL/vec_long.h>
NTL_OPEN_NNS
typedef Vec< Vec<long> > vec_vec_long;
NTL_CLOSE_NNS
#endif
| {
"pile_set_name": "Github"
} |
From 9af545e2824df54e8cef7f3a62d96c37360cb0a0 Mon Sep 17 00:00:00 2001
From: Christian Hewitt <[email protected]>
Date: Fri, 24 Apr 2020 03:09:12 +0000
Subject: [PATCH 2/3] boards: amlogic: add Odroid C4 support
Odroid C4 is an SM1 device, the board config is adapted from VIM3L and
README is based on the README.odroid-n2 from the same vendor.
Signed-off-by: Christian Hewitt <[email protected]>
Signed-off-by: Neil Armstrong <[email protected]>
---
board/amlogic/w400/MAINTAINERS | 1 +
board/amlogic/w400/README.odroid-c4 | 134 ++++++++++++++++++++++++++++
configs/odroid-c4_defconfig | 62 +++++++++++++
3 files changed, 197 insertions(+)
create mode 100644 board/amlogic/w400/README.odroid-c4
create mode 100644 configs/odroid-c4_defconfig
diff --git a/board/amlogic/w400/MAINTAINERS b/board/amlogic/w400/MAINTAINERS
index 2ff90039..b28dd7f0 100644
--- a/board/amlogic/w400/MAINTAINERS
+++ b/board/amlogic/w400/MAINTAINERS
@@ -6,3 +6,4 @@ F: board/amlogic/w400/
F: configs/khadas-vim3_defconfig
F: configs/khadas-vim3l_defconfig
F: configs/odroid-n2_defconfig
+F: configs/odroid-c4_defconfig
diff --git a/board/amlogic/w400/README.odroid-c4 b/board/amlogic/w400/README.odroid-c4
new file mode 100644
index 00000000..b1bca758
--- /dev/null
+++ b/board/amlogic/w400/README.odroid-c4
@@ -0,0 +1,134 @@
+U-Boot for ODROID-C4
+====================
+
+ODROID-N2 is a single board computer manufactured by Hardkernel
+Co. Ltd with the following specifications:
+
+ - Amlogic S905X3 Arm Cortex-A55 quad-core SoC
+ - 2GB or 4GB LPDDR4 SDRAM
+ - Gigabit Ethernet
+ - HDMI 2.1 display
+ - 40-pin GPIO header
+ - 7-pin GPIO expansion header
+ - 4x USB 3.0 Host
+ - 1x USB 2.0 Host/OTG (micro)
+ - eMMC, microSD
+ - UART serial
+ - Infrared receiver
+
+Schematics are available on the manufacturer website.
+
+Currently the U-Boot port supports the following devices:
+ - serial
+ - eMMC, microSD
+ - Ethernet
+ - I2C
+ - Regulators
+ - Reset controller
+ - Clock controller
+ - ADC
+
+u-boot compilation
+==================
+
+ > export ARCH=arm
+ > export CROSS_COMPILE=aarch64-none-elf-
+ > make odroid-c4_defconfig
+ > make
+
+Image creation
+==============
+
+Amlogic doesn't provide sources for the firmware and for tools needed
+to create the bootloader image, so it is necessary to obtain them from
+the git tree published by the board vendor:
+
+ > wget https://releases.linaro.org/archive/13.11/components/toolchain/binaries/gcc-linaro-aarch64-none-elf-4.8-2013.11_linux.tar.xz
+ > wget https://releases.linaro.org/archive/14.04/components/toolchain/binaries/gcc-linaro-arm-none-eabi-4.8-2014.04_linux.tar.xz
+ > tar xvfJ gcc-linaro-aarch64-none-elf-4.8-2013.11_linux.tar.xz
+ > tar xvf gcc-linaro-arm-none-eabi-4.8-2014.04_linux.tar.xz
+ > export PATH=$PWD/gcc-linaro-aarch64-none-elf-4.8-2013.11_linux/bin:$PWD/gcc-linaro-arm-none-eabi-4.8-2014.04_linux/bin:$PATH
+
+ > DIR=odroidc4-u-boot
+ > git clone --depth 1 \
+ https://github.com/hardkernel/u-boot.git -b odroidg12-v2015.01 \
+ $DIR
+
+ > cd odroidc4-u-boot
+ > make odroidc4_defconfig
+ > make
+ > export UBOOTDIR=$PWD
+
+ Go back to mainline U-Boot source tree then :
+ > mkdir fip
+
+ > cp $UBOOTDIR/build/scp_task/bl301.bin fip/
+ > cp $UBOOTDIR/build/board/hardkernel/odroidc4/firmware/acs.bin fip/
+ > cp $UBOOTDIR/fip/g12a/bl2.bin fip/
+ > cp $UBOOTDIR/fip/g12a/bl30.bin fip/
+ > cp $UBOOTDIR/fip/g12a/bl31.img fip/
+ > cp $UBOOTDIR/fip/g12a/ddr3_1d.fw fip/
+ > cp $UBOOTDIR/fip/g12a/ddr4_1d.fw fip/
+ > cp $UBOOTDIR/fip/g12a/ddr4_2d.fw fip/
+ > cp $UBOOTDIR/fip/g12a/diag_lpddr4.fw fip/
+ > cp $UBOOTDIR/fip/g12a/lpddr3_1d.fw fip/
+ > cp $UBOOTDIR/fip/g12a/lpddr4_1d.fw fip/
+ > cp $UBOOTDIR/fip/g12a/lpddr4_2d.fw fip/
+ > cp $UBOOTDIR/fip/g12a/piei.fw fip/
+ > cp $UBOOTDIR/fip/g12a/aml_ddr.fw fip/
+ > cp u-boot.bin fip/bl33.bin
+
+ > sh fip/blx_fix.sh \
+ fip/bl30.bin \
+ fip/zero_tmp \
+ fip/bl30_zero.bin \
+ fip/bl301.bin \
+ fip/bl301_zero.bin \
+ fip/bl30_new.bin \
+ bl30
+
+ > sh fip/blx_fix.sh \
+ fip/bl2.bin \
+ fip/zero_tmp \
+ fip/bl2_zero.bin \
+ fip/acs.bin \
+ fip/bl21_zero.bin \
+ fip/bl2_new.bin \
+ bl2
+
+ > $UBOOTDIR/fip/g12a/aml_encrypt_g12a --bl30sig --input fip/bl30_new.bin \
+ --output fip/bl30_new.bin.g12a.enc \
+ --level v3
+ > $UBOOTDIR/fip/g12a/aml_encrypt_g12a --bl3sig --input fip/bl30_new.bin.g12a.enc \
+ --output fip/bl30_new.bin.enc \
+ --level v3 --type bl30
+ > $UBOOTDIR/fip/g12a/aml_encrypt_g12a --bl3sig --input fip/bl31.img \
+ --output fip/bl31.img.enc \
+ --level v3 --type bl31
+ > $UBOOTDIR/fip/g12a/aml_encrypt_g12a --bl3sig --input fip/bl33.bin --compress lz4 \
+ --output fip/bl33.bin.enc \
+ --level v3 --type bl33 --compress lz4
+ > $UBOOTDIR/fip/g12a/aml_encrypt_g12a --bl2sig --input fip/bl2_new.bin \
+ --output fip/bl2.n.bin.sig
+ > $UBOOTDIR/fip/g12a/aml_encrypt_g12a --bootmk \
+ --output fip/u-boot.bin \
+ --bl2 fip/bl2.n.bin.sig \
+ --bl30 fip/bl30_new.bin.enc \
+ --bl31 fip/bl31.img.enc \
+ --bl33 fip/bl33.bin.enc \
+ --ddrfw1 fip/ddr4_1d.fw \
+ --ddrfw2 fip/ddr4_2d.fw \
+ --ddrfw3 fip/ddr3_1d.fw \
+ --ddrfw4 fip/piei.fw \
+ --ddrfw5 fip/lpddr4_1d.fw \
+ --ddrfw6 fip/lpddr4_2d.fw \
+ --ddrfw7 fip/diag_lpddr4.fw \
+ --ddrfw8 fip/aml_ddr.fw \
+ --ddrfw9 fip/lpddr3_1d.fw \
+ --level v3
+
+and then write the image to SD with:
+
+ > DEV=/dev/your_sd_device
+ > dd if=fip/u-boot.bin.sd.bin of=$DEV conv=fsync,notrunc bs=512 skip=1 seek=1
+ > dd if=fip/u-boot.bin.sd.bin of=$DEV conv=fsync,notrunc bs=1 count=444
diff --git a/configs/odroid-c4_defconfig b/configs/odroid-c4_defconfig
new file mode 100644
index 00000000..ab7d588e
--- /dev/null
+++ b/configs/odroid-c4_defconfig
@@ -0,0 +1,62 @@
+CONFIG_ARM=y
+CONFIG_SYS_BOARD="w400"
+CONFIG_ARCH_MESON=y
+CONFIG_SYS_TEXT_BASE=0x01000000
+CONFIG_ENV_SIZE=0x2000
+CONFIG_DM_GPIO=y
+CONFIG_MESON_G12A=y
+CONFIG_NR_DRAM_BANKS=1
+CONFIG_DEBUG_UART_BASE=0xff803000
+CONFIG_DEBUG_UART_CLOCK=24000000
+CONFIG_IDENT_STRING=" odroid-c4"
+CONFIG_DEBUG_UART=y
+CONFIG_OF_BOARD_SETUP=y
+CONFIG_MISC_INIT_R=y
+# CONFIG_DISPLAY_CPUINFO is not set
+# CONFIG_CMD_BDI is not set
+# CONFIG_CMD_IMI is not set
+CONFIG_CMD_GPIO=y
+# CONFIG_CMD_LOADS is not set
+CONFIG_CMD_MMC=y
+CONFIG_CMD_USB=y
+CONFIG_CMD_USB_MASS_STORAGE=y
+# CONFIG_CMD_SETEXPR is not set
+CONFIG_CMD_REGULATOR=y
+CONFIG_OF_CONTROL=y
+CONFIG_DEFAULT_DEVICE_TREE="meson-sm1-odroid-c4"
+CONFIG_SYS_RELOC_GD_ENV_ADDR=y
+CONFIG_NET_RANDOM_ETHADDR=y
+CONFIG_DM_MMC=y
+CONFIG_MMC_MESON_GX=y
+CONFIG_PHY_REALTEK=y
+CONFIG_DM_ETH=y
+CONFIG_ETH_DESIGNWARE=y
+CONFIG_MESON_G12A_USB_PHY=y
+CONFIG_PINCTRL=y
+CONFIG_PINCTRL_MESON_G12A=y
+CONFIG_POWER_DOMAIN=y
+CONFIG_MESON_EE_POWER_DOMAIN=y
+CONFIG_DM_REGULATOR=y
+CONFIG_DM_REGULATOR_FIXED=y
+CONFIG_DM_RESET=y
+CONFIG_DEBUG_UART_MESON=y
+CONFIG_DEBUG_UART_ANNOUNCE=y
+CONFIG_DEBUG_UART_SKIP_INIT=y
+CONFIG_MESON_SERIAL=y
+CONFIG_USB=y
+CONFIG_DM_USB=y
+CONFIG_USB_XHCI_HCD=y
+CONFIG_USB_XHCI_DWC3=y
+CONFIG_USB_DWC3=y
+# CONFIG_USB_DWC3_GADGET is not set
+CONFIG_USB_DWC3_MESON_G12A=y
+CONFIG_USB_GADGET=y
+CONFIG_USB_GADGET_VENDOR_NUM=0x1b8e
+CONFIG_USB_GADGET_PRODUCT_NUM=0xfada
+CONFIG_USB_GADGET_DWC2_OTG=y
+CONFIG_USB_GADGET_DWC2_OTG_PHY_BUS_WIDTH_8=y
+CONFIG_USB_GADGET_DOWNLOAD=y
+CONFIG_DM_VIDEO=y
+CONFIG_VIDEO_MESON=y
+CONFIG_VIDEO_DT_SIMPLEFB=y
+CONFIG_OF_LIBFDT_OVERLAY=y
--
2.22.0
| {
"pile_set_name": "Github"
} |
<ul class="device-info" onclick="selectDevice('[[DEVICE_IDENTIFIER]]', this);">
<li>[[DEVICE_RESULT]]<h3 class="device-name">[[DEVICE_NAME]]</h3></li>
<li class="device-os">iOS [[DEVICE_OS]]</li>
<li class="device-model">Model: [[DEVICE_MODEL]]</li>
<li class="device-identifier">Identifier: [[DEVICE_IDENTIFIER]]</li>
</ul>
| {
"pile_set_name": "Github"
} |
#!/usr/local/bin/perl
# mkdir-p.pl
# On some systems, the -p option to mkdir (= also create any missing parent
# directories) is not available.
my $arg;
foreach $arg (@ARGV) {
$arg =~ tr|\\|/|;
&do_mkdir_p($arg);
}
sub do_mkdir_p {
local($dir) = @_;
$dir =~ s|/*\Z(?!\n)||s;
if (-d $dir) {
return;
}
if ($dir =~ m|[^/]/|s) {
local($parent) = $dir;
$parent =~ s|[^/]*\Z(?!\n)||s;
do_mkdir_p($parent);
}
mkdir($dir, 0777) || die "Cannot create directory $dir: $!\n";
print "created directory `$dir'\n";
}
| {
"pile_set_name": "Github"
} |
name: "CodeQL"
on:
push:
branches: [master, dev]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
schedule:
- cron: '0 3 * * 4'
jobs:
analyse:
name: Analyse
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
fetch-depth: 2
# If this run was triggered by a pull request event, then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# ℹ️ Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1
| {
"pile_set_name": "Github"
} |
/*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
* All rights reserved.
*/
/** \file
* \ingroup MEM
*
* \brief Read \ref MEMPage
*
* \page MEMPage Guarded memory(de)allocation
*
* \section aboutmem c-style guarded memory allocation
*
* \subsection memabout About the MEM module
*
* MEM provides guarded malloc/calloc calls. All memory is enclosed by
* pads, to detect out-of-bound writes. All blocks are placed in a
* linked list, so they remain reachable at all times. There is no
* back-up in case the linked-list related data is lost.
*
* \subsection memissues Known issues with MEM
*
* There are currently no known issues with MEM. Note that there is a
* second intern/ module with MEM_ prefix, for use in c++.
*
* \subsection memdependencies Dependencies
* - stdlib
* - stdio
*
* \subsection memdocs API Documentation
* See \ref MEM_guardedalloc.h
*/
#ifndef __MEM_GUARDEDALLOC_H__
#define __MEM_GUARDEDALLOC_H__
#include <stdio.h> /* needed for FILE* */
/* needed for uintptr_t and attributes, exception, dont use BLI anywhere else in MEM_* */
#include "../../source/blender/blenlib/BLI_compiler_attrs.h"
#include "../../source/blender/blenlib/BLI_sys_types.h"
#ifdef __cplusplus
extern "C" {
#endif
/** Returns the length of the allocated memory segment pointed at
* by vmemh. If the pointer was not previously allocated by this
* module, the result is undefined.*/
extern size_t (*MEM_allocN_len)(const void *vmemh) ATTR_WARN_UNUSED_RESULT;
/**
* Release memory previously allocated by this module.
*/
extern void (*MEM_freeN)(void *vmemh);
#if 0 /* UNUSED */
/**
* Return zero if memory is not in allocated list
*/
extern short (*MEM_testN)(void *vmemh);
#endif
/**
* Duplicates a block of memory, and returns a pointer to the
* newly allocated block. */
extern void *(*MEM_dupallocN)(const void *vmemh) /* ATTR_MALLOC */ ATTR_WARN_UNUSED_RESULT;
/**
* Reallocates a block of memory, and returns pointer to the newly
* allocated block, the old one is freed. this is not as optimized
* as a system realloc but just makes a new allocation and copies
* over from existing memory. */
extern void *(*MEM_reallocN_id)(void *vmemh,
size_t len,
const char *str) /* ATTR_MALLOC */ ATTR_WARN_UNUSED_RESULT
ATTR_ALLOC_SIZE(2);
/**
* A variant of realloc which zeros new bytes
*/
extern void *(*MEM_recallocN_id)(void *vmemh,
size_t len,
const char *str) /* ATTR_MALLOC */ ATTR_WARN_UNUSED_RESULT
ATTR_ALLOC_SIZE(2);
#define MEM_reallocN(vmemh, len) MEM_reallocN_id(vmemh, len, __func__)
#define MEM_recallocN(vmemh, len) MEM_recallocN_id(vmemh, len, __func__)
/**
* Allocate a block of memory of size len, with tag name str. The
* memory is cleared. The name must be static, because only a
* pointer to it is stored ! */
extern void *(*MEM_callocN)(size_t len, const char *str) /* ATTR_MALLOC */ ATTR_WARN_UNUSED_RESULT
ATTR_ALLOC_SIZE(1) ATTR_NONNULL(2);
/**
* Allocate a block of memory of size (len * size), with tag name
* str, aborting in case of integer overflows to prevent vulnerabilities.
* The memory is cleared. The name must be static, because only a
* pointer to it is stored ! */
extern void *(*MEM_calloc_arrayN)(size_t len,
size_t size,
const char *str) /* ATTR_MALLOC */ ATTR_WARN_UNUSED_RESULT
ATTR_ALLOC_SIZE(1, 2) ATTR_NONNULL(3);
/**
* Allocate a block of memory of size len, with tag name str. The
* name must be a static, because only a pointer to it is stored !
* */
extern void *(*MEM_mallocN)(size_t len, const char *str) /* ATTR_MALLOC */ ATTR_WARN_UNUSED_RESULT
ATTR_ALLOC_SIZE(1) ATTR_NONNULL(2);
/**
* Allocate a block of memory of size (len * size), with tag name str,
* aborting in case of integer overflow to prevent vulnerabilities. The
* name must be a static, because only a pointer to it is stored !
* */
extern void *(*MEM_malloc_arrayN)(size_t len,
size_t size,
const char *str) /* ATTR_MALLOC */ ATTR_WARN_UNUSED_RESULT
ATTR_ALLOC_SIZE(1, 2) ATTR_NONNULL(3);
/**
* Allocate an aligned block of memory of size len, with tag name str. The
* name must be a static, because only a pointer to it is stored !
* */
extern void *(*MEM_mallocN_aligned)(size_t len,
size_t alignment,
const char *str) /* ATTR_MALLOC */ ATTR_WARN_UNUSED_RESULT
ATTR_ALLOC_SIZE(1) ATTR_NONNULL(3);
/** Print a list of the names and sizes of all allocated memory
* blocks. as a python dict for easy investigation */
extern void (*MEM_printmemlist_pydict)(void);
/** Print a list of the names and sizes of all allocated memory
* blocks. */
extern void (*MEM_printmemlist)(void);
/** calls the function on all allocated memory blocks. */
extern void (*MEM_callbackmemlist)(void (*func)(void *));
/** Print statistics about memory usage */
extern void (*MEM_printmemlist_stats)(void);
/** Set the callback function for error output. */
extern void (*MEM_set_error_callback)(void (*func)(const char *));
/**
* Are the start/end block markers still correct ?
*
* \retval true for correct memory, false for corrupted memory. */
extern bool (*MEM_consistency_check)(void);
/** Attempt to enforce OSX (or other OS's) to have malloc and stack nonzero */
extern void (*MEM_set_memory_debug)(void);
/** Memory usage stats. */
extern size_t (*MEM_get_memory_in_use)(void);
/** Get amount of memory blocks in use. */
extern unsigned int (*MEM_get_memory_blocks_in_use)(void);
/** Reset the peak memory statistic to zero. */
extern void (*MEM_reset_peak_memory)(void);
/** Get the peak memory usage in bytes, including mmap allocations. */
extern size_t (*MEM_get_peak_memory)(void) ATTR_WARN_UNUSED_RESULT;
#ifdef __GNUC__
# define MEM_SAFE_FREE(v) \
do { \
typeof(&(v)) _v = &(v); \
if (*_v) { \
/* Cast so we can free constant arrays. */ \
MEM_freeN((void *)*_v); \
*_v = NULL; \
} \
} while (0)
#else
# define MEM_SAFE_FREE(v) \
do { \
void **_v = (void **)&(v); \
if (*_v) { \
MEM_freeN(*_v); \
*_v = NULL; \
} \
} while (0)
#endif
/* overhead for lockfree allocator (use to avoid slop-space) */
#define MEM_SIZE_OVERHEAD sizeof(size_t)
#define MEM_SIZE_OPTIMAL(size) ((size)-MEM_SIZE_OVERHEAD)
#ifndef NDEBUG
extern const char *(*MEM_name_ptr)(void *vmemh);
#endif
/** This should be called as early as possible in the program. When it has been called, information
* about memory leaks will be printed on exit. */
void MEM_init_memleak_detection(void);
/**
* Use this if we want to call #exit during argument parsing for example,
* without having to free all data.
*/
void MEM_use_memleak_detection(bool enabled);
/** When this has been called and memory leaks have been detected, the process will have an exit
* code that indicates failure. This can be used for when checking for memory leaks with automated
* tests. */
void MEM_enable_fail_on_memleak(void);
/* Switch allocator to slower but fully guarded mode. */
void MEM_use_guarded_allocator(void);
#ifdef __cplusplus
}
#endif /* __cplusplus */
#ifdef __cplusplus
/* alloc funcs for C++ only */
# define MEM_CXX_CLASS_ALLOC_FUNCS(_id) \
public: \
void *operator new(size_t num_bytes) \
{ \
return MEM_mallocN(num_bytes, _id); \
} \
void operator delete(void *mem) \
{ \
if (mem) { \
MEM_freeN(mem); \
} \
} \
void *operator new[](size_t num_bytes) \
{ \
return MEM_mallocN(num_bytes, _id "[]"); \
} \
void operator delete[](void *mem) \
{ \
if (mem) { \
MEM_freeN(mem); \
} \
} \
void *operator new(size_t /*count*/, void *ptr) \
{ \
return ptr; \
} \
/* This is the matching delete operator to the placement-new operator above. Both parameters \
* will have the same value. Without this, we get the warning C4291 on windows. */ \
void operator delete(void * /*ptr_to_free*/, void * /*ptr*/) \
{ \
}
/* Needed when type includes a namespace, then the namespace should not be
* specified after ~, so using a macro fails. */
template<class T> inline void OBJECT_GUARDED_DESTRUCTOR(T *what)
{
what->~T();
}
# if defined __GNUC__
# define OBJECT_GUARDED_NEW(type, args...) new (MEM_mallocN(sizeof(type), __func__)) type(args)
# else
# define OBJECT_GUARDED_NEW(type, ...) \
new (MEM_mallocN(sizeof(type), __FUNCTION__)) type(__VA_ARGS__)
# endif
# define OBJECT_GUARDED_DELETE(what, type) \
{ \
if (what) { \
OBJECT_GUARDED_DESTRUCTOR((type *)what); \
MEM_freeN(what); \
} \
} \
(void)0
# define OBJECT_GUARDED_SAFE_DELETE(what, type) \
{ \
if (what) { \
OBJECT_GUARDED_DESTRUCTOR((type *)what); \
MEM_freeN(what); \
what = NULL; \
} \
} \
(void)0
#endif /* __cplusplus */
#endif /* __MEM_GUARDEDALLOC_H__ */
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: b2173b9606b6741fda0af840ae51b353
TextureImporter:
fileIDToRecycleName: {}
serializedVersion: 2
mipmaps:
mipMapMode: 0
enableMipMap: 1
linearTexture: 0
correctGamma: 0
fadeOut: 0
borderMipMap: 0
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: .25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 0
seamlessCubemap: 0
textureFormat: -1
maxTextureSize: 1024
textureSettings:
filterMode: -1
aniso: 16
mipBias: -1
wrapMode: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 1
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: .5, y: .5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaIsTransparency: 1
textureType: 8
buildTargetSettings: []
spriteSheet:
sprites: []
spritePackingTag:
userData:
| {
"pile_set_name": "Github"
} |
THE CRAPL v0 BETA 1
0. Information about the CRAPL
If you have questions or concerns about the CRAPL, or you need more
information about this license, please contact:
Matthew Might
http://matt.might.net/
I. Preamble
Science thrives on openness.
In modern science, it is often infeasible to replicate claims without
access to the software underlying those claims.
Let's all be honest: when scientists write code, aesthetics and
software engineering principles take a back seat to having running,
working code before a deadline.
So, let's release the ugly. And, let's be proud of that.
II. Definitions
1. "This License" refers to version 0 beta 1 of the Community
Research and Academic Programming License (the CRAPL).
2. "The Program" refers to the medley of source code, shell scripts,
executables, objects, libraries and build files supplied to You,
or these files as modified by You.
[Any appearance of design in the Program is purely coincidental and
should not in any way be mistaken for evidence of thoughtful
software construction.]
3. "You" refers to the person or persons brave and daft enough to use
the Program.
4. "The Documentation" refers to the Program.
5. "The Author" probably refers to the caffeine-addled graduate
student that got the Program to work moments before a submission
deadline.
III. Terms
1. By reading this sentence, You have agreed to the terms and
conditions of this License.
2. If the Program shows any evidence of having been properly tested
or verified, You will disregard this evidence.
3. You agree to hold the Author free from shame, embarrassment or
ridicule for any hacks, kludges or leaps of faith found within the
Program.
4. You recognize that any request for support for the Program will be
discarded with extreme prejudice.
5. The Author reserves all rights to the Program, except for any
rights granted under any additional licenses attached to the
Program.
IV. Permissions
1. You are permitted to use the Program to validate published
scientific claims.
2. You are permitted to use the Program to validate scientific claims
submitted for peer review, under the condition that You keep
modifications to the Program confidential until those claims have
been published.
3. You are permitted to use and/or modify the Program for the
validation of novel scientific claims if You make a good-faith
attempt to notify the Author of Your work and Your claims prior to
submission for publication.
4. If You publicly release any claims or data that were supported or
generated by the Program or a modification thereof, in whole or in
part, You will release any inputs supplied to the Program and any
modifications You made to the Progam. This License will be in
effect for the modified program.
V. Disclaimer of Warranty
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
CORRECTION.
VI. Limitation of Liability
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 1e6cce25c02c8454c9036b887d7f3056
timeCreated: 1435485319
licenseType: Free
NativeFormatImporter:
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
"""This file makes every line between a --@generated and --@usercode comment
read only and the background colored in grey.
This is an example of using overlays"""
import GPS
import os.path
def on_open_generated_file(name, file):
ebuf = GPS.EditorBuffer.get()
def apply_overlay(overlay, from_line, to_line, line_len):
start_loc = ebuf.at(from_line, 1)
end_loc = ebuf.at(to_line, line_len)
ebuf.apply_overlay(overlay, start_loc, end_loc)
f_path = os.path.abspath(file.name())
f = open(f_path)
text = f.readlines()
f.close()
start = 0
start_found = False
if text[0].strip() == "--@generated":
grey = ebuf.create_overlay(f_path)
grey.set_property("background", "grey")
grey.set_property("editable", False)
for line_num, line in enumerate(text[0:]):
if not start_found and line.find('@usercode:') > -1:
apply_overlay(grey, start, line_num + 1, len(line))
start_found = True
elif start_found and line.find('@end') > -1:
start = line_num + 1
start_found = False
apply_overlay(grey, start, len(text), len(text[-1]))
GPS.Hook("file_edited").add(on_open_generated_file)
| {
"pile_set_name": "Github"
} |
p + p {
text-indent: 2em;
margin-top: -1.5em;
/* Don't want this in forms. */ }
form p + p {
text-indent: 0; }
p.incr,
.incr p {
font-size: 0.833em;
line-height: 1.44em;
margin-bottom: 1.5em; }
.caps {
font-variant: small-caps;
letter-spacing: 1px;
text-transform: lowercase;
font-size: 1.2em;
line-height: 1%;
font-weight: bold;
padding: 0 2px; }
.dquo {
margin-left: -0.5em; }
.alt {
color: #666666;
font-family: "Warnock Pro", "Goudy Old Style", "Palatino", "Book Antiqua", Georgia, serif;
font-style: italic;
font-weight: normal; }
| {
"pile_set_name": "Github"
} |
/*
* Linux defines for values that are not yet included in common C libraries
* Copyright (c) 2014, Jouni Malinen <[email protected]>
*
* This software may be distributed under the terms of the BSD license.
* See README for more details.
*/
#ifndef LINUX_DEFINES_H
#define LINUX_DEFINES_H
#ifndef SO_WIFI_STATUS
# if defined(__sparc__)
# define SO_WIFI_STATUS 0x0025
# elif defined(__parisc__)
# define SO_WIFI_STATUS 0x4022
# else
# define SO_WIFI_STATUS 41
# endif
# define SCM_WIFI_STATUS SO_WIFI_STATUS
#endif
#ifndef SO_EE_ORIGIN_TXSTATUS
#define SO_EE_ORIGIN_TXSTATUS 4
#endif
#ifndef PACKET_TX_TIMESTAMP
#define PACKET_TX_TIMESTAMP 16
#endif
#ifndef IFF_LOWER_UP
#define IFF_LOWER_UP 0x10000 /* driver signals L1 up */
#endif
#ifndef IFF_DORMANT
#define IFF_DORMANT 0x20000 /* driver signals dormant */
#endif
#ifndef IF_OPER_DORMANT
#define IF_OPER_DORMANT 5
#endif
#ifndef IF_OPER_UP
#define IF_OPER_UP 6
#endif
#endif /* LINUX_DEFINES_H */
| {
"pile_set_name": "Github"
} |
// Copyright 2019 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build !windows
package prometheus
import (
"github.com/prometheus/procfs"
)
func canCollectProcess() bool {
_, err := procfs.NewDefaultFS()
return err == nil
}
func (c *processCollector) processCollect(ch chan<- Metric) {
pid, err := c.pidFn()
if err != nil {
c.reportError(ch, nil, err)
return
}
p, err := procfs.NewProc(pid)
if err != nil {
c.reportError(ch, nil, err)
return
}
if stat, err := p.Stat(); err == nil {
ch <- MustNewConstMetric(c.cpuTotal, CounterValue, stat.CPUTime())
ch <- MustNewConstMetric(c.vsize, GaugeValue, float64(stat.VirtualMemory()))
ch <- MustNewConstMetric(c.rss, GaugeValue, float64(stat.ResidentMemory()))
if startTime, err := stat.StartTime(); err == nil {
ch <- MustNewConstMetric(c.startTime, GaugeValue, startTime)
} else {
c.reportError(ch, c.startTime, err)
}
} else {
c.reportError(ch, nil, err)
}
if fds, err := p.FileDescriptorsLen(); err == nil {
ch <- MustNewConstMetric(c.openFDs, GaugeValue, float64(fds))
} else {
c.reportError(ch, c.openFDs, err)
}
if limits, err := p.Limits(); err == nil {
ch <- MustNewConstMetric(c.maxFDs, GaugeValue, float64(limits.OpenFiles))
ch <- MustNewConstMetric(c.maxVsize, GaugeValue, float64(limits.AddressSpace))
} else {
c.reportError(ch, nil, err)
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package time_test
import (
"testing"
. "time"
)
func TestAndroidTzdata(t *testing.T) {
ForceAndroidTzdataForTest(true)
defer ForceAndroidTzdataForTest(false)
if _, err := LoadLocation("America/Los_Angeles"); err != nil {
t.Error(err)
}
}
| {
"pile_set_name": "Github"
} |
---
# Only the main Sass file needs front matter (the dashes are enough)
---
@import 'tale';
summary {
color: $blue;
padding-left: 6px;
padding-right: 6px;
border: 1px solid #ee0;
display: inline-block;
border-radius: 4px;
}
details[open] summary {
margin-bottom: 6px;
}
details[open] {
margin-bottom: 6px;
}
a.colab-root {
display: inline-block;
background: rgba(255, 255, 120, 0.75);
padding: 4px 8px;
border-radius: 4px;
font-size: 11px!important;
text-decoration: none;
color: $blue;
border: none;
font-weight: 300;
border: solid 1px rgba(0, 0, 0, 0.08);
border-bottom-color: rgba(0, 0, 0, 0.15);
text-transform: uppercase;
line-height: 16px;
&:hover {
color: #666;
background: rgba(255, 255, 0, 1);
border-color: rgba(0, 0, 0, 0.2);
text-decoration: none;
}
span {
background-image: url(/assets/colab.svg);
background-repeat: no-repeat;
background-size: 20px;
background-position-y: 2px;
display: inline-block;
padding-left: 24px;
border-radius: 4px;
text-decoration: none;
}
}
.request-feedback, .welcome {
border: 1px solid #bbb;
background: #eee;
padding-left: 12px;
padding-right: 12px;
padding-top: 8px;
padding-bottom: 8px;
text-align: center;
line-height: 1.6;
margin-bottom: 35px;
border-radius: 4px;
.inner {
margin-left: auto;
margin-right: auto;
}
}
.welcome {
margin-top: 25px;
margin-bottom: 25px;
.welcome-blurb {
text-align: left;
}
}
@media only screen and (min-width: 680px) {
.request-feedback, .welcome {
.inner {
width: 500px;
}
}
}
iframe.deps {
border: 0;
overflow: hidden;
margin-bottom: 10px;
}
div.deps-graph {
text-align: center;
font-size: 75%;
}
@media only screen and (max-width: 599px) {
div.deps-graph {
display: block;
margin-left: auto;
margin-right: auto;
margin-top: 10px;
margin-bottom: 20px;
}
}
@media only screen and (min-width: 600px) {
div.deps-graph {
float: right;
margin-left: 40px;
margin-top: 10px;
margin-bottom: 20px;
}
}
span.more-arrow {
color: $blue;
}
| {
"pile_set_name": "Github"
} |
"%@ of %@" = "%1$@ of %2$@";
"%@ %@ has been installed and will be ready to use next time %@ starts! Would you like to relaunch now?" = "%1$@ %2$@ הותקנה ותהיה מוכנה לפעולה בפעם הבאה שתריץ את %3$@ ! ברצונך לאתחל?";
"%@ %@ is currently the newest version available." = "%1$@ %2$@ היא הגרסה האחונה הזמינה.";
"%@ %@ is currently the newest version available.\n(You are currently running version %@.)" = "%1$@ %2$@ היא הגרסה האחונה הזמינה.\n(You are currently running version %3$@.)";
/* Description text for SUUpdateAlert when the update is downloadable. */
"%@ %@ is now available--you have %@. Would you like to download it now?" = "%1$@ %2$@ זמין כעת (לך יש %3$@). ברצונך להוריד כעת?";
/* Description text for SUUpdateAlert when the update informational with no download. */
"%@ %@ is now available--you have %@. Would you like to learn more about this update on the web?" = "%1$@ %2$@ is now available--you have %3$@. Would you like to learn more about this update on the web?";
"%@ does not have permission to write to the application's directory! Are you running off a disk image? If not, ask your system administrator for help." = "ל %@ אין הרשאה לכתוב לתקיית התוכניות! האם אתה פועל מ- disc image? אם לא, בקש עזרה ממנהל הרשת.";
"A new version of %@ has been installed!" = "גרסה חדשה של %@ הותקנה!";
"A new version of %@ is available!" = "גרסה חדשה של %@ זמינה!";
"Cancel Update" = "בטל עדכון";
"An error occurred during installation. Please try again later." = "שגיאה בהתקנה. אנא נסה שנית במועד מאוחר יותר.";
"An error occurred in retrieving update information. Please try again later." = "שגיאה בקבלת מידע על עדכונים. אנא נסה שנית במועד מאוחר יותר.";
"An error occurred while extracting the archive. Please try again later." = "שגיאה בפתיחת הקובת המקווץ. אנא נסה שנית במועד מאוחר יותר.";
"An error occurred while trying to download the file. Please try again later." = "שגיאה בהורדת הקובץ. אנא נסה שנית במועד מאוחר יותר.";
"An update is already in progress!" = "עדכון כבר מתבצע";
"Cancel" = "ביטול";
"Check for updates on startup?" = "לבדוק עדכונים באתחול?";
"Downloading update..." = "מוריד עדכון…";
"Extracting update..." = "פותח עדכון…";
"Install and Relaunch" = "התקן ואתחל";
"Installing update..." = "מתקין עידכון…";
"No" = "לא";
"OK" = "אישור";
"Ready to Install" = "מוכן להתקנה!";
"Update Error!" = "שגיאה בעדכון!";
"Updating %@" = "מעדכן %@";
"Would you like %@ to check for updates on startup? If not, you can initiate the check manually from the application menu." = "האם ברצונך ש%@ יבדוק עדכונים באתחול? אם לא, תוכל לבדוק ידנית מתפריט התכנה.";
"Yes" = "כן";
"You're up-to-date!" = "התכנה עדכנית!";
| {
"pile_set_name": "Github"
} |
// ----------------------------------------------------------------------------
//
// *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
//
// ----------------------------------------------------------------------------
//
// This file is automatically generated by Magic Modules and manual
// changes will be clobbered when the file is regenerated.
//
// Please read more about how to change this file in
// .github/CONTRIBUTING.md.
//
// ----------------------------------------------------------------------------
package google
import (
"fmt"
"log"
"reflect"
"strconv"
"strings"
"time"
"github.com/hashicorp/terraform-plugin-sdk/helper/customdiff"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"google.golang.org/api/googleapi"
)
// Is the new disk size smaller than the old one?
func isDiskShrinkage(old, new, _ interface{}) bool {
// It's okay to remove size entirely.
if old == nil || new == nil {
return false
}
return new.(int) < old.(int)
}
// We cannot suppress the diff for the case when family name is not part of the image name since we can't
// make a network call in a DiffSuppressFunc.
func diskImageDiffSuppress(_, old, new string, _ *schema.ResourceData) bool {
// Understand that this function solves a messy problem ("how do we tell if the diff between two images
// is 'ForceNew-worthy', without making a network call?") in the best way we can: through a series of special
// cases and regexes. If you find yourself here because you are trying to add a new special case,
// you are probably looking for the diskImageFamilyEquals function and its subfunctions.
// In order to keep this maintainable, we need to ensure that the positive and negative examples
// in resource_compute_disk_test.go are as complete as possible.
// 'old' is read from the API.
// It always has the format 'https://www.googleapis.com/compute/v1/projects/(%s)/global/images/(%s)'
matches := resolveImageLink.FindStringSubmatch(old)
if matches == nil {
// Image read from the API doesn't have the expected format. In practice, it should never happen
return false
}
oldProject := matches[1]
oldName := matches[2]
// Partial or full self link family
if resolveImageProjectFamily.MatchString(new) {
// Value matches pattern "projects/{project}/global/images/family/{family-name}$"
matches := resolveImageProjectFamily.FindStringSubmatch(new)
newProject := matches[1]
newFamilyName := matches[2]
return diskImageProjectNameEquals(oldProject, newProject) && diskImageFamilyEquals(oldName, newFamilyName)
}
// Partial or full self link image
if resolveImageProjectImage.MatchString(new) {
// Value matches pattern "projects/{project}/global/images/{image-name}$"
matches := resolveImageProjectImage.FindStringSubmatch(new)
newProject := matches[1]
newImageName := matches[2]
return diskImageProjectNameEquals(oldProject, newProject) && diskImageEquals(oldName, newImageName)
}
// Partial link without project family
if resolveImageGlobalFamily.MatchString(new) {
// Value is "global/images/family/{family-name}"
matches := resolveImageGlobalFamily.FindStringSubmatch(new)
familyName := matches[1]
return diskImageFamilyEquals(oldName, familyName)
}
// Partial link without project image
if resolveImageGlobalImage.MatchString(new) {
// Value is "global/images/{image-name}"
matches := resolveImageGlobalImage.FindStringSubmatch(new)
imageName := matches[1]
return diskImageEquals(oldName, imageName)
}
// Family shorthand
if resolveImageFamilyFamily.MatchString(new) {
// Value is "family/{family-name}"
matches := resolveImageFamilyFamily.FindStringSubmatch(new)
familyName := matches[1]
return diskImageFamilyEquals(oldName, familyName)
}
// Shorthand for image or family
if resolveImageProjectImageShorthand.MatchString(new) {
// Value is "{project}/{image-name}" or "{project}/{family-name}"
matches := resolveImageProjectImageShorthand.FindStringSubmatch(new)
newProject := matches[1]
newName := matches[2]
return diskImageProjectNameEquals(oldProject, newProject) &&
(diskImageEquals(oldName, newName) || diskImageFamilyEquals(oldName, newName))
}
// Image or family only
if diskImageEquals(oldName, new) || diskImageFamilyEquals(oldName, new) {
// Value is "{image-name}" or "{family-name}"
return true
}
return false
}
func diskImageProjectNameEquals(project1, project2 string) bool {
// Convert short project name to full name
// For instance, centos => centos-cloud
fullProjectName, ok := imageMap[project2]
if ok {
project2 = fullProjectName
}
return project1 == project2
}
func diskImageEquals(oldImageName, newImageName string) bool {
return oldImageName == newImageName
}
func diskImageFamilyEquals(imageName, familyName string) bool {
// Handles the case when the image name includes the family name
// e.g. image name: debian-9-drawfork-v20180109, family name: debian-9
if strings.Contains(imageName, familyName) {
return true
}
if suppressCanonicalFamilyDiff(imageName, familyName) {
return true
}
if suppressWindowsSqlFamilyDiff(imageName, familyName) {
return true
}
if suppressWindowsFamilyDiff(imageName, familyName) {
return true
}
return false
}
// e.g. image: ubuntu-1404-trusty-v20180122, family: ubuntu-1404-lts
func suppressCanonicalFamilyDiff(imageName, familyName string) bool {
parts := canonicalUbuntuLtsImage.FindStringSubmatch(imageName)
if len(parts) == 3 {
f := fmt.Sprintf("ubuntu-%s%s-lts", parts[1], parts[2])
if f == familyName {
return true
}
}
return false
}
// e.g. image: sql-2017-standard-windows-2016-dc-v20180109, family: sql-std-2017-win-2016
// e.g. image: sql-2017-express-windows-2012-r2-dc-v20180109, family: sql-exp-2017-win-2012-r2
func suppressWindowsSqlFamilyDiff(imageName, familyName string) bool {
parts := windowsSqlImage.FindStringSubmatch(imageName)
if len(parts) == 5 {
edition := parts[2] // enterprise, standard or web.
sqlVersion := parts[1]
windowsVersion := parts[3]
// Translate edition
switch edition {
case "enterprise":
edition = "ent"
case "standard":
edition = "std"
case "express":
edition = "exp"
}
var f string
if revision := parts[4]; revision != "" {
// With revision
f = fmt.Sprintf("sql-%s-%s-win-%s-r%s", edition, sqlVersion, windowsVersion, revision)
} else {
// No revision
f = fmt.Sprintf("sql-%s-%s-win-%s", edition, sqlVersion, windowsVersion)
}
if f == familyName {
return true
}
}
return false
}
// e.g. image: windows-server-1709-dc-core-v20180109, family: windows-1709-core
// e.g. image: windows-server-1709-dc-core-for-containers-v20180109, family: "windows-1709-core-for-containers
func suppressWindowsFamilyDiff(imageName, familyName string) bool {
updatedFamilyString := strings.Replace(familyName, "windows-", "windows-server-", 1)
updatedImageName := strings.Replace(imageName, "-dc-", "-", 1)
return strings.Contains(updatedImageName, updatedFamilyString)
}
func resourceComputeDisk() *schema.Resource {
return &schema.Resource{
Create: resourceComputeDiskCreate,
Read: resourceComputeDiskRead,
Update: resourceComputeDiskUpdate,
Delete: resourceComputeDiskDelete,
Importer: &schema.ResourceImporter{
State: resourceComputeDiskImport,
},
Timeouts: &schema.ResourceTimeout{
Create: schema.DefaultTimeout(5 * time.Minute),
Update: schema.DefaultTimeout(4 * time.Minute),
Delete: schema.DefaultTimeout(4 * time.Minute),
},
CustomizeDiff: customdiff.All(
customdiff.ForceNewIfChange("size", isDiskShrinkage)),
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression '[a-z]([-a-z0-9]*[a-z0-9])?' which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.`,
},
"description": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Description: `An optional description of this resource. Provide this property when
you create the resource.`,
},
"disk_encryption_key": {
Type: schema.TypeList,
Optional: true,
ForceNew: true,
Description: `Encrypts the disk using a customer-supplied encryption key.
After you encrypt a disk with a customer-supplied key, you must
provide the same key if you use the disk later (e.g. to create a disk
snapshot or an image, or to attach the disk to a virtual machine).
Customer-supplied encryption keys do not protect access to metadata of
the disk.
If you do not provide an encryption key when creating the disk, then
the disk will be encrypted using an automatically generated key and
you do not need to provide a key to use the disk later.`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"kms_key_self_link": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
DiffSuppressFunc: compareSelfLinkRelativePaths,
Description: `The self link of the encryption key used to encrypt the disk. Also called KmsKeyName
in the cloud console. Your project's Compute Engine System service account
('service-{{PROJECT_NUMBER}}@compute-system.iam.gserviceaccount.com') must have
'roles/cloudkms.cryptoKeyEncrypterDecrypter' to use this feature.
See https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys`,
},
"raw_key": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Description: `Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.`,
Sensitive: true,
},
"sha256": {
Type: schema.TypeString,
Computed: true,
Description: `The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.`,
},
},
},
},
"image": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
DiffSuppressFunc: diskImageDiffSuppress,
Description: `The image from which to initialize this disk. This can be
one of: the image's 'self_link', 'projects/{project}/global/images/{image}',
'projects/{project}/global/images/family/{family}', 'global/images/{image}',
'global/images/family/{family}', 'family/{family}', '{project}/{family}',
'{project}/{image}', '{family}', or '{image}'. If referred by family, the
images names must include the family name. If they don't, use the
[google_compute_image data source](/docs/providers/google/d/datasource_compute_image.html).
For instance, the image 'centos-6-v20180104' includes its family name 'centos-6'.
These images can be referred by family name here.`,
},
"labels": {
Type: schema.TypeMap,
Optional: true,
Description: `Labels to apply to this disk. A list of key->value pairs.`,
Elem: &schema.Schema{Type: schema.TypeString},
},
"physical_block_size_bytes": {
Type: schema.TypeInt,
Computed: true,
Optional: true,
ForceNew: true,
Description: `Physical block size of the persistent disk, in bytes. If not present
in a request, a default value is used. Currently supported sizes
are 4096 and 16384, other sizes may be added in the future.
If an unsupported value is requested, the error message will list
the supported values for the caller's project.`,
},
"size": {
Type: schema.TypeInt,
Computed: true,
Optional: true,
Description: `Size of the persistent disk, specified in GB. You can specify this
field when creating a persistent disk using the 'image' or
'snapshot' parameter, or specify it alone to create an empty
persistent disk.
If you specify this field along with 'image' or 'snapshot',
the value must not be less than the size of the image
or the size of the snapshot.`,
},
"snapshot": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
DiffSuppressFunc: compareSelfLinkOrResourceName,
Description: `The source snapshot used to create this disk. You can provide this as
a partial or full URL to the resource. If the snapshot is in another
project than this disk, you must supply a full URL. For example, the
following are valid values:
* 'https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot'
* 'projects/project/global/snapshots/snapshot'
* 'global/snapshots/snapshot'
* 'snapshot'`,
},
"source_image_encryption_key": {
Type: schema.TypeList,
Optional: true,
ForceNew: true,
Description: `The customer-supplied encryption key of the source image. Required if
the source image is protected by a customer-supplied encryption key.`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"kms_key_self_link": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
DiffSuppressFunc: compareSelfLinkRelativePaths,
Description: `The self link of the encryption key used to encrypt the disk. Also called KmsKeyName
in the cloud console. Your project's Compute Engine System service account
('service-{{PROJECT_NUMBER}}@compute-system.iam.gserviceaccount.com') must have
'roles/cloudkms.cryptoKeyEncrypterDecrypter' to use this feature.
See https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys`,
},
"raw_key": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Description: `Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.`,
},
"sha256": {
Type: schema.TypeString,
Computed: true,
Description: `The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.`,
},
},
},
},
"source_snapshot_encryption_key": {
Type: schema.TypeList,
Optional: true,
ForceNew: true,
Description: `The customer-supplied encryption key of the source snapshot. Required
if the source snapshot is protected by a customer-supplied encryption
key.`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"kms_key_self_link": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
DiffSuppressFunc: compareSelfLinkRelativePaths,
Description: `The self link of the encryption key used to encrypt the disk. Also called KmsKeyName
in the cloud console. Your project's Compute Engine System service account
('service-{{PROJECT_NUMBER}}@compute-system.iam.gserviceaccount.com') must have
'roles/cloudkms.cryptoKeyEncrypterDecrypter' to use this feature.
See https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys`,
},
"raw_key": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Description: `Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.`,
},
"sha256": {
Type: schema.TypeString,
Computed: true,
Description: `The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.`,
},
},
},
},
"type": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
DiffSuppressFunc: compareSelfLinkOrResourceName,
Description: `URL of the disk type resource describing which disk type to use to
create the disk. Provide this when creating the disk.`,
Default: "pd-standard",
},
"zone": {
Type: schema.TypeString,
Computed: true,
Optional: true,
ForceNew: true,
DiffSuppressFunc: compareSelfLinkOrResourceName,
Description: `A reference to the zone where the disk resides.`,
},
"creation_timestamp": {
Type: schema.TypeString,
Computed: true,
Description: `Creation timestamp in RFC3339 text format.`,
},
"label_fingerprint": {
Type: schema.TypeString,
Computed: true,
Description: `The fingerprint used for optimistic locking of this resource. Used
internally during updates.`,
},
"last_attach_timestamp": {
Type: schema.TypeString,
Computed: true,
Description: `Last attach timestamp in RFC3339 text format.`,
},
"last_detach_timestamp": {
Type: schema.TypeString,
Computed: true,
Description: `Last detach timestamp in RFC3339 text format.`,
},
"source_image_id": {
Type: schema.TypeString,
Computed: true,
Description: `The ID value of the image used to create this disk. This value
identifies the exact image that was used to create this persistent
disk. For example, if you created the persistent disk from an image
that was later deleted and recreated under the same name, the source
image ID would identify the exact version of the image that was used.`,
},
"source_snapshot_id": {
Type: schema.TypeString,
Computed: true,
Description: `The unique ID of the snapshot used to create this disk. This value
identifies the exact snapshot that was used to create this persistent
disk. For example, if you created the persistent disk from a snapshot
that was later deleted and recreated under the same name, the source
snapshot ID would identify the exact version of the snapshot that was
used.`,
},
"users": {
Type: schema.TypeList,
Computed: true,
Description: `Links to the users of the disk (attached instances) in form:
project/zones/zone/instances/instance`,
Elem: &schema.Schema{
Type: schema.TypeString,
DiffSuppressFunc: compareSelfLinkOrResourceName,
},
},
"project": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
},
"self_link": {
Type: schema.TypeString,
Computed: true,
},
},
}
}
func resourceComputeDiskCreate(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
obj := make(map[string]interface{})
labelFingerprintProp, err := expandComputeDiskLabelFingerprint(d.Get("label_fingerprint"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("label_fingerprint"); !isEmptyValue(reflect.ValueOf(labelFingerprintProp)) && (ok || !reflect.DeepEqual(v, labelFingerprintProp)) {
obj["labelFingerprint"] = labelFingerprintProp
}
descriptionProp, err := expandComputeDiskDescription(d.Get("description"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("description"); !isEmptyValue(reflect.ValueOf(descriptionProp)) && (ok || !reflect.DeepEqual(v, descriptionProp)) {
obj["description"] = descriptionProp
}
labelsProp, err := expandComputeDiskLabels(d.Get("labels"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("labels"); !isEmptyValue(reflect.ValueOf(labelsProp)) && (ok || !reflect.DeepEqual(v, labelsProp)) {
obj["labels"] = labelsProp
}
nameProp, err := expandComputeDiskName(d.Get("name"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("name"); !isEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) {
obj["name"] = nameProp
}
sizeGbProp, err := expandComputeDiskSize(d.Get("size"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("size"); !isEmptyValue(reflect.ValueOf(sizeGbProp)) && (ok || !reflect.DeepEqual(v, sizeGbProp)) {
obj["sizeGb"] = sizeGbProp
}
physicalBlockSizeBytesProp, err := expandComputeDiskPhysicalBlockSizeBytes(d.Get("physical_block_size_bytes"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("physical_block_size_bytes"); !isEmptyValue(reflect.ValueOf(physicalBlockSizeBytesProp)) && (ok || !reflect.DeepEqual(v, physicalBlockSizeBytesProp)) {
obj["physicalBlockSizeBytes"] = physicalBlockSizeBytesProp
}
typeProp, err := expandComputeDiskType(d.Get("type"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("type"); !isEmptyValue(reflect.ValueOf(typeProp)) && (ok || !reflect.DeepEqual(v, typeProp)) {
obj["type"] = typeProp
}
sourceImageProp, err := expandComputeDiskImage(d.Get("image"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("image"); !isEmptyValue(reflect.ValueOf(sourceImageProp)) && (ok || !reflect.DeepEqual(v, sourceImageProp)) {
obj["sourceImage"] = sourceImageProp
}
zoneProp, err := expandComputeDiskZone(d.Get("zone"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("zone"); !isEmptyValue(reflect.ValueOf(zoneProp)) && (ok || !reflect.DeepEqual(v, zoneProp)) {
obj["zone"] = zoneProp
}
sourceImageEncryptionKeyProp, err := expandComputeDiskSourceImageEncryptionKey(d.Get("source_image_encryption_key"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("source_image_encryption_key"); !isEmptyValue(reflect.ValueOf(sourceImageEncryptionKeyProp)) && (ok || !reflect.DeepEqual(v, sourceImageEncryptionKeyProp)) {
obj["sourceImageEncryptionKey"] = sourceImageEncryptionKeyProp
}
diskEncryptionKeyProp, err := expandComputeDiskDiskEncryptionKey(d.Get("disk_encryption_key"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("disk_encryption_key"); !isEmptyValue(reflect.ValueOf(diskEncryptionKeyProp)) && (ok || !reflect.DeepEqual(v, diskEncryptionKeyProp)) {
obj["diskEncryptionKey"] = diskEncryptionKeyProp
}
sourceSnapshotProp, err := expandComputeDiskSnapshot(d.Get("snapshot"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("snapshot"); !isEmptyValue(reflect.ValueOf(sourceSnapshotProp)) && (ok || !reflect.DeepEqual(v, sourceSnapshotProp)) {
obj["sourceSnapshot"] = sourceSnapshotProp
}
sourceSnapshotEncryptionKeyProp, err := expandComputeDiskSourceSnapshotEncryptionKey(d.Get("source_snapshot_encryption_key"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("source_snapshot_encryption_key"); !isEmptyValue(reflect.ValueOf(sourceSnapshotEncryptionKeyProp)) && (ok || !reflect.DeepEqual(v, sourceSnapshotEncryptionKeyProp)) {
obj["sourceSnapshotEncryptionKey"] = sourceSnapshotEncryptionKeyProp
}
obj, err = resourceComputeDiskEncoder(d, meta, obj)
if err != nil {
return err
}
url, err := replaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/zones/{{zone}}/disks")
if err != nil {
return err
}
log.Printf("[DEBUG] Creating new Disk: %#v", obj)
project, err := getProject(d, config)
if err != nil {
return err
}
res, err := sendRequestWithTimeout(config, "POST", project, url, obj, d.Timeout(schema.TimeoutCreate))
if err != nil {
return fmt.Errorf("Error creating Disk: %s", err)
}
// Store the ID now
id, err := replaceVars(d, config, "projects/{{project}}/zones/{{zone}}/disks/{{name}}")
if err != nil {
return fmt.Errorf("Error constructing id: %s", err)
}
d.SetId(id)
err = computeOperationWaitTime(
config, res, project, "Creating Disk",
d.Timeout(schema.TimeoutCreate))
if err != nil {
// The resource didn't actually create
d.SetId("")
return fmt.Errorf("Error waiting to create Disk: %s", err)
}
log.Printf("[DEBUG] Finished creating Disk %q: %#v", d.Id(), res)
return resourceComputeDiskRead(d, meta)
}
func resourceComputeDiskRead(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
url, err := replaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/zones/{{zone}}/disks/{{name}}")
if err != nil {
return err
}
project, err := getProject(d, config)
if err != nil {
return err
}
res, err := sendRequest(config, "GET", project, url, nil)
if err != nil {
return handleNotFoundError(err, d, fmt.Sprintf("ComputeDisk %q", d.Id()))
}
res, err = resourceComputeDiskDecoder(d, meta, res)
if err != nil {
return err
}
if res == nil {
// Decoding the object has resulted in it being gone. It may be marked deleted
log.Printf("[DEBUG] Removing ComputeDisk because it no longer exists.")
d.SetId("")
return nil
}
if err := d.Set("project", project); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("label_fingerprint", flattenComputeDiskLabelFingerprint(res["labelFingerprint"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("creation_timestamp", flattenComputeDiskCreationTimestamp(res["creationTimestamp"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("description", flattenComputeDiskDescription(res["description"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("last_attach_timestamp", flattenComputeDiskLastAttachTimestamp(res["lastAttachTimestamp"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("last_detach_timestamp", flattenComputeDiskLastDetachTimestamp(res["lastDetachTimestamp"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("labels", flattenComputeDiskLabels(res["labels"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("name", flattenComputeDiskName(res["name"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("size", flattenComputeDiskSize(res["sizeGb"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("users", flattenComputeDiskUsers(res["users"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("physical_block_size_bytes", flattenComputeDiskPhysicalBlockSizeBytes(res["physicalBlockSizeBytes"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("type", flattenComputeDiskType(res["type"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("image", flattenComputeDiskImage(res["sourceImage"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("zone", flattenComputeDiskZone(res["zone"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("source_image_encryption_key", flattenComputeDiskSourceImageEncryptionKey(res["sourceImageEncryptionKey"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("source_image_id", flattenComputeDiskSourceImageId(res["sourceImageId"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("disk_encryption_key", flattenComputeDiskDiskEncryptionKey(res["diskEncryptionKey"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("snapshot", flattenComputeDiskSnapshot(res["sourceSnapshot"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("source_snapshot_encryption_key", flattenComputeDiskSourceSnapshotEncryptionKey(res["sourceSnapshotEncryptionKey"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("source_snapshot_id", flattenComputeDiskSourceSnapshotId(res["sourceSnapshotId"], d, config)); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
if err := d.Set("self_link", ConvertSelfLinkToV1(res["selfLink"].(string))); err != nil {
return fmt.Errorf("Error reading Disk: %s", err)
}
return nil
}
func resourceComputeDiskUpdate(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
project, err := getProject(d, config)
if err != nil {
return err
}
d.Partial(true)
if d.HasChange("label_fingerprint") || d.HasChange("labels") {
obj := make(map[string]interface{})
labelFingerprintProp, err := expandComputeDiskLabelFingerprint(d.Get("label_fingerprint"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("label_fingerprint"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, labelFingerprintProp)) {
obj["labelFingerprint"] = labelFingerprintProp
}
labelsProp, err := expandComputeDiskLabels(d.Get("labels"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("labels"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, labelsProp)) {
obj["labels"] = labelsProp
}
url, err := replaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/zones/{{zone}}/disks/{{name}}/setLabels")
if err != nil {
return err
}
res, err := sendRequestWithTimeout(config, "POST", project, url, obj, d.Timeout(schema.TimeoutUpdate))
if err != nil {
return fmt.Errorf("Error updating Disk %q: %s", d.Id(), err)
}
err = computeOperationWaitTime(
config, res, project, "Updating Disk",
d.Timeout(schema.TimeoutUpdate))
if err != nil {
return err
}
d.SetPartial("label_fingerprint")
d.SetPartial("labels")
}
if d.HasChange("size") {
obj := make(map[string]interface{})
sizeGbProp, err := expandComputeDiskSize(d.Get("size"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("size"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, sizeGbProp)) {
obj["sizeGb"] = sizeGbProp
}
url, err := replaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/zones/{{zone}}/disks/{{name}}/resize")
if err != nil {
return err
}
res, err := sendRequestWithTimeout(config, "POST", project, url, obj, d.Timeout(schema.TimeoutUpdate))
if err != nil {
return fmt.Errorf("Error updating Disk %q: %s", d.Id(), err)
}
err = computeOperationWaitTime(
config, res, project, "Updating Disk",
d.Timeout(schema.TimeoutUpdate))
if err != nil {
return err
}
d.SetPartial("size")
}
d.Partial(false)
return resourceComputeDiskRead(d, meta)
}
func resourceComputeDiskDelete(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
project, err := getProject(d, config)
if err != nil {
return err
}
url, err := replaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/zones/{{zone}}/disks/{{name}}")
if err != nil {
return err
}
var obj map[string]interface{}
readRes, err := sendRequest(config, "GET", project, url, nil)
if err != nil {
return handleNotFoundError(err, d, fmt.Sprintf("ComputeDisk %q", d.Id()))
}
// if disks are attached to instances, they must be detached before the disk can be deleted
if v, ok := readRes["users"].([]interface{}); ok {
type detachArgs struct{ project, zone, instance, deviceName string }
var detachCalls []detachArgs
for _, instance := range convertStringArr(v) {
self := d.Get("self_link").(string)
instanceProject, instanceZone, instanceName, err := GetLocationalResourcePropertiesFromSelfLinkString(instance)
if err != nil {
return err
}
i, err := config.clientCompute.Instances.Get(instanceProject, instanceZone, instanceName).Do()
if err != nil {
if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 {
log.Printf("[WARN] instance %q not found, not bothering to detach disks", instance)
continue
}
return fmt.Errorf("Error retrieving instance %s: %s", instance, err.Error())
}
for _, disk := range i.Disks {
if compareSelfLinkOrResourceName("", disk.Source, self, nil) {
detachCalls = append(detachCalls, detachArgs{
project: instanceProject,
zone: GetResourceNameFromSelfLink(i.Zone),
instance: i.Name,
deviceName: disk.DeviceName,
})
}
}
}
for _, call := range detachCalls {
op, err := config.clientCompute.Instances.DetachDisk(call.project, call.zone, call.instance, call.deviceName).Do()
if err != nil {
return fmt.Errorf("Error detaching disk %s from instance %s/%s/%s: %s", call.deviceName, call.project,
call.zone, call.instance, err.Error())
}
err = computeOperationWaitTime(config, op, call.project,
fmt.Sprintf("Detaching disk from %s/%s/%s", call.project, call.zone, call.instance), d.Timeout(schema.TimeoutDelete))
if err != nil {
if opErr, ok := err.(ComputeOperationError); ok && len(opErr.Errors) == 1 && opErr.Errors[0].Code == "RESOURCE_NOT_FOUND" {
log.Printf("[WARN] instance %q was deleted while awaiting detach", call.instance)
continue
}
return err
}
}
}
log.Printf("[DEBUG] Deleting Disk %q", d.Id())
res, err := sendRequestWithTimeout(config, "DELETE", project, url, obj, d.Timeout(schema.TimeoutDelete))
if err != nil {
return handleNotFoundError(err, d, "Disk")
}
err = computeOperationWaitTime(
config, res, project, "Deleting Disk",
d.Timeout(schema.TimeoutDelete))
if err != nil {
return err
}
log.Printf("[DEBUG] Finished deleting Disk %q: %#v", d.Id(), res)
return nil
}
func resourceComputeDiskImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*Config)
if err := parseImportId([]string{
"projects/(?P<project>[^/]+)/zones/(?P<zone>[^/]+)/disks/(?P<name>[^/]+)",
"(?P<project>[^/]+)/(?P<zone>[^/]+)/(?P<name>[^/]+)",
"(?P<zone>[^/]+)/(?P<name>[^/]+)",
"(?P<name>[^/]+)",
}, d, config); err != nil {
return nil, err
}
// Replace import id for the resource id
id, err := replaceVars(d, config, "projects/{{project}}/zones/{{zone}}/disks/{{name}}")
if err != nil {
return nil, fmt.Errorf("Error constructing id: %s", err)
}
d.SetId(id)
return []*schema.ResourceData{d}, nil
}
func flattenComputeDiskLabelFingerprint(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskCreationTimestamp(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskDescription(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskLastAttachTimestamp(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskLastDetachTimestamp(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskLabels(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskName(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskSize(v interface{}, d *schema.ResourceData, config *Config) interface{} {
// Handles the string fixed64 format
if strVal, ok := v.(string); ok {
if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil {
return intVal
}
}
// number values are represented as float64
if floatVal, ok := v.(float64); ok {
intVal := int(floatVal)
return intVal
}
return v // let terraform core handle it otherwise
}
func flattenComputeDiskUsers(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return v
}
return convertAndMapStringArr(v.([]interface{}), ConvertSelfLinkToV1)
}
func flattenComputeDiskPhysicalBlockSizeBytes(v interface{}, d *schema.ResourceData, config *Config) interface{} {
// Handles the string fixed64 format
if strVal, ok := v.(string); ok {
if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil {
return intVal
}
}
// number values are represented as float64
if floatVal, ok := v.(float64); ok {
intVal := int(floatVal)
return intVal
}
return v // let terraform core handle it otherwise
}
func flattenComputeDiskType(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return v
}
return NameFromSelfLinkStateFunc(v)
}
func flattenComputeDiskImage(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskZone(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return v
}
return NameFromSelfLinkStateFunc(v)
}
func flattenComputeDiskSourceImageEncryptionKey(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["raw_key"] =
flattenComputeDiskSourceImageEncryptionKeyRawKey(original["rawKey"], d, config)
transformed["sha256"] =
flattenComputeDiskSourceImageEncryptionKeySha256(original["sha256"], d, config)
transformed["kms_key_self_link"] =
flattenComputeDiskSourceImageEncryptionKeyKmsKeySelfLink(original["kmsKeyName"], d, config)
return []interface{}{transformed}
}
func flattenComputeDiskSourceImageEncryptionKeyRawKey(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskSourceImageEncryptionKeySha256(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskSourceImageEncryptionKeyKmsKeySelfLink(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskSourceImageId(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskDiskEncryptionKey(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["raw_key"] =
flattenComputeDiskDiskEncryptionKeyRawKey(original["rawKey"], d, config)
transformed["sha256"] =
flattenComputeDiskDiskEncryptionKeySha256(original["sha256"], d, config)
transformed["kms_key_self_link"] =
flattenComputeDiskDiskEncryptionKeyKmsKeySelfLink(original["kmsKeyName"], d, config)
return []interface{}{transformed}
}
func flattenComputeDiskDiskEncryptionKeyRawKey(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskDiskEncryptionKeySha256(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskDiskEncryptionKeyKmsKeySelfLink(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskSnapshot(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return v
}
return ConvertSelfLinkToV1(v.(string))
}
func flattenComputeDiskSourceSnapshotEncryptionKey(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["raw_key"] =
flattenComputeDiskSourceSnapshotEncryptionKeyRawKey(original["rawKey"], d, config)
transformed["kms_key_self_link"] =
flattenComputeDiskSourceSnapshotEncryptionKeyKmsKeySelfLink(original["kmsKeyName"], d, config)
transformed["sha256"] =
flattenComputeDiskSourceSnapshotEncryptionKeySha256(original["sha256"], d, config)
return []interface{}{transformed}
}
func flattenComputeDiskSourceSnapshotEncryptionKeyRawKey(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskSourceSnapshotEncryptionKeyKmsKeySelfLink(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskSourceSnapshotEncryptionKeySha256(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenComputeDiskSourceSnapshotId(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func expandComputeDiskLabelFingerprint(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskDescription(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskLabels(v interface{}, d TerraformResourceData, config *Config) (map[string]string, error) {
if v == nil {
return map[string]string{}, nil
}
m := make(map[string]string)
for k, val := range v.(map[string]interface{}) {
m[k] = val.(string)
}
return m, nil
}
func expandComputeDiskName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskSize(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskPhysicalBlockSizeBytes(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskType(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
f, err := parseZonalFieldValue("diskTypes", v.(string), "project", "zone", d, config, true)
if err != nil {
return nil, fmt.Errorf("Invalid value for type: %s", err)
}
return f.RelativeLink(), nil
}
func expandComputeDiskImage(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskZone(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
f, err := parseGlobalFieldValue("zones", v.(string), "project", d, config, true)
if err != nil {
return nil, fmt.Errorf("Invalid value for zone: %s", err)
}
return f.RelativeLink(), nil
}
func expandComputeDiskSourceImageEncryptionKey(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedRawKey, err := expandComputeDiskSourceImageEncryptionKeyRawKey(original["raw_key"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedRawKey); val.IsValid() && !isEmptyValue(val) {
transformed["rawKey"] = transformedRawKey
}
transformedSha256, err := expandComputeDiskSourceImageEncryptionKeySha256(original["sha256"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedSha256); val.IsValid() && !isEmptyValue(val) {
transformed["sha256"] = transformedSha256
}
transformedKmsKeySelfLink, err := expandComputeDiskSourceImageEncryptionKeyKmsKeySelfLink(original["kms_key_self_link"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedKmsKeySelfLink); val.IsValid() && !isEmptyValue(val) {
transformed["kmsKeyName"] = transformedKmsKeySelfLink
}
return transformed, nil
}
func expandComputeDiskSourceImageEncryptionKeyRawKey(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskSourceImageEncryptionKeySha256(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskSourceImageEncryptionKeyKmsKeySelfLink(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskDiskEncryptionKey(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedRawKey, err := expandComputeDiskDiskEncryptionKeyRawKey(original["raw_key"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedRawKey); val.IsValid() && !isEmptyValue(val) {
transformed["rawKey"] = transformedRawKey
}
transformedSha256, err := expandComputeDiskDiskEncryptionKeySha256(original["sha256"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedSha256); val.IsValid() && !isEmptyValue(val) {
transformed["sha256"] = transformedSha256
}
transformedKmsKeySelfLink, err := expandComputeDiskDiskEncryptionKeyKmsKeySelfLink(original["kms_key_self_link"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedKmsKeySelfLink); val.IsValid() && !isEmptyValue(val) {
transformed["kmsKeyName"] = transformedKmsKeySelfLink
}
return transformed, nil
}
func expandComputeDiskDiskEncryptionKeyRawKey(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskDiskEncryptionKeySha256(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskDiskEncryptionKeyKmsKeySelfLink(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskSnapshot(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
f, err := parseGlobalFieldValue("snapshots", v.(string), "project", d, config, true)
if err != nil {
return nil, fmt.Errorf("Invalid value for snapshot: %s", err)
}
return f.RelativeLink(), nil
}
func expandComputeDiskSourceSnapshotEncryptionKey(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedRawKey, err := expandComputeDiskSourceSnapshotEncryptionKeyRawKey(original["raw_key"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedRawKey); val.IsValid() && !isEmptyValue(val) {
transformed["rawKey"] = transformedRawKey
}
transformedKmsKeySelfLink, err := expandComputeDiskSourceSnapshotEncryptionKeyKmsKeySelfLink(original["kms_key_self_link"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedKmsKeySelfLink); val.IsValid() && !isEmptyValue(val) {
transformed["kmsKeyName"] = transformedKmsKeySelfLink
}
transformedSha256, err := expandComputeDiskSourceSnapshotEncryptionKeySha256(original["sha256"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedSha256); val.IsValid() && !isEmptyValue(val) {
transformed["sha256"] = transformedSha256
}
return transformed, nil
}
func expandComputeDiskSourceSnapshotEncryptionKeyRawKey(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskSourceSnapshotEncryptionKeyKmsKeySelfLink(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandComputeDiskSourceSnapshotEncryptionKeySha256(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func resourceComputeDiskEncoder(d *schema.ResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) {
config := meta.(*Config)
project, err := getProject(d, config)
if err != nil {
return nil, err
}
if v, ok := d.GetOk("type"); ok {
log.Printf("[DEBUG] Loading disk type: %s", v.(string))
diskType, err := readDiskType(config, d, v.(string))
if err != nil {
return nil, fmt.Errorf(
"Error loading disk type '%s': %s",
v.(string), err)
}
obj["type"] = diskType.RelativeLink()
}
if v, ok := d.GetOk("image"); ok {
log.Printf("[DEBUG] Resolving image name: %s", v.(string))
imageUrl, err := resolveImage(config, project, v.(string))
if err != nil {
return nil, fmt.Errorf(
"Error resolving image name '%s': %s",
v.(string), err)
}
obj["sourceImage"] = imageUrl
log.Printf("[DEBUG] Image name resolved to: %s", imageUrl)
}
return obj, nil
}
func resourceComputeDiskDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) {
if v, ok := res["diskEncryptionKey"]; ok {
original := v.(map[string]interface{})
transformed := make(map[string]interface{})
// The raw key won't be returned, so we need to use the original.
transformed["rawKey"] = d.Get("disk_encryption_key.0.raw_key")
transformed["sha256"] = original["sha256"]
if kmsKeyName, ok := original["kmsKeyName"]; ok {
// The response for crypto keys often includes the version of the key which needs to be removed
// format: projects/<project>/locations/<region>/keyRings/<keyring>/cryptoKeys/<key>/cryptoKeyVersions/1
transformed["kmsKeyName"] = strings.Split(kmsKeyName.(string), "/cryptoKeyVersions")[0]
}
res["diskEncryptionKey"] = transformed
}
if v, ok := res["sourceImageEncryptionKey"]; ok {
original := v.(map[string]interface{})
transformed := make(map[string]interface{})
// The raw key won't be returned, so we need to use the original.
transformed["rawKey"] = d.Get("source_image_encryption_key.0.raw_key")
transformed["sha256"] = original["sha256"]
if kmsKeyName, ok := original["kmsKeyName"]; ok {
// The response for crypto keys often includes the version of the key which needs to be removed
// format: projects/<project>/locations/<region>/keyRings/<keyring>/cryptoKeys/<key>/cryptoKeyVersions/1
transformed["kmsKeyName"] = strings.Split(kmsKeyName.(string), "/cryptoKeyVersions")[0]
}
res["sourceImageEncryptionKey"] = transformed
}
if v, ok := res["sourceSnapshotEncryptionKey"]; ok {
original := v.(map[string]interface{})
transformed := make(map[string]interface{})
// The raw key won't be returned, so we need to use the original.
transformed["rawKey"] = d.Get("source_snapshot_encryption_key.0.raw_key")
transformed["sha256"] = original["sha256"]
if kmsKeyName, ok := original["kmsKeyName"]; ok {
// The response for crypto keys often includes the version of the key which needs to be removed
// format: projects/<project>/locations/<region>/keyRings/<keyring>/cryptoKeys/<key>/cryptoKeyVersions/1
transformed["kmsKeyName"] = strings.Split(kmsKeyName.(string), "/cryptoKeyVersions")[0]
}
res["sourceSnapshotEncryptionKey"] = transformed
}
return res, nil
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python3
import os
import betamax
from betamax_serializers import pretty_json
record_mode = 'once'
from git_repo.services.service import RepositoryService
services = list(RepositoryService.service_map.keys())
if os.environ.get('TRAVIS_GH3'):
# create default bogus values for tokens and namespaces if missing for pytest
# to run without environment values
# also if an environment variable is not set, then we don't want to record cassettes
record_mode = 'never'
for service in services:
user_name = 'USERNAME_{}'.format(service.upper())
token_name = 'PRIVATE_KEY_{}'.format(service.upper())
namespace_name = '{}_NAMESPACE'.format(service.upper())
if user_name not in os.environ:
os.environ[user_name] = '_username_'.format(service)
if token_name not in os.environ:
os.environ[token_name] = '_token_{}_'.format(service)
if namespace_name not in os.environ:
os.environ[namespace_name] = '_namespace_{}_'.format(service)
else:
# if running tests "locally" and not in travis, let's try to extract the keys from
# the local configuration if there is some local configuration. And exposes them as
# environment variables.
import git, getpass
config = git.config.GitConfigParser(os.path.join(os.environ['HOME'], '.gitconfig'))
# handle the different forms of token configuration item (yup, technical debt bites here)
get_section = lambda s: 'gitrepo "{}"'.format(s)
get_username = lambda s: config.get_value(get_section(s), 'username',
'_username_{}'.format(s)
)
get_token = lambda s: config.get_value(get_section(s), 'token',
config.get_value(get_section(s), 'private_token',
config.get_value(get_section(s), 'privatekey',
'_namespace_{}_'.format(s)
)))
for service in services:
user_name = 'USERNAME_{}'.format(service.upper())
token_name = 'PRIVATE_KEY_{}'.format(service.upper())
namespace_name = '{}_NAMESPACE'.format(service.upper())
if user_name not in os.environ:
os.environ[user_name] = get_username(service)
if token_name not in os.environ:
os.environ[token_name] = get_token(service)
if namespace_name not in os.environ:
os.environ[namespace_name] = os.environ.get('GITREPO_NAMESPACE', '_namespace_{}_'.format(service))
def sanitize_token(interaction, current_cassette):
# Exit early if the request did not return 200 OK because that's the
# only time we want to look for Authorization-Token headers
if interaction.data['response']['status']['code'] != 200:
return
headers = interaction.data['response']['headers']
token = headers.get('Authorization')
# If there was no token header in the response, exit
if token is None:
return
# Otherwise, create a new placeholder so that when cassette is saved,
# Betamax will replace the token with our placeholder.
current_cassette.placeholders.append(
cassette.Placeholder(placeholder='<AUTH_TOKEN>', replace=token)
)
betamax.Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with betamax.Betamax.configure() as config:
config.default_cassette_options['record_mode'] = record_mode
config.cassette_library_dir = 'tests/integration/cassettes'
config.default_cassette_options['serialize_with'] = 'prettyjson'
#config.before_record(callback=sanitize_token)
# generating placeholders in betamax configuration for each service's key and default namespace
for service in services:
config.define_cassette_placeholder('<PRIVATE_KEY_{}>'.format(service.upper()), os.environ.get('PRIVATE_KEY_{}'.format(service.upper())))
config.define_cassette_placeholder('<{}_NAMESPACE>'.format(service.upper()), os.environ.get('{}_NAMESPACE'.format(service.upper())))
| {
"pile_set_name": "Github"
} |
.class Lcn/com/smartdevices/bracelet/tencent/health/QQHealth$3;
.super Lcom/d/a/a/h;
# instance fields
.field final synthetic this$0:Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;
.field final synthetic val$weight:Lcn/com/smartdevices/bracelet/tencent/health/HealthWeight;
# direct methods
.method constructor <init>(Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;Lcn/com/smartdevices/bracelet/tencent/health/HealthWeight;)V
.locals 0
iput-object p1, p0, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth$3;->this$0:Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;
iput-object p2, p0, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth$3;->val$weight:Lcn/com/smartdevices/bracelet/tencent/health/HealthWeight;
invoke-direct {p0}, Lcom/d/a/a/h;-><init>()V
return-void
.end method
# virtual methods
.method public onFailure(I[Lorg/apache/http/Header;[BLjava/lang/Throwable;)V
.locals 5
iget-object v0, p0, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth$3;->this$0:Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;
iget-object v1, p0, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth$3;->val$weight:Lcn/com/smartdevices/bracelet/tencent/health/HealthWeight;
invoke-virtual {v1}, Lcn/com/smartdevices/bracelet/tencent/health/HealthWeight;->getTime()I
move-result v1
int-to-long v1, v1
const-wide/16 v3, 0x3e8
mul-long/2addr v1, v3
invoke-static {v1, v2}, Ljava/lang/Long;->valueOf(J)Ljava/lang/Long;
move-result-object v1
invoke-virtual {v0, v1}, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;->addNeedSyncWeight(Ljava/lang/Long;)V
const-string v0, "QQ.Health"
const-string v1, "Post Weight Failed!!"
invoke-static {v0, v1, p4}, Lcn/com/smartdevices/bracelet/q;->b(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Throwable;)V
if-eqz p3, :cond_0
new-instance v0, Ljava/lang/String;
invoke-direct {v0, p3}, Ljava/lang/String;-><init>([B)V
const-string v1, "QQ.Health"
new-instance v2, Ljava/lang/StringBuilder;
invoke-direct {v2}, Ljava/lang/StringBuilder;-><init>()V
const-string v3, "Respone : "
invoke-virtual {v2, v3}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
move-result-object v2
invoke-virtual {v2, v0}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
move-result-object v0
invoke-virtual {v0}, Ljava/lang/StringBuilder;->toString()Ljava/lang/String;
move-result-object v0
invoke-static {v1, v0}, Lcn/com/smartdevices/bracelet/q;->d(Ljava/lang/String;Ljava/lang/String;)V
:cond_0
return-void
.end method
.method public onSuccess(I[Lorg/apache/http/Header;[B)V
.locals 5
if-eqz p3, :cond_0
const-string v0, "QQ.Health"
const-string v1, "Post Weight Successed!!"
invoke-static {v0, v1}, Lcn/com/smartdevices/bracelet/q;->d(Ljava/lang/String;Ljava/lang/String;)V
new-instance v0, Ljava/lang/String;
invoke-direct {v0, p3}, Ljava/lang/String;-><init>([B)V
const-string v1, "QQ.Health"
new-instance v2, Ljava/lang/StringBuilder;
invoke-direct {v2}, Ljava/lang/StringBuilder;-><init>()V
const-string v3, "Respone : "
invoke-virtual {v2, v3}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
move-result-object v2
invoke-virtual {v2, v0}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;
move-result-object v2
invoke-virtual {v2}, Ljava/lang/StringBuilder;->toString()Ljava/lang/String;
move-result-object v2
invoke-static {v1, v2}, Lcn/com/smartdevices/bracelet/q;->d(Ljava/lang/String;Ljava/lang/String;)V
iget-object v1, p0, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth$3;->this$0:Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;
# invokes: Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;->checkPostResult(Ljava/lang/String;)Z
invoke-static {v1, v0}, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;->access$000(Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;Ljava/lang/String;)Z
move-result v0
if-eqz v0, :cond_0
iget-object v0, p0, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth$3;->this$0:Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;
iget-object v1, p0, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth$3;->val$weight:Lcn/com/smartdevices/bracelet/tencent/health/HealthWeight;
invoke-virtual {v1}, Lcn/com/smartdevices/bracelet/tencent/health/HealthWeight;->getTime()I
move-result v1
int-to-long v1, v1
const-wide/16 v3, 0x3e8
mul-long/2addr v1, v3
invoke-static {v1, v2}, Ljava/lang/Long;->valueOf(J)Ljava/lang/Long;
move-result-object v1
invoke-virtual {v0, v1}, Lcn/com/smartdevices/bracelet/tencent/health/QQHealth;->removeNeedSyncWeight(Ljava/lang/Long;)V
:cond_0
return-void
.end method
| {
"pile_set_name": "Github"
} |
/* Base16 Atelier Cave Dark - Theme */
/* by Bram de Haan (http://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave) */
/* Original Base16 color scheme by Chris Kempson (https://github.com/chriskempson/base16) */
/* Atelier-Cave Comment */
.hljs-comment,
.hljs-quote {
color: #7e7887;
}
/* Atelier-Cave Red */
.hljs-variable,
.hljs-template-variable,
.hljs-attribute,
.hljs-regexp,
.hljs-link,
.hljs-tag,
.hljs-name,
.hljs-selector-id,
.hljs-selector-class {
color: #be4678;
}
/* Atelier-Cave Orange */
.hljs-number,
.hljs-meta,
.hljs-built_in,
.hljs-builtin-name,
.hljs-literal,
.hljs-type,
.hljs-params {
color: #aa573c;
}
/* Atelier-Cave Green */
.hljs-string,
.hljs-symbol,
.hljs-bullet {
color: #2a9292;
}
/* Atelier-Cave Blue */
.hljs-title,
.hljs-section {
color: #576ddb;
}
/* Atelier-Cave Purple */
.hljs-keyword,
.hljs-selector-tag {
color: #955ae7;
}
.hljs-deletion,
.hljs-addition {
color: #19171c;
display: inline-block;
width: 100%;
}
.hljs-deletion {
background-color: #be4678;
}
.hljs-addition {
background-color: #2a9292;
}
.hljs {
display: block;
overflow-x: auto;
background: #19171c;
color: #8b8792;
padding: 0.5em;
}
.hljs-emphasis {
font-style: italic;
}
.hljs-strong {
font-weight: bold;
}
| {
"pile_set_name": "Github"
} |
////////////////////////////////
//
// Copyright 2020 Battelle Energy Alliance, LLC
//
//
////////////////////////////////
namespace CSETWeb_Api.Controllers
{
public class usp_getComponentsSummmary
{
public double YesNumber { get; set; }
public double YesPercent { get; set; }
public double NoNumber { get; set; }
public double NoPercent { get; set; }
public double NANumber { get; set; }
public double NAPercent { get; set; }
public double AltNumber { get; set; }
public double AltPercent { get; set; }
public double UnansweredNumber { get; set; }
public double UnansweredPercent { get; set; }
public double TotalNumber { get; set; }
public double TotalPercent { get; set; }
public string Answer_Text { get; set; }
public string Answer_Full_Name { get; set; }
public int vcount { get; set; }
public decimal value { get; set; }
}
}
| {
"pile_set_name": "Github"
} |
{
"pile_set_name": "Github"
} |
|
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
namespace UMA
{
/// <summary>
/// This ScriptableObject class is used for advanced mesh hiding with UMA and the DCS.
/// </summary>
/// <remarks>
/// This class simply stores a link to a SlotDataAsset (the slot to get hiding applied to) and a list of the slot's triangles as a BitArray.
/// Each bit indicates a flag of whether the triangle should be hidden or not in the final generated UMA.
/// After creating the MeshHideAsset, it can then be added to a list in a wardrobe recipes. This makes it so when the wardrobe recipe is active and the slot associated
/// with the MeshHideAsset is found in the UMA recipe, then apply the triangle hiding to the slot. MeshHideAsset's are also unioned, so multiple MeshHideAssets with
/// the same slotData can combine to hide their unioned list.
/// </remarks>
public class MeshHideAsset : ScriptableObject, ISerializationCallbackReceiver
{
/// <summary>
/// The asset we want to apply mesh hiding to if found in the generated UMA.
/// </summary>
/// <value>The SlotDataAsset.</value>
[SerializeField]
public SlotDataAsset asset
{
get
{
if (_asset != null)
{
_assetSlotName = _asset.slotName;
_asset = null;
}
return UMAAssetIndexer.Instance.GetAsset<SlotDataAsset>(_assetSlotName);
}
set
{
if (value != null)
_assetSlotName = value.slotName;
else
{
Debug.Log("Cleared Asset Slot Name");
_assetSlotName = "";
}
}
}
[SerializeField, HideInInspector]
private SlotDataAsset _asset;
public bool HasReference
{
get { return _asset != null; }
}
public string AssetSlotName
{
get {
if (string.IsNullOrEmpty(_assetSlotName))
{
if (_asset != null)
{
_assetSlotName = _asset.slotName;
}
}
return _assetSlotName;
}
set
{
_assetSlotName = value;
}
}
[SerializeField, HideInInspector]
private string _assetSlotName = "";
/// <summary>
/// BitArray of the triangle flags list. The list stores only the first index of the triangle vertex in the asset's triangle list.
/// </summary>
/// <value>The array of BitArrays. A BitArray for each submesh triangle list.</value>
public BitArray[] triangleFlags { get { return _triangleFlags; }}
private BitArray[] _triangleFlags;
[System.Serializable]
public class serializedFlags
{
public int[] flags;
public int Count;
public serializedFlags(int count)
{
Count = count;
flags = new int[(Count + 31) / 32];
}
}
[SerializeField]
private serializedFlags[] _serializedFlags;
public int SubmeshCount
{
get
{
if (_triangleFlags != null)
{
return _triangleFlags.Length;
}
else
return 0;
}
}
/// <summary>
/// If this contains a reference to an asset, it is freed.
/// This asset reference is no longer needed, and
/// forces the asset to be included in the build.
/// It is kept only for upgrading from earlier UMA versions
/// </summary>
public void FreeReference()
{
if (_asset != null)
{
_assetSlotName = _asset.slotName;
_asset = null;
}
}
/// <summary>
/// Gets the total triangle count in the multidimensional triangleFlags.
/// </summary>
/// <value>The triangle count.</value>
public int TriangleCount
{
get
{
if (_triangleFlags != null)
{
int total = 0;
for (int i = 0; i < _triangleFlags.Length; i++)
total += _triangleFlags[i].Count;
return total;
}
else
return 0;
}
}
/// <summary>
/// Gets the hidden triangles count.
/// </summary>
/// <value>The hidden triangles count.</value>
public int HiddenCount
{
get
{
if (_triangleFlags != null)
{
int total = 0;
for (int i = 0; i < _triangleFlags.Length; i++)
{
total += UMAUtils.GetCardinality(_triangleFlags[i]);
}
return total;
}
else
return 0;
}
}
#if UNITY_EDITOR
[ContextMenu("CopyToClipboard")]
public void CopyToClipboard()
{
UnityEditor.EditorGUIUtility.systemCopyBuffer = JsonUtility.ToJson(this);
}
[ContextMenu("PasteFromClipboard")]
public void PasteFromClipboard()
{
JsonUtility.FromJsonOverwrite(UnityEditor.EditorGUIUtility.systemCopyBuffer, this);
}
#endif
/// <summary>
/// Custom serialization to write the BitArray to a boolean array.
/// </summary>
public void OnBeforeSerialize()
{
// _asset = null; // Let's not save this!
if (_triangleFlags == null)
return;
if (TriangleCount > 0)
{
_serializedFlags = new serializedFlags[_triangleFlags.Length];
for (int i = 0; i < _triangleFlags.Length; i++)
{
_serializedFlags[i] = new serializedFlags(_triangleFlags[i].Length);
_serializedFlags[i].flags.Initialize();
}
}
for (int i = 0; i < _triangleFlags.Length; i++)
{
_triangleFlags[i].CopyTo(_serializedFlags[i].flags, 0);
}
if (_serializedFlags == null)
{
if(Debug.isDebugBuild)
Debug.LogError("Serializing triangle flags failed!");
}
}
/// <summary>
/// Custom deserialization to write the boolean array to the BitArray.
/// </summary>
public void OnAfterDeserialize()
{
//We're not logging an error here because we'll get spammed by it for empty/not-set assets.
if (_asset == null && string.IsNullOrEmpty(_assetSlotName))
{
Debug.Log("No reference and no name on MeshHideAsset!");
return;
}
if (_asset != null)
{
_assetSlotName = _asset.slotName;
}
if (_serializedFlags == null)
return;
if (_serializedFlags.Length > 0)
{
_triangleFlags = new BitArray[_serializedFlags.Length];
for (int i = 0; i < _serializedFlags.Length; i++)
{
_triangleFlags[i] = new BitArray(_serializedFlags[i].flags);
_triangleFlags[i].Length = _serializedFlags[i].Count;
}
}
}
/// <summary>
/// Initialize this asset by creating a new boolean array that matches the triangle length in the asset triangle list.
/// </summary>
[ExecuteInEditMode]
public void Initialize()
{
SlotDataAsset slot = asset;
if (slot == null)
{
_triangleFlags = null;
return;
}
if (slot.meshData == null)
return;
_triangleFlags = new BitArray[slot.meshData.subMeshCount];
for (int i = 0; i < slot.meshData.subMeshCount; i++)
{
_triangleFlags[i] = new BitArray(slot.meshData.submeshes[i].triangles.Length / 3);
}
}
/// <summary>
/// Set the triangle flag's boolean value
/// </summary>
/// <param name="triangleIndex">The first index for the triangle to set.</param>
/// <param name="flag">Bool to set the triangle flag to.</param>
/// <param name="submesh">The submesh index to access. Default = 0.</param>
[ExecuteInEditMode]
public void SetTriangleFlag(int triangleIndex, bool flag, int submesh = 0)
{
if (_triangleFlags == null)
{
if(Debug.isDebugBuild)
Debug.LogError("Triangle Array not initialized!");
return;
}
if (triangleIndex >= 0 && (_triangleFlags[submesh].Length - 3) > triangleIndex)
{
_triangleFlags[submesh][triangleIndex] = flag;
}
}
/// <summary>
/// Set the given BitArray to this object's triangleFlag's BitArray.
/// </summary>
/// <param name="selection">The BitArray selection.</param>
[ExecuteInEditMode]
public void SaveSelection( BitArray selection )
{
int submesh = asset.subMeshIndex;
if (selection.Count != _triangleFlags[submesh].Count)
{
if (Debug.isDebugBuild)
Debug.Log("SaveSelection: counts don't match!");
return;
}
//Only works for submesh 0 for now
_triangleFlags[submesh].SetAll(false);
if (selection.Length == _triangleFlags[submesh].Length)
_triangleFlags[submesh] = new BitArray(selection);
else
{
if (Debug.isDebugBuild)
Debug.LogWarning("SaveSelection: counts don't match!");
}
#if UNITY_EDITOR
UnityEditor.EditorUtility.SetDirty(this);
#endif
}
/// <summary>
/// Generates a final BitArray mask from a list of MeshHideAssets.
/// </summary>
/// <returns>The BitArray array mask.</returns>
/// <param name="assets">List of MeshHideAssets.</param>
public static BitArray[] GenerateMask( List<MeshHideAsset> assets )
{
List<BitArray[]> flags = new List<BitArray[]>();
foreach (MeshHideAsset asset in assets)
flags.Add(asset.triangleFlags);
return CombineTriangleFlags(flags);
}
/// <summary>
/// Combines the list of BitArray arrays.
/// </summary>
/// <returns>The final combined BitArray array.</returns>
/// <param name="flags">List of BitArray array flags.</param>
public static BitArray[] CombineTriangleFlags( List<BitArray[]> flags)
{
if (flags == null || flags.Count <= 0)
return null;
BitArray[] final = new BitArray[flags[0].Length];
for(int i = 0; i < flags[0].Length; i++)
{
final[i] = new BitArray(flags[0][i]);
}
for (int i = 1; i < flags.Count; i++)
{
for (int j = 0; j < flags[i].Length; j++)
{
if (flags[i][j].Count == flags[0][j].Count)
final[j].Or(flags[i][j]);
}
}
return final;
}
#if UNITY_EDITOR
#if UMA_HOTKEYS
[UnityEditor.MenuItem("Assets/Create/UMA/Misc/Mesh Hide Asset %#h")]
#else
[UnityEditor.MenuItem("Assets/Create/UMA/Misc/Mesh Hide Asset")]
#endif
public static void CreateMeshHideAsset()
{
UMA.CustomAssetUtility.CreateAsset<MeshHideAsset>();
}
#endif
}
} | {
"pile_set_name": "Github"
} |
GIMP-VBR
1.0
2. Hardness 100
10.000000
25.000000
1.000000
1.000000
0.000000
| {
"pile_set_name": "Github"
} |
/*
Copyright (c) 2011-2019, Intel Corporation
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef _MSC_VER
#define ISPC_IS_WINDOWS
#define NOMINMAX
#elif defined(__linux__)
#define ISPC_IS_LINUX
#elif defined(__APPLE__)
#define ISPC_IS_APPLE
#endif
#include <algorithm>
#include <assert.h>
#include <fcntl.h>
#include <float.h>
#include <math.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/types.h>
#include <vector>
#ifdef ISPC_IS_WINDOWS
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#endif
#include "../timing.h"
#include "deferred.h"
#include "kernels_ispc.h"
///////////////////////////////////////////////////////////////////////////
int main(int argc, char **argv) {
if (argc < 2) {
printf(
"usage: deferred_shading <input_file (e.g. data/pp1280x720.bin)> [tasks iterations] [serial iterations]\n");
return 1;
}
static unsigned int test_iterations[] = {5, 3, 500}; // last value is for nframes, it is scale.
if (argc == 5) {
for (int i = 0; i < 3; i++) {
test_iterations[i] = atoi(argv[2 + i]);
}
}
InputData *input = CreateInputDataFromFile(argv[1]);
if (!input) {
printf("Failed to load input file \"%s\"!\n", argv[1]);
return 1;
}
Framebuffer framebuffer(input->header.framebufferWidth, input->header.framebufferHeight);
InitDynamicC(input);
int nframes = test_iterations[2];
double ispcCycles = 1e30;
for (unsigned int i = 0; i < test_iterations[0]; ++i) {
framebuffer.clear();
reset_and_start_timer();
for (int j = 0; j < nframes; ++j)
ispc::RenderStatic(input->header, input->arrays, VISUALIZE_LIGHT_COUNT, framebuffer.r, framebuffer.g,
framebuffer.b);
double mcycles = get_elapsed_mcycles() / nframes;
printf("@time of ISPC + TASKS run:\t\t\t[%.3f] million cycles\n", mcycles);
ispcCycles = std::min(ispcCycles, mcycles);
}
printf("[ispc static + tasks]:\t\t[%.3f] million cycles to render "
"%d x %d image\n",
ispcCycles, input->header.framebufferWidth, input->header.framebufferHeight);
WriteFrame("deferred-ispc-static.ppm", input, framebuffer);
nframes = 3;
double serialCycles = 1e30;
for (unsigned int i = 0; i < test_iterations[1]; ++i) {
framebuffer.clear();
reset_and_start_timer();
for (int j = 0; j < nframes; ++j)
DispatchDynamicC(input, &framebuffer);
double mcycles = get_elapsed_mcycles() / nframes;
printf("@time of serial run:\t\t\t[%.3f] million cycles\n", mcycles);
serialCycles = std::min(serialCycles, mcycles);
}
printf("[C++ serial dynamic, 1 core]:\t[%.3f] million cycles to render image\n", serialCycles);
WriteFrame("deferred-serial-dynamic.ppm", input, framebuffer);
printf("\t\t\t\t(%.2fx speedup from ISPC + tasks)\n", serialCycles / ispcCycles);
DeleteInputData(input);
return 0;
}
| {
"pile_set_name": "Github"
} |
/home/sunzheng/Projects/EasyRSS/obj/local/armeabi/objs/curl/curl/lib/krb5.o: \
/home/sunzheng/Projects/EasyRSS/jni/curl/lib/krb5.c \
/home/sunzheng/Projects/EasyRSS/jni/curl/lib/setup.h \
/home/sunzheng/Projects/EasyRSS/jni/curl/lib/curl_config.h \
/home/sunzheng/Projects/EasyRSS/jni/curl/include/curl/curlbuild.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/types.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/stdint.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/_types.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/machine/_types.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/cdefs.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/cdefs_elf.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/posix_types.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/stddef.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/compiler.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/posix_types.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/types.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/types.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/machine/kernel.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/sysmacros.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/inttypes.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/socket.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/socket.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/socket.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/sockios.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/sockios.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/uio.h \
/home/sunzheng/Projects/EasyRSS/jni/curl/include/curl/curlrules.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/stdio.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/assert.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/errno.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/errno.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/errno.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm-generic/errno.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm-generic/errno-base.h \
/home/sunzheng/Projects/EasyRSS/jni/curl/lib/setup_once.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/stdlib.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/string.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/malloc.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/alloca.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/strings.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/memory.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/ctype.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/stat.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/time.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/time.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/stat.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/endian.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/endian.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/time.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/siginfo.h \
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm-generic/siginfo.h
/home/sunzheng/Projects/EasyRSS/jni/curl/lib/setup.h:
/home/sunzheng/Projects/EasyRSS/jni/curl/lib/curl_config.h:
/home/sunzheng/Projects/EasyRSS/jni/curl/include/curl/curlbuild.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/types.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/stdint.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/_types.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/machine/_types.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/cdefs.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/cdefs_elf.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/posix_types.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/stddef.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/compiler.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/posix_types.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/types.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/types.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/machine/kernel.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/sysmacros.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/inttypes.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/socket.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/socket.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/socket.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/sockios.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/sockios.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/uio.h:
/home/sunzheng/Projects/EasyRSS/jni/curl/include/curl/curlrules.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/stdio.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/assert.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/errno.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/errno.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/errno.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm-generic/errno.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm-generic/errno-base.h:
/home/sunzheng/Projects/EasyRSS/jni/curl/lib/setup_once.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/stdlib.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/string.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/malloc.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/alloca.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/strings.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/memory.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/ctype.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/stat.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/time.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/time.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/linux/stat.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/endian.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/sys/endian.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/time.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm/siginfo.h:
/opt/android-ndk/platforms/android-5/arch-arm/usr/include/asm-generic/siginfo.h:
| {
"pile_set_name": "Github"
} |
(function($) {
/**
* Czech language package
* Translated by @AdwinTrave. Improved by @cuchac
*/
$.fn.bootstrapValidator.i18n = $.extend(true, $.fn.bootstrapValidator.i18n, {
base64: {
'default': 'Prosím zadejte správný base64'
},
between: {
'default': 'Prosím zadejte hodnotu mezi %s a %s',
notInclusive: 'Prosím zadejte hodnotu mezi %s a %s (včetně těchto čísel)'
},
callback: {
'default': 'Prosím zadejte správnou hodnotu'
},
choice: {
'default': 'Prosím vyberte správnou hodnotu',
less: 'Hodnota musí být minimálně %s',
more: 'Hodnota nesmí být více jak %s',
between: 'Prosím vyberte mezi %s a %s'
},
color: {
'default': 'Prosím zadejte správnou barvu'
},
creditCard: {
'default': 'Prosím zadejte správné číslo kreditní karty'
},
cusip: {
'default': 'Prosím zadejte správné CUSIP číslo'
},
cvv: {
'default': 'Prosím zadejte správné CVV číslo'
},
date: {
'default': 'Prosím zadejte správné datum',
min: 'Prosím zadejte datum před %s',
max: 'Prosím zadejte datum po %s',
range: 'Prosím zadejte datum v rozmezí %s až %s'
},
different: {
'default': 'Prosím zadejte jinou hodnotu'
},
digits: {
'default': 'Toto pole může obsahovat pouze čísla'
},
ean: {
'default': 'Prosím zadejte správné EAN číslo'
},
emailAddress: {
'default': 'Prosím zadejte správnou emailovou adresu'
},
file: {
'default': 'Prosím vyberte soubor'
},
greaterThan: {
'default': 'Prosím zadejte hodnotu větší nebo rovnu %s',
notInclusive: 'Prosím zadejte hodnotu větší než %s'
},
grid: {
'default': 'Prosím zadejte správné GRId číslo'
},
hex: {
'default': 'Prosím zadejte správné hexadecimální číslo'
},
hexColor: {
'default': 'Prosím zadejte správnou hex barvu'
},
iban: {
'default': 'Prosím zadejte správné IBAN číslo',
countryNotSupported: 'IBAN pro %s není podporován',
country: 'Prosím zadejte správné IBAN číslo pro %s',
countries: {
AD: 'Andorru',
AE: 'Spojené arabské emiráty',
AL: 'Albanii',
AO: 'Angolu',
AT: 'Rakousko',
AZ: 'Ázerbajdžán',
BA: 'Bosnu a Herzegovinu',
BE: 'Belgie',
BF: 'Burkina Faso',
BG: 'Bulharsko',
BH: 'Bahrajn',
BI: 'Burundi',
BJ: 'Benin',
BR: 'Brazílii',
CH: 'Švýcarsko',
CI: 'Pobřeží slonoviny',
CM: 'Kamerun',
CR: 'Kostariku',
CV: 'Cape Verde',
CY: 'Kypr',
CZ: 'Českou republiku',
DE: 'Německo',
DK: 'Dánsko',
DO: 'Dominikánskou republiku',
DZ: 'Alžírsko',
EE: 'Estonsko',
ES: 'Španělsko',
FI: 'Finsko',
FO: 'Faerské ostrovy',
FR: 'Francie',
GB: 'Velkou Británii',
GE: 'Gruzii',
GI: 'Gibraltar',
GL: 'Grónsko',
GR: 'Řecko',
GT: 'Guatemala',
HR: 'Chorvatsko',
HU: 'Maďarsko',
IE: 'Irsko',
IL: 'Israel',
IR: 'Irán',
IS: 'Island',
IT: 'Itálii',
JO: 'Jordansko',
KW: 'Kuwait',
KZ: 'Kazakhstán',
LB: 'Lebanon',
LI: 'Lichtenštejnsko',
LT: 'Litvu',
LU: 'Lucembursko',
LV: 'Lotyšsko',
MC: 'Monaco',
MD: 'Moldavsko',
ME: 'Černou Horu',
MG: 'Madagaskar',
MK: 'Makedonii',
ML: 'Mali',
MR: 'Mauritánii',
MT: 'Malta',
MU: 'Mauritius',
MZ: 'Mosambik',
NL: 'Nizozemsko',
NO: 'Norsko',
PK: 'Pakistán',
PL: 'Polsko',
PS: 'Palestinu',
PT: 'Portugalsko',
QA: 'Katar',
RO: 'Rumunsko',
RS: 'Srbsko',
SA: 'Saudskou Arábii',
SE: 'Švédsko',
SI: 'Slovinsko',
SK: 'Slovensko',
SM: 'San Marino',
SN: 'Senegal',
TN: 'Tunisko',
TR: 'Turecko',
VG: 'Britské Panenské ostrovy'
}
},
id: {
'default': 'Prosím zadejte správné rodné číslo',
countryNotSupported: 'Rodné číslo pro %s není podporované',
country: 'Prosím zadejte správné rodné číslo pro %s',
countries: {
BA: 'Bosnu a Hercegovinu',
BG: 'Bulharsko',
BR: 'Brazílii',
CH: 'Švýcarsko',
CL: 'Chile',
CN: 'Čína',
CZ: 'Českou Republiku',
DK: 'Dánsko',
EE: 'Estonsko',
ES: 'Špaňelsko',
FI: 'Finsko',
HR: 'Chorvatsko',
IE: 'Irsko',
IS: 'Island',
LT: 'Litvu',
LV: 'Lotyšsko',
ME: 'Montenegro',
MK: 'Makedonii',
NL: 'Nizozemí',
RO: 'Rumunsko',
RS: 'Srbsko',
SE: 'Švédsko',
SI: 'Slovinsko',
SK: 'Slovensko',
SM: 'San Marino',
TH: 'Thajsko',
ZA: 'Jižní Afriku'
}
},
identical: {
'default': 'Prosím zadejte stejnou hodnotu'
},
imei: {
'default': 'Prosím zadejte správné IMEI číslo'
},
imo: {
'default': 'Prosím zadejte správné IMO číslo'
},
integer: {
'default': 'Prosím zadejte celé číslo'
},
ip: {
'default': 'Prosím zadejte správnou IP adresu',
ipv4: 'Prosím zadejte správnou IPv4 adresu',
ipv6: 'Prosím zadejte správnou IPv6 adresu'
},
isbn: {
'default': 'Prosím zadejte správné ISBN číslo'
},
isin: {
'default': 'Prosím zadejte správné ISIN číslo'
},
ismn: {
'default': 'Prosím zadejte správné ISMN číslo'
},
issn: {
'default': 'Prosím zadejte správné ISSN číslo'
},
lessThan: {
'default': 'Prosím zadejte hodnotu menší nebo rovno %s',
notInclusive: 'Prosím zadejte hodnotu menčí než %s'
},
mac: {
'default': 'Prosím zadejte správnou MAC adresu'
},
meid: {
'default': 'Prosím zadejte správné MEID číslo'
},
notEmpty: {
'default': 'Toto pole nesmí být prázdné'
},
numeric: {
'default': 'Prosím zadejte číselnou hodnotu'
},
phone: {
'default': 'Prosím zadejte správné telefoní číslo',
countryNotSupported: 'Telefoní číslo pro %s není podporované',
country: 'Prosím zadejte správné telefoní číslo pro %s',
countries: {
BR: 'Brazílii',
CN: 'Čína',
CZ: 'Českou Republiku',
DE: 'Německo',
DK: 'Dánsko',
ES: 'Španělsko',
FR: 'Francie',
GB: 'Velkou Británii',
MA: 'Maroko',
PK: 'Pákistán',
RO: 'Rumunsko',
RU: 'Rusko',
SK: 'Slovensko',
TH: 'Thajsko',
US: 'Spojené Státy Americké',
VE: 'Venezuelský'
}
},
regexp: {
'default': 'Prosím zadejte hodnotu splňující zadání'
},
remote: {
'default': 'Prosím zadejte správnou hodnotu'
},
rtn: {
'default': 'Prosím zadejte správné RTN číslo'
},
sedol: {
'default': 'Prosím zadejte správné SEDOL číslo'
},
siren: {
'default': 'Prosím zadejte správné SIREN číslo'
},
siret: {
'default': 'Prosím zadejte správné SIRET číslo'
},
step: {
'default': 'Prosím zadejte správný krok %s'
},
stringCase: {
'default': 'Pouze malá písmen jsou povoleny v tomto poli',
upper: 'Pouze velké písmena jsou povoleny v tomto poli'
},
stringLength: {
'default': 'Toto pole nesmí být prázdné',
less: 'Prosím zadejte méně než %s znaků',
more: 'Prosím zadejte více než %s znaků',
between: 'Prosím zadejte mezi %s a %s znaky'
},
uri: {
'default': 'Prosím zadejte správnou URI'
},
uuid: {
'default': 'Prosím zadejte správné UUID číslo',
version: 'Prosím zadejte správné UUID verze %s'
},
vat: {
'default': 'Prosím zadejte správné VAT číslo',
countryNotSupported: 'VAT pro %s není podporované',
country: 'Prosím zadejte správné VAT číslo pro %s',
countries: {
AT: 'Rakousko',
BE: 'Belgii',
BG: 'Bulharsko',
BR: 'Brazílii',
CH: 'Švýcarsko',
CY: 'Kypr',
CZ: 'Českou Republiku',
DE: 'Německo',
DK: 'Dánsko',
EE: 'Estonsko',
ES: 'Špaňelsko',
FI: 'Finsko',
FR: 'Francie',
GB: 'Velkou Británii',
GR: 'Řecko',
EL: 'Řecko',
HU: 'Maďarsko',
HR: 'Chorvatsko',
IE: 'Irsko',
IS: 'Island',
IT: 'Itálie',
LT: 'Litvu',
LU: 'Lucembursko',
LV: 'Lotyšsko',
MT: 'Maltu',
NL: 'Nizozemí',
NO: 'Norsko',
PL: 'Polsko',
PT: 'Portugalsko',
RO: 'Rumunsko',
RU: 'Rusko',
RS: 'Srbsko',
SE: 'Švédsko',
SI: 'Slovinsko',
SK: 'Slovensko',
VE: 'Venezuelský',
ZA: 'Jižní Afriku'
}
},
vin: {
'default': 'Prosím zadejte správné VIN číslo'
},
zipCode: {
'default': 'Prosím zadejte správné PSČ',
countryNotSupported: '%s není podporované',
country: 'Prosím zadejte správné PSČ pro %s',
countries: {
AT: 'Rakousko',
BR: 'Brazílie',
CA: 'Kanada',
CH: 'Švýcarsko',
CZ: 'Českou Republiku',
DE: 'Německo',
DK: 'Dánsko',
FR: 'Francie',
GB: 'Velkou Británii',
IE: 'Irsko',
IT: 'Itálie',
MA: 'Maroko',
NL: 'Nizozemí',
PT: 'Portugalsko',
RO: 'Rumunsko',
RU: 'Rusko',
SE: 'Švédsko',
SG: 'Singapur',
SK: 'Slovensko',
US: 'Spojené Státy Americké'
}
}
});
}(window.jQuery));
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2016, 2018, 2020, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
package core
import (
"github.com/oracle/oci-go-sdk/v25/common"
"net/http"
)
// ListServicesRequest wrapper for the ListServices operation
type ListServicesRequest struct {
// For list pagination. The maximum number of results per page, or items to return in a paginated
// "List" call. For important details about how pagination works, see
// List Pagination (https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
// Example: `50`
Limit *int `mandatory:"false" contributesTo:"query" name:"limit"`
// For list pagination. The value of the `opc-next-page` response header from the previous "List"
// call. For important details about how pagination works, see
// List Pagination (https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
Page *string `mandatory:"false" contributesTo:"query" name:"page"`
// Unique Oracle-assigned identifier for the request.
// If you need to contact Oracle about a particular request, please provide the request ID.
OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"`
// Metadata about the request. This information will not be transmitted to the service, but
// represents information that the SDK will consume to drive retry behavior.
RequestMetadata common.RequestMetadata
}
func (request ListServicesRequest) String() string {
return common.PointerString(request)
}
// HTTPRequest implements the OCIRequest interface
func (request ListServicesRequest) HTTPRequest(method, path string) (http.Request, error) {
return common.MakeDefaultHTTPRequestWithTaggedStruct(method, path, request)
}
// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy.
func (request ListServicesRequest) RetryPolicy() *common.RetryPolicy {
return request.RequestMetadata.RetryPolicy
}
// ListServicesResponse wrapper for the ListServices operation
type ListServicesResponse struct {
// The underlying http response
RawResponse *http.Response
// A list of []Service instances
Items []Service `presentIn:"body"`
// For list pagination. When this header appears in the response, additional pages
// of results remain. For important details about how pagination works, see
// List Pagination (https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
OpcNextPage *string `presentIn:"header" name:"opc-next-page"`
// Unique Oracle-assigned identifier for the request. If you need to contact
// Oracle about a particular request, please provide the request ID.
OpcRequestId *string `presentIn:"header" name:"opc-request-id"`
}
func (response ListServicesResponse) String() string {
return common.PointerString(response)
}
// HTTPResponse implements the OCIResponse interface
func (response ListServicesResponse) HTTPResponse() *http.Response {
return response.RawResponse
}
| {
"pile_set_name": "Github"
} |
# Docker file that builds a Centos 6 image ready for GRR installation.
#
# To build a new image on your local machine, cd to this file's directory
# and run (note the period at the end):
#
# docker build -t grrdocker/centos6 -f Dockerfile.centos6 .
#
# A custom Python version is built and installed in /usr/local/bin by this
# script. It is available in the PATH as 'python2.7'. The old (default) Python
# is still available in the PATH as 'python'.
FROM centos:6
LABEL maintainer="[email protected]"
WORKDIR /tmp/grrdocker-scratch
# Install pre-requisites for building Python, as well as GRR prerequisites.
RUN yum update -y && yum install -y zlib-devel bzip2-devel ncurses-devel \
readline-devel tk-devel gdbm-devel db4-devel libpcap-devel \
xz-devel epel-release python-devel wget which java-1.8.0-openjdk \
libffi-devel openssl-devel zip git gcc gcc-c++ redhat-rpm-config rpm-build \
rpm-sign
# Install a recent version of sqlite. CentOS-provided one is too old
# for Python 3.
RUN curl -sO https://sqlite.org/2017/sqlite-autoconf-3160200.tar.gz && \
tar xfz sqlite-autoconf-3160200.tar.gz && \
cd sqlite-autoconf-3160200 && \
./configure && \
make install && \
cd .. && \
rm -rf sqlite-autoconf-3160200*
# Build a recent version of Python 2 from source (Centos 6 has Python 2.6
# installed in /usr/bin). The custom Python version is installed in
# /usr/local/bin
RUN wget https://www.python.org/ftp/python/2.7.14/Python-2.7.14.tgz && \
tar xzvf Python-2.7.14.tgz && \
cd Python-2.7.14 && \
./configure --enable-shared --enable-ipv6 --enable-unicode=ucs4 \
--prefix=/usr/local LDFLAGS="-Wl,-rpath /usr/local/lib" && \
make && \
make altinstall
# Install Python 2 pip and virtualenv.
RUN wget https://bootstrap.pypa.io/get-pip.py && \
python2.7 get-pip.py && \
pip install --upgrade pip virtualenv
# Build Python 3 from source.
RUN wget https://www.python.org/ftp/python/3.6.9/Python-3.6.9.tgz && \
tar xzvf Python-3.6.9.tgz && \
cd Python-3.6.9 && \
./configure --enable-shared --enable-ipv6 --prefix=/usr/local \
LDFLAGS="-Wl,-rpath /usr/local/lib" && \
make && \
make altinstall
# TSK currently fails with sqlite-devel, so we have to remove it from the Docker container.
RUN yum remove -y sqlite-devel || true
WORKDIR /
RUN rm -rf /tmp/grrdocker-scratch
CMD ["/bin/bash"]
| {
"pile_set_name": "Github"
} |
/* SPDX-License-Identifier: GPL-2.0 */
static uint32_t gk208_pmu_data[] = {
/* 0x0000: proc_kern */
0x52544e49,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
/* 0x0058: proc_list_head */
0x54534f48,
0x0000042c,
0x000003df,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x584d454d,
0x000005ee,
0x000005e0,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x46524550,
0x000005f2,
0x000005f0,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x5f433249,
0x000009f3,
0x0000089d,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x54534554,
0x00000a11,
0x000009f5,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x454c4449,
0x00000a1c,
0x00000a1a,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
/* 0x0268: proc_list_tail */
/* 0x0268: time_prev */
0x00000000,
/* 0x026c: time_next */
0x00000000,
/* 0x0270: fifo_queue */
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
/* 0x02f0: rfifo_queue */
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
/* 0x0370: memx_func_head */
0x00000001,
0x00000000,
0x0000045c,
/* 0x037c: memx_func_next */
0x00000002,
0x00000000,
0x000004cc,
0x00000003,
0x00000002,
0x00000541,
0x00040004,
0x00000000,
0x0000055e,
0x00010005,
0x00000000,
0x00000578,
0x00010006,
0x00000000,
0x0000053c,
0x00000007,
0x00000000,
0x00000584,
/* 0x03c4: memx_func_tail */
/* 0x03c4: memx_ts_start */
0x00000000,
/* 0x03c8: memx_ts_end */
0x00000000,
/* 0x03cc: memx_data_head */
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
/* 0x0bcc: memx_data_tail */
/* 0x0bcc: memx_train_head */
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
/* 0x0ccc: memx_train_tail */
/* 0x0ccc: i2c_scl_map */
0x00000400,
0x00000800,
0x00001000,
0x00002000,
0x00004000,
0x00008000,
0x00010000,
0x00020000,
0x00040000,
0x00080000,
/* 0x0cf4: i2c_sda_map */
0x00100000,
0x00200000,
0x00400000,
0x00800000,
0x01000000,
0x02000000,
0x04000000,
0x08000000,
0x10000000,
0x20000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
};
static uint32_t gk208_pmu_code[] = {
0x02f90ef5,
/* 0x0004: rd32 */
0xf607a040,
0x04bd000e,
0x0100018d,
0xf607ac40,
0x04bd000d,
/* 0x0018: rd32_wait */
0xcf07ac4d,
0xd4f100dd,
0x1bf47000,
0x07a44df6,
0xf800ddcf,
/* 0x002d: wr32 */
0x07a04000,
0xbd000ef6,
0x07a44004,
0xbd000df6,
0x00f28d04,
0x07ac4001,
0xbd000df6,
/* 0x0049: wr32_wait */
0x07ac4d04,
0xf100ddcf,
0xf47000d4,
0x00f8f61b,
/* 0x0058: nsec */
0x80f990f9,
0x88cf2c08,
/* 0x0061: nsec_loop */
0xcf2c0900,
0x98bb0099,
0xf49ea602,
0x80fcf61e,
0x00f890fc,
/* 0x0074: wait */
0x80f990f9,
0x88cf2c08,
/* 0x007d: wait_loop */
0x7eeeb200,
0xb2000004,
0x04adfdda,
0x0bf4aca6,
0xcf2c0910,
0x98bb0099,
0xf49ba602,
/* 0x009a: wait_done */
0x80fce61e,
0x00f890fc,
/* 0x00a0: intr_watchdog */
0xb003e998,
0x0bf40096,
0x9a0a9828,
0xf4029abb,
0x010d0e1c,
0x00023e7e,
0x0ef494bd,
/* 0x00bd: intr_watchdog_next_time */
0x9b0a9814,
0xf400a6b0,
0x9aa6080b,
/* 0x00cb: intr_watchdog_next_time_set */
0xb5061cf4,
/* 0x00ce: intr_watchdog_next_proc */
0xe9b59b09,
0x58e0b603,
0x0268e6b1,
0xf8c81bf4,
/* 0x00dd: intr */
0xbd00f900,
0xf980f904,
0xf9a0f990,
0xf9c0f9b0,
0xf9e0f9d0,
0xfe000ff0,
0x80f90188,
0xcf045048,
0x80b60088,
0x04504001,
0xbd0008f6,
0xcf080804,
0x89c40088,
0x1f0bf402,
0x0e9b00b5,
0x00a07e58,
0x9b099800,
0xf40096b0,
0x34000d0b,
0xbd0009f6,
0x9a09b504,
/* 0x0130: intr_skip_watchdog */
0x080089e4,
0x49340bf4,
0x99cf0688,
0x029ac400,
0x4c200bf4,
0xcccf04c0,
0xdec0f900,
0x54534f48,
0x9f7e000d,
0xc0fc0002,
0xf604c040,
0x04bd000c,
/* 0x0160: intr_subintr_skip_fifo */
0xf6068840,
0x04bd0009,
/* 0x0168: intr_skip_subintr */
0xbd00e049,
0x0489fd90,
0x08f60400,
0xfc04bd00,
0x0088fe80,
0xe0fcf0fc,
0xc0fcd0fc,
0xa0fcb0fc,
0x80fc90fc,
0x32f400fc,
/* 0x0193: ticks_from_ns */
0xf901f800,
0x4db0f9c0,
0x527e0144,
0xccec0003,
0xb4b003e8,
0x0e0bf400,
0x03e8eeec,
0x7e01444d,
/* 0x01b3: ticks_from_ns_quit */
0xb2000352,
0xfcb0fcce,
/* 0x01bb: ticks_from_us */
0xf900f8c0,
0x4db0f9c0,
0x527e0144,
0xceb20003,
0xf400b4b0,
0xe4bd050b,
/* 0x01d0: ticks_from_us_quit */
0xc0fcb0fc,
/* 0x01d6: ticks_to_us */
0x444d00f8,
0xecedff01,
/* 0x01de: timer */
0x90f900f8,
0x32f480f9,
0x03f89810,
0xf40086b0,
0x84bd4a1c,
0x08f63800,
0x0804bd00,
0x0088cf34,
0xbb9a0998,
0xe9bb0298,
0x03feb500,
0x88cf0808,
0x0284f000,
0x081c1bf4,
0x0088cf34,
0x0bf4e0a6,
0xf4e8a608,
/* 0x0222: timer_reset */
0x34000d1c,
0xbd000ef6,
0x9a0eb504,
/* 0x022c: timer_enable */
0x38000108,
0xbd0008f6,
/* 0x0235: timer_done */
0x1031f404,
0x90fc80fc,
/* 0x023e: send_proc */
0x80f900f8,
0xe89890f9,
0x04e99805,
0xa60486f0,
0x2a0bf489,
0x940398c4,
0x80b60488,
0x008ebb18,
0xb500fa98,
0x8db5008a,
0x028cb501,
0xb6038bb5,
0x94f00190,
0x04e9b507,
/* 0x0277: send_done */
0xfc0231f4,
0xf880fc90,
/* 0x027d: find */
0x0880f900,
0x0131f458,
/* 0x0284: find_loop */
0xa6008a98,
0x100bf4ae,
0xb15880b6,
0xf4026886,
0x32f4f11b,
/* 0x0299: find_done */
0xfc8eb201,
/* 0x029f: send */
0x7e00f880,
0xf400027d,
0x00f89b01,
/* 0x02a8: recv */
0x80f990f9,
0x9805e898,
0x32f404e9,
0xf489a601,
0x89c43c0b,
0x0180b603,
0xb50784f0,
0xea9805e8,
0xfef0f902,
0xf0f9018f,
0x9994efb2,
0x00e9bb04,
0x9818e0b6,
0xec9803eb,
0x01ed9802,
0xf900ee98,
0xfef0fca5,
0x31f400f8,
/* 0x02f3: recv_done */
0xfcf0fc01,
0xf890fc80,
/* 0x02f9: init */
0x01084100,
0xe70011cf,
0xb6010911,
0x14fe0814,
0x00e04100,
0x01f61c00,
0x0104bd00,
0xf61400ff,
0x04bd0001,
0x15f10201,
0x10000800,
0xbd0001f6,
0x00dd4104,
0xffff14f1,
0xf40010fe,
0x01011031,
0x01f63800,
0x0f04bd00,
/* 0x0341: init_proc */
0x01f19858,
0xf40016b0,
0x15f9fa0b,
0xf458f0b6,
/* 0x0352: mulu32_32_64 */
0x10f9f20e,
0x30f920f9,
0xe19540f9,
0x10d29510,
0xb4bdc4bd,
0xffc0edff,
0x34b2301d,
0xffff34f1,
0xb61034b6,
0xc3bb1045,
0x01b4bb00,
0xb230e2ff,
0xff34f134,
0x1034b6ff,
0xbb1045b6,
0xb4bb00c3,
0x3012ff01,
0xfc00b3bb,
0xfc30fc40,
0xf810fc20,
/* 0x03a1: host_send */
0x04b04100,
0x420011cf,
0x22cf04a0,
0xf412a600,
0x1ec42e0b,
0x04ee9407,
0x0270e0b7,
0x9803eb98,
0xed9802ec,
0x00ee9801,
0x00029f7e,
0xc40110b6,
0xb0400f1e,
0x000ef604,
0x0ef404bd,
/* 0x03dd: host_send_done */
/* 0x03df: host_recv */
0xd100f8c7,
0x52544e49,
0x0bf4e1a6,
/* 0x03e9: host_recv_wait */
0x04cc41bb,
0x420011cf,
0x22cf04c8,
0x0816f000,
0x0bf412a6,
0x0723c4ef,
0xb70434b6,
0xb502f030,
0x3cb5033b,
0x013db502,
0xb6003eb5,
0x24f00120,
0x04c8400f,
0xbd0002f6,
0x00400204,
0x0002f600,
0x00f804bd,
/* 0x042c: host_init */
0xb6008041,
0x15f11014,
0xd0400270,
0x0001f604,
0x804104bd,
0x1014b600,
0x02f015f1,
0xf604dc40,
0x04bd0001,
0xc4400101,
0x0001f604,
0x00f804bd,
/* 0x045c: memx_func_enter */
0x47162046,
0x6eb2f55d,
0x0000047e,
0x87fdd8b2,
0xf960f904,
0xfcd0fc80,
0x002d7ee0,
0xb2fe0700,
0x00047e6e,
0xfdd8b200,
0x60f90487,
0xd0fc80f9,
0x2d7ee0fc,
0xf0460000,
0x7e6eb226,
0xb2000004,
0x0487fdd8,
0x80f960f9,
0xe0fcd0fc,
0x00002d7e,
0xe0400406,
0x0006f607,
/* 0x04b6: memx_func_enter_wait */
0xc04604bd,
0x0066cf07,
0xf40464f0,
0x2c06f70b,
0xb50066cf,
0x00f8f106,
/* 0x04cc: memx_func_leave */
0x66cf2c06,
0xf206b500,
0xe4400406,
0x0006f607,
/* 0x04de: memx_func_leave_wait */
0xc04604bd,
0x0066cf07,
0xf40464f0,
0xf046f71b,
0xb2010726,
0x00047e6e,
0xfdd8b200,
0x60f90587,
0xd0fc80f9,
0x2d7ee0fc,
0x20460000,
0x7e6eb216,
0xb2000004,
0x0587fdd8,
0x80f960f9,
0xe0fcd0fc,
0x00002d7e,
0xb20aa247,
0x00047e6e,
0xfdd8b200,
0x60f90587,
0xd0fc80f9,
0x2d7ee0fc,
0x00f80000,
/* 0x053c: memx_func_wait_vblank */
0xf80410b6,
/* 0x0541: memx_func_wr32 */
0x00169800,
0xb6011598,
0x60f90810,
0xd0fc50f9,
0x2d7ee0fc,
0x42b60000,
0xe81bf402,
/* 0x055e: memx_func_wait */
0x2c0800f8,
0x980088cf,
0x1d98001e,
0x021c9801,
0xb6031b98,
0x747e1010,
0x00f80000,
/* 0x0578: memx_func_delay */
0xb6001e98,
0x587e0410,
0x00f80000,
/* 0x0584: memx_func_train */
/* 0x0586: memx_exec */
0xe0f900f8,
0xc1b2d0f9,
/* 0x058e: memx_exec_next */
0x1398b2b2,
0x0410b600,
0x01f034e7,
0x01e033e7,
0xf00132b6,
0x35980c30,
0xa655f9de,
0xe51ef412,
0x98f10b98,
0xcbbbf20c,
0x07c44b02,
0xfc00bbcf,
0x7ee0fcd0,
0xf800029f,
/* 0x05c5: memx_info */
0x01c67000,
/* 0x05cb: memx_info_data */
0x4c0c0bf4,
0x004b03cc,
0x090ef408,
/* 0x05d4: memx_info_train */
0x4b0bcc4c,
/* 0x05da: memx_info_send */
0x9f7e0100,
0x00f80002,
/* 0x05e0: memx_recv */
0xf401d6b0,
0xd6b0a30b,
0xdc0bf400,
/* 0x05ee: memx_init */
0x00f800f8,
/* 0x05f0: perf_recv */
/* 0x05f2: perf_init */
0x00f800f8,
/* 0x05f4: i2c_drive_scl */
0xf40036b0,
0xe0400d0b,
0x0001f607,
0x00f804bd,
/* 0x0604: i2c_drive_scl_lo */
0xf607e440,
0x04bd0001,
/* 0x060e: i2c_drive_sda */
0x36b000f8,
0x0d0bf400,
0xf607e040,
0x04bd0002,
/* 0x061e: i2c_drive_sda_lo */
0xe44000f8,
0x0002f607,
0x00f804bd,
/* 0x0628: i2c_sense_scl */
0x430132f4,
0x33cf07c4,
0x0431fd00,
0xf4060bf4,
/* 0x063a: i2c_sense_scl_done */
0x00f80131,
/* 0x063c: i2c_sense_sda */
0x430132f4,
0x33cf07c4,
0x0432fd00,
0xf4060bf4,
/* 0x064e: i2c_sense_sda_done */
0x00f80131,
/* 0x0650: i2c_raise_scl */
0x984440f9,
0x7e010308,
/* 0x065b: i2c_raise_scl_wait */
0x4e0005f4,
0x587e03e8,
0x287e0000,
0x01f40006,
0x0142b609,
/* 0x066f: i2c_raise_scl_done */
0xfcef1bf4,
/* 0x0673: i2c_start */
0x7e00f840,
0xf4000628,
0x3c7e0d11,
0x11f40006,
0x2e0ef406,
/* 0x0684: i2c_start_rep */
0xf47e0003,
0x01030005,
0x00060e7e,
0xb60076bb,
0x50f90465,
0xbb046594,
0x50bd0256,
0xfc0475fd,
0x06507e50,
0x0464b600,
/* 0x06af: i2c_start_send */
0x031d11f4,
0x060e7e00,
0x13884e00,
0x0000587e,
0xf47e0003,
0x884e0005,
0x00587e13,
/* 0x06c9: i2c_start_out */
/* 0x06cb: i2c_stop */
0x0300f800,
0x05f47e00,
0x7e000300,
0x4e00060e,
0x587e03e8,
0x01030000,
0x0005f47e,
0x7e13884e,
0x03000058,
0x060e7e01,
0x13884e00,
0x0000587e,
/* 0x06fa: i2c_bitw */
0x0e7e00f8,
0xe84e0006,
0x00587e03,
0x0076bb00,
0xf90465b6,
0x04659450,
0xbd0256bb,
0x0475fd50,
0x507e50fc,
0x64b60006,
0x1711f404,
0x7e13884e,
0x03000058,
0x05f47e00,
0x13884e00,
0x0000587e,
/* 0x0738: i2c_bitw_out */
/* 0x073a: i2c_bitr */
0x010300f8,
0x00060e7e,
0x7e03e84e,
0xbb000058,
0x65b60076,
0x9450f904,
0x56bb0465,
0xfd50bd02,
0x50fc0475,
0x0006507e,
0xf40464b6,
0x3c7e1a11,
0x00030006,
0x0005f47e,
0x7e13884e,
0xf0000058,
0x31f4013c,
/* 0x077d: i2c_bitr_done */
/* 0x077f: i2c_get_byte */
0x0500f801,
/* 0x0783: i2c_get_byte_next */
0xb6080400,
0x76bb0154,
0x0465b600,
0x659450f9,
0x0256bb04,
0x75fd50bd,
0x7e50fc04,
0xb600073a,
0x11f40464,
0x0553fd2a,
0xf40142b6,
0x0103d81b,
0xb60076bb,
0x50f90465,
0xbb046594,
0x50bd0256,
0xfc0475fd,
0x06fa7e50,
0x0464b600,
/* 0x07cc: i2c_get_byte_done */
/* 0x07ce: i2c_put_byte */
0x080400f8,
/* 0x07d0: i2c_put_byte_next */
0xff0142b6,
0x76bb3854,
0x0465b600,
0x659450f9,
0x0256bb04,
0x75fd50bd,
0x7e50fc04,
0xb60006fa,
0x11f40464,
0x0046b034,
0xbbd81bf4,
0x65b60076,
0x9450f904,
0x56bb0465,
0xfd50bd02,
0x50fc0475,
0x00073a7e,
0xf40464b6,
0x76bb0f11,
0x0136b000,
0xf4061bf4,
/* 0x0826: i2c_put_byte_done */
0x00f80132,
/* 0x0828: i2c_addr */
0xb60076bb,
0x50f90465,
0xbb046594,
0x50bd0256,
0xfc0475fd,
0x06737e50,
0x0464b600,
0xe72911f4,
0xb6012ec3,
0x53fd0134,
0x0076bb05,
0xf90465b6,
0x04659450,
0xbd0256bb,
0x0475fd50,
0xce7e50fc,
0x64b60007,
/* 0x086d: i2c_addr_done */
/* 0x086f: i2c_acquire_addr */
0xc700f804,
0xe4b6f8ce,
0x14e0b705,
/* 0x087b: i2c_acquire */
0x7e00f8d0,
0x7e00086f,
0xf0000004,
0x2d7e03d9,
0x00f80000,
/* 0x088c: i2c_release */
0x00086f7e,
0x0000047e,
0x7e03daf0,
0xf800002d,
/* 0x089d: i2c_recv */
0x0132f400,
0xb6f8c1c7,
0x16b00214,
0x341ff528,
0xf413b801,
0x3298000c,
0xcc13b800,
0x3198000c,
0x0231f400,
0xe0f9d0f9,
0x00d6d0f9,
0x92100000,
0x76bb0167,
0x0465b600,
0x659450f9,
0x0256bb04,
0x75fd50bd,
0x7e50fc04,
0xb600087b,
0xd0fc0464,
0xf500d6b0,
0x0500b01b,
0x0076bb00,
0xf90465b6,
0x04659450,
0xbd0256bb,
0x0475fd50,
0x287e50fc,
0x64b60008,
0xcc11f504,
0xe0c5c700,
0xb60076bb,
0x50f90465,
0xbb046594,
0x50bd0256,
0xfc0475fd,
0x07ce7e50,
0x0464b600,
0x00a911f5,
0x76bb0105,
0x0465b600,
0x659450f9,
0x0256bb04,
0x75fd50bd,
0x7e50fc04,
0xb6000828,
0x11f50464,
0x76bb0087,
0x0465b600,
0x659450f9,
0x0256bb04,
0x75fd50bd,
0x7e50fc04,
0xb600077f,
0x11f40464,
0xe05bcb67,
0xb60076bb,
0x50f90465,
0xbb046594,
0x50bd0256,
0xfc0475fd,
0x06cb7e50,
0x0464b600,
0x74bd5bb2,
/* 0x099f: i2c_recv_not_rd08 */
0xb0410ef4,
0x1bf401d6,
0x7e00053b,
0xf4000828,
0xc5c73211,
0x07ce7ee0,
0x2811f400,
0x287e0005,
0x11f40008,
0xe0b5c71f,
0x0007ce7e,
0x7e1511f4,
0xbd0006cb,
0x08c5c774,
0xf4091bf4,
0x0ef40232,
/* 0x09dd: i2c_recv_not_wr08 */
/* 0x09dd: i2c_recv_done */
0xf8cec703,
0x00088c7e,
0xd0fce0fc,
0xb20912f4,
0x029f7e7c,
/* 0x09f1: i2c_recv_exit */
/* 0x09f3: i2c_init */
0xf800f800,
/* 0x09f5: test_recv */
0x04584100,
0xb60011cf,
0x58400110,
0x0001f604,
0x00de04bd,
0x7e134fd9,
0xf80001de,
/* 0x0a11: test_init */
0x08004e00,
0x0001de7e,
/* 0x0a1a: idle_recv */
0x00f800f8,
/* 0x0a1c: idle */
0x410031f4,
0x11cf0454,
0x0110b600,
0xf6045440,
0x04bd0001,
/* 0x0a30: idle_loop */
0x32f45801,
/* 0x0a35: idle_proc */
/* 0x0a35: idle_proc_exec */
0xb210f902,
0x02a87e1e,
0xf410fc00,
0x31f40911,
0xf00ef402,
/* 0x0a48: idle_proc_next */
0xa65810b6,
0xe81bf41f,
0xf4e002f4,
0x0ef40028,
0x000000c6,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
};
| {
"pile_set_name": "Github"
} |
# Acknowledgements
This application makes use of the following third party libraries:
Generated by CocoaPods - https://cocoapods.org
| {
"pile_set_name": "Github"
} |
name: "Hind Jalandhar"
designer: "Indian Type Foundry"
license: "OFL"
category: "SANS_SERIF"
date_added: "2016-01-20"
fonts {
name: "Hind Jalandhar"
style: "normal"
weight: 300
filename: "HindJalandhar-Light.ttf"
post_script_name: "HindJalandhar-Light"
full_name: "Hind Jalandhar Light"
copyright: "Copyright (c) 2015 Indian Type Foundry ([email protected])"
}
fonts {
name: "Hind Jalandhar"
style: "normal"
weight: 400
filename: "HindJalandhar-Regular.ttf"
post_script_name: "HindJalandhar-Regular"
full_name: "Hind Jalandhar"
copyright: "Copyright (c) 2015 Indian Type Foundry ([email protected])"
}
fonts {
name: "Hind Jalandhar"
style: "normal"
weight: 500
filename: "HindJalandhar-Medium.ttf"
post_script_name: "HindJalandhar-Medium"
full_name: "Hind Jalandhar Medium"
copyright: "Copyright (c) 2015 Indian Type Foundry ([email protected])"
}
fonts {
name: "Hind Jalandhar"
style: "normal"
weight: 600
filename: "HindJalandhar-SemiBold.ttf"
post_script_name: "HindJalandhar-SemiBold"
full_name: "Hind Jalandhar SemiBold"
copyright: "Copyright (c) 2015 Indian Type Foundry ([email protected])"
}
fonts {
name: "Hind Jalandhar"
style: "normal"
weight: 700
filename: "HindJalandhar-Bold.ttf"
post_script_name: "HindJalandhar-Bold"
full_name: "Hind Jalandhar Bold"
copyright: "Copyright (c) 2015 Indian Type Foundry ([email protected])"
}
subsets: "menu"
subsets: "gurmukhi"
subsets: "latin"
subsets: "latin-ext"
| {
"pile_set_name": "Github"
} |
/*
* SHA1 hash implementation and interface functions
* Copyright (c) 2003-2005, Jouni Malinen <[email protected]>
*
* This software may be distributed under the terms of the BSD license.
* See README for more details.
*/
#include "includes.h"
#include "common.h"
#include "sha1.h"
#include "sha1_i.h"
#include "md5.h"
#include "crypto.h"
typedef struct SHA1Context SHA1_CTX;
void SHA1Transform(u32 state[5], const unsigned char buffer[64]);
#ifdef CONFIG_CRYPTO_INTERNAL
/**
* sha1_vector - SHA-1 hash for data vector
* @num_elem: Number of elements in the data vector
* @addr: Pointers to the data areas
* @len: Lengths of the data blocks
* @mac: Buffer for the hash
* Returns: 0 on success, -1 of failure
*/
int sha1_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
{
SHA1_CTX ctx;
size_t i;
if (TEST_FAIL())
return -1;
SHA1Init(&ctx);
for (i = 0; i < num_elem; i++)
SHA1Update(&ctx, addr[i], len[i]);
SHA1Final(mac, &ctx);
return 0;
}
#endif /* CONFIG_CRYPTO_INTERNAL */
/* ===== start - public domain SHA1 implementation ===== */
/*
SHA-1 in C
By Steve Reid <[email protected]>
100% Public Domain
-----------------
Modified 7/98
By James H. Brown <[email protected]>
Still 100% Public Domain
Corrected a problem which generated improper hash values on 16 bit machines
Routine SHA1Update changed from
void SHA1Update(SHA1_CTX* context, unsigned char* data, unsigned int
len)
to
void SHA1Update(SHA1_CTX* context, unsigned char* data, unsigned
long len)
The 'len' parameter was declared an int which works fine on 32 bit machines.
However, on 16 bit machines an int is too small for the shifts being done
against
it. This caused the hash function to generate incorrect values if len was
greater than 8191 (8K - 1) due to the 'len << 3' on line 3 of SHA1Update().
Since the file IO in main() reads 16K at a time, any file 8K or larger would
be guaranteed to generate the wrong hash (e.g. Test Vector #3, a million
"a"s).
I also changed the declaration of variables i & j in SHA1Update to
unsigned long from unsigned int for the same reason.
These changes should make no difference to any 32 bit implementations since
an
int and a long are the same size in those environments.
--
I also corrected a few compiler warnings generated by Borland C.
1. Added #include <process.h> for exit() prototype
2. Removed unused variable 'j' in SHA1Final
3. Changed exit(0) to return(0) at end of main.
ALL changes I made can be located by searching for comments containing 'JHB'
-----------------
Modified 8/98
By Steve Reid <[email protected]>
Still 100% public domain
1- Removed #include <process.h> and used return() instead of exit()
2- Fixed overwriting of finalcount in SHA1Final() (discovered by Chris Hall)
3- Changed email address from [email protected] to [email protected]
-----------------
Modified 4/01
By Saul Kravitz <[email protected]>
Still 100% PD
Modified to run on Compaq Alpha hardware.
-----------------
Modified 4/01
By Jouni Malinen <[email protected]>
Minor changes to match the coding style used in Dynamics.
Modified September 24, 2004
By Jouni Malinen <[email protected]>
Fixed alignment issue in SHA1Transform when SHA1HANDSOFF is defined.
*/
/*
Test Vectors (from FIPS PUB 180-1)
"abc"
A9993E36 4706816A BA3E2571 7850C26C 9CD0D89D
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"
84983E44 1C3BD26E BAAE4AA1 F95129E5 E54670F1
A million repetitions of "a"
34AA973C D4C4DAA4 F61EEB2B DBAD2731 6534016F
*/
#define SHA1HANDSOFF
#define rol(value, bits) (((value) << (bits)) | ((value) >> (32 - (bits))))
/* blk0() and blk() perform the initial expand. */
/* I got the idea of expanding during the round function from SSLeay */
#ifndef WORDS_BIGENDIAN
#define blk0(i) (block->l[i] = (rol(block->l[i], 24) & 0xFF00FF00) | \
(rol(block->l[i], 8) & 0x00FF00FF))
#else
#define blk0(i) block->l[i]
#endif
#define blk(i) (block->l[i & 15] = rol(block->l[(i + 13) & 15] ^ \
block->l[(i + 8) & 15] ^ block->l[(i + 2) & 15] ^ block->l[i & 15], 1))
/* (R0+R1), R2, R3, R4 are the different operations used in SHA1 */
#define R0(v,w,x,y,z,i) \
z += ((w & (x ^ y)) ^ y) + blk0(i) + 0x5A827999 + rol(v, 5); \
w = rol(w, 30);
#define R1(v,w,x,y,z,i) \
z += ((w & (x ^ y)) ^ y) + blk(i) + 0x5A827999 + rol(v, 5); \
w = rol(w, 30);
#define R2(v,w,x,y,z,i) \
z += (w ^ x ^ y) + blk(i) + 0x6ED9EBA1 + rol(v, 5); w = rol(w, 30);
#define R3(v,w,x,y,z,i) \
z += (((w | x) & y) | (w & x)) + blk(i) + 0x8F1BBCDC + rol(v, 5); \
w = rol(w, 30);
#define R4(v,w,x,y,z,i) \
z += (w ^ x ^ y) + blk(i) + 0xCA62C1D6 + rol(v, 5); \
w=rol(w, 30);
#ifdef VERBOSE /* SAK */
void SHAPrintContext(SHA1_CTX *context, char *msg)
{
printf("%s (%d,%d) %x %x %x %x %x\n",
msg,
context->count[0], context->count[1],
context->state[0],
context->state[1],
context->state[2],
context->state[3],
context->state[4]);
}
#endif
/* Hash a single 512-bit block. This is the core of the algorithm. */
void SHA1Transform(u32 state[5], const unsigned char buffer[64])
{
u32 a, b, c, d, e;
typedef union {
unsigned char c[64];
u32 l[16];
} CHAR64LONG16;
CHAR64LONG16* block;
#ifdef SHA1HANDSOFF
CHAR64LONG16 workspace;
block = &workspace;
os_memcpy(block, buffer, 64);
#else
block = (CHAR64LONG16 *) buffer;
#endif
/* Copy context->state[] to working vars */
a = state[0];
b = state[1];
c = state[2];
d = state[3];
e = state[4];
/* 4 rounds of 20 operations each. Loop unrolled. */
R0(a,b,c,d,e, 0); R0(e,a,b,c,d, 1); R0(d,e,a,b,c, 2); R0(c,d,e,a,b, 3);
R0(b,c,d,e,a, 4); R0(a,b,c,d,e, 5); R0(e,a,b,c,d, 6); R0(d,e,a,b,c, 7);
R0(c,d,e,a,b, 8); R0(b,c,d,e,a, 9); R0(a,b,c,d,e,10); R0(e,a,b,c,d,11);
R0(d,e,a,b,c,12); R0(c,d,e,a,b,13); R0(b,c,d,e,a,14); R0(a,b,c,d,e,15);
R1(e,a,b,c,d,16); R1(d,e,a,b,c,17); R1(c,d,e,a,b,18); R1(b,c,d,e,a,19);
R2(a,b,c,d,e,20); R2(e,a,b,c,d,21); R2(d,e,a,b,c,22); R2(c,d,e,a,b,23);
R2(b,c,d,e,a,24); R2(a,b,c,d,e,25); R2(e,a,b,c,d,26); R2(d,e,a,b,c,27);
R2(c,d,e,a,b,28); R2(b,c,d,e,a,29); R2(a,b,c,d,e,30); R2(e,a,b,c,d,31);
R2(d,e,a,b,c,32); R2(c,d,e,a,b,33); R2(b,c,d,e,a,34); R2(a,b,c,d,e,35);
R2(e,a,b,c,d,36); R2(d,e,a,b,c,37); R2(c,d,e,a,b,38); R2(b,c,d,e,a,39);
R3(a,b,c,d,e,40); R3(e,a,b,c,d,41); R3(d,e,a,b,c,42); R3(c,d,e,a,b,43);
R3(b,c,d,e,a,44); R3(a,b,c,d,e,45); R3(e,a,b,c,d,46); R3(d,e,a,b,c,47);
R3(c,d,e,a,b,48); R3(b,c,d,e,a,49); R3(a,b,c,d,e,50); R3(e,a,b,c,d,51);
R3(d,e,a,b,c,52); R3(c,d,e,a,b,53); R3(b,c,d,e,a,54); R3(a,b,c,d,e,55);
R3(e,a,b,c,d,56); R3(d,e,a,b,c,57); R3(c,d,e,a,b,58); R3(b,c,d,e,a,59);
R4(a,b,c,d,e,60); R4(e,a,b,c,d,61); R4(d,e,a,b,c,62); R4(c,d,e,a,b,63);
R4(b,c,d,e,a,64); R4(a,b,c,d,e,65); R4(e,a,b,c,d,66); R4(d,e,a,b,c,67);
R4(c,d,e,a,b,68); R4(b,c,d,e,a,69); R4(a,b,c,d,e,70); R4(e,a,b,c,d,71);
R4(d,e,a,b,c,72); R4(c,d,e,a,b,73); R4(b,c,d,e,a,74); R4(a,b,c,d,e,75);
R4(e,a,b,c,d,76); R4(d,e,a,b,c,77); R4(c,d,e,a,b,78); R4(b,c,d,e,a,79);
/* Add the working vars back into context.state[] */
state[0] += a;
state[1] += b;
state[2] += c;
state[3] += d;
state[4] += e;
/* Wipe variables */
a = b = c = d = e = 0;
#ifdef SHA1HANDSOFF
os_memset(block, 0, 64);
#endif
}
/* SHA1Init - Initialize new context */
void SHA1Init(SHA1_CTX* context)
{
/* SHA1 initialization constants */
context->state[0] = 0x67452301;
context->state[1] = 0xEFCDAB89;
context->state[2] = 0x98BADCFE;
context->state[3] = 0x10325476;
context->state[4] = 0xC3D2E1F0;
context->count[0] = context->count[1] = 0;
}
/* Run your data through this. */
void SHA1Update(SHA1_CTX* context, const void *_data, u32 len)
{
u32 i, j;
const unsigned char *data = _data;
#ifdef VERBOSE
SHAPrintContext(context, "before");
#endif
j = (context->count[0] >> 3) & 63;
if ((context->count[0] += len << 3) < (len << 3))
context->count[1]++;
context->count[1] += (len >> 29);
if ((j + len) > 63) {
os_memcpy(&context->buffer[j], data, (i = 64-j));
SHA1Transform(context->state, context->buffer);
for ( ; i + 63 < len; i += 64) {
SHA1Transform(context->state, &data[i]);
}
j = 0;
}
else i = 0;
os_memcpy(&context->buffer[j], &data[i], len - i);
#ifdef VERBOSE
SHAPrintContext(context, "after ");
#endif
}
/* Add padding and return the message digest. */
void SHA1Final(unsigned char digest[20], SHA1_CTX* context)
{
u32 i;
unsigned char finalcount[8];
for (i = 0; i < 8; i++) {
finalcount[i] = (unsigned char)
((context->count[(i >= 4 ? 0 : 1)] >>
((3-(i & 3)) * 8) ) & 255); /* Endian independent */
}
SHA1Update(context, (unsigned char *) "\200", 1);
while ((context->count[0] & 504) != 448) {
SHA1Update(context, (unsigned char *) "\0", 1);
}
SHA1Update(context, finalcount, 8); /* Should cause a SHA1Transform()
*/
for (i = 0; i < 20; i++) {
digest[i] = (unsigned char)
((context->state[i >> 2] >> ((3 - (i & 3)) * 8)) &
255);
}
/* Wipe variables */
os_memset(context->buffer, 0, 64);
os_memset(context->state, 0, 20);
os_memset(context->count, 0, 8);
os_memset(finalcount, 0, 8);
}
/* ===== end - public domain SHA1 implementation ===== */
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2017 Jos van den Oever <[email protected]>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
* the License or (at your option) version 3 or any later version
* accepted by the membership of KDE e.V. (or its successor approved
* by the membership of KDE e.V.), which shall act as a proxy
* defined in Section 14 of version 3 of the license.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "SortedModel.h"
bool SortedModel::filterAcceptsRow(int source_row, const QModelIndex& source_parent) const
{
if (QSortFilterProxyModel::filterAcceptsRow(source_row, source_parent)) {
return true;
}
QModelIndex source_index = sourceModel()->index(source_row, 0, source_parent);
for (int i = 0 ; i < sourceModel()->rowCount(source_index); ++i) {
if (filterAcceptsRow(i, source_index)) return true;
}
return false;
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ JBoss, Home of Professional Open Source.
~ Copyright 2011, Red Hat, Inc., and individual contributors
~ as indicated by the @author tags. See the copyright.txt file in the
~ distribution for a full listing of individual contributors.
~
~ This is free software; you can redistribute it and/or modify it
~ under the terms of the GNU Lesser General Public License as
~ published by the Free Software Foundation; either version 2.1 of
~ the License, or (at your option) any later version.
~
~ This software is distributed in the hope that it will be useful,
~ but WITHOUT ANY WARRANTY; without even the implied warranty of
~ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
~ Lesser General Public License for more details.
~
~ You should have received a copy of the GNU Lesser General Public
~ License along with this software; if not, write to the Free
~ Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
~ 02110-1301 USA, or see the FSF site: http://www.fsf.org.
-->
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
targetNamespace="urn:jboss:domain:transactions:1.1"
xmlns="urn:jboss:domain:transactions:1.1"
elementFormDefault="qualified"
attributeFormDefault="unqualified"
version="1.1">
<!-- The transaction subsystem root element -->
<xs:element name="subsystem" type="subsystem"/>
<xs:complexType name="subsystem">
<xs:annotation>
<xs:documentation>
<![CDATA[
The configuration of the transactions subsystem.
]]>
</xs:documentation>
</xs:annotation>
<xs:all>
<xs:element name="core-environment" type="core-environment"/>
<xs:element name="recovery-environment" type="recovery-environment"/>
<xs:element name="coordinator-environment" type="coordinator-environment" minOccurs="0"/>
<xs:element name="object-store" type="object-store" minOccurs="0"/>
<xs:element name="jts" type="jts-Type" minOccurs="0" maxOccurs="1"/>
</xs:all>
</xs:complexType>
<xs:complexType name="recovery-environment">
<xs:annotation>
<xs:documentation>
<![CDATA[
The recovery environment configuration.
The "socket-binding" attribute is used to reference the correct socket binding to use for the
recovery environment.
The "status-socket-binding" attribute is used to reference the correct socket binding to use for the
transaction status manager.
The "recovery-listener" attribute sets if recovery system should listen on a network socket or not.
]]>
</xs:documentation>
</xs:annotation>
<xs:attribute name="socket-binding" type="xs:string" />
<xs:attribute name="status-socket-binding" type="xs:string" />
<xs:attribute name="recovery-listener" type="xs:boolean" default="false"/>
</xs:complexType>
<xs:complexType name="core-environment">
<xs:annotation>
<xs:documentation>
<![CDATA[
The core environment configuration.
The process-id element specifies the process id implemention.
The "node-identifier" attribute is used to set the node identifier on the core environment.
The "path" attribute denotes a relative or absolute filesystem path denoting where the transaction
manager core should store data.
The "relative-to" attribute references a global path configuration in the domain model, defaulting
to the JBoss Application Server data directory (jboss.server.data.dir). If the value of the "path" attribute
does not specify an absolute pathname, it will treated as relative to this path.
]]>
</xs:documentation>
</xs:annotation>
<xs:all>
<xs:element name="process-id" type="process-id" />
</xs:all>
<xs:attribute name="node-identifier" type="xs:string" default="1"/>
<xs:attribute name="path" type="xs:string" default="var"/>
<xs:attribute name="relative-to" type="xs:string" default="jboss.server.data.dir"/>
</xs:complexType>
<xs:complexType name="process-id">
<xs:annotation>
<xs:documentation>
<![CDATA[
The process identifer implementation
The "node-identifier" attribute is used to set the node identifier on the core environment.
The "socket-process-id-max-ports" attribute is used to set the max ports on the core environment.
]]>
</xs:documentation>
</xs:annotation>
<xs:choice>
<xs:element name="uuid" type="uuid" />
<xs:element name="socket" type="socket-id" />
</xs:choice>
</xs:complexType>
<xs:complexType name="uuid">
<xs:annotation>
<xs:documentation>
<![CDATA[
The UUID based process identifer implementation
]]>
</xs:documentation>
</xs:annotation>
</xs:complexType>
<xs:complexType name="socket-id">
<xs:annotation>
<xs:documentation>
<![CDATA[
The socket based process identifer implementation
The "socket-binding" attribute is used to specify the port to bind to.
The "socket-process-id-max-ports" attribute is used to set the max ports on the core environment.
]]>
</xs:documentation>
</xs:annotation>
<xs:attribute name="socket-binding" type="xs:string" use="required"/>
<xs:attribute name="socket-process-id-max-ports" type="xs:int" default="10" />
</xs:complexType>
<xs:attribute name="socket-process-id-max-ports" type="xs:int" default="10" />
<xs:complexType name="coordinator-environment">
<xs:annotation>
<xs:documentation>
<![CDATA[
The coordinator environment configuration.
enable-statistics - if recording of transaction statistics is enabled, false otherwise.
enable-tsm-status - if the transaction status manager (TSM) service, needed for out of process recovery, should be provided or not.
default-timeout - the default transaction lifetime, in seconds.
]]>
</xs:documentation>
</xs:annotation>
<xs:attribute name="enable-statistics" type="xs:boolean" default="false"/>
<xs:attribute name="enable-tsm-status" type="xs:boolean" default="false"/>
<xs:attribute name="default-timeout" type="xs:int" default="300" />
</xs:complexType>
<xs:complexType name="object-store">
<xs:annotation>
<xs:documentation>
<![CDATA[
The object store configuration.
The "path" attribute denotes a relative or absolute filesystem path denoting where the transaction
manager object store should store data.
The "relative-to" attribute references a global path configuration in the domain model, defaulting
to the JBoss Application Server data directory (jboss.server.data.dir). If the value of the "path" attribute
does not specify an absolute pathname, it will treated as relative to this path.
]]>
</xs:documentation>
</xs:annotation>
<xs:attribute name="path" type="xs:string" default="tx-object-store"/>
<xs:attribute name="relative-to" type="xs:string" default="jboss.server.data.dir" />
</xs:complexType>
<xs:complexType name="jts-Type">
<xs:annotation>
<xs:documentation>
<![CDATA[
The flag to enable JTS.
]]>
</xs:documentation>
</xs:annotation>
</xs:complexType>
</xs:schema>
| {
"pile_set_name": "Github"
} |
/* Copyright (C) 2002 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Ulrich Drepper <[email protected]>, 2002.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library; if not, write to the Free
Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307 USA. */
#include <string.h>
#include <pthreadP.h>
int
__pthread_mutexattr_init (attr)
pthread_mutexattr_t *attr;
{
if (sizeof (struct pthread_mutexattr) != sizeof (pthread_mutexattr_t))
memset (attr, '\0', sizeof (*attr));
/* We use bit 31 to signal whether the mutex is going to be
process-shared or not. By default it is zero, i.e., the mutex is
not process-shared. */
((struct pthread_mutexattr *) attr)->mutexkind = PTHREAD_MUTEX_NORMAL;
return 0;
}
strong_alias (__pthread_mutexattr_init, pthread_mutexattr_init)
| {
"pile_set_name": "Github"
} |
// +build !darwin no_native_menus
package glfw
import "fyne.io/fyne"
func hasNativeMenu() bool {
return false
}
func setupNativeMenu(menu *fyne.MainMenu) {
// no-op
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env bash
#
# mfnew
#
# Create a new branch from the default target with the given name
#
usage() {
echo "Usage: `basename $0` [1|2] [name]" 1>&2
}
[[ $# < 3 && $1 != "-h" && $1 != "--help" ]] || { usage; exit 1; }
MFINFO=$(mfinfo "$@") || exit 1
IFS=' ' read -a INFO <<< "$MFINFO"
TARG=${INFO[3]}
BRANCH=pr_for_$TARG-$(date +"%G-%m-%d_%H.%M.%S")
# BRANCH can be given as the last argument
case "$#" in
1 ) case "$1" in
1|2) ;;
*) BRANCH=$1 ;;
esac
;;
2 ) case "$1" in
1|2) BRANCH=$2 ;;
*) usage ; exit 1 ;;
esac
;;
esac
git fetch upstream
git checkout --no-track upstream/$TARG -b $BRANCH
| {
"pile_set_name": "Github"
} |
export { default } from './ErrorInfo';
| {
"pile_set_name": "Github"
} |
'use strict';
var GetIntrinsic = require('../GetIntrinsic');
var $gOPD = require('../helpers/getOwnPropertyDescriptor');
var $SyntaxError = GetIntrinsic('%SyntaxError%');
var $TypeError = GetIntrinsic('%TypeError%');
var isPropertyDescriptor = require('../helpers/isPropertyDescriptor');
var IsAccessorDescriptor = require('./IsAccessorDescriptor');
var IsDataDescriptor = require('./IsDataDescriptor');
var IsExtensible = require('./IsExtensible');
var IsPropertyKey = require('./IsPropertyKey');
var ToPropertyDescriptor = require('./ToPropertyDescriptor');
var SameValue = require('./SameValue');
var Type = require('./Type');
var ValidateAndApplyPropertyDescriptor = require('./ValidateAndApplyPropertyDescriptor');
// https://www.ecma-international.org/ecma-262/6.0/#sec-ordinarydefineownproperty
module.exports = function OrdinaryDefineOwnProperty(O, P, Desc) {
if (Type(O) !== 'Object') {
throw new $TypeError('Assertion failed: O must be an Object');
}
if (!IsPropertyKey(P)) {
throw new $TypeError('Assertion failed: P must be a Property Key');
}
if (!isPropertyDescriptor({
Type: Type,
IsDataDescriptor: IsDataDescriptor,
IsAccessorDescriptor: IsAccessorDescriptor
}, Desc)) {
throw new $TypeError('Assertion failed: Desc must be a Property Descriptor');
}
if (!$gOPD) {
// ES3/IE 8 fallback
if (IsAccessorDescriptor(Desc)) {
throw new $SyntaxError('This environment does not support accessor property descriptors.');
}
var creatingNormalDataProperty = !(P in O)
&& Desc['[[Writable]]']
&& Desc['[[Enumerable]]']
&& Desc['[[Configurable]]']
&& '[[Value]]' in Desc;
var settingExistingDataProperty = (P in O)
&& (!('[[Configurable]]' in Desc) || Desc['[[Configurable]]'])
&& (!('[[Enumerable]]' in Desc) || Desc['[[Enumerable]]'])
&& (!('[[Writable]]' in Desc) || Desc['[[Writable]]'])
&& '[[Value]]' in Desc;
if (creatingNormalDataProperty || settingExistingDataProperty) {
O[P] = Desc['[[Value]]']; // eslint-disable-line no-param-reassign
return SameValue(O[P], Desc['[[Value]]']);
}
throw new $SyntaxError('This environment does not support defining non-writable, non-enumerable, or non-configurable properties');
}
var desc = $gOPD(O, P);
var current = desc && ToPropertyDescriptor(desc);
var extensible = IsExtensible(O);
return ValidateAndApplyPropertyDescriptor(O, P, extensible, Desc, current);
};
| {
"pile_set_name": "Github"
} |
package br.com.caelum.stella.validation.ie;
import br.com.caelum.stella.MessageProducer;
import br.com.caelum.stella.validation.Validator;
public class IEGoiasValidatorTest extends IEValidatorTest {
public IEGoiasValidatorTest() {
super(wrongCheckDigitUnformattedString, validUnformattedString, validFormattedString, validValues);
}
private static final String wrongCheckDigitUnformattedString = "109876542";
private static final String validUnformattedString = "109876547";
private static final String validFormattedString = "10.987.654-7";
private static final String[] validValues = { validFormattedString, "10.103.119-1", "15.368.273-6" };
@Override
protected Validator<String> getValidator(MessageProducer messageProducer, boolean isFormatted) {
return new IEGoiasValidator(messageProducer, isFormatted);
}
}
| {
"pile_set_name": "Github"
} |
# ----------------------------
# Set NAME to the program name
# Set ICON to the png icon file name
# Set DESCRIPTION to display within a compatible shell
# Set COMPRESSED to "YES" to create a compressed program
# ----------------------------
NAME ?= DEMO
COMPRESSED ?= NO
ICON ?= icon.png
DESCRIPTION ?= "CE C SDK Demo"
# ----------------------------
include $(CEDEV)/include/.makefile
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2010 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include "BaseTextInputType.h"
namespace WebCore {
class EmailInputType final : public BaseTextInputType {
public:
explicit EmailInputType(HTMLInputElement& element) : BaseTextInputType(element) { }
private:
const AtomString& formControlType() const override;
bool typeMismatchFor(const String&) const override;
bool typeMismatch() const override;
String typeMismatchText() const override;
bool isEmailField() const override;
bool supportsSelectionAPI() const override;
String sanitizeValue(const String&) const override;
};
} // namespace WebCore
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8" ?>
<links>
<set target="*">
<link name="Developer Guide">https://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide</link>
<link name="Service API Reference">https://docs.aws.amazon.com/autoscaling/ec2/APIReference</link>
</set>
</links>
| {
"pile_set_name": "Github"
} |
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// No testdata on Android.
// +build !android
package loader_test
import (
"fmt"
"go/build"
"go/constant"
"go/types"
"path/filepath"
"reflect"
"sort"
"strings"
"sync"
"testing"
"golang.org/x/tools/go/buildutil"
"golang.org/x/tools/go/loader"
)
// TestFromArgs checks that conf.FromArgs populates conf correctly.
// It does no I/O.
func TestFromArgs(t *testing.T) {
type result struct {
Err string
Rest []string
ImportPkgs map[string]bool
CreatePkgs []loader.PkgSpec
}
for _, test := range []struct {
args []string
tests bool
want result
}{
// Mix of existing and non-existent packages.
{
args: []string{"nosuchpkg", "errors"},
want: result{
ImportPkgs: map[string]bool{"errors": false, "nosuchpkg": false},
},
},
// Same, with -test flag.
{
args: []string{"nosuchpkg", "errors"},
tests: true,
want: result{
ImportPkgs: map[string]bool{"errors": true, "nosuchpkg": true},
},
},
// Surplus arguments.
{
args: []string{"fmt", "errors", "--", "surplus"},
want: result{
Rest: []string{"surplus"},
ImportPkgs: map[string]bool{"errors": false, "fmt": false},
},
},
// Ad hoc package specified as *.go files.
{
args: []string{"foo.go", "bar.go"},
want: result{CreatePkgs: []loader.PkgSpec{{
Filenames: []string{"foo.go", "bar.go"},
}}},
},
// Mixture of *.go and import paths.
{
args: []string{"foo.go", "fmt"},
want: result{
Err: "named files must be .go files: fmt",
},
},
} {
var conf loader.Config
rest, err := conf.FromArgs(test.args, test.tests)
got := result{
Rest: rest,
ImportPkgs: conf.ImportPkgs,
CreatePkgs: conf.CreatePkgs,
}
if err != nil {
got.Err = err.Error()
}
if !reflect.DeepEqual(got, test.want) {
t.Errorf("FromArgs(%q) = %+v, want %+v", test.args, got, test.want)
}
}
}
func TestLoad_NoInitialPackages(t *testing.T) {
var conf loader.Config
const wantErr = "no initial packages were loaded"
prog, err := conf.Load()
if err == nil {
t.Errorf("Load succeeded unexpectedly, want %q", wantErr)
} else if err.Error() != wantErr {
t.Errorf("Load failed with wrong error %q, want %q", err, wantErr)
}
if prog != nil {
t.Errorf("Load unexpectedly returned a Program")
}
}
func TestLoad_MissingInitialPackage(t *testing.T) {
var conf loader.Config
conf.Import("nosuchpkg")
conf.Import("errors")
const wantErr = "couldn't load packages due to errors: nosuchpkg"
prog, err := conf.Load()
if err == nil {
t.Errorf("Load succeeded unexpectedly, want %q", wantErr)
} else if err.Error() != wantErr {
t.Errorf("Load failed with wrong error %q, want %q", err, wantErr)
}
if prog != nil {
t.Errorf("Load unexpectedly returned a Program")
}
}
func TestLoad_MissingInitialPackage_AllowErrors(t *testing.T) {
var conf loader.Config
conf.AllowErrors = true
conf.Import("nosuchpkg")
conf.ImportWithTests("errors")
prog, err := conf.Load()
if err != nil {
t.Errorf("Load failed unexpectedly: %v", err)
}
if prog == nil {
t.Fatalf("Load returned a nil Program")
}
if got, want := created(prog), "errors_test"; got != want {
t.Errorf("Created = %s, want %s", got, want)
}
if got, want := imported(prog), "errors"; got != want {
t.Errorf("Imported = %s, want %s", got, want)
}
}
func TestCreateUnnamedPackage(t *testing.T) {
var conf loader.Config
conf.CreateFromFilenames("")
prog, err := conf.Load()
if err != nil {
t.Fatalf("Load failed: %v", err)
}
if got, want := fmt.Sprint(prog.InitialPackages()), "[(unnamed)]"; got != want {
t.Errorf("InitialPackages = %s, want %s", got, want)
}
}
func TestLoad_MissingFileInCreatedPackage(t *testing.T) {
var conf loader.Config
conf.CreateFromFilenames("", "missing.go")
const wantErr = "couldn't load packages due to errors: (unnamed)"
prog, err := conf.Load()
if prog != nil {
t.Errorf("Load unexpectedly returned a Program")
}
if err == nil {
t.Fatalf("Load succeeded unexpectedly, want %q", wantErr)
}
if err.Error() != wantErr {
t.Fatalf("Load failed with wrong error %q, want %q", err, wantErr)
}
}
func TestLoad_MissingFileInCreatedPackage_AllowErrors(t *testing.T) {
conf := loader.Config{AllowErrors: true}
conf.CreateFromFilenames("", "missing.go")
prog, err := conf.Load()
if err != nil {
t.Errorf("Load failed: %v", err)
}
if got, want := fmt.Sprint(prog.InitialPackages()), "[(unnamed)]"; got != want {
t.Fatalf("InitialPackages = %s, want %s", got, want)
}
}
func TestLoad_ParseError(t *testing.T) {
var conf loader.Config
conf.CreateFromFilenames("badpkg", "testdata/badpkgdecl.go")
const wantErr = "couldn't load packages due to errors: badpkg"
prog, err := conf.Load()
if prog != nil {
t.Errorf("Load unexpectedly returned a Program")
}
if err == nil {
t.Fatalf("Load succeeded unexpectedly, want %q", wantErr)
}
if err.Error() != wantErr {
t.Fatalf("Load failed with wrong error %q, want %q", err, wantErr)
}
}
func TestLoad_ParseError_AllowErrors(t *testing.T) {
var conf loader.Config
conf.AllowErrors = true
conf.CreateFromFilenames("badpkg", "testdata/badpkgdecl.go")
prog, err := conf.Load()
if err != nil {
t.Errorf("Load failed unexpectedly: %v", err)
}
if prog == nil {
t.Fatalf("Load returned a nil Program")
}
if got, want := created(prog), "badpkg"; got != want {
t.Errorf("Created = %s, want %s", got, want)
}
badpkg := prog.Created[0]
if len(badpkg.Files) != 1 {
t.Errorf("badpkg has %d files, want 1", len(badpkg.Files))
}
wantErr := filepath.Join("testdata", "badpkgdecl.go") + ":1:34: expected 'package', found 'EOF'"
if !hasError(badpkg.Errors, wantErr) {
t.Errorf("badpkg.Errors = %v, want %s", badpkg.Errors, wantErr)
}
}
func TestLoad_FromSource_Success(t *testing.T) {
var conf loader.Config
conf.CreateFromFilenames("P", "testdata/a.go", "testdata/b.go")
prog, err := conf.Load()
if err != nil {
t.Errorf("Load failed unexpectedly: %v", err)
}
if prog == nil {
t.Fatalf("Load returned a nil Program")
}
if got, want := created(prog), "P"; got != want {
t.Errorf("Created = %s, want %s", got, want)
}
}
func TestLoad_FromImports_Success(t *testing.T) {
var conf loader.Config
conf.ImportWithTests("fmt")
conf.ImportWithTests("errors")
prog, err := conf.Load()
if err != nil {
t.Errorf("Load failed unexpectedly: %v", err)
}
if prog == nil {
t.Fatalf("Load returned a nil Program")
}
if got, want := created(prog), "errors_test fmt_test"; got != want {
t.Errorf("Created = %q, want %s", got, want)
}
if got, want := imported(prog), "errors fmt"; got != want {
t.Errorf("Imported = %s, want %s", got, want)
}
// Check set of transitive packages.
// There are >30 and the set may grow over time, so only check a few.
want := map[string]bool{
"strings": true,
"time": true,
"runtime": true,
"testing": true,
"unicode": true,
}
for _, path := range all(prog) {
delete(want, path)
}
if len(want) > 0 {
t.Errorf("AllPackages is missing these keys: %q", keys(want))
}
}
func TestLoad_MissingIndirectImport(t *testing.T) {
pkgs := map[string]string{
"a": `package a; import _ "b"`,
"b": `package b; import _ "c"`,
}
conf := loader.Config{Build: fakeContext(pkgs)}
conf.Import("a")
const wantErr = "couldn't load packages due to errors: b"
prog, err := conf.Load()
if err == nil {
t.Errorf("Load succeeded unexpectedly, want %q", wantErr)
} else if err.Error() != wantErr {
t.Errorf("Load failed with wrong error %q, want %q", err, wantErr)
}
if prog != nil {
t.Errorf("Load unexpectedly returned a Program")
}
}
func TestLoad_BadDependency_AllowErrors(t *testing.T) {
for _, test := range []struct {
descr string
pkgs map[string]string
wantPkgs string
}{
{
descr: "missing dependency",
pkgs: map[string]string{
"a": `package a; import _ "b"`,
"b": `package b; import _ "c"`,
},
wantPkgs: "a b",
},
{
descr: "bad package decl in dependency",
pkgs: map[string]string{
"a": `package a; import _ "b"`,
"b": `package b; import _ "c"`,
"c": `package`,
},
wantPkgs: "a b",
},
{
descr: "parse error in dependency",
pkgs: map[string]string{
"a": `package a; import _ "b"`,
"b": `package b; import _ "c"`,
"c": `package c; var x = `,
},
wantPkgs: "a b c",
},
} {
conf := loader.Config{
AllowErrors: true,
Build: fakeContext(test.pkgs),
}
conf.Import("a")
prog, err := conf.Load()
if err != nil {
t.Errorf("%s: Load failed unexpectedly: %v", test.descr, err)
}
if prog == nil {
t.Fatalf("%s: Load returned a nil Program", test.descr)
}
if got, want := imported(prog), "a"; got != want {
t.Errorf("%s: Imported = %s, want %s", test.descr, got, want)
}
if got := all(prog); strings.Join(got, " ") != test.wantPkgs {
t.Errorf("%s: AllPackages = %s, want %s", test.descr, got, test.wantPkgs)
}
}
}
func TestCwd(t *testing.T) {
ctxt := fakeContext(map[string]string{"one/two/three": `package three`})
for _, test := range []struct {
cwd, arg, want string
}{
{cwd: "/go/src/one", arg: "./two/three", want: "one/two/three"},
{cwd: "/go/src/one", arg: "../one/two/three", want: "one/two/three"},
{cwd: "/go/src/one", arg: "one/two/three", want: "one/two/three"},
{cwd: "/go/src/one/two/three", arg: ".", want: "one/two/three"},
{cwd: "/go/src/one", arg: "two/three", want: ""},
} {
conf := loader.Config{
Cwd: test.cwd,
Build: ctxt,
}
conf.Import(test.arg)
var got string
prog, err := conf.Load()
if prog != nil {
got = imported(prog)
}
if got != test.want {
t.Errorf("Load(%s) from %s: Imported = %s, want %s",
test.arg, test.cwd, got, test.want)
if err != nil {
t.Errorf("Load failed: %v", err)
}
}
}
}
func TestLoad_vendor(t *testing.T) {
pkgs := map[string]string{
"a": `package a; import _ "x"`,
"a/vendor": ``, // mkdir a/vendor
"a/vendor/x": `package xa`,
"b": `package b; import _ "x"`,
"b/vendor": ``, // mkdir b/vendor
"b/vendor/x": `package xb`,
"c": `package c; import _ "x"`,
"x": `package xc`,
}
conf := loader.Config{Build: fakeContext(pkgs)}
conf.Import("a")
conf.Import("b")
conf.Import("c")
prog, err := conf.Load()
if err != nil {
t.Fatal(err)
}
// Check that a, b, and c see different versions of x.
for _, r := range "abc" {
name := string(r)
got := prog.Package(name).Pkg.Imports()[0]
want := "x" + name
if got.Name() != want {
t.Errorf("package %s import %q = %s, want %s",
name, "x", got.Name(), want)
}
}
}
func TestVendorCwd(t *testing.T) {
// Test the interaction of cwd and vendor directories.
ctxt := fakeContext(map[string]string{
"net": ``, // mkdir net
"net/http": `package http; import _ "hpack"`,
"vendor": ``, // mkdir vendor
"vendor/hpack": `package vendorhpack`,
"hpack": `package hpack`,
})
for i, test := range []struct {
cwd, arg, want string
}{
{cwd: "/go/src/net", arg: "http"}, // not found
{cwd: "/go/src/net", arg: "./http", want: "net/http vendor/hpack"},
{cwd: "/go/src/net", arg: "hpack", want: "vendor/hpack"},
{cwd: "/go/src/vendor", arg: "hpack", want: "vendor/hpack"},
{cwd: "/go/src/vendor", arg: "./hpack", want: "vendor/hpack"},
} {
conf := loader.Config{
Cwd: test.cwd,
Build: ctxt,
}
conf.Import(test.arg)
var got string
prog, err := conf.Load()
if prog != nil {
got = strings.Join(all(prog), " ")
}
if got != test.want {
t.Errorf("#%d: Load(%s) from %s: got %s, want %s",
i, test.arg, test.cwd, got, test.want)
if err != nil {
t.Errorf("Load failed: %v", err)
}
}
}
}
func TestVendorCwdIssue16580(t *testing.T) {
// Regression test for Go issue 16580.
// Import decls in "created" packages were vendor-resolved
// w.r.t. cwd, not the parent directory of the package's files.
ctxt := fakeContext(map[string]string{
"a": ``, // mkdir a
"a/vendor": ``, // mkdir a/vendor
"a/vendor/b": `package b; const X = true`,
"b": `package b; const X = false`,
})
for _, test := range []struct {
filename, cwd string
want bool // expected value of b.X; depends on filename, not on cwd
}{
{filename: "c.go", cwd: "/go/src", want: false},
{filename: "c.go", cwd: "/go/src/a", want: false},
{filename: "c.go", cwd: "/go/src/a/b", want: false},
{filename: "c.go", cwd: "/go/src/a/vendor/b", want: false},
{filename: "/go/src/a/c.go", cwd: "/go/src", want: true},
{filename: "/go/src/a/c.go", cwd: "/go/src/a", want: true},
{filename: "/go/src/a/c.go", cwd: "/go/src/a/b", want: true},
{filename: "/go/src/a/c.go", cwd: "/go/src/a/vendor/b", want: true},
{filename: "/go/src/c/c.go", cwd: "/go/src", want: false},
{filename: "/go/src/c/c.go", cwd: "/go/src/a", want: false},
{filename: "/go/src/c/c.go", cwd: "/go/src/a/b", want: false},
{filename: "/go/src/c/c.go", cwd: "/go/src/a/vendor/b", want: false},
} {
conf := loader.Config{
Cwd: test.cwd,
Build: ctxt,
}
f, err := conf.ParseFile(test.filename, `package dummy; import "b"; const X = b.X`)
if err != nil {
t.Fatal(f)
}
conf.CreateFromFiles("dummy", f)
prog, err := conf.Load()
if err != nil {
t.Errorf("%+v: Load failed: %v", test, err)
continue
}
x := constant.BoolVal(prog.Created[0].Pkg.Scope().Lookup("X").(*types.Const).Val())
if x != test.want {
t.Errorf("%+v: b.X = %t", test, x)
}
}
// TODO(adonovan): also test imports within XTestGoFiles.
}
// TODO(adonovan): more Load tests:
//
// failures:
// - to parse package decl of *_test.go files
// - to parse package decl of external *_test.go files
// - to parse whole of *_test.go files
// - to parse whole of external *_test.go files
// - to open a *.go file during import scanning
// - to import from binary
// features:
// - InitialPackages
// - PackageCreated hook
// - TypeCheckFuncBodies hook
func TestTransitivelyErrorFreeFlag(t *testing.T) {
// Create an minimal custom build.Context
// that fakes the following packages:
//
// a --> b --> c! c has an error
// \ d and e are transitively error-free.
// e --> d
//
// Each package [a-e] consists of one file, x.go.
pkgs := map[string]string{
"a": `package a; import (_ "b"; _ "e")`,
"b": `package b; import _ "c"`,
"c": `package c; func f() { _ = int(false) }`, // type error within function body
"d": `package d;`,
"e": `package e; import _ "d"`,
}
conf := loader.Config{
AllowErrors: true,
Build: fakeContext(pkgs),
}
conf.Import("a")
prog, err := conf.Load()
if err != nil {
t.Errorf("Load failed: %s", err)
}
if prog == nil {
t.Fatalf("Load returned nil *Program")
}
for pkg, info := range prog.AllPackages {
var wantErr, wantTEF bool
switch pkg.Path() {
case "a", "b":
case "c":
wantErr = true
case "d", "e":
wantTEF = true
default:
t.Errorf("unexpected package: %q", pkg.Path())
continue
}
if (info.Errors != nil) != wantErr {
if wantErr {
t.Errorf("Package %q.Error = nil, want error", pkg.Path())
} else {
t.Errorf("Package %q has unexpected Errors: %v",
pkg.Path(), info.Errors)
}
}
if info.TransitivelyErrorFree != wantTEF {
t.Errorf("Package %q.TransitivelyErrorFree=%t, want %t",
pkg.Path(), info.TransitivelyErrorFree, wantTEF)
}
}
}
// Test that syntax (scan/parse), type, and loader errors are recorded
// (in PackageInfo.Errors) and reported (via Config.TypeChecker.Error).
func TestErrorReporting(t *testing.T) {
pkgs := map[string]string{
"a": `package a; import (_ "b"; _ "c"); var x int = false`,
"b": `package b; 'syntax error!`,
}
conf := loader.Config{
AllowErrors: true,
Build: fakeContext(pkgs),
}
var mu sync.Mutex
var allErrors []error
conf.TypeChecker.Error = func(err error) {
mu.Lock()
allErrors = append(allErrors, err)
mu.Unlock()
}
conf.Import("a")
prog, err := conf.Load()
if err != nil {
t.Errorf("Load failed: %s", err)
}
if prog == nil {
t.Fatalf("Load returned nil *Program")
}
// TODO(adonovan): test keys of ImportMap.
// Check errors recorded in each PackageInfo.
for pkg, info := range prog.AllPackages {
switch pkg.Path() {
case "a":
if !hasError(info.Errors, "cannot convert false") {
t.Errorf("a.Errors = %v, want bool conversion (type) error", info.Errors)
}
if !hasError(info.Errors, "could not import c") {
t.Errorf("a.Errors = %v, want import (loader) error", info.Errors)
}
case "b":
if !hasError(info.Errors, "rune literal not terminated") {
t.Errorf("b.Errors = %v, want unterminated literal (syntax) error", info.Errors)
}
}
}
// Check errors reported via error handler.
if !hasError(allErrors, "cannot convert false") ||
!hasError(allErrors, "rune literal not terminated") ||
!hasError(allErrors, "could not import c") {
t.Errorf("allErrors = %v, want syntax, type and loader errors", allErrors)
}
}
func TestCycles(t *testing.T) {
for _, test := range []struct {
descr string
ctxt *build.Context
wantErr string
}{
{
"self-cycle",
fakeContext(map[string]string{
"main": `package main; import _ "selfcycle"`,
"selfcycle": `package selfcycle; import _ "selfcycle"`,
}),
`import cycle: selfcycle -> selfcycle`,
},
{
"three-package cycle",
fakeContext(map[string]string{
"main": `package main; import _ "a"`,
"a": `package a; import _ "b"`,
"b": `package b; import _ "c"`,
"c": `package c; import _ "a"`,
}),
`import cycle: c -> a -> b -> c`,
},
{
"self-cycle in dependency of test file",
buildutil.FakeContext(map[string]map[string]string{
"main": {
"main.go": `package main`,
"main_test.go": `package main; import _ "a"`,
},
"a": {
"a.go": `package a; import _ "a"`,
},
}),
`import cycle: a -> a`,
},
// TODO(adonovan): fix: these fail
// {
// "two-package cycle in dependency of test file",
// buildutil.FakeContext(map[string]map[string]string{
// "main": {
// "main.go": `package main`,
// "main_test.go": `package main; import _ "a"`,
// },
// "a": {
// "a.go": `package a; import _ "main"`,
// },
// }),
// `import cycle: main -> a -> main`,
// },
// {
// "self-cycle in augmented package",
// buildutil.FakeContext(map[string]map[string]string{
// "main": {
// "main.go": `package main`,
// "main_test.go": `package main; import _ "main"`,
// },
// }),
// `import cycle: main -> main`,
// },
} {
conf := loader.Config{
AllowErrors: true,
Build: test.ctxt,
}
var mu sync.Mutex
var allErrors []error
conf.TypeChecker.Error = func(err error) {
mu.Lock()
allErrors = append(allErrors, err)
mu.Unlock()
}
conf.ImportWithTests("main")
prog, err := conf.Load()
if err != nil {
t.Errorf("%s: Load failed: %s", test.descr, err)
}
if prog == nil {
t.Fatalf("%s: Load returned nil *Program", test.descr)
}
if !hasError(allErrors, test.wantErr) {
t.Errorf("%s: Load() errors = %q, want %q",
test.descr, allErrors, test.wantErr)
}
}
// TODO(adonovan):
// - Test that in a legal test cycle, none of the symbols
// defined by augmentation are visible via import.
}
// ---- utilities ----
// Simplifying wrapper around buildutil.FakeContext for single-file packages.
func fakeContext(pkgs map[string]string) *build.Context {
pkgs2 := make(map[string]map[string]string)
for path, content := range pkgs {
pkgs2[path] = map[string]string{"x.go": content}
}
return buildutil.FakeContext(pkgs2)
}
func hasError(errors []error, substr string) bool {
for _, err := range errors {
if strings.Contains(err.Error(), substr) {
return true
}
}
return false
}
func keys(m map[string]bool) (keys []string) {
for key := range m {
keys = append(keys, key)
}
sort.Strings(keys)
return
}
// Returns all loaded packages.
func all(prog *loader.Program) []string {
var pkgs []string
for _, info := range prog.AllPackages {
pkgs = append(pkgs, info.Pkg.Path())
}
sort.Strings(pkgs)
return pkgs
}
// Returns initially imported packages, as a string.
func imported(prog *loader.Program) string {
var pkgs []string
for _, info := range prog.Imported {
pkgs = append(pkgs, info.Pkg.Path())
}
sort.Strings(pkgs)
return strings.Join(pkgs, " ")
}
// Returns initially created packages, as a string.
func created(prog *loader.Program) string {
var pkgs []string
for _, info := range prog.Created {
pkgs = append(pkgs, info.Pkg.Path())
}
return strings.Join(pkgs, " ")
}
// Load package "io" twice in parallel.
// When run with -race, this is a regression test for Go issue 20718, in
// which the global "unsafe" package was modified concurrently.
func TestLoad1(t *testing.T) { loadIO(t) }
func TestLoad2(t *testing.T) { loadIO(t) }
func loadIO(t *testing.T) {
t.Parallel()
conf := &loader.Config{ImportPkgs: map[string]bool{"io": false}}
if _, err := conf.Load(); err != nil {
t.Fatal(err)
}
}
| {
"pile_set_name": "Github"
} |
/* Flowchart variables */
/* Sequence Diagram variables */
/* Gantt chart variables */
.mermaid .label {
color: #323D47;
}
.node rect,
.node circle,
.node ellipse,
.node polygon {
fill: #BDD5EA;
stroke: #81B1DB;
stroke-width: 1px;
}
.edgePath .path {
stroke: lightgrey;
}
.edgeLabel {
background-color: #e8e8e8;
}
.cluster rect {
fill: #6D6D65 !important;
rx: 4 !important;
stroke: rgba(255, 255, 255, 0.25) !important;
stroke-width: 1px !important;
}
.cluster text {
fill: #F9FFFE;
}
.actor {
stroke: #81B1DB;
fill: #BDD5EA;
}
text.actor {
fill: black;
stroke: none;
}
.actor-line {
stroke: lightgrey;
}
.messageLine0 {
stroke-width: 1.5;
stroke-dasharray: "2 2";
marker-end: "url(#arrowhead)";
stroke: lightgrey;
}
.messageLine1 {
stroke-width: 1.5;
stroke-dasharray: "2 2";
stroke: lightgrey;
}
#arrowhead {
fill: lightgrey !important;
}
#crosshead path {
fill: lightgrey !important;
stroke: lightgrey !important;
}
.messageText {
fill: lightgrey;
stroke: none;
}
.labelBox {
stroke: #81B1DB;
fill: #BDD5EA;
}
.labelText {
fill: #323D47;
stroke: none;
}
.loopText {
fill: lightgrey;
stroke: none;
}
.loopLine {
stroke-width: 2;
stroke-dasharray: "2 2";
marker-end: "url(#arrowhead)";
stroke: #81B1DB;
}
.note {
stroke: rgba(255, 255, 255, 0.25);
fill: #fff5ad;
}
.noteText {
fill: black;
stroke: none;
font-family: 'trebuchet ms', verdana, arial;
font-size: 14px;
}
/** Section styling */
.section {
stroke: none;
opacity: 0.2;
}
.section0 {
fill: rgba(255, 255, 255, 0.3);
}
.section2 {
fill: #EAE8B9;
}
.section1,
.section3 {
fill: white;
opacity: 0.2;
}
.sectionTitle0 {
fill: #F9FFFE;
}
.sectionTitle1 {
fill: #F9FFFE;
}
.sectionTitle2 {
fill: #F9FFFE;
}
.sectionTitle3 {
fill: #F9FFFE;
}
.sectionTitle {
text-anchor: start;
font-size: 11px;
text-height: 14px;
}
/* Grid and axis */
.grid .tick {
stroke: rgba(255, 255, 255, 0.3);
opacity: 0.3;
shape-rendering: crispEdges;
}
.grid .tick text {
fill: lightgrey;
opacity: 0.5;
}
.grid path {
stroke-width: 0;
}
/* Today line */
.today {
fill: none;
stroke: #DB5757;
stroke-width: 2px;
}
/* Task styling */
/* Default task */
.task {
stroke-width: 1;
}
.taskText {
text-anchor: middle;
font-size: 11px;
}
.taskTextOutsideRight {
fill: #323D47;
text-anchor: start;
font-size: 11px;
}
.taskTextOutsideLeft {
fill: #323D47;
text-anchor: end;
font-size: 11px;
}
/* Specific task settings for the sections*/
.taskText0,
.taskText1,
.taskText2,
.taskText3 {
fill: #323D47;
}
.task0,
.task1,
.task2,
.task3 {
fill: #BDD5EA;
stroke: rgba(255, 255, 255, 0.5);
}
.taskTextOutside0,
.taskTextOutside2 {
fill: lightgrey;
}
.taskTextOutside1,
.taskTextOutside3 {
fill: lightgrey;
}
/* Active task */
.active0,
.active1,
.active2,
.active3 {
fill: #81B1DB;
stroke: rgba(255, 255, 255, 0.5);
}
.activeText0,
.activeText1,
.activeText2,
.activeText3 {
fill: #323D47 !important;
}
/* Completed task */
.done0,
.done1,
.done2,
.done3 {
fill: lightgrey;
}
.doneText0,
.doneText1,
.doneText2,
.doneText3 {
fill: #323D47 !important;
}
/* Tasks on the critical line */
.crit0,
.crit1,
.crit2,
.crit3 {
stroke: #E83737;
fill: #E83737;
stroke-width: 2;
}
.activeCrit0,
.activeCrit1,
.activeCrit2,
.activeCrit3 {
stroke: #E83737;
fill: #81B1DB;
stroke-width: 2;
}
.doneCrit0,
.doneCrit1,
.doneCrit2,
.doneCrit3 {
stroke: #E83737;
fill: lightgrey;
stroke-width: 1;
cursor: pointer;
shape-rendering: crispEdges;
}
.doneCritText0,
.doneCritText1,
.doneCritText2,
.doneCritText3 {
fill: lightgrey !important;
}
.activeCritText0,
.activeCritText1,
.activeCritText2,
.activeCritText3 {
fill: #323D47 !important;
}
.titleText {
text-anchor: middle;
font-size: 18px;
fill: lightgrey;
}
/*
*/
.node text {
font-family: 'trebuchet ms', verdana, arial;
font-size: 14px;
}
.node.clickable {
cursor: pointer;
}
div.mermaidTooltip {
position: absolute;
text-align: center;
max-width: 200px;
padding: 2px;
font-family: 'trebuchet ms', verdana, arial;
font-size: 12px;
background: #6D6D65;
border: 1px solid rgba(255, 255, 255, 0.25);
border-radius: 2px;
pointer-events: none;
z-index: 100;
}
| {
"pile_set_name": "Github"
} |
// unused reports unused identifiers (types, functions, ...) in your
// code.
package main // import "honnef.co/go/tools/cmd/unused"
import (
"log"
"os"
"honnef.co/go/tools/lint/lintutil"
"honnef.co/go/tools/unused"
)
var (
fConstants bool
fFields bool
fFunctions bool
fTypes bool
fVariables bool
fDebug string
fWholeProgram bool
fReflection bool
)
func newChecker(mode unused.CheckMode) *unused.Checker {
checker := unused.NewChecker(mode)
if fDebug != "" {
debug, err := os.Create(fDebug)
if err != nil {
log.Fatal("couldn't open debug file:", err)
}
checker.Debug = debug
}
checker.WholeProgram = fWholeProgram
checker.ConsiderReflection = fReflection
return checker
}
func main() {
log.SetFlags(0)
fs := lintutil.FlagSet("unused")
fs.BoolVar(&fConstants, "consts", true, "Report unused constants")
fs.BoolVar(&fFields, "fields", true, "Report unused fields")
fs.BoolVar(&fFunctions, "funcs", true, "Report unused functions and methods")
fs.BoolVar(&fTypes, "types", true, "Report unused types")
fs.BoolVar(&fVariables, "vars", true, "Report unused variables")
fs.StringVar(&fDebug, "debug", "", "Write a debug graph to `file`. Existing files will be overwritten.")
fs.BoolVar(&fWholeProgram, "exported", false, "Treat arguments as a program and report unused exported identifiers")
fs.BoolVar(&fReflection, "reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection")
fs.Parse(os.Args[1:])
var mode unused.CheckMode
if fConstants {
mode |= unused.CheckConstants
}
if fFields {
mode |= unused.CheckFields
}
if fFunctions {
mode |= unused.CheckFunctions
}
if fTypes {
mode |= unused.CheckTypes
}
if fVariables {
mode |= unused.CheckVariables
}
checker := newChecker(mode)
l := unused.NewLintChecker(checker)
lintutil.ProcessFlagSet("unused", l, fs)
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2015-2019 Jean-Luc Barriere
*
* This file is part of Noson-App
*
* Noson is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Noson is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Noson. If not, see <http://www.gnu.org/licenses/>.
*
*/
#ifndef NOSONAPPALLSERVICESMODEL_H
#define NOSONAPPALLSERVICESMODEL_H
#include "listmodel.h"
#include "servicesmodel.h"
#include <QAbstractListModel>
namespace nosonapp
{
class AllServicesModel : public QAbstractListModel, public ListModel<Sonos>
{
Q_OBJECT
Q_PROPERTY(int count READ rowCount NOTIFY countChanged)
Q_PROPERTY(bool failure READ dataFailure NOTIFY loaded)
public:
enum AllServiceRoles
{
PayloadRole,
IdRole,
TitleRole,
IconRole,
NickNameRole,
NormalizedRole,
TypeRole,
AuthRole,
};
AllServicesModel(QObject* parent = 0);
virtual ~AllServicesModel();
void addItem(ServiceItem* item);
int rowCount(const QModelIndex& parent = QModelIndex()) const;
QVariant data(const QModelIndex& index, int role = Qt::DisplayRole) const;
Q_INVOKABLE QVariantMap get(int row);
Q_INVOKABLE bool isNew() { return m_dataState == DataStatus::DataBlank; }
Q_INVOKABLE bool init(Sonos* provider, bool fill = false) { return ListModel::configure(provider, fill); }
virtual void clearData();
virtual bool loadData();
Q_INVOKABLE bool asyncLoad();
Q_INVOKABLE void resetModel();
virtual void handleDataUpdate();
signals:
void dataUpdated();
void countChanged();
void loaded(bool succeeded);
protected:
QHash<int, QByteArray> roleNames() const;
private:
QList<ServiceItem*> m_items;
QList<ServiceItem*> m_data;
};
}
#endif /* NOSONAPPALLSERVICESMODEL_H */
| {
"pile_set_name": "Github"
} |
# Jackson JSON processor
Jackson is a high-performance, Free/Open Source JSON processing library.
It was originally written by Tatu Saloranta ([email protected]), and has
been in development since 2007.
It is currently developed by a community of developers, as well as supported
commercially by FasterXML.com.
## Licensing
Jackson core and extension components may licensed under different licenses.
To find the details that apply to this artifact see the accompanying LICENSE file.
For more information, including possible other licensing options, contact
FasterXML.com (http://fasterxml.com).
## Credits
A list of contributors may be found from CREDITS file, which is included
in some artifacts (usually source distributions); but is always available
from the source code management (SCM) system project uses.
| {
"pile_set_name": "Github"
} |
±²³SolveSpaceREVa
Group.h.v=00000001
Group.type=5000
Group.name=#references
Group.color=ff000000
Group.skipFirst=0
Group.predef.swapUV=0
Group.predef.negateU=0
Group.predef.negateV=0
Group.visible=1
Group.suppress=0
Group.relaxConstraints=0
Group.allowRedundant=0
Group.allDimsReference=0
Group.scale=1.00000000000000000000
Group.remap={
}
AddGroup
Group.h.v=00000002
Group.type=5001
Group.order=1
Group.name=sketch-in-plane
Group.activeWorkplane.v=80020000
Group.color=ff000000
Group.subtype=6000
Group.skipFirst=0
Group.predef.q.w=1.00000000000000000000
Group.predef.origin.v=00010001
Group.predef.swapUV=0
Group.predef.negateU=0
Group.predef.negateV=0
Group.visible=1
Group.suppress=0
Group.relaxConstraints=0
Group.allowRedundant=0
Group.allDimsReference=0
Group.scale=1.00000000000000000000
Group.remap={
}
AddGroup
Param.h.v.=00010010
AddParam
Param.h.v.=00010011
AddParam
Param.h.v.=00010012
AddParam
Param.h.v.=00010020
Param.val=1.00000000000000000000
AddParam
Param.h.v.=00010021
AddParam
Param.h.v.=00010022
AddParam
Param.h.v.=00010023
AddParam
Param.h.v.=00020010
AddParam
Param.h.v.=00020011
AddParam
Param.h.v.=00020012
AddParam
Param.h.v.=00020020
Param.val=0.50000000000000000000
AddParam
Param.h.v.=00020021
Param.val=0.50000000000000000000
AddParam
Param.h.v.=00020022
Param.val=0.50000000000000000000
AddParam
Param.h.v.=00020023
Param.val=0.50000000000000000000
AddParam
Param.h.v.=00030010
AddParam
Param.h.v.=00030011
AddParam
Param.h.v.=00030012
AddParam
Param.h.v.=00030020
Param.val=0.50000000000000000000
AddParam
Param.h.v.=00030021
Param.val=-0.50000000000000000000
AddParam
Param.h.v.=00030022
Param.val=-0.50000000000000000000
AddParam
Param.h.v.=00030023
Param.val=-0.50000000000000000000
AddParam
Param.h.v.=00040010
Param.val=-15.00000000000000000000
AddParam
Param.h.v.=00040011
Param.val=5.00000000000000000000
AddParam
Param.h.v.=00040013
Param.val=-5.00000000000000000000
AddParam
Param.h.v.=00040014
Param.val=5.00000000000000000000
AddParam
Param.h.v.=00050010
Param.val=-10.00000000000000000000
AddParam
Param.h.v.=00050011
Param.val=10.00000000000000000000
AddParam
Request.h.v=00000001
Request.type=100
Request.group.v=00000001
Request.construction=0
AddRequest
Request.h.v=00000002
Request.type=100
Request.group.v=00000001
Request.construction=0
AddRequest
Request.h.v=00000003
Request.type=100
Request.group.v=00000001
Request.construction=0
AddRequest
Request.h.v=00000004
Request.type=200
Request.workplane.v=80020000
Request.group.v=00000002
Request.construction=0
AddRequest
Request.h.v=00000005
Request.type=101
Request.workplane.v=80020000
Request.group.v=00000002
Request.construction=0
AddRequest
Entity.h.v=00010000
Entity.type=10000
Entity.construction=0
Entity.point[0].v=00010001
Entity.normal.v=00010020
Entity.actVisible=1
AddEntity
Entity.h.v=00010001
Entity.type=2000
Entity.construction=0
Entity.actVisible=1
AddEntity
Entity.h.v=00010020
Entity.type=3000
Entity.construction=0
Entity.point[0].v=00010001
Entity.actNormal.w=1.00000000000000000000
Entity.actVisible=1
AddEntity
Entity.h.v=00020000
Entity.type=10000
Entity.construction=0
Entity.point[0].v=00020001
Entity.normal.v=00020020
Entity.actVisible=1
AddEntity
Entity.h.v=00020001
Entity.type=2000
Entity.construction=0
Entity.actVisible=1
AddEntity
Entity.h.v=00020020
Entity.type=3000
Entity.construction=0
Entity.point[0].v=00020001
Entity.actNormal.w=0.50000000000000000000
Entity.actNormal.vx=0.50000000000000000000
Entity.actNormal.vy=0.50000000000000000000
Entity.actNormal.vz=0.50000000000000000000
Entity.actVisible=1
AddEntity
Entity.h.v=00030000
Entity.type=10000
Entity.construction=0
Entity.point[0].v=00030001
Entity.normal.v=00030020
Entity.actVisible=1
AddEntity
Entity.h.v=00030001
Entity.type=2000
Entity.construction=0
Entity.actVisible=1
AddEntity
Entity.h.v=00030020
Entity.type=3000
Entity.construction=0
Entity.point[0].v=00030001
Entity.actNormal.w=0.50000000000000000000
Entity.actNormal.vx=-0.50000000000000000000
Entity.actNormal.vy=-0.50000000000000000000
Entity.actNormal.vz=-0.50000000000000000000
Entity.actVisible=1
AddEntity
Entity.h.v=00040000
Entity.type=11000
Entity.construction=0
Entity.point[0].v=00040001
Entity.point[1].v=00040002
Entity.workplane.v=80020000
Entity.actVisible=1
AddEntity
Entity.h.v=00040001
Entity.type=2001
Entity.construction=0
Entity.workplane.v=80020000
Entity.actPoint.x=-15.00000000000000000000
Entity.actPoint.y=5.00000000000000000000
Entity.actVisible=1
AddEntity
Entity.h.v=00040002
Entity.type=2001
Entity.construction=0
Entity.workplane.v=80020000
Entity.actPoint.x=-5.00000000000000000000
Entity.actPoint.y=5.00000000000000000000
Entity.actVisible=1
AddEntity
Entity.h.v=00050000
Entity.type=2001
Entity.construction=0
Entity.workplane.v=80020000
Entity.actPoint.x=-10.00000000000000000000
Entity.actPoint.y=10.00000000000000000000
Entity.actVisible=1
AddEntity
Entity.h.v=80020000
Entity.type=10000
Entity.construction=0
Entity.point[0].v=80020002
Entity.normal.v=80020001
Entity.actVisible=1
AddEntity
Entity.h.v=80020001
Entity.type=3010
Entity.construction=0
Entity.point[0].v=80020002
Entity.actNormal.w=1.00000000000000000000
Entity.actVisible=1
AddEntity
Entity.h.v=80020002
Entity.type=2012
Entity.construction=0
Entity.actVisible=1
AddEntity
Constraint.h.v=00000001
Constraint.type=32
Constraint.group.v=00000002
Constraint.workplane.v=80020000
Constraint.valA=-5.00000000000000000000
Constraint.ptA.v=00050000
Constraint.entityA.v=00040000
Constraint.other=0
Constraint.other2=0
Constraint.reference=1
AddConstraint
| {
"pile_set_name": "Github"
} |
[
{ "name": "gles2-master-risky", "api": "gles2", "tests": "testlists/gles2-master-risky-PASS.txt" },
{ "name": "gles2-master", "api": "gles2", "tests": "testlists/gles2-master-PASS.txt" },
{ "name": "gles3-565-no-depth-no-stencil", "api": "gles3", "tests": "testlists/gles3-565-no-depth-no-stencil-PASS.txt" },
{ "name": "gles3-master-risky", "api": "gles3", "tests": "testlists/gles3-master-risky-PASS.txt" },
{ "name": "gles3-master", "api": "gles3", "tests": "testlists/gles3-master-PASS.txt" },
{ "name": "vulkan-wsi", "api": "vulkan", "tests": "testlists/vk-wsi-PASS.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-ABORT.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-ASSERT.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-COMPATIBILITY_WARNING.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-CRASH.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-FAIL.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-PASS.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-QUALITY_WARNING.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-TIMEOUT.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-UNIMPLEMENTED.txt" },
{ "name": "vulkan-master", "api": "vulkan", "tests": "testlists/vk-master-UNREACHABLE.txt" },
{ "name": "vulkan-master-risky", "api": "vulkan", "tests": "testlists/vk-master-risky-PASS.txt" }
]
| {
"pile_set_name": "Github"
} |
# Dynamic Programming - 动态规划(五)
# Interleaving String
## Question
- leetcode: [Interleaving String | LeetCode OJ](https://leetcode.com/problems/interleaving-string/)
- lintcode: [(29) Interleaving String](http://www.lintcode.com/en/problem/interleaving-string/)
```
Given three strings: s1, s2, s3,
determine whether s3 is formed by the interleaving of s1 and s2.
Example
For s1 = "aabcc", s2 = "dbbca"
When s3 = "aadbbcbcac", return true.
When s3 = "aadbbbaccc", return false.
Challenge
O(n2) time or better
```
## 题解1 - bug
题目意思是 s3 是否由 s1 和 s2 交叉构成,不允许跳着从 s1 和 s2 挑选字符。那么直觉上可以对三个字符串设置三个索引,首先从 s3 中依次取字符,然后进入内循环,依次从 s1 和 s2 中取首字符,若能匹配上则进入下一次循环,否则立即返回 false. 我们先看代码,再分析 bug 之处。
### Java
```java
public class Solution {
/**
* Determine whether s3 is formed by interleaving of s1 and s2.
* @param s1, s2, s3: As description.
* @return: true or false.
*/
public boolean isInterleave(String s1, String s2, String s3) {
int len1 = (s1 == null) ? 0 : s1.length();
int len2 = (s2 == null) ? 0 : s2.length();
int len3 = (s3 == null) ? 0 : s3.length();
if (len3 != len1 + len2) return false;
int i1 = 0, i2 = 0;
for (int i3 = 0; i3 < len3; i3++) {
boolean result = false;
if (i1 < len1 && s1.charAt(i1) == s3.charAt(i3)) {
i1++;
result = true;
continue;
}
if (i2 < len2 && s2.charAt(i2) == s3.charAt(i3)) {
i2++;
result = true;
continue;
}
// return instantly if both s1 and s2 can not pair with s3
if (!result) return false;
}
return true;
}
}
```
### 源码分析
异常处理部分:首先求得 s1, s2, s3 的字符串长度,随后用索引 i1, i2, i3 巧妙地避开了繁琐的 null 检测。这段代码能过前面的一部分数据,但在 lintcode 的第15个 test 跪了。不想马上看以下分析的可以自己先 debug 下。
我们可以注意到以上代码还有一种情况并未考虑到,那就是当 s1[i1] 和 s2[i2] 均和 s3[i3] 相等时,我们可以拿 s1 或者 s2 去匹配,那么问题来了,由于不允许跳着取,那么可能出现在取了 s1 中的字符后,接下来的 s1 和 s2 首字符都无法和 s3 匹配到,因此原本应该返回 true 而现在返回 false. 建议将以上代码贴到 OJ 上看看测试用例。
以上 bug 可以通过加入对 `(s1[i1] == s3[i3]) && (s2[i2] == s3[i3])` 这一特殊情形考虑,即分两种情况递归调用 isInterleave, 只不过 s1, s2, s3 为新生成的字符串。
### 复杂度分析
遍历一次 s3, 时间复杂度为 `$$O(n)$$`, 空间复杂度 `$$O(1)$$`.
## 题解2
在 `(s1[i1] == s3[i3]) && (s2[i2] == s3[i3])` 时分两种情况考虑,即让 s1[i1] 和 s3[i3] 配对或者 s2[i2] 和 s3[i3] 配对,那么嵌套调用时新生成的字符串则分别为 `s1[1+i1:], s2[i2], s3[1+i3:]` 和 `s1[i1:], s2[1+i2], s3[1+i3:]`. 嵌套调用结束后立即返回最终结果,因为递归调用时整个结果已经知晓,不立即返回则有可能会产生错误结果,递归调用并未影响到调用处的 i1 和 i2.
### Python
```python
class Solution:
"""
@params s1, s2, s3: Three strings as description.
@return: return True if s3 is formed by the interleaving of
s1 and s2 or False if not.
@hint: you can use [[True] * m for i in range (n)] to allocate a n*m matrix.
"""
def isInterleave(self, s1, s2, s3):
len1 = 0 if s1 is None else len(s1)
len2 = 0 if s2 is None else len(s2)
len3 = 0 if s3 is None else len(s3)
if len3 != len1 + len2:
return False
i1, i2 = 0, 0
for i3 in xrange(len(s3)):
result = False
if (i1 < len1 and s1[i1] == s3[i3]) and \
(i1 < len1 and s1[i1] == s3[i3]):
# s1[1+i1:], s2[i2:], s3[1+i3:]
case1 = self.isInterleave(s1[1 + i1:], s2[i2:], s3[1 + i3:])
# s1[i1:], s2[1+i2:], s3[1+i3:]
case2 = self.isInterleave(s1[i1:], s2[1 + i2:], s3[1 + i3:])
return case1 or case2
if i1 < len1 and s1[i1] == s3[i3]:
i1 += 1
result = True
continue
if i2 < len2 and s2[i2] == s3[i3]:
i2 += 1
result = True
continue
# return instantly if both s1 and s2 can not pair with s3
if not result:
return False
return True
```
### C++
```c++
class Solution {
public:
/**
* Determine whether s3 is formed by interleaving of s1 and s2.
* @param s1, s2, s3: As description.
* @return: true of false.
*/
bool isInterleave(string s1, string s2, string s3) {
int len1 = s1.size();
int len2 = s2.size();
int len3 = s3.size();
if (len3 != len1 + len2) return false;
int i1 = 0, i2 = 0;
for (int i3 = 0; i3 < len3; ++i3) {
bool result = false;
if (i1 < len1 && s1[i1] == s3[i3] &&
i2 < len2 && s2[i2] == s3[i3]) {
// s1[1+i1:], s2[i2:], s3[1+i3:]
bool case1 = isInterleave(s1.substr(1 + i1), s2.substr(i2), s3.substr(1 + i3));
// s1[i1:], s2[1+i2:], s3[1+i3:]
bool case2 = isInterleave(s1.substr(i1), s2.substr(1 + i2), s3.substr(1 + i3));
// return instantly
return case1 || case2;
}
if (i1 < len1 && s1[i1] == s3[i3]) {
i1++;
result = true;
continue;
}
if (i2 < len2 && s2[i2] == s3[i3]) {
i2++;
result = true;
continue;
}
// return instantly if both s1 and s2 can not pair with s3
if (!result) return false;
}
return true;
}
};
```
### Java
```java
public class Solution {
/**
* Determine whether s3 is formed by interleaving of s1 and s2.
* @param s1, s2, s3: As description.
* @return: true or false.
*/
public boolean isInterleave(String s1, String s2, String s3) {
int len1 = (s1 == null) ? 0 : s1.length();
int len2 = (s2 == null) ? 0 : s2.length();
int len3 = (s3 == null) ? 0 : s3.length();
if (len3 != len1 + len2) return false;
int i1 = 0, i2 = 0;
for (int i3 = 0; i3 < len3; i3++) {
boolean result = false;
if (i1 < len1 && s1.charAt(i1) == s3.charAt(i3) &&
i2 < len2 && s2.charAt(i2) == s3.charAt(i3)) {
// s1[1+i1:], s2[i2:], s3[1+i3:]
boolean case1 = isInterleave(s1.substring(1 + i1), s2.substring(i2), s3.substring(1 + i3));
// s1[i1:], s2[1+i2:], s3[1+i3:]
boolean case2 = isInterleave(s1.substring(i1), s2.substring(1 + i2), s3.substring(1 + i3));
// return instantly
return case1 || case2;
}
if (i1 < len1 && s1.charAt(i1) == s3.charAt(i3)) {
i1++;
result = true;
continue;
}
if (i2 < len2 && s2.charAt(i2) == s3.charAt(i3)) {
i2++;
result = true;
continue;
}
// return instantly if both s1 and s2 can not pair with s3
if (!result) return false;
}
return true;
}
}
```
## 题解3 - 动态规划
看过题解1 和 题解2 的思路后动规的状态和状态方程应该就不难推出了。按照经典的序列规划,不妨假设状态 f[i1][i2][i3] 为 s1的前i1个字符和 s2的前 i2个字符是否能交叉构成 s3的前 i3个字符,那么根据 s1[i1], s2[i2], s3[i3]的匹配情况可以分为8种情况讨论。咋一看这似乎十分麻烦,但实际上我们注意到其实还有一个隐含条件:`len3 == len1 + len2`, 故状态转移方程得到大幅简化。
新的状态可定义为 f[i1][i2], 含义为s1的前`i1`个字符和 s2的前 `i2`个字符是否能交叉构成 s3的前 `i1 + i2` 个字符。根据 `s1[i1] == s3[i3]` 和 `s2[i2] == s3[i3]` 的匹配情况可建立状态转移方程为:
```
f[i1][i2] = (s1[i1 - 1] == s3[i1 + i2 - 1] && f[i1 - 1][i2]) ||
(s2[i2 - 1] == s3[i1 + i2 - 1] && f[i1][i2 - 1])
```
这道题的初始化有点 trick, 考虑到空串的可能,需要单独初始化 `f[*][0]` 和 `f[0][*]`.
### Python
```python
class Solution:
"""
@params s1, s2, s3: Three strings as description.
@return: return True if s3 is formed by the interleaving of
s1 and s2 or False if not.
@hint: you can use [[True] * m for i in range (n)] to allocate a n*m matrix.
"""
def isInterleave(self, s1, s2, s3):
len1 = 0 if s1 is None else len(s1)
len2 = 0 if s2 is None else len(s2)
len3 = 0 if s3 is None else len(s3)
if len3 != len1 + len2:
return False
f = [[True] * (1 + len2) for i in xrange (1 + len1)]
# s1[i1 - 1] == s3[i1 + i2 - 1] && f[i1 - 1][i2]
for i in xrange(1, 1 + len1):
f[i][0] = s1[i - 1] == s3[i - 1] and f[i - 1][0]
# s2[i2 - 1] == s3[i1 + i2 - 1] && f[i1][i2 - 1]
for i in xrange(1, 1 + len2):
f[0][i] = s2[i - 1] == s3[i - 1] and f[0][i - 1]
# i1 >= 1, i2 >= 1
for i1 in xrange(1, 1 + len1):
for i2 in xrange(1, 1 + len2):
case1 = s1[i1 - 1] == s3[i1 + i2 - 1] and f[i1 - 1][i2]
case2 = s2[i2 - 1] == s3[i1 + i2 - 1] and f[i1][i2 - 1]
f[i1][i2] = case1 or case2
return f[len1][len2]
```
### C++
```c++
class Solution {
public:
/**
* Determine whether s3 is formed by interleaving of s1 and s2.
* @param s1, s2, s3: As description.
* @return: true of false.
*/
bool isInterleave(string s1, string s2, string s3) {
int len1 = s1.size();
int len2 = s2.size();
int len3 = s3.size();
if (len3 != len1 + len2) return false;
vector<vector<bool> > f(1 + len1, vector<bool>(1 + len2, true));
// s1[i1 - 1] == s3[i1 + i2 - 1] && f[i1 - 1][i2]
for (int i = 1; i <= len1; ++i) {
f[i][0] = s1[i - 1] == s3[i - 1] && f[i - 1][0];
}
// s2[i2 - 1] == s3[i1 + i2 - 1] && f[i1][i2 - 1]
for (int i = 1; i <= len2; ++i) {
f[0][i] = s2[i - 1] == s3[i - 1] && f[0][i - 1];
}
// i1 >= 1, i2 >= 1
for (int i1 = 1; i1 <= len1; ++i1) {
for (int i2 = 1; i2 <= len2; ++i2) {
bool case1 = s1[i1 - 1] == s3[i1 + i2 - 1] && f[i1 - 1][i2];
bool case2 = s2[i2 - 1] == s3[i1 + i2 - 1] && f[i1][i2 - 1];
f[i1][i2] = case1 || case2;
}
}
return f[len1][len2];
}
};
```
### Java
```java
public class Solution {
/**
* Determine whether s3 is formed by interleaving of s1 and s2.
* @param s1, s2, s3: As description.
* @return: true or false.
*/
public boolean isInterleave(String s1, String s2, String s3) {
int len1 = (s1 == null) ? 0 : s1.length();
int len2 = (s2 == null) ? 0 : s2.length();
int len3 = (s3 == null) ? 0 : s3.length();
if (len3 != len1 + len2) return false;
boolean [][] f = new boolean[1 + len1][1 + len2];
f[0][0] = true;
// s1[i1 - 1] == s3[i1 + i2 - 1] && f[i1 - 1][i2]
for (int i = 1; i <= len1; i++) {
f[i][0] = s1.charAt(i - 1) == s3.charAt(i - 1) && f[i - 1][0];
}
// s2[i2 - 1] == s3[i1 + i2 - 1] && f[i1][i2 - 1]
for (int i = 1; i <= len2; i++) {
f[0][i] = s2.charAt(i - 1) == s3.charAt(i - 1) && f[0][i - 1];
}
// i1 >= 1, i2 >= 1
for (int i1 = 1; i1 <= len1; i1++) {
for (int i2 = 1; i2 <= len2; i2++) {
boolean case1 = s1.charAt(i1 - 1) == s3.charAt(i1 + i2 - 1) && f[i1 - 1][i2];
boolean case2 = s2.charAt(i2 - 1) == s3.charAt(i1 + i2 - 1) && f[i1][i2 - 1];
f[i1][i2] = case1 || case2;
}
}
return f[len1][len2];
}
}
```
### 源码分析
为后面递推方便,初始化时数组长度多加1,for 循环时需要注意边界(取到等号)。
### 复杂度分析
双重 for 循环,时间复杂度为 `$$O(n^2)$$`, 使用了二维矩阵,空间复杂度 `$$O(n^2)$$`. 其中空间复杂度可以优化。
## Reference
- soulmachine 的 Interleaving String 部分
- [Interleaving String 参考程序 Java/C++/Python](http://www.jiuzhang.com/solutions/interleaving-string/)
# Maximum Subarray
## Question
- leetcode: [Maximum Subarray | LeetCode OJ](https://leetcode.com/problems/maximum-subarray/)
- lintcode: [(41) Maximum Subarray](http://www.lintcode.com/en/problem/maximum-subarray/)
```
Given an array of integers,
find a contiguous subarray which has the largest sum.
Example
Given the array [−2,2,−3,4,−1,2,1,−5,3],
the contiguous subarray [4,−1,2,1] has the largest sum = 6.
Note
The subarray should contain at least one number.
Challenge
Can you do it in time complexity O(n)?
```
## 题解1 - 贪心
求最大子数组和,即求区间和的最大值,不同子区间共有约 `$$n^2$$` 中可能,遍历虽然可解,但是时间复杂度颇高。
这里首先介绍一种巧妙的贪心算法,用`sum`表示当前子数组和,`maxSum`表示求得的最大子数组和。当`sum <= 0`时,累加数组中的元素只会使得到的和更小,故此时应将此部分和丢弃,使用此时遍历到的数组元素替代。需要注意的是由于有`maxSum`更新`sum`, 故直接丢弃小于0的`sum`并不会对最终结果有影响。即不会漏掉前面的和比后面的元素大的情况。
### Java
```java
public class Solution {
/**
* @param nums: A list of integers
* @return: A integer indicate the sum of max subarray
*/
public int maxSubArray(ArrayList<Integer> nums) {
// -1 is not proper for illegal input
if (nums == null || nums.isEmpty()) return -1;
int sum = 0, maxSub = Integer.MIN_VALUE;
for (int num : nums) {
// drop negtive sum
sum = Math.max(sum, 0);
sum += num;
// update maxSub
maxSub = Math.max(maxSub, sum);
}
return maxSub;
}
}
```
### 源码分析
贪心的实现较为巧妙,需要`sum`和`maxSub`配合运作才能正常工作。
### 复杂度分析
遍历一次数组,时间复杂度 `$$O(n)$$`, 使用了几个额外变量,空间复杂度 `$$O(1)$$`.
## 题解2 - 动态规划1(区间和)
求最大/最小这种字眼往往都可以使用动态规划求解,此题为单序列动态规划。我们可以先求出到索引 i 的子数组和,然后用子数组和的最大值减去最小值,最后返回最大值即可。用这种动态规划需要注意初始化条件和求和顺序。
### Java
```java
public class Solution {
/**
* @param nums: A list of integers
* @return: A integer indicate the sum of max subarray
*/
public int maxSubArray(ArrayList<Integer> nums) {
// -1 is not proper for illegal input
if (nums == null || nums.isEmpty()) return -1;
int sum = 0, minSum = 0, maxSub = Integer.MIN_VALUE;
for (int num : nums) {
minSum = Math.min(minSum, sum);
sum += num;
maxSub = Math.max(maxSub, sum - minSum);
}
return maxSub;
}
}
```
### 源码分析
首先求得当前的最小子数组和,初始化为0,随后比较子数组和减掉最小子数组和的差值和最大区间和,并更新最大区间和。
### 复杂度分析
时间复杂度 `$$O(n)$$`, 使用了类似滚动数组的处理方式,空间复杂度 `$$O(1)$$`.
## 题解3 - 动态规划2(局部与全局)
这种动规的实现和题解1 的思想几乎一模一样,只不过这里用局部最大值和全局最大值两个数组来表示。
### Java
```java
public class Solution {
/**
* @param nums: A list of integers
* @return: A integer indicate the sum of max subarray
*/
public int maxSubArray(ArrayList<Integer> nums) {
// -1 is not proper for illegal input
if (nums == null || nums.isEmpty()) return -1;
int size = nums.size();
int[] local = new int[size];
int[] global = new int[size];
local[0] = nums.get(0);
global[0] = nums.get(0);
for (int i = 1; i < size; i++) {
// drop local[i - 1] < 0
local[i] = Math.max(nums.get(i), local[i - 1] + nums.get(i));
// update global with local
global[i] = Math.max(global[i - 1], local[i]);
}
return global[size - 1];
}
}
```
### 源码分析
由于局部最大值需要根据之前的局部值是否大于0进行更新,故方便起见初始化 local 和 global 数组的第一个元素为数组第一个元素。
### 复杂度分析
时间复杂度 `$$O(n)$$`, 空间复杂度也为 `$$O(n)$$`.
## Reference
- 《剑指 Offer》第五章
- [Maximum Subarray 参考程序 Java/C++/Python](http://www.jiuzhang.com/solutions/maximum-subarray/)
# Maximum Subarray II
## Question
- lintcode: [(42) Maximum Subarray II](http://www.lintcode.com/en/problem/maximum-subarray-ii/)
```
Given an array of integers,
find two non-overlapping subarrays which have the largest sum.
The number in each subarray should be contiguous.
Return the largest sum.
Example
For given [1, 3, -1, 2, -1, 2],
the two subarrays are [1, 3] and [2, -1, 2] or [1, 3, -1, 2] and [2],
they both have the largest sum 7.
Note
The subarray should contain at least one number
Challenge
Can you do it in time complexity O(n) ?
```
## 题解
严格来讲这道题这道题也可以不用动规来做,这里还是采用经典的动规解法。[Maximum Subarray](http://algorithm.yuanbin.me/zh-hans/dynamic_programming/maximum_subarray.html) 中要求的是数组中最大子数组和,这里是求不相重叠的两个子数组和的和最大值,做过买卖股票系列的题的话这道题就非常容易了,既然我们已经求出了单一子数组的最大和,那么我们使用隔板法将数组一分为二,分别求这两段的最大子数组和,求相加后的最大值即为最终结果。隔板前半部分的最大子数组和很容易求得,但是后半部分难道需要将索引从0开始依次计算吗?NO!!! 我们可以采用从后往前的方式进行遍历,这样时间复杂度就大大降低了。
### Java
```java
public class Solution {
/**
* @param nums: A list of integers
* @return: An integer denotes the sum of max two non-overlapping subarrays
*/
public int maxTwoSubArrays(ArrayList<Integer> nums) {
// -1 is not proper for illegal input
if (nums == null || nums.isEmpty()) return -1;
int size = nums.size();
// get max sub array forward
int[] maxSubArrayF = new int[size];
forwardTraversal(nums, maxSubArrayF);
// get max sub array backward
int[] maxSubArrayB = new int[size];
backwardTraversal(nums, maxSubArrayB);
// get maximum subarray by iteration
int maxTwoSub = Integer.MIN_VALUE;
for (int i = 0; i < size - 1; i++) {
// non-overlapping
maxTwoSub = Math.max(maxTwoSub, maxSubArrayF[i] + maxSubArrayB[i + 1]);
}
return maxTwoSub;
}
private void forwardTraversal(List<Integer> nums, int[] maxSubArray) {
int sum = 0, minSum = 0, maxSub = Integer.MIN_VALUE;
int size = nums.size();
for (int i = 0; i < size; i++) {
minSum = Math.min(minSum, sum);
sum += nums.get(i);
maxSub = Math.max(maxSub, sum - minSum);
maxSubArray[i] = maxSub;
}
}
private void backwardTraversal(List<Integer> nums, int[] maxSubArray) {
int sum = 0, minSum = 0, maxSub = Integer.MIN_VALUE;
int size = nums.size();
for (int i = size - 1; i >= 0; i--) {
minSum = Math.min(minSum, sum);
sum += nums.get(i);
maxSub = Math.max(maxSub, sum - minSum);
maxSubArray[i] = maxSub;
}
}
}
```
### 源码分析
前向搜索和逆向搜索我们使用私有方法实现,可读性更高。注意是求非重叠子数组和,故求`maxTwoSub`时i 的范围为`0, size - 2`, 前向数组索引为 i, 后向索引为 i + 1.
### 复杂度分析
前向和后向搜索求得最大子数组和,时间复杂度 `$$O(2n)=O(n)$$`, 空间复杂度 `$$O(n)$$`. 遍历子数组和的数组求最终两个子数组和的最大值,时间复杂度 `$$O(n)$$`. 故总的时间复杂度为 `$$O(n)$$`, 空间复杂度 `$$O(n)$$`.
# Longest Increasing Continuous subsequence
## Question
- lintcode: [(397) Longest Increasing Continuous subsequence](http://www.lintcode.com/en/problem/longest-increasing-continuous-subsequence/)
### Problem Statement
Give you an integer array (index from 0 to n-1, where n is the size of this array),find the longest increasing continuous subsequence in this array. (The definition of the longest increasing continuous subsequence here can be from right to left or from left to right)
#### Example
For `[5, 4, 2, 1, 3]`, the LICS is `[5, 4, 2, 1]`, return 4.
For `[5, 1, 2, 3, 4]`, the LICS is `[1, 2, 3, 4]`, return 4.
#### Note
O(n) time and O(1) extra space.
## 题解1
题目只要返回最大长度,注意此题中的连续递增指的是双向的,即可递增也可递减。简单点考虑可分两种情况,一种递增,另一种递减,跟踪最大递增长度,最后返回即可。也可以在一个 for 循环中搞定,只不过需要增加一布尔变量判断之前是递增还是递减。
### Java - two for loop
```java
public class Solution {
/**
* @param A an array of Integer
* @return an integer
*/
public int longestIncreasingContinuousSubsequence(int[] A) {
if (A == null || A.length == 0) return 0;
int lics = 1, licsMax = 1, prev = A[0];
// ascending order
for (int a : A) {
lics = (prev < a) ? lics + 1 : 1;
licsMax = Math.max(licsMax, lics);
prev = a;
}
// reset
lics = 1;
prev = A[0];
// descending order
for (int a : A) {
lics = (prev > a) ? lics + 1 : 1;
licsMax = Math.max(licsMax, lics);
prev = a;
}
return licsMax;
}
}
```
### Java - one for loop
```java
public class Solution {
/**
* @param A an array of Integer
* @return an integer
*/
public int longestIncreasingContinuousSubsequence(int[] A) {
if (A == null || A.length == 0) return 0;
int start = 0, licsMax = 1;
boolean ascending = false;
for (int i = 1; i < A.length; i++) {
// ascending order
if (A[i - 1] < A[i]) {
if (!ascending) {
ascending = true;
start = i - 1;
}
} else if (A[i - 1] > A[i]) {
// descending order
if (ascending) {
ascending = false;
start = i - 1;
}
} else {
start = i - 1;
}
licsMax = Math.max(licsMax, i - start + 1);
}
return licsMax;
}
}
```
### 源码分析
使用两个 for 循环时容易在第二次循环忘记重置。使用一个 for 循环时使用下标来计数较为方便。
### 复杂度分析
时间复杂度 `$$O(n)$$`, 空间复杂度 `$$O(1)$$`.
## 题解2 - 动态规划
除了题解1 中分两种情况讨论外,我们还可以使用动态规划求解。状态转移方程容易得到——要么向右增长,要么向左增长。相应的状态`dp[i]`即为从索引 i 出发所能得到的最长连续递增子序列。这样就避免了分两个循环处理了,这种思想对此题的 follow up 有特别大的帮助。
### Java
```java
public class Solution {
/**
* @param A an array of Integer
* @return an integer
*/
public int longestIncreasingContinuousSubsequence(int[] A) {
if (A == null || A.length == 0) return 0;
int lics = 0;
int[] dp = new int[A.length];
for (int i = 0; i < A.length; i++) {
if (dp[i] == 0) {
lics = Math.max(lics, dfs(A, i, dp));
}
}
return lics;
}
private int dfs(int[] A, int i, int[] dp) {
if (dp[i] != 0) return dp[i];
// increasing from xxx to left, right
int left = 0, right = 0;
// increasing from right to left
if (i > 0 && A[i - 1] > A[i]) left = dfs(A, i - 1, dp);
// increasing from left to right
if (i + 1 < A.length && A[i + 1] > A[i]) right = dfs(A, i + 1, dp);
dp[i] = 1 + Math.max(left, right);
return dp[i];
}
}
```
### 源码分析
dfs 中使用记忆化存储避免重复递归,分左右两个方向递增,最后取较大值。这种方法对于数组长度较长时栈会溢出。
### 复杂度分析
时间复杂度 `$$O(n)$$`, 空间复杂度 `$$(n)$$`.
## Reference
- [Lintcode: Longest Increasing Continuous subsequence | codesolutiony](https://codesolutiony.wordpress.com/2015/05/25/lintcode-longest-increasing-continuous-subsequence/)
# Longest Increasing Continuous subsequence II
## Question
- lintcode: [(398) Longest Increasing Continuous subsequence II](http://www.lintcode.com/en/problem/longest-increasing-continuous-subsequence-ii/)
### Problem Statement
Give you an integer matrix (with row size n, column size m),find the longest increasing continuous subsequence in this matrix. (The definition of the longest increasing continuous subsequence here can start at any row or column and go up/down/right/left any direction).
#### Example
Given a matrix:
```
[
[1 ,2 ,3 ,4 ,5],
[16,17,24,23,6],
[15,18,25,22,7],
[14,19,20,21,8],
[13,12,11,10,9]
]
```
return 25
#### Challenge
O(nm) time and memory.
## 题解
题 [Longest Increasing Continuous subsequence](http://algorithm.yuanbin.me/zh-hans/dynamic_programming/longest_increasing_continuous_subsequence.html) 的 follow up, 变成一道比较难的题了。从之前的一维 DP 变为现在的二维 DP,自增方向可从上下左右四个方向进行。需要结合 DFS 和动态规划两大重量级武器。
根据二维 DP 的通用方法,我们首先需要关注状态及状态转移方程,状态转移方程相对明显一点,即上下左右四个方向的元素值递增关系,根据此转移方程,**不难得到我们需要的状态为`dp[i][j]`——表示从坐标`(i, j)`出发所得到的最长连续递增子序列。**根据状态及转移方程我们不难得到初始化应该为1或者0,这要视具体情况而定。
这里我们可能会纠结的地方在于自增的方向,平时见到的二维 DP 自增方向都是从小到大,而这里的增长方向却不一定。**这里需要突破思维定势的地方在于我们可以不理会从哪个方向自增,只需要处理自增和边界条件即可。**根据转移方程可以知道使用递归来解决是比较好的方式,这里关键的地方就在于递归的终止条件。比较容易想到的一个递归终止条件自然是当前元素是整个矩阵中的最大元素,索引朝四个方向出发都无法自增,因此返回1. 另外可以预想到的是如果不进行记忆化存储,递归过程中自然会产生大量重复计算,根据记忆化存储的通用方法,这里可以以结果是否为0(初始化为0时)来进行区分。
### Java
```java
public class Solution {
/**
* @param A an integer matrix
* @return an integer
*/
public int longestIncreasingContinuousSubsequenceII(int[][] A) {
if (A == null || A.length == 0 || A[0].length == 0) return 0;
int lics = 0;
int[][] dp = new int[A.length][A[0].length];
for (int row = 0; row < A.length; row++) {
for (int col = 0; col < A[0].length; col++) {
if (dp[row][col] == 0) {
lics = Math.max(lics, dfs(A, row, col, dp));
}
}
}
return lics;
}
private int dfs(int[][] A, int row, int col, int[][] dp) {
if (dp[row][col] != 0) {
return dp[row][col];
}
// increasing from xxx to up, down, left, right
int up = 0, down = 0, left = 0, right = 0;
// increasing from down to up
if (row > 0 && A[row - 1][col] > A[row][col]) {
up = dfs(A, row - 1, col, dp);
}
// increasing from up to down
if (row + 1 < A.length && A[row + 1][col] > A[row][col]) {
down = dfs(A, row + 1, col, dp);
}
// increasing from right to left
if (col > 0 && A[row][col - 1] > A[row][col]) {
left = dfs(A, row, col - 1, dp);
}
// increasing from left to right
if (col + 1 < A[0].length && A[row][col + 1] > A[row][col]) {
right = dfs(A, row, col + 1, dp);
}
// return maximum of up, down, left, right
dp[row][col] = 1 + Math.max(Math.max(up, down), Math.max(left, right));
return dp[row][col];
}
}
```
### 源码分析
dfs 递归最深一层即矩阵中最大的元素处,然后逐层返回。这道题对状态`dp[i][j]`的理解很重要,否则会陷入对上下左右四个方向的迷雾中。
### 复杂度分析
由于引入了记忆化存储,时间复杂度逼近 `$$O(mn)$$`, 空间复杂度 `$$O(mn)$$`.
## Reference
- [Lintcode: Longest Increasing Continuous subsequence II | codesolutiony](https://codesolutiony.wordpress.com/2015/05/25/lintcode-longest-increasing-continuous-subsequence-ii/)
# Maximal Square
## Question
- leetcode: [Maximal Square | LeetCode OJ](https://leetcode.com/problems/maximal-square/)
- lintcode: [Maximal Square](http://www.lintcode.com/en/problem/maximal-square/)
### Problem Statement
Given a 2D binary matrix filled with 0's and 1's, find the largest square
containing all 1's and return its area.
#### Example
For example, given the following matrix:
1 0 1 0 0
1 0 1 1 1
1 1 1 1 1
1 0 0 1 0
Return `4`.
## 题解
第一次遇到这个题是在嘀嘀打车现场面试中,首先把题意理解错了,而且动态规划的状态定义错了,没搞出来... 所以说明确题意非常重要!
题意是问矩阵中子正方形(不是长方形)的最大面积。也就是说我们的思路应该是去判断正方形这一子状态以及相应的状态转移方程。正方形的可能有边长为1,2,3等等... 边长为2的可由边长为1 的转化而来,边长为3的可由边长为2的转化而来。那么问题来了,边长的转化是如何得到的?边长由1变为2容易得知,即左上、左边以及上边的值均为1,边长由2变为3这一状态转移方程不容易直接得到。直观上来讲,我们需要边长为3的小正方形内格子中的数均为1. **抽象来讲也可以认为边长为3的正方形是由若干个边长为2的正方形堆叠得到的,这就是这道题的核心状态转移方程。**
令状态`dp[i][j]`表示为从左上角(不一定是`(0,0)`)到矩阵中坐标`(i,j)`为止能构成正方形的最大边长。那么有如下状态转移方程:
```
dp[i][j] = min(dp[i-1][j-1], dp[i-1][j], dp[i][j-1]) + 1; if matrix[i][j] == 1
dp[i][j] = 0; if matrix[i][j] = 0
```
初始化直接用第一行和第一列即可。
### Java
```java
public class Solution {
/**
* @param matrix: a matrix of 0 and 1
* @return: an integer
*/
public int maxSquare(int[][] matrix) {
int side = 0;
if (matrix == null || matrix.length == 0 || matrix[0].length == 0) {
return side;
}
final int ROW = matrix.length, COL = matrix[0].length;
int[][] dp = new int[ROW][COL];
for (int i = 0; i < ROW; i++) {
dp[i][0] = matrix[i][0];
side = 1;
}
for (int i = 0; i < COL; i++) {
dp[0][i] = matrix[0][i];
side = 1;
}
for (int i = 1; i < ROW; i++) {
side = Math.max(side, matrix[i][0]);
for (int j = 1; j < COL; j++) {
if (matrix[i][j] == 1) {
dp[i][j] = 1 + minTri(dp[i-1][j-1], dp[i-1][j], dp[i][j-1]);
side = Math.max(side, dp[i][j]);
}
}
}
return side * side;
}
private int minTri(int a, int b, int c) {
return Math.min(a, Math.min(b, c));
}
}
```
### 源码分析
经典的动规实现三步走。先初始化,后转移方程,最后对结果做必要的处理(边长 side 的更新)。
### 复杂度分析
使用了二维矩阵,空间复杂度 `$$O(mn)$$`. 遍历一次原矩阵,时间复杂度 `$$O(mn)$$`.
### Follow up
题目问的是子正方形,如果问的是矩形呢?
转移方程仍然可以不变,但是遍历完之后需要做进一步处理,比如如果不是正方形的话可能会出现多个相同的边长值,此时需要对相同的边长值递增(按行或者按列),相乘后保存,最后取最大输出。
## Reference
- [Maximum size square sub-matrix with all 1s - GeeksforGeeks](http://www.geeksforgeeks.org/maximum-size-sub-matrix-with-all-1s-in-a-binary-matrix/)
- [maximal-square/ 参考程序 Java/C++/Python](http://www.jiuzhang.com/solutions/maximal-square/) - 空间复杂度可进一步优化(只保存最近的两行即可)
| {
"pile_set_name": "Github"
} |
# lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*-
#
# Copyright (C) 2004-2005, 2007, 2009, 2011-2015 Free Software
# Foundation, Inc.
# Written by Scott James Remnant, 2004.
#
# This file is free software; the Free Software Foundation gives
# unlimited permission to copy and/or distribute it, with or without
# modifications, as long as this notice is preserved.
# serial 5 lt~obsolete.m4
# These exist entirely to fool aclocal when bootstrapping libtool.
#
# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN),
# which have later been changed to m4_define as they aren't part of the
# exported API, or moved to Autoconf or Automake where they belong.
#
# The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN
# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
# using a macro with the same name in our local m4/libtool.m4 it'll
# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
# and doesn't know about Autoconf macros at all.)
#
# So we provide this file, which has a silly filename so it's always
# included after everything else. This provides aclocal with the
# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
# because those macros already exist, or will be overwritten later.
# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6.
#
# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
# Yes, that means every name once taken will need to remain here until
# we give up compatibility with versions before 1.7, at which point
# we need to keep only those names which we still refer to.
# This is to help aclocal find these macros, as it can't see m4_define.
AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])])
m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])])
m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])])
m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])])
m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])])
m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])])
m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])])
m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])])
m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])])
m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])])
m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])])
m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])])
m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])])
m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])])
m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])])
m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])])
m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])])
m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])])
m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])])
m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])])
m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])])
m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])])
m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])])
m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS], [AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])])
m4_ifndef([_LT_AC_PROG_CXXCPP], [AC_DEFUN([_LT_AC_PROG_CXXCPP])])
m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS], [AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])])
m4_ifndef([_LT_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])])
m4_ifndef([_LT_PROG_F77], [AC_DEFUN([_LT_PROG_F77])])
m4_ifndef([_LT_PROG_FC], [AC_DEFUN([_LT_PROG_FC])])
m4_ifndef([_LT_PROG_CXX], [AC_DEFUN([_LT_PROG_CXX])])
| {
"pile_set_name": "Github"
} |
/* annoStreamDbFactorSource -- factorSource track w/related tables */
#include "annoStreamDbFactorSource.h"
#include "annoStreamDb.h"
#include "factorSource.h"
#include "hdb.h"
#include "sqlNum.h"
static char *asfsAutoSqlString =
"table factorSourcePlus"
"\"factorSourcePlus: Peaks clustered by factor w/normalized scores plus cell type and treatment\""
" ("
" string chrom; \"Chromosome\""
" uint chromStart; \"Start position in chrom\""
" uint chromEnd; \"End position in chrom\""
" string name; \"factor that has a peak here\""
" uint score; \"Score from 0-1000\""
" uint expCount; \"Number of experiment values\""
" uint[expCount] expNums; \"Comma separated list of experiment numbers\""
" float[expCount] expScores; \"Comma separated list of experiment scores\""
" string[expCount] cellType; \"Comma separated list of experiment cell types\""
" string[expCount] treatment; \"Comma separated list of experiment treatments\""
" )";
#define FACTORSOURCEPLUS_NUM_COLS 10
struct annoStreamDbFactorSource
{
struct annoStreamer streamer; // Parent class members & methods (external interface)
// Private members
struct annoStreamer *mySource; // Internal source of track table rows
// Data from related tables
int expCount; // Number of experiments whose results were clustered
char **expCellType; // Array[expCount] of cellType used in each experiment
char **expTreatment; // Array[expCount] of treatment used in each experiment
};
struct asObject *annoStreamDbFactorSourceAsObj()
/* Return an autoSql object that describs fields of a joining query on a factorSource table
* and its inputs. */
{
return asParseText(asfsAutoSqlString);
}
static void asdfsSetAutoSqlObject(struct annoStreamer *self, struct asObject *asObj)
/* Abort if something external tries to change the autoSql object. */
{
errAbort("annoStreamDbFactorSource %s: can't change autoSqlObject.",
((struct annoStreamer *)self)->name);
}
static void asdfsSetRegion(struct annoStreamer *sSelf, char *chrom, uint rStart, uint rEnd)
/* Pass setRegion down to internal source. */
{
annoStreamerSetRegion(sSelf, chrom, rStart, rEnd);
struct annoStreamDbFactorSource *self = (struct annoStreamDbFactorSource *)sSelf;
self->mySource->setRegion(self->mySource, chrom, rStart, rEnd);
}
static char *commaSepFromExpData(char **expAttrs, int *expNums, uint expCount, struct lm *lm)
/* Look up experiment attribute strings by experiment numbers; return a comma-separated string
* of experiment attributes, allocated using lm. */
{
int i;
int len = 0, offset = 0;
for (i = 0; i < expCount; i++)
len += (strlen(expAttrs[expNums[i]]) + 1);
char *str = lmAlloc(lm, len + 1);
for (i = 0; i < expCount; i++)
{
char *attr = expAttrs[expNums[i]];
safef(str + offset, len + 1 - offset, "%s,", attr);
offset += strlen(attr) + 1;
}
return str;
}
INLINE void getCommaSepInts(char *commaSep, int *values, int expectedCount)
/* Parse comma-separated ints into values[]. This is like sqlSignedStaticArray,
* but we give it an expected count and it's thread-safe because it doesn't use
* static variables. */
{
char *s = commaSep, *e = NULL;
int count;
for (count = 0; isNotEmpty(s); count++, s = e)
{
e = strchr(s, ',');
if (e != NULL)
*e++ = 0;
if (count < expectedCount)
values[count] = sqlSigned(s);
}
if (count != expectedCount)
errAbort("getCommaSepInts: expected %d values but found %d", expectedCount, count);
}
static void factorSourceToFactorSourcePlus(struct annoStreamDbFactorSource *self,
char **fsWords, char **fspWords, struct lm *lm)
/* Copy fsWords into fspWords and add columns for cellTypes and treatments corresponding to
* expNums. */
{
// Parse out experiment IDs from expNums column
uint expCount = sqlUnsigned(fsWords[5]);
int expNums[expCount];
getCommaSepInts(fsWords[6], expNums, expCount);
// Copy factorSource columns, then add experiment attribute columns.
int i;
for (i = 0; i < FACTORSOURCE_NUM_COLS; i++)
fspWords[i] = fsWords[i];
fspWords[i++] = commaSepFromExpData(self->expCellType, expNums, expCount, lm);
fspWords[i++] = commaSepFromExpData(self->expTreatment, expNums, expCount, lm);
if (i != FACTORSOURCEPLUS_NUM_COLS)
errAbort("annoStreamDbFactorSource %s: expected to make %d columns but made %d",
self->streamer.name, FACTORSOURCEPLUS_NUM_COLS, i);
}
static struct annoRow *asdfsNextRow(struct annoStreamer *sSelf, char *minChrom, uint minEnd,
struct lm *lm)
/* Join experiment data with expNums from track table and apply filters. */
{
struct annoStreamDbFactorSource *self = (struct annoStreamDbFactorSource *)sSelf;
char **fspWords;
lmAllocArray(lm, fspWords, FACTORSOURCEPLUS_NUM_COLS);
struct annoRow *fsRow;
boolean rightJoinFail = FALSE;
while ((fsRow = self->mySource->nextRow(self->mySource, minChrom, minEnd, lm)) != NULL)
{
// If there are filters on experiment attributes, apply them, otherwise just return aRow.
if (sSelf->filters)
{
char **fsWords = fsRow->data;
factorSourceToFactorSourcePlus(self, fsWords, fspWords, lm);
boolean fails = annoFilterRowFails(sSelf->filters, fspWords, FACTORSOURCEPLUS_NUM_COLS,
&rightJoinFail);
// If this row passes the filter, or fails but is rightJoin, then we're done looking.
if (!fails || rightJoinFail)
break;
}
else
// no filtering to do, just use this row
break;
}
if (fsRow != NULL)
return annoRowFromStringArray(fsRow->chrom, fsRow->start, fsRow->end, rightJoinFail,
fspWords, FACTORSOURCEPLUS_NUM_COLS, lm);
else
return NULL;
}
static void getExperimentData(struct annoStreamDbFactorSource *self, char *db,
char *sourceTable, char *inputsTable)
/* Join two small tables to relate experiment IDs from the track tables expNums column
* to experiment attributes cellType and treatment. */
{
struct sqlConnection *conn = hAllocConn(db);
self->expCount = sqlRowCount(conn, sourceTable);
AllocArray(self->expCellType, self->expCount);
AllocArray(self->expTreatment, self->expCount);
struct dyString *query = sqlDyStringCreate("select id, cellType, treatment "
"from %s, %s where %s.description = %s.source",
sourceTable, inputsTable, sourceTable, inputsTable);
struct sqlResult *sr = sqlGetResult(conn, query->string);
char **row;
while ((row = sqlNextRow(sr)) != NULL)
{
int id = sqlSigned(row[0]);
if (id < 0 || id >= self->expCount)
errAbort("annoStreamDbFactorSource %s: found out-of-range id %d in %s (expected [0-%d])",
((struct annoStreamer *)self)->name, id, sourceTable, self->expCount - 1);
self->expCellType[id] = cloneString(row[1]);
self->expTreatment[id] = cloneString(row[2]);
}
sqlFreeResult(&sr);
hFreeConn(&conn);
}
static void asdfsClose(struct annoStreamer **pSSelf)
/* Free up state. */
{
if (pSSelf == NULL)
return;
struct annoStreamDbFactorSource *self = *(struct annoStreamDbFactorSource **)pSSelf;
self->mySource->close(&(self->mySource));
int i;
for (i = 0; i < self->expCount; i++)
{
freeMem(self->expCellType[i]);
freeMem(self->expTreatment[i]);
}
freez(&self->expCellType);
freez(&self->expTreatment);
annoStreamerFree(pSSelf);
}
struct annoStreamer *annoStreamDbFactorSourceNew(char *db, char *trackTable, char *sourceTable,
char *inputsTable, struct annoAssembly *aa,
int maxOutRows)
/* Create an annoStreamer (subclass) object using three database tables:
* trackTable: the table for a track with type factorSource (bed5 + exp{Count,Nums,Scores})
* sourceTable: trackTable's tdb setting sourceTable; expNums -> source name "cellType+lab+antibody"
* inputsTable: trackTable's tdb setting inputTrackTable; source name -> cellType, treatment, etc.
*/
{
struct annoStreamDbFactorSource *self;
AllocVar(self);
struct annoStreamer *streamer = &(self->streamer);
// Set up external streamer interface
annoStreamerInit(streamer, aa, annoStreamDbFactorSourceAsObj(), trackTable);
streamer->rowType = arWords;
// Get internal streamer for trackTable
self->mySource = annoStreamDbNew(db, trackTable, aa, factorSourceAsObj(), maxOutRows);
// Slurp in data from small related tables
getExperimentData(self, db, sourceTable, inputsTable);
// Override methods that need to pass through to internal source:
streamer->setAutoSqlObject = asdfsSetAutoSqlObject;
streamer->setRegion = asdfsSetRegion;
streamer->nextRow = asdfsNextRow;
streamer->close = asdfsClose;
return (struct annoStreamer *)self;
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<androidx.coordinatorlayout.widget.CoordinatorLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent">
<include layout="@layout/toolbar_layout" />
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
app:layout_behavior="@string/appbar_scrolling_view_behavior">
<fr.castorflex.android.circularprogressbar.CircularProgressBar
android:layout_width="48dp"
android:layout_centerInParent="true"
android:layout_height="48dp"
android:indeterminate="true"
app:cpb_color="?colorPrimary"
app:cpb_max_sweep_angle="300"
app:cpb_min_sweep_angle="10"
app:cpb_rotation_speed="1.5"
app:cpb_stroke_width="4dp"
app:cpb_sweep_speed="1.0" />
</RelativeLayout>
</androidx.coordinatorlayout.widget.CoordinatorLayout> | {
"pile_set_name": "Github"
} |
// Copyright 2010 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Implements 64-bit multiword CRC using MMX built-in functions.
#include "generic_crc.h"
#if CRCUTIL_USE_ASM && HAVE_I386 && HAVE_MMX
namespace crcutil {
template<> uint64 GenericCrc<uint64, uint64, uint64, 4>::CrcMultiwordI386Mmx(
const void *data, size_t bytes, const uint64 &start)
const GCC_OMIT_FRAME_POINTER;
#if !defined(_MSC_VER)
template<> uint64 GenericCrc<uint64, uint64, uint64, 4>::CrcMultiword(
const void *data,
size_t bytes,
const uint64 &start) const {
if (bytes <= 7) {
const uint8 *src = static_cast<const uint8 *>(data);
uint64 crc = start ^ Base().Canonize();
for (const uint8 *end = src + bytes; src < end; ++src) {
CRC_BYTE(this, crc, *src);
}
return (crc ^ Base().Canonize());
}
return CrcMultiwordI386Mmx(data, bytes, start);
}
#else
#pragma warning(push)
// CL: uninitialized local variable 'crc1' used
// Wrong: crc1 = XOR(crc1, crc1) sets it to 0.
#pragma warning(disable: 4700)
#pragma warning(disable: 4619) // there is no warning number '592'
// ICL: variable "crc1" is used before its value is set
// Wrong: crc1 = XOR(crc1, crc1) sets it to 0.
#pragma warning(disable: 592)
#endif // !defined(_MSC_VER)
#define MM64(adr) reinterpret_cast<const __m64 *>(adr)
#define MM64_TABLE(byte) MM64(crc_word_interleaved_[byte])
#define CRC_WORD_MMX(this, crc, buf) do { \
buf = _mm_xor_si64(buf, crc); \
uint32 tmp = static_cast<uint32>(_mm_cvtsi64_si32(buf)); \
buf = _mm_srli_si64(buf, 32); \
crc = MM64(crc_word_[0])[TO_BYTE(tmp)]; \
tmp >>= 8; \
crc = _mm_xor_si64(crc, MM64(crc_word_[1])[TO_BYTE(tmp)]); \
tmp >>= 8; \
crc = _mm_xor_si64(crc, MM64(crc_word_[2])[TO_BYTE(tmp)]); \
tmp >>= 8; \
crc = _mm_xor_si64(crc, MM64(crc_word_[3])[tmp]); \
tmp = static_cast<uint32>(_mm_cvtsi64_si32(buf)); \
crc = _mm_xor_si64(crc, MM64(crc_word_[4])[TO_BYTE(tmp)]); \
tmp >>= 8; \
crc = _mm_xor_si64(crc, MM64(crc_word_[5])[TO_BYTE(tmp)]); \
tmp >>= 8; \
crc = _mm_xor_si64(crc, MM64(crc_word_[6])[TO_BYTE(tmp)]); \
tmp >>= 8; \
crc = _mm_xor_si64(crc, MM64(crc_word_[7])[tmp]); \
} while (0)
template<> uint64 GenericCrc<uint64, uint64, uint64, 4>::CrcMultiwordI386Mmx(
const void *data, size_t bytes, const uint64 &start) const {
const uint8 *src = static_cast<const uint8 *>(data);
const uint8 *end = src + bytes;
uint64 crc = start ^ Base().Canonize();
ALIGN_ON_WORD_BOUNDARY_IF_NEEDED(bytes, this, src, end, crc, uint64);
if (src >= end) {
return (crc ^ Base().Canonize());
}
// Process 4 registers of sizeof(uint64) bytes at once.
bytes = static_cast<size_t>(end - src) & ~(4*8 - 1);
if (bytes > 4*8) {
const uint8 *stop = src + bytes - 4*8;
union {
__m64 m64;
uint64 u64;
} temp;
__m64 crc0;
__m64 crc1;
__m64 crc2;
__m64 crc3;
__m64 buf0 = MM64(src)[0];
__m64 buf1 = MM64(src)[1];
__m64 buf2 = MM64(src)[2];
__m64 buf3 = MM64(src)[3];
temp.u64 = crc;
crc0 = temp.m64;
#if defined(__GNUC__) && !GCC_VERSION_AVAILABLE(4, 4)
// There is no way to suppress a warning in GCC;
// generate extra assignments.
temp.u64 = 0;
crc1 = temp.m64;
crc2 = temp.m64;
crc3 = temp.m64;
#else
crc1 = _mm_xor_si64(crc1, crc1);
crc2 = _mm_xor_si64(crc2, crc2);
crc3 = _mm_xor_si64(crc3, crc3);
#endif // defined(__GNUC__) && !GCC_VERSION_AVAILABLE(4, 4)
do {
PREFETCH(src);
src += 4*8;
buf0 = _mm_xor_si64(buf0, crc0);
buf1 = _mm_xor_si64(buf1, crc1);
buf2 = _mm_xor_si64(buf2, crc2);
buf3 = _mm_xor_si64(buf3, crc3);
uint32 tmp0 = static_cast<uint32>(_mm_cvtsi64_si32(buf0));
uint32 tmp1 = static_cast<uint32>(_mm_cvtsi64_si32(buf1));
uint32 tmp2 = static_cast<uint32>(_mm_cvtsi64_si32(buf2));
uint32 tmp3 = static_cast<uint32>(_mm_cvtsi64_si32(buf3));
buf0 = _mm_srli_si64(buf0, 32);
buf1 = _mm_srli_si64(buf1, 32);
buf2 = _mm_srli_si64(buf2, 32);
buf3 = _mm_srli_si64(buf3, 32);
crc0 = MM64_TABLE(0)[TO_BYTE(tmp0)];
tmp0 >>= 8;
crc1 = MM64_TABLE(0)[TO_BYTE(tmp1)];
tmp1 >>= 8;
crc2 = MM64_TABLE(0)[TO_BYTE(tmp2)];
tmp2 >>= 8;
crc3 = MM64_TABLE(0)[TO_BYTE(tmp3)];
tmp3 >>= 8;
#define XOR(byte) do { \
crc0 = _mm_xor_si64(crc0, MM64_TABLE(byte)[TO_BYTE(tmp0)]); \
tmp0 >>= 8; \
crc1 = _mm_xor_si64(crc1, MM64_TABLE(byte)[TO_BYTE(tmp1)]); \
tmp1 >>= 8; \
crc2 = _mm_xor_si64(crc2, MM64_TABLE(byte)[TO_BYTE(tmp2)]); \
tmp2 >>= 8; \
crc3 = _mm_xor_si64(crc3, MM64_TABLE(byte)[TO_BYTE(tmp3)]); \
tmp3 >>= 8; \
} while (0)
XOR(1);
XOR(2);
crc0 = _mm_xor_si64(crc0, MM64_TABLE(3)[tmp0]);
tmp0 = static_cast<uint32>(_mm_cvtsi64_si32(buf0));
crc1 = _mm_xor_si64(crc1, MM64_TABLE(3)[tmp1]);
tmp1 = static_cast<uint32>(_mm_cvtsi64_si32(buf1));
crc2 = _mm_xor_si64(crc2, MM64_TABLE(3)[tmp2]);
tmp2 = static_cast<uint32>(_mm_cvtsi64_si32(buf2));
crc3 = _mm_xor_si64(crc3, MM64_TABLE(3)[tmp3]);
tmp3 = static_cast<uint32>(_mm_cvtsi64_si32(buf3));
XOR(4);
XOR(5);
XOR(6);
#undef XOR
crc0 = _mm_xor_si64(crc0, MM64_TABLE(sizeof(uint64) - 1)[tmp0]);
buf0 = MM64(src)[0];
crc1 = _mm_xor_si64(crc1, MM64_TABLE(sizeof(uint64) - 1)[tmp1]);
buf1 = MM64(src)[1];
crc2 = _mm_xor_si64(crc2, MM64_TABLE(sizeof(uint64) - 1)[tmp2]);
buf2 = MM64(src)[2];
crc3 = _mm_xor_si64(crc3, MM64_TABLE(sizeof(uint64) - 1)[tmp3]);
buf3 = MM64(src)[3];
}
while (src < stop);
CRC_WORD_MMX(this, crc0, buf0);
buf1 = _mm_xor_si64(buf1, crc1);
CRC_WORD_MMX(this, crc0, buf1);
buf2 = _mm_xor_si64(buf2, crc2);
CRC_WORD_MMX(this, crc0, buf2);
buf3 = _mm_xor_si64(buf3, crc3);
CRC_WORD_MMX(this, crc0, buf3);
temp.m64 = crc0;
crc = temp.u64;
_mm_empty();
src += 4*8;
}
// Process sizeof(uint64) bytes at once.
bytes = static_cast<size_t>(end - src) & ~(sizeof(uint64) - 1);
if (bytes > 0) {
union {
__m64 m64;
uint64 u64;
} temp;
__m64 crc0;
temp.u64 = crc;
crc0 = temp.m64;
for (const uint8 *stop = src + bytes; src < stop; src += sizeof(uint64)) {
__m64 buf0 = MM64(src)[0];
CRC_WORD_MMX(this, crc0, buf0);
}
temp.m64 = crc0;
crc = temp.u64;
_mm_empty();
}
// Compute CRC of remaining bytes.
for (;src < end; ++src) {
CRC_BYTE(this, crc, *src);
}
return (crc ^ Base().Canonize());
}
#if defined(_MSC_VER)
#pragma warning(pop)
#endif // defined(_MSC_VER)
} // namespace crcutil
#endif // CRCUTIL_USE_ASM && HAVE_I386 && HAVE_MMX
| {
"pile_set_name": "Github"
} |
.. include:: ../vpbx/rating_profiles.rst
| {
"pile_set_name": "Github"
} |
/*
* This class is distributed as part of the Psi Mod.
* Get the Source Code in github:
* https://github.com/Vazkii/Psi
*
* Psi is Open Source and distributed under the
* Psi License: https://psi.vazkii.net/license.php
*/
package vazkii.psi.common.spell.trick.block;
import net.minecraft.util.Direction;
import net.minecraft.util.math.BlockPos;
import vazkii.psi.api.internal.MathHelper;
import vazkii.psi.api.internal.Vector3;
import vazkii.psi.api.spell.EnumSpellStat;
import vazkii.psi.api.spell.Spell;
import vazkii.psi.api.spell.SpellCompilationException;
import vazkii.psi.api.spell.SpellContext;
import vazkii.psi.api.spell.SpellMetadata;
import vazkii.psi.api.spell.SpellParam;
import vazkii.psi.api.spell.SpellRuntimeException;
import vazkii.psi.api.spell.param.ParamNumber;
import vazkii.psi.api.spell.param.ParamVector;
import vazkii.psi.api.spell.piece.PieceTrick;
public class PieceTrickPlaceInSequence extends PieceTrick {
SpellParam<Vector3> position;
SpellParam<Vector3> target;
SpellParam<Number> maxBlocks;
SpellParam<Vector3> direction;
public PieceTrickPlaceInSequence(Spell spell) {
super(spell);
}
@Override
public void initParams() {
addParam(position = new ParamVector(SpellParam.GENERIC_NAME_POSITION, SpellParam.BLUE, false, false));
addParam(target = new ParamVector(SpellParam.GENERIC_NAME_TARGET, SpellParam.GREEN, false, false));
addParam(maxBlocks = new ParamNumber(SpellParam.GENERIC_NAME_MAX, SpellParam.RED, false, true));
addParam(direction = new ParamVector(SpellParam.GENERIC_NAME_DIRECTION, SpellParam.CYAN, true, false));
}
@Override
public void addToMetadata(SpellMetadata meta) throws SpellCompilationException {
super.addToMetadata(meta);
Double maxBlocksVal = this.<Double>getParamEvaluation(maxBlocks);
if (maxBlocksVal == null || maxBlocksVal <= 0) {
throw new SpellCompilationException(SpellCompilationException.NON_POSITIVE_VALUE, x, y);
}
meta.addStat(EnumSpellStat.POTENCY, (int) (maxBlocksVal * 8));
meta.addStat(EnumSpellStat.COST, (int) ((9.6 + (maxBlocksVal - 1) * 5.6)));
}
@Override
public Object execute(SpellContext context) throws SpellRuntimeException {
Vector3 positionVal = this.getParamValue(context, position);
Vector3 targetVal = this.getParamValue(context, target);
Number maxBlocksVal = this.getParamValue(context, maxBlocks);
int maxBlocksInt = maxBlocksVal.intValue();
Vector3 directionVal = this.getParamValue(context, direction);
Direction direction = Direction.UP;
if (directionVal != null) {
direction = Direction.getFacingFromVector(directionVal.x, directionVal.y, directionVal.z);
}
if (positionVal == null) {
throw new SpellRuntimeException(SpellRuntimeException.NULL_VECTOR);
}
Vector3 targetNorm = targetVal.copy().normalize();
for (BlockPos blockPos : MathHelper.getBlocksAlongRay(positionVal.toVec3D(), positionVal.copy().add(targetNorm.copy().multiply(maxBlocksInt)).toVec3D(), maxBlocksInt)) {
if (!context.isInRadius(Vector3.fromBlockPos(blockPos))) {
throw new SpellRuntimeException(SpellRuntimeException.OUTSIDE_RADIUS);
}
PieceTrickPlaceBlock.placeBlock(context.caster, context.caster.getEntityWorld(), blockPos, context.getTargetSlot(), false, direction);
}
return null;
}
}
| {
"pile_set_name": "Github"
} |
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import logging
import signal
import time
import sys
from traitlets.config import catch_config_error
from traitlets import (
Instance, Dict, Unicode, Bool, List, CUnicode, Any, Float
)
from jupyter_core.application import (
JupyterApp, base_flags, base_aliases
)
from . import __version__
from .consoleapp import JupyterConsoleApp, app_aliases, app_flags
try:
import queue
except ImportError:
import Queue as queue
OUTPUT_TIMEOUT = 10
# copy flags from mixin:
flags = dict(base_flags)
# start with mixin frontend flags:
frontend_flags = dict(app_flags)
# update full dict with frontend flags:
flags.update(frontend_flags)
# copy flags from mixin
aliases = dict(base_aliases)
# start with mixin frontend flags
frontend_aliases = dict(app_aliases)
# load updated frontend flags into full dict
aliases.update(frontend_aliases)
# get flags&aliases into sets, and remove a couple that
# shouldn't be scrubbed from backend flags:
frontend_aliases = set(frontend_aliases.keys())
frontend_flags = set(frontend_flags.keys())
class RunApp(JupyterApp, JupyterConsoleApp):
version = __version__
name = "jupyter run"
description = """Run Jupyter kernel code."""
flags = Dict(flags)
aliases = Dict(aliases)
frontend_aliases = Any(frontend_aliases)
frontend_flags = Any(frontend_flags)
kernel_timeout = Float(60, config=True,
help="""Timeout for giving up on a kernel (in seconds).
On first connect and restart, the console tests whether the
kernel is running and responsive by sending kernel_info_requests.
This sets the timeout in seconds for how long the kernel can take
before being presumed dead.
"""
)
def parse_command_line(self, argv=None):
super(RunApp, self).parse_command_line(argv)
self.build_kernel_argv(self.extra_args)
self.filenames_to_run = self.extra_args[:]
@catch_config_error
def initialize(self, argv=None):
self.log.debug("jupyter run: initialize...")
super(RunApp, self).initialize(argv)
JupyterConsoleApp.initialize(self)
signal.signal(signal.SIGINT, self.handle_sigint)
self.init_kernel_info()
def handle_sigint(self, *args):
if self.kernel_manager:
self.kernel_manager.interrupt_kernel()
else:
print("", file=sys.stderr)
error("Cannot interrupt kernels we didn't start.\n")
def init_kernel_info(self):
"""Wait for a kernel to be ready, and store kernel info"""
timeout = self.kernel_timeout
tic = time.time()
self.kernel_client.hb_channel.unpause()
msg_id = self.kernel_client.kernel_info()
while True:
try:
reply = self.kernel_client.get_shell_msg(timeout=1)
except queue.Empty:
if (time.time() - tic) > timeout:
raise RuntimeError("Kernel didn't respond to kernel_info_request")
else:
if reply['parent_header'].get('msg_id') == msg_id:
self.kernel_info = reply['content']
return
def start(self):
self.log.debug("jupyter run: starting...")
super(RunApp, self).start()
if self.filenames_to_run:
for filename in self.filenames_to_run:
self.log.debug("jupyter run: executing `%s`" % filename)
with open(filename) as fp:
code = fp.read()
reply = self.kernel_client.execute_interactive(code, timeout=OUTPUT_TIMEOUT)
return_code = 0 if reply['content']['status'] == 'ok' else 1
if return_code:
raise Exception("jupyter-run error running '%s'" % filename)
else:
code = sys.stdin.read()
reply = self.kernel_client.execute_interactive(code, timeout=OUTPUT_TIMEOUT)
return_code = 0 if reply['content']['status'] == 'ok' else 1
if return_code:
raise Exception("jupyter-run error running 'stdin'")
main = launch_new_instance = RunApp.launch_instance
if __name__ == '__main__':
main()
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (c) 2002-2016, DuraSpace. All rights reserved
Licensed under the DuraSpace License.
A copy of the DuraSpace License has been included in this
distribution and is available at: http://www.dspace.org/license
-->
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
<bean id="rdfFactory" class="org.dspace.rdf.factory.RDFFactoryImpl">
<!-- This defines which URIGenerator will be used. The URIGenerator
itself must be instantiated below.
-->
<property name="generator" ref="org.dspace.rdf.storage.LocalURIGenerator"/>
<property name="storage" ref="org.dspace.rdf.storage.RDFStorage"/>
<property name="converter" ref="org.dspace.rdf.conversion.RDFConverter"/>
</bean>
<!-- configure all plugins the converter should use. If you don't want to
use a plugin, remove it here. -->
<bean id="org.dspace.rdf.conversion.SimpleDSORelationsConverterPlugin" class="org.dspace.rdf.conversion.SimpleDSORelationsConverterPlugin"/>
<bean id="org.dspace.rdf.conversion.MetadataConverterPlugin" class="org.dspace.rdf.conversion.MetadataConverterPlugin"/>
<bean id="org.dspace.rdf.conversion.StaticDSOConverterPlugin" class="org.dspace.rdf.conversion.StaticDSOConverterPlugin"/>
<!-- You do not need to change anything below this line, if you are not
developing DSpace. -->
<!-- Currently there is only one implementation of RDFConverter. This uses
automatically all instantiated plugins. -->
<bean id="org.dspace.rdf.conversion.RDFConverter" class="org.dspace.rdf.conversion.RDFConverterImpl" scope="singleton"/>
<!-- We have multiple instances of URIGenerator using different Persistent
Identifier. Some of them use others as fallback (e.g. generate a DOI,
if there is no DOI fallback to handle, if there is no handle fallback
to local URIs. -->
<bean id="org.dspace.rdf.storage.LocalURIGenerator" class="org.dspace.rdf.storage.LocalURIGenerator"/>
<bean id="org.dspace.rdf.storage.HandleURIGenerator" class="org.dspace.rdf.storage.HandleURIGenerator"/>
<bean id="org.dspace.rdf.storage.DOIURIGenerator" class="org.dspace.rdf.storage.DOIURIGenerator">
<property name="fallback" ref="org.dspace.rdf.storage.LocalURIGenerator"/>
</bean>
<bean id="org.dspace.rdf.storage.DOIHandleURIGenerator" class="org.dspace.rdf.storage.DOIHandleURIGenerator">
<property name="fallback" ref="org.dspace.rdf.storage.HandleURIGenerator"/>
</bean>
<!-- Currently there is only one implementation of RDFStorage -->
<bean id="org.dspace.rdf.storage.RDFStorage" class="org.dspace.rdf.storage.RDFStorageImpl" scope="singleton"/>
</beans>
| {
"pile_set_name": "Github"
} |
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { ZtreeDemoComponent } from './ztree-demo.component';
describe('ZtreeDemoComponent', () => {
let component: ZtreeDemoComponent;
let fixture: ComponentFixture<ZtreeDemoComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ ZtreeDemoComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(ZtreeDemoComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
| {
"pile_set_name": "Github"
} |
# See www.openfst.org for extensive documentation on this weighted
# finite-state transducer library.
from libc.time cimport time
from libc.time cimport time_t
from libcpp cimport bool
from libcpp.memory cimport shared_ptr
from libcpp.memory cimport unique_ptr
from libcpp.utility cimport pair
from libcpp.vector cimport vector
from libcpp.string cimport string
from basictypes cimport int32
from basictypes cimport int64
from basictypes cimport uint32
from basictypes cimport uint64
cimport fst as fst
from ios cimport stringstream
# Exportable helper functions.
cdef string tostring(data, encoding=?) except *
cdef string weight_tostring(data, encoding=?) except *
cdef fst.ComposeFilter _get_compose_filter(
const string &compose_filter) except *
cdef fst.DeterminizeType _get_determinize_type(const string &det_type) except *
cdef fst.QueueType _get_queue_type(const string &queue_type) except *
cdef fst.RandArcSelection _get_rand_arc_selection(
const string &replace_label_type) except *
cdef fst.ReplaceLabelType _get_replace_label_type(
const string &replace_label_type, bool epsilon_on_replace) except *
# Weight.
cdef fst.WeightClass _get_WeightClass_or_One(const string &weight_type,
weight_string) except *
cdef fst.WeightClass _get_WeightClass_or_Zero(const string &weight_type,
weight_string) except *
cdef class Weight(object):
cdef unique_ptr[fst.WeightClass] _weight
cdef void _check_weight(self) except *
cpdef Weight copy(self)
cpdef string to_string(self)
cpdef string type(self)
cdef Weight _Zero(weight_type)
cdef Weight _One(weight_type)
cdef Weight _NoWeight(weight_type)
cdef Weight _plus(Weight lhs, Weight rhs)
cdef Weight _times(Weight lhs, Weight rhs)
cdef Weight _divide(Weight lhs, Weight rhs)
cdef Weight _power(Weight lhs, size_t n)
# SymbolTable.
ctypedef fst.SymbolTable * SymbolTable_ptr
cdef class _SymbolTable(object):
cdef fst.SymbolTable *_table
cpdef int64 available_key(self)
cpdef string checksum(self)
cpdef SymbolTable copy(self)
cpdef int64 get_nth_key(self, ssize_t pos) except *
cpdef string labeled_checksum(self)
cpdef bool member(self, key)
cpdef string name(self)
cpdef size_t num_symbols(self)
cpdef void write(self, filename) except *
cpdef void write_text(self, filename) except *
cdef class _EncodeMapperSymbolTable(_SymbolTable):
cdef shared_ptr[fst.EncodeMapperClass] _encoder
cdef class _FstSymbolTable(_SymbolTable):
cdef shared_ptr[fst.FstClass] _fst
cdef class _MutableSymbolTable(_SymbolTable):
cpdef int64 add_symbol(self, symbol, int64 key=?)
cpdef void add_table(self, _SymbolTable syms)
cpdef void set_name(self, new_name) except *
cdef class _MutableFstSymbolTable(_MutableSymbolTable):
cdef shared_ptr[fst.MutableFstClass] _mfst
cdef class SymbolTable(_MutableSymbolTable):
cdef unique_ptr[fst.SymbolTable] _smart_table
cdef _EncodeMapperSymbolTable _init_EncodeMapperSymbolTable(
fst.SymbolTable *table, shared_ptr[fst.EncodeMapperClass] encoder)
cdef _FstSymbolTable _init_FstSymbolTable(fst.SymbolTable *table,
shared_ptr[fst.FstClass] ifst)
cdef _MutableFstSymbolTable _init_MutableFstSymbolTable(fst.SymbolTable *table,
shared_ptr[fst.MutableFstClass] ifst)
cdef SymbolTable _init_SymbolTable(fst.SymbolTable *table)
cdef class SymbolTableIterator(object):
cdef shared_ptr[fst.SymbolTable] _table
cdef unique_ptr[fst.SymbolTableIterator] _siter
cpdef bool done(self)
cpdef void next(self)
cpdef void reset(self)
cpdef string symbol(self)
cpdef int64 value(self)
# EncodeMapper.
cdef class EncodeMapper(object):
cdef shared_ptr[fst.EncodeMapperClass] _encoder
cpdef string arc_type(self)
cpdef uint32 flags(self)
cpdef _EncodeMapperSymbolTable input_symbols(self)
cpdef _EncodeMapperSymbolTable output_symbols(self)
cpdef uint64 properties(self, uint64 mask)
cpdef void set_input_symbols(self, _SymbolTable syms) except *
cpdef void set_output_symbols(self, _SymbolTable syms) except *
cpdef string weight_type(self)
# Fst.
ctypedef fst.FstClass * FstClass_ptr
ctypedef fst.MutableFstClass * MutableFstClass_ptr
ctypedef fst.VectorFstClass * VectorFstClass_ptr
cdef class _Fst(object):
cdef shared_ptr[fst.FstClass] _fst
cpdef string arc_type(self)
cpdef ArcIterator arcs(self, int64 state)
cpdef _Fst copy(self)
cpdef void draw(self, filename, _SymbolTable isymbols=?,
_SymbolTable osymbols=?, SymbolTable ssymbols=?,
bool acceptor=?, title=?, double width=?,
double height=?, bool portrait=?, bool vertical=?,
double ranksep=?, double nodesep=?, int32 fontsize=?,
int32 precision=?, float_format=?,
bool show_weight_one=?)
cpdef Weight final(self, int64 state)
cpdef string fst_type(self)
cpdef _FstSymbolTable input_symbols(self)
cpdef size_t num_arcs(self, int64 state) except *
cpdef size_t num_input_epsilons(self, int64 state) except *
cpdef size_t num_output_epsilons(self, int64 state) except *
cpdef _FstSymbolTable output_symbols(self)
cpdef uint64 properties(self, uint64 mask, bool test)
cpdef int64 start(self)
cpdef StateIterator states(self)
cpdef string text(self, _SymbolTable isymbols=?, _SymbolTable osymbols=?,
_SymbolTable ssymbols=?, bool acceptor=?,
bool show_weight_one=?, missing_sym=?)
cpdef bool verify(self)
cpdef string weight_type(self)
cpdef void write(self, filename) except *
cpdef string write_to_string(self)
cdef class _MutableFst(_Fst):
cdef shared_ptr[fst.MutableFstClass] _mfst
cdef void _check_mutating_imethod(self) except *
cdef void _add_arc(self, int64 state, Arc arc) except *
cpdef int64 add_state(self) except *
cdef void _arcsort(self, sort_type=?) except *
cdef void _closure(self, bool closure_plus=?) except *
cdef void _concat(self, _Fst ifst) except *
cdef void _connect(self) except *
cdef void _decode(self, EncodeMapper) except *
cdef void _delete_arcs(self, int64 state, size_t n=?) except *
cdef void _delete_states(self, states=?) except *
cdef void _encode(self, EncodeMapper) except *
cdef void _invert(self) except *
cdef void _minimize(self, float delta=?, bool allow_nondet=?) except *
cpdef MutableArcIterator mutable_arcs(self, int64 state)
cpdef int64 num_states(self)
cdef void _project(self, bool project_output=?) except *
cdef void _prune(self, float delta=?, int64 nstate=?, weight=?) except *
cdef void _push(self, float delta=?, bool remove_total_weight=?,
bool to_final=?) except *
cdef void _relabel_pairs(self, ipairs=?, opairs=?) except *
cdef void _relabel_tables(self, _SymbolTable old_isymbols=?,
_SymbolTable new_isymbols=?, unknown_isymbol=?,
bool attach_new_isymbols=?,
_SymbolTable old_osymbols=?, _SymbolTable new_osymbols=?,
unknown_osymbol=?, bool attach_new_osymbols=?) except *
cdef void _reserve_arcs(self, int64 state, size_t n) except *
cdef void _reserve_states(self, int64 n) except *
cdef void _reweight(self, potentials, bool to_final=?) except *
cdef void _rmepsilon(self, queue_type=?, bool connect=?, weight=?,
int64 nstate=?, float delta=?) except *
cdef void _set_final(self, int64 state, weight=?) except *
cdef void _set_properties(self, uint64 props, uint64 mask)
cdef void _set_start(self, int64 state) except *
cdef void _set_input_symbols(self, _SymbolTable syms) except *
cdef void _set_output_symbols(self, _SymbolTable syms) except *
cdef void _topsort(self) except *
cdef void _union(self, _Fst ifst) except *
# Fst construction helpers.
cdef _Fst _init_Fst(FstClass_ptr tfst)
cdef _MutableFst _init_MutableFst(MutableFstClass_ptr tfst)
cdef _Fst _init_XFst(FstClass_ptr tfst)
cdef _MutableFst _create_Fst(arc_type=?)
cpdef _Fst _read(filename)
cpdef _Fst _read_from_string(State)
# Iterators.
cdef class Arc(object):
cdef unique_ptr[fst.ArcClass] _arc
cpdef Arc copy(self)
cdef Arc _init_Arc(const fst.ArcClass &arc)
cdef class ArcIterator(object):
cdef shared_ptr[fst.FstClass] _fst
cdef unique_ptr[fst.ArcIteratorClass] _aiter
cpdef bool done(self)
cpdef uint32 flags(self)
cpdef void next(self)
cpdef size_t position(self)
cpdef void reset(self)
cpdef void seek(self, size_t a)
cpdef void set_flags(self, uint32 flags, uint32 mask)
cpdef object value(self)
cdef class MutableArcIterator(object):
cdef shared_ptr[fst.MutableFstClass] _mfst
cdef unique_ptr[fst.MutableArcIteratorClass] _aiter
cpdef bool done(self)
cpdef uint32 flags(self)
cpdef void next(self)
cpdef size_t position(self)
cpdef void reset(self)
cpdef void seek(self, size_t a)
cpdef void set_flags(self, uint32 flags, uint32 mask)
cpdef void set_value(self, Arc arc)
cpdef object value(self)
cdef class StateIterator(object):
cdef shared_ptr[fst.FstClass] _fst
cdef unique_ptr[fst.StateIteratorClass] _siter
cpdef bool done(self)
cpdef void next(self)
cpdef void reset(self)
cpdef int64 value(self)
# Constructive operations on Fst.
cdef _Fst _map(_Fst ifst, float delta=?, map_type=?, double power=?, weight=?)
cpdef _Fst arcmap(_Fst ifst, float delta=?, map_type=?, double power=?, weight=?)
cpdef _MutableFst compose(_Fst ifst1, _Fst ifst2, compose_filter=?,
bool connect=?)
cpdef _Fst convert(_Fst ifst, fst_type=?)
cpdef _MutableFst determinize(_Fst ifst, float delta=?, det_type=?,
int64 nstate=?, int64 subsequential_label=?,
weight=?, bool increment_subsequential_label=?)
cpdef _MutableFst difference(_Fst ifst1, _Fst ifst2, compose_filter=?,
bool connect=?)
cpdef _MutableFst disambiguate(_Fst ifst, float delta=?, int64 nstate=?,
int64 subsequential_label=?, weight=?)
cpdef _MutableFst epsnormalize(_Fst ifst, bool eps_norm_output=?)
cpdef bool equal(_Fst ifst1, _Fst ifst2, float delta=?)
cpdef bool equivalent(_Fst ifst1, _Fst ifst2, float delta=?) except *
cpdef _MutableFst intersect(_Fst ifst1, _Fst ifst2, compose_filter=?,
bool connect=?)
cpdef bool isomorphic(_Fst ifst1, _Fst ifst2, float delta=?)
cpdef _MutableFst prune(_Fst ifst, float delta=?, int64 nstate=?,
weight=?)
cpdef _MutableFst push(_Fst ifst, float delta=?, bool push_weights=?,
bool push_labels=?, bool remove_common_affix=?,
bool remove_total_weight=?, bool to_final=?)
cpdef bool randequivalent(_Fst ifst1, _Fst ifst2, int32 npath=?,
float delta=?, time_t seed=?, select=?,
int32 max_length=?) except *
cpdef _MutableFst randgen(_Fst ifst, int32 npath=?, time_t seed=?,
select=?, int32 max_length=?,
bool remove_total_weight=?, bool weighted=?)
cdef fst.ReplaceLabelType _get_replace_label_type(string rlt,
bool epsilon_on_replace) except *
cpdef _MutableFst replace(pairs, call_arc_labeling=?, return_arc_labeling=?,
bool epsilon_on_replace=?, int64 return_label=?)
cpdef _MutableFst reverse(_Fst ifst, bool require_superinitial=?)
cdef vector[fst.WeightClass] *_shortestdistance(_Fst ifst, float delta=?,
int64 nstate=?, queue_type=?,
bool reverse=?) except *
cpdef _MutableFst shortestpath(_Fst ifst, float delta=?, int32 nshortest=?,
int64 nstate=?, queue_type=?, bool unique=?,
weight=?)
cpdef _Fst statemap(_Fst ifst, map_type)
cpdef _MutableFst synchronize(_Fst ifst)
# Compiler.
cdef class Compiler(object):
cdef unique_ptr[stringstream] _sstrm
cdef string _fst_type
cdef string _arc_type
cdef const fst.SymbolTable *_isymbols
cdef const fst.SymbolTable *_osymbols
cdef const fst.SymbolTable *_ssymbols
cdef bool _acceptor
cdef bool _keep_isymbols
cdef bool _keep_osymbols
cdef bool _keep_state_numbering
cdef bool _allow_negative_labels
cpdef _Fst compile(self)
cpdef void write(self, expression)
# FarReader.
cdef class FarReader(object):
cdef unique_ptr[fst.FarReaderClass] _reader
cpdef string arc_type(self)
cpdef bool done(self)
cpdef bool error(self)
cpdef string far_type(self)
cpdef bool find(self, key) except *
cpdef _Fst get_fst(self)
cpdef string get_key(self)
cpdef void next(self)
cpdef void reset(self)
# FarWriter.
cdef class FarWriter(object):
cdef unique_ptr[fst.FarWriterClass] _writer
cpdef string arc_type(self)
cdef void close(self)
cpdef void add(self, key, _Fst ifst) except *
cpdef bool error(self)
cpdef string far_type(self)
| {
"pile_set_name": "Github"
} |
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!310 &1
UnityConnectSettings:
m_ObjectHideFlags: 0
serializedVersion: 1
m_Enabled: 1
m_TestMode: 0
m_EventOldUrl: https://api.uca.cloud.unity3d.com/v1/events
m_EventUrl: https://cdp.cloud.unity3d.com/v1/events
m_ConfigUrl: https://config.uca.cloud.unity3d.com
m_TestInitMode: 0
CrashReportingSettings:
m_EventUrl: https://perf-events.cloud.unity3d.com
m_Enabled: 0
m_LogBufferSize: 10
m_CaptureEditorExceptions: 1
UnityPurchasingSettings:
m_Enabled: 0
m_TestMode: 0
UnityAnalyticsSettings:
m_Enabled: 0
m_TestMode: 0
m_InitializeOnStartup: 1
UnityAdsSettings:
m_Enabled: 0
m_InitializeOnStartup: 1
m_TestMode: 0
m_IosGameId:
m_AndroidGameId:
m_GameIds: {}
m_GameId:
PerformanceReportingSettings:
m_Enabled: 0
| {
"pile_set_name": "Github"
} |
/*
* File: AbstractMarkovChainMonteCarlo.java
* Authors: Kevin R. Dixon
* Company: Sandia National Laboratories
* Project: Cognitive Foundry
*
* Copyright Sep 30, 2009, Sandia Corporation.
* Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
* license for use of this work by or on behalf of the U.S. Government.
* Export of this program may require a license from the United States
* Government. See CopyrightHistory.txt for complete details.
*
*/
package gov.sandia.cognition.statistics.bayesian;
import gov.sandia.cognition.learning.algorithm.AbstractAnytimeBatchLearner;
import gov.sandia.cognition.statistics.DataDistribution;
import gov.sandia.cognition.statistics.distribution.DefaultDataDistribution;
import gov.sandia.cognition.util.ObjectUtil;
import java.util.Collection;
import java.util.Random;
/**
* Partial abstract implementation of MarkovChainMonteCarlo.
* @author Kevin R. Dixon
* @since 3.0
* @param <ObservationType>
* Type of observations handled by the MCMC algorithm.
* @param <ParameterType>
* Type of parameters to infer.
*/
public abstract class AbstractMarkovChainMonteCarlo<ObservationType,ParameterType>
extends AbstractAnytimeBatchLearner<Collection<? extends ObservationType>,DataDistribution<ParameterType>>
implements MarkovChainMonteCarlo<ObservationType,ParameterType>
{
/**
* Default number of sample/iterations, {@value}.
*/
public static final int DEFAULT_NUM_SAMPLES = 1000;
/**
* Random number generator.
*/
protected Random random;
/**
* The number of iterations that must transpire before the algorithm
* begins collection the samples.
*/
private int burnInIterations;
/**
* The number of iterations that must transpire between capturing
* samples from the distribution.
*/
private int iterationsPerSample;
/**
* The current parameters in the random walk.
*/
protected ParameterType currentParameter;
/**
* The previous parameter in the random walk.
*/
protected ParameterType previousParameter;
/**
* Resulting parameters to return.
*/
private transient DefaultDataDistribution<ParameterType> result;
/**
* Creates a new instance of AbstractMarkovChainMonteCarlo
*/
public AbstractMarkovChainMonteCarlo()
{
super( DEFAULT_NUM_SAMPLES );
this.setIterationsPerSample(1);
}
@Override
@SuppressWarnings("unchecked")
public AbstractMarkovChainMonteCarlo<ObservationType,ParameterType> clone()
{
AbstractMarkovChainMonteCarlo<ObservationType,ParameterType> clone =
(AbstractMarkovChainMonteCarlo<ObservationType,ParameterType>) super.clone();
clone.setRandom( ObjectUtil.cloneSmart( this.getRandom() ) );
clone.setCurrentParameter(
ObjectUtil.cloneSmart( this.getCurrentParameter() ) );
return clone;
}
@Override
public int getBurnInIterations()
{
return this.burnInIterations;
}
@Override
public void setBurnInIterations(
final int burnInIterations)
{
if( burnInIterations < 0 )
{
throw new IllegalArgumentException( "burnInIterations must be >= 0" );
}
this.burnInIterations = burnInIterations;
}
@Override
public int getIterationsPerSample()
{
return this.iterationsPerSample;
}
@Override
public void setIterationsPerSample(
final int iterationsPerSample)
{
if( iterationsPerSample < 1 )
{
throw new IllegalArgumentException( "iterationsPerSample must be >= 1" );
}
this.iterationsPerSample = iterationsPerSample;
}
@Override
public DefaultDataDistribution<ParameterType> getResult()
{
return this.result;
}
/**
* Setter for result
* @param result
* Results to return.
*/
protected void setResult(
final DefaultDataDistribution<ParameterType> result)
{
this.result = result;
}
@Override
public ParameterType getCurrentParameter()
{
return this.currentParameter;
}
/**
* Setter for currentParameter.
* @param currentParameter
* The current location in the random walk.
*/
protected void setCurrentParameter(
final ParameterType currentParameter )
{
this.currentParameter = currentParameter;
}
@Override
public Random getRandom()
{
return this.random;
}
@Override
public void setRandom(
final Random random)
{
this.random = random;
}
/**
* Performs a valid MCMC update step. That is, the function is expected to
* modify the currentParameter member.
*/
abstract protected void mcmcUpdate();
/**
* Creates the initial parameters from which to start the Markov chain.
* @return
* initial parameters from which to start the Markov chain.
*/
abstract public ParameterType createInitialLearnedObject();
@Override
protected boolean initializeAlgorithm()
{
this.previousParameter =
ObjectUtil.cloneSmart(this.createInitialLearnedObject());
this.setCurrentParameter( this.previousParameter );
for( int i = 0; i < this.getBurnInIterations(); i++ )
{
this.mcmcUpdate();
}
this.setResult( new DefaultDataDistribution<ParameterType>(
this.getMaxIterations() ) );
return true;
}
@Override
protected boolean step()
{
for( int i = 0; i < this.iterationsPerSample; i++ )
{
this.mcmcUpdate();
}
// Put a clone of the current parameter into the array list.
this.previousParameter = ObjectUtil.cloneSmart(this.currentParameter);
this.result.increment( this.previousParameter );
return true;
}
@Override
protected void cleanupAlgorithm()
{
}
/**
* Getter for previousParameter
* @return
* The previous parameter in the random walk.
*/
public ParameterType getPreviousParameter()
{
return this.previousParameter;
}
}
| {
"pile_set_name": "Github"
} |
Copyright (c) 2007 The Khronos Group Inc.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and/or associated documentation files (the
"Materials"), to deal in the Materials without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Materials, and to
permit persons to whom the Materials are furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Materials.
THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
| {
"pile_set_name": "Github"
} |
/***************************************************************************/
/* */
/* ftbitmap.h */
/* */
/* FreeType utility functions for bitmaps (specification). */
/* */
/* Copyright 2004, 2005, 2006, 2008 by */
/* David Turner, Robert Wilhelm, and Werner Lemberg. */
/* */
/* This file is part of the FreeType project, and may only be used, */
/* modified, and distributed under the terms of the FreeType project */
/* license, LICENSE.TXT. By continuing to use, modify, or distribute */
/* this file you indicate that you have read the license and */
/* understand and accept it fully. */
/* */
/***************************************************************************/
#ifndef __FTBITMAP_H__
#define __FTBITMAP_H__
#include <ft2build.h>
#include FT_FREETYPE_H
#ifdef FREETYPE_H
#error "freetype.h of FreeType 1 has been loaded!"
#error "Please fix the directory search order for header files"
#error "so that freetype.h of FreeType 2 is found first."
#endif
FT_BEGIN_HEADER
/*************************************************************************/
/* */
/* <Section> */
/* bitmap_handling */
/* */
/* <Title> */
/* Bitmap Handling */
/* */
/* <Abstract> */
/* Handling FT_Bitmap objects. */
/* */
/* <Description> */
/* This section contains functions for converting FT_Bitmap objects. */
/* */
/*************************************************************************/
/*************************************************************************/
/* */
/* <Function> */
/* FT_Bitmap_New */
/* */
/* <Description> */
/* Initialize a pointer to an @FT_Bitmap structure. */
/* */
/* <InOut> */
/* abitmap :: A pointer to the bitmap structure. */
/* */
FT_EXPORT( void )
FT_Bitmap_New( FT_Bitmap *abitmap );
/*************************************************************************/
/* */
/* <Function> */
/* FT_Bitmap_Copy */
/* */
/* <Description> */
/* Copy a bitmap into another one. */
/* */
/* <Input> */
/* library :: A handle to a library object. */
/* */
/* source :: A handle to the source bitmap. */
/* */
/* <Output> */
/* target :: A handle to the target bitmap. */
/* */
/* <Return> */
/* FreeType error code. 0~means success. */
/* */
FT_EXPORT( FT_Error )
FT_Bitmap_Copy( FT_Library library,
const FT_Bitmap *source,
FT_Bitmap *target);
/*************************************************************************/
/* */
/* <Function> */
/* FT_Bitmap_Embolden */
/* */
/* <Description> */
/* Embolden a bitmap. The new bitmap will be about `xStrength' */
/* pixels wider and `yStrength' pixels higher. The left and bottom */
/* borders are kept unchanged. */
/* */
/* <Input> */
/* library :: A handle to a library object. */
/* */
/* xStrength :: How strong the glyph is emboldened horizontally. */
/* Expressed in 26.6 pixel format. */
/* */
/* yStrength :: How strong the glyph is emboldened vertically. */
/* Expressed in 26.6 pixel format. */
/* */
/* <InOut> */
/* bitmap :: A handle to the target bitmap. */
/* */
/* <Return> */
/* FreeType error code. 0~means success. */
/* */
/* <Note> */
/* The current implementation restricts `xStrength' to be less than */
/* or equal to~8 if bitmap is of pixel_mode @FT_PIXEL_MODE_MONO. */
/* */
/* If you want to embolden the bitmap owned by a @FT_GlyphSlotRec, */
/* you should call @FT_GlyphSlot_Own_Bitmap on the slot first. */
/* */
FT_EXPORT( FT_Error )
FT_Bitmap_Embolden( FT_Library library,
FT_Bitmap* bitmap,
FT_Pos xStrength,
FT_Pos yStrength );
/*************************************************************************/
/* */
/* <Function> */
/* FT_Bitmap_Convert */
/* */
/* <Description> */
/* Convert a bitmap object with depth 1bpp, 2bpp, 4bpp, or 8bpp to a */
/* bitmap object with depth 8bpp, making the number of used bytes per */
/* line (a.k.a. the `pitch') a multiple of `alignment'. */
/* */
/* <Input> */
/* library :: A handle to a library object. */
/* */
/* source :: The source bitmap. */
/* */
/* alignment :: The pitch of the bitmap is a multiple of this */
/* parameter. Common values are 1, 2, or 4. */
/* */
/* <Output> */
/* target :: The target bitmap. */
/* */
/* <Return> */
/* FreeType error code. 0~means success. */
/* */
/* <Note> */
/* It is possible to call @FT_Bitmap_Convert multiple times without */
/* calling @FT_Bitmap_Done (the memory is simply reallocated). */
/* */
/* Use @FT_Bitmap_Done to finally remove the bitmap object. */
/* */
/* The `library' argument is taken to have access to FreeType's */
/* memory handling functions. */
/* */
FT_EXPORT( FT_Error )
FT_Bitmap_Convert( FT_Library library,
const FT_Bitmap *source,
FT_Bitmap *target,
FT_Int alignment );
/*************************************************************************/
/* */
/* <Function> */
/* FT_GlyphSlot_Own_Bitmap */
/* */
/* <Description> */
/* Make sure that a glyph slot owns `slot->bitmap'. */
/* */
/* <Input> */
/* slot :: The glyph slot. */
/* */
/* <Return> */
/* FreeType error code. 0~means success. */
/* */
/* <Note> */
/* This function is to be used in combination with */
/* @FT_Bitmap_Embolden. */
/* */
FT_EXPORT( FT_Error )
FT_GlyphSlot_Own_Bitmap( FT_GlyphSlot slot );
/*************************************************************************/
/* */
/* <Function> */
/* FT_Bitmap_Done */
/* */
/* <Description> */
/* Destroy a bitmap object created with @FT_Bitmap_New. */
/* */
/* <Input> */
/* library :: A handle to a library object. */
/* */
/* bitmap :: The bitmap object to be freed. */
/* */
/* <Return> */
/* FreeType error code. 0~means success. */
/* */
/* <Note> */
/* The `library' argument is taken to have access to FreeType's */
/* memory handling functions. */
/* */
FT_EXPORT( FT_Error )
FT_Bitmap_Done( FT_Library library,
FT_Bitmap *bitmap );
/* */
FT_END_HEADER
#endif /* __FTBITMAP_H__ */
/* END */
| {
"pile_set_name": "Github"
} |
using NHapi.Base.Parser;
using NHapi.Base;
using NHapi.Base.Log;
using System;
using System.Collections.Generic;
using NHapi.Model.V281.Segment;
using NHapi.Model.V281.Datatype;
using NHapi.Base.Model;
namespace NHapi.Model.V281.Group
{
///<summary>
///Represents the PEX_P07_STUDY Group. A Group is an ordered collection of message
/// segments that can repeat together or be optionally in/excluded together.
/// This Group contains the following elements:
///<ol>
///<li>0: CSR (Clinical Study Registration) </li>
///<li>1: CSP (Clinical Study Phase) optional repeating</li>
///</ol>
///</summary>
[Serializable]
public class PEX_P07_STUDY : AbstractGroup {
///<summary>
/// Creates a new PEX_P07_STUDY Group.
///</summary>
public PEX_P07_STUDY(IGroup parent, IModelClassFactory factory) : base(parent, factory){
try {
this.add(typeof(CSR), true, false);
this.add(typeof(CSP), false, true);
} catch(HL7Exception e) {
HapiLogFactory.GetHapiLog(GetType()).Error("Unexpected error creating PEX_P07_STUDY - this is probably a bug in the source code generator.", e);
}
}
///<summary>
/// Returns CSR (Clinical Study Registration) - creates it if necessary
///</summary>
public CSR CSR {
get{
CSR ret = null;
try {
ret = (CSR)this.GetStructure("CSR");
} catch(HL7Exception e) {
HapiLogFactory.GetHapiLog(GetType()).Error("Unexpected error accessing data - this is probably a bug in the source code generator.", e);
throw new System.Exception("An unexpected error ocurred",e);
}
return ret;
}
}
///<summary>
/// Returns first repetition of CSP (Clinical Study Phase) - creates it if necessary
///</summary>
public CSP GetCSP() {
CSP ret = null;
try {
ret = (CSP)this.GetStructure("CSP");
} catch(HL7Exception e) {
HapiLogFactory.GetHapiLog(GetType()).Error("Unexpected error accessing data - this is probably a bug in the source code generator.", e);
throw new System.Exception("An unexpected error ocurred",e);
}
return ret;
}
///<summary>
///Returns a specific repetition of CSP
/// * (Clinical Study Phase) - creates it if necessary
/// throws HL7Exception if the repetition requested is more than one
/// greater than the number of existing repetitions.
///</summary>
public CSP GetCSP(int rep) {
return (CSP)this.GetStructure("CSP", rep);
}
/**
* Returns the number of existing repetitions of CSP
*/
public int CSPRepetitionsUsed {
get{
int reps = -1;
try {
reps = this.GetAll("CSP").Length;
} catch (HL7Exception e) {
string message = "Unexpected error accessing data - this is probably a bug in the source code generator.";
HapiLogFactory.GetHapiLog(GetType()).Error(message, e);
throw new System.Exception(message);
}
return reps;
}
}
/**
* Enumerate over the CSP results
*/
public IEnumerable<CSP> CSPs
{
get
{
for (int rep = 0; rep < CSPRepetitionsUsed; rep++)
{
yield return (CSP)this.GetStructure("CSP", rep);
}
}
}
///<summary>
///Adds a new CSP
///</summary>
public CSP AddCSP()
{
return this.AddStructure("CSP") as CSP;
}
///<summary>
///Removes the given CSP
///</summary>
public void RemoveCSP(CSP toRemove)
{
this.RemoveStructure("CSP", toRemove);
}
///<summary>
///Removes the CSP at the given index
///</summary>
public void RemoveCSPAt(int index)
{
this.RemoveRepetition("CSP", index);
}
}
}
| {
"pile_set_name": "Github"
} |
f() ->
Y = case 1 of 1 -> ok end. | {
"pile_set_name": "Github"
} |
/*
* This file is part of Cleanflight.
*
* Cleanflight is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Cleanflight is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Cleanflight. If not, see <http://www.gnu.org/licenses/>.
*/
#include <stdbool.h>
#include <stdint.h>
#include <stdlib.h>
#include "platform.h"
#include "common/axis.h"
#include "common/maths.h"
#include "system.h"
#include "exti.h"
#include "gpio.h"
#include "gyro_sync.h"
#include "sensor.h"
#include "debug.h"
#include "accgyro.h"
#include "accgyro_mpu.h"
#include "accgyro_mpu6500.h"
extern uint16_t acc_1G;
#define BIT_I2C_IF_DIS 0x10
void resetGyro (void) {
// Device Reset
mpuConfiguration.write(MPU_RA_PWR_MGMT_1, MPU6500_BIT_RESET);
delay(100);
}
bool mpu6500AccDetect(acc_t *acc)
{
if (mpuDetectionResult.sensor != MPU_65xx_I2C) {
return false;
}
acc->init = mpu6500AccInit;
acc->read = mpuAccRead;
return true;
}
bool mpu6500GyroDetect(gyro_t *gyro)
{
if (mpuDetectionResult.sensor != MPU_65xx_I2C) {
return false;
}
gyro->init = mpu6500GyroInit;
gyro->read = mpuGyroRead;
gyro->intStatus = checkMPUDataReady;
// 16.4 dps/lsb scalefactor
gyro->scale = 1.0f / 16.4f;
return true;
}
void mpu6500AccInit(void)
{
mpuIntExtiInit();
acc_1G = 512 * 8;
}
void mpu6500GyroInit(uint8_t lpf)
{
mpuIntExtiInit();
#ifdef NAZE
// FIXME target specific code in driver code.
gpio_config_t gpio;
// MPU_INT output on rev5 hardware (PC13). rev4 was on PB13, conflicts with SPI devices
if (hse_value == 12000000) {
gpio.pin = Pin_13;
gpio.speed = Speed_2MHz;
gpio.mode = Mode_IN_FLOATING;
gpioInit(GPIOC, &gpio);
}
#endif
mpuIntExtiInit();
mpuConfiguration.write(MPU_RA_PWR_MGMT_1, MPU6500_BIT_RESET);
delay(50);
mpuConfiguration.write(MPU_RA_PWR_MGMT_1, INV_CLK_PLL);
delayMicroseconds(1);
mpuConfiguration.write(MPU_RA_GYRO_CONFIG, INV_FSR_2000DPS << 3 | FCB_DISABLED); //Fchoice_b defaults to 00 which makes fchoice 11
delay(15);
mpuConfiguration.write(MPU_RA_ACCEL_CONFIG, INV_FSR_8G << 3);
delay(15);
if (lpf == 4) {
mpuConfiguration.write(MPU_RA_CONFIG, 1); //1KHz, 184DLPF
} else if (lpf < 4) {
mpuConfiguration.write(MPU_RA_CONFIG, 7); //8KHz, 3600DLPF
} else if (lpf > 4) {
mpuConfiguration.write(MPU_RA_CONFIG, 0); //8KHz, 250DLPF
}
delay(15);
mpuConfiguration.write(MPU_RA_SMPLRT_DIV, gyroMPU6xxxGetDividerDrops()); // Get Divider Drops
delay(15);
mpuConfiguration.write(MPU_RA_INT_PIN_CFG, 0 << 7 | 0 << 6 | 0 << 5 | 1 << 4 | 0 << 3 | 0 << 2 | 1 << 1 | 0 << 0); // INT_ANYRD_2CLEAR, BYPASS_EN
delayMicroseconds(1);
#if defined(USE_MPU_DATA_READY_SIGNAL)
mpuConfiguration.write(MPU_RA_INT_ENABLE, 0x01); //this resets register MPU_RA_PWR_MGMT_1 and won't read back correctly.
delayMicroseconds(1);
#endif
//uint8_t in;
//mpu6500SlowReadRegister(MPU_RA_CONFIG, 1, &in);
//debug[0]= 7;
//debug[1]= in;
//delayMicroseconds(1);
//mpu6500SlowReadRegister(MPU_RA_INT_PIN_CFG, 1, &in);
//debug[2]= INV_FSR_2000DPS << 3 | FCB_3600_32;
//debug[3]= in;
//correct
//MPU_RA_PWR_MGMT_1
//MPU_RA_ACCEL_CONFIG
//MPU_RA_CONFIG
//MPU_RA_GYRO_CONFIG
}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.