text
stringlengths 89
104k
| code_tokens
list | avg_line_len
float64 7.91
980
| score
float64 0
630
|
---|---|---|---|
def _payload_to_dict(self):
"""When an error status the payload is holding an AsyncException that
is converted to a serializable dict.
"""
if self.status != self.ERROR or not self.payload:
return self.payload
import traceback
return {
"error": self.payload.error,
"args": self.payload.args,
"traceback": traceback.format_exception(*self.payload.traceback)
}
|
[
"def",
"_payload_to_dict",
"(",
"self",
")",
":",
"if",
"self",
".",
"status",
"!=",
"self",
".",
"ERROR",
"or",
"not",
"self",
".",
"payload",
":",
"return",
"self",
".",
"payload",
"import",
"traceback",
"return",
"{",
"\"error\"",
":",
"self",
".",
"payload",
".",
"error",
",",
"\"args\"",
":",
"self",
".",
"payload",
".",
"args",
",",
"\"traceback\"",
":",
"traceback",
".",
"format_exception",
"(",
"*",
"self",
".",
"payload",
".",
"traceback",
")",
"}"
] | 32.142857 | 16.571429 |
def update(self,
stats,
duration=3):
"""Display stats to stdout.
Refresh every duration second.
"""
for plugin, attribute in self.plugins_list:
# Check if the plugin exist and is enable
if plugin in stats.getPluginsList() and \
stats.get_plugin(plugin).is_enable():
stat = stats.get_plugin(plugin).get_export()
else:
continue
# Display stats
if attribute is not None:
# With attribute
try:
printandflush("{}.{}: {}".format(plugin, attribute,
stat[attribute]))
except KeyError as err:
logger.error("Can not display stat {}.{} ({})".format(plugin, attribute, err))
else:
# Without attribute
printandflush("{}: {}".format(plugin, stat))
# Wait until next refresh
if duration > 0:
time.sleep(duration)
|
[
"def",
"update",
"(",
"self",
",",
"stats",
",",
"duration",
"=",
"3",
")",
":",
"for",
"plugin",
",",
"attribute",
"in",
"self",
".",
"plugins_list",
":",
"# Check if the plugin exist and is enable",
"if",
"plugin",
"in",
"stats",
".",
"getPluginsList",
"(",
")",
"and",
"stats",
".",
"get_plugin",
"(",
"plugin",
")",
".",
"is_enable",
"(",
")",
":",
"stat",
"=",
"stats",
".",
"get_plugin",
"(",
"plugin",
")",
".",
"get_export",
"(",
")",
"else",
":",
"continue",
"# Display stats",
"if",
"attribute",
"is",
"not",
"None",
":",
"# With attribute",
"try",
":",
"printandflush",
"(",
"\"{}.{}: {}\"",
".",
"format",
"(",
"plugin",
",",
"attribute",
",",
"stat",
"[",
"attribute",
"]",
")",
")",
"except",
"KeyError",
"as",
"err",
":",
"logger",
".",
"error",
"(",
"\"Can not display stat {}.{} ({})\"",
".",
"format",
"(",
"plugin",
",",
"attribute",
",",
"err",
")",
")",
"else",
":",
"# Without attribute",
"printandflush",
"(",
"\"{}: {}\"",
".",
"format",
"(",
"plugin",
",",
"stat",
")",
")",
"# Wait until next refresh",
"if",
"duration",
">",
"0",
":",
"time",
".",
"sleep",
"(",
"duration",
")"
] | 37.607143 | 16.035714 |
def subspaces(self, expressions_list=None, dimensions=None, exclude=None, **kwargs):
"""Generate a Subspaces object, based on a custom list of expressions or all possible combinations based on
dimension
:param expressions_list: list of list of expressions, where the inner list defines the subspace
:param dimensions: if given, generates a subspace with all possible combinations for that dimension
:param exclude: list of
"""
if dimensions is not None:
expressions_list = list(itertools.combinations(self.get_column_names(), dimensions))
if exclude is not None:
import six
def excluded(expressions):
if callable(exclude):
return exclude(expressions)
elif isinstance(exclude, six.string_types):
return exclude in expressions
elif isinstance(exclude, (list, tuple)):
# $#expressions = set(expressions)
for e in exclude:
if isinstance(e, six.string_types):
if e in expressions:
return True
elif isinstance(e, (list, tuple)):
if set(e).issubset(expressions):
return True
else:
raise ValueError("elements of exclude should contain a string or a sequence of strings")
else:
raise ValueError("exclude should contain a string, a sequence of strings, or should be a callable")
return False
# test if any of the elements of exclude are a subset of the expression
expressions_list = [expr for expr in expressions_list if not excluded(expr)]
logger.debug("expression list generated: %r", expressions_list)
import vaex.legacy
return vaex.legacy.Subspaces([self(*expressions, **kwargs) for expressions in expressions_list])
|
[
"def",
"subspaces",
"(",
"self",
",",
"expressions_list",
"=",
"None",
",",
"dimensions",
"=",
"None",
",",
"exclude",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"dimensions",
"is",
"not",
"None",
":",
"expressions_list",
"=",
"list",
"(",
"itertools",
".",
"combinations",
"(",
"self",
".",
"get_column_names",
"(",
")",
",",
"dimensions",
")",
")",
"if",
"exclude",
"is",
"not",
"None",
":",
"import",
"six",
"def",
"excluded",
"(",
"expressions",
")",
":",
"if",
"callable",
"(",
"exclude",
")",
":",
"return",
"exclude",
"(",
"expressions",
")",
"elif",
"isinstance",
"(",
"exclude",
",",
"six",
".",
"string_types",
")",
":",
"return",
"exclude",
"in",
"expressions",
"elif",
"isinstance",
"(",
"exclude",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"# $#expressions = set(expressions)",
"for",
"e",
"in",
"exclude",
":",
"if",
"isinstance",
"(",
"e",
",",
"six",
".",
"string_types",
")",
":",
"if",
"e",
"in",
"expressions",
":",
"return",
"True",
"elif",
"isinstance",
"(",
"e",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"if",
"set",
"(",
"e",
")",
".",
"issubset",
"(",
"expressions",
")",
":",
"return",
"True",
"else",
":",
"raise",
"ValueError",
"(",
"\"elements of exclude should contain a string or a sequence of strings\"",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"exclude should contain a string, a sequence of strings, or should be a callable\"",
")",
"return",
"False",
"# test if any of the elements of exclude are a subset of the expression",
"expressions_list",
"=",
"[",
"expr",
"for",
"expr",
"in",
"expressions_list",
"if",
"not",
"excluded",
"(",
"expr",
")",
"]",
"logger",
".",
"debug",
"(",
"\"expression list generated: %r\"",
",",
"expressions_list",
")",
"import",
"vaex",
".",
"legacy",
"return",
"vaex",
".",
"legacy",
".",
"Subspaces",
"(",
"[",
"self",
"(",
"*",
"expressions",
",",
"*",
"*",
"kwargs",
")",
"for",
"expressions",
"in",
"expressions_list",
"]",
")"
] | 57.297297 | 25.837838 |
def nn_allocmsg(size, type):
"allocate a message"
pointer = _nn_allocmsg(size, type)
if pointer is None:
return None
return _create_message(pointer, size)
|
[
"def",
"nn_allocmsg",
"(",
"size",
",",
"type",
")",
":",
"pointer",
"=",
"_nn_allocmsg",
"(",
"size",
",",
"type",
")",
"if",
"pointer",
"is",
"None",
":",
"return",
"None",
"return",
"_create_message",
"(",
"pointer",
",",
"size",
")"
] | 28.833333 | 11.5 |
def orcid_uri_to_orcid(value):
"Strip the uri schema from the start of ORCID URL strings"
if value is None:
return value
replace_values = ['http://orcid.org/', 'https://orcid.org/']
for replace_value in replace_values:
value = value.replace(replace_value, '')
return value
|
[
"def",
"orcid_uri_to_orcid",
"(",
"value",
")",
":",
"if",
"value",
"is",
"None",
":",
"return",
"value",
"replace_values",
"=",
"[",
"'http://orcid.org/'",
",",
"'https://orcid.org/'",
"]",
"for",
"replace_value",
"in",
"replace_values",
":",
"value",
"=",
"value",
".",
"replace",
"(",
"replace_value",
",",
"''",
")",
"return",
"value"
] | 37.625 | 15.875 |
def run(engine, parameters, components_paths=None, requisite_components=None, visible_components=None):
"""
Starts the Application.
:param engine: Engine.
:type engine: QObject
:param parameters: Command line parameters.
:type parameters: tuple
:param components_paths: Components components_paths.
:type components_paths: tuple or list
:param requisite_components: Requisite components names.
:type requisite_components: tuple or list
:param visible_components: Visible components names.
:type visible_components: tuple or list
:return: Definition success.
:rtype: bool
"""
# Command line parameters handling.
RuntimeGlobals.parameters, RuntimeGlobals.arguments = parameters
foundations.trace.evaluate_trace_request(RuntimeGlobals.parameters.trace_modules, foundations.verbose.tracer)
if RuntimeGlobals.parameters.about:
for line in SESSION_HEADER_TEXT:
sys.stdout.write("{0}\n".format(line))
foundations.core.exit(1)
# Redirecting standard output and error messages.
sys.stdout = foundations.verbose.StandardOutputStreamer(LOGGER)
sys.stderr = foundations.verbose.StandardOutputStreamer(LOGGER)
# Setting application verbose level.
foundations.verbose.set_verbosity_level(4)
# Setting user application data directory.
if RuntimeGlobals.parameters.user_application_data_directory:
user_application_data_directory = RuntimeGlobals.user_application_data_directory = \
RuntimeGlobals.parameters.user_application_data_directory
else:
user_application_data_directory = RuntimeGlobals.user_application_data_directory = \
foundations.environment.get_user_application_data_directory()
if not set_user_application_data_directory(user_application_data_directory):
raise umbra.exceptions.EngineConfigurationError(
"{0} | '{1}' user Application data directory is not available, '{2}' will now close!".format(
__name__, RuntimeGlobals.user_application_data_directory, Constants.application_name))
if foundations.environment.get_temporary_directory() in user_application_data_directory:
umbra.ui.widgets.message_box.message_box("Error",
"Error",
"{0} failed to use the default user Application data directory to store its preferences \
and has defaulted to the following directory:\n\n\t'{1}'.\n\nReasons for this are various:\n\
\t- Undefined 'APPDATA' ( Windows ) or 'HOME' ( Mac Os X, Linux ) environment variables.\n\
\t- User name with non 'UTF-8' encoding compliant characters.\n\
\t- Non 'UTF-8' encoding compliant characters in the preferences directory path.\n\n\
You will have to define your own preferences directory by launching {0} with the \
'-u \"path\\to\\the\\custom\\preferences\\directory\"' command line parameter.".format(
Constants.application_name,
user_application_data_directory))
LOGGER.debug("> Application Python interpreter: '{0}'".format(sys.executable))
LOGGER.debug("> Application PyQt version: '{0}'".format(PYQT_VERSION_STR))
LOGGER.debug("> Application startup location: '{0}'".format(os.getcwd()))
LOGGER.debug("> Session user Application data directory: '{0}'".format(
RuntimeGlobals.user_application_data_directory))
LOGGER.debug("> Initializing '{0}'!".format(Constants.application_name))
# Getting the logging file path.
RuntimeGlobals.logging_file = get_logging_file()
RuntimeGlobals.logging_file_handler = foundations.verbose.get_logging_file_handler(
file=RuntimeGlobals.logging_file)
# Getting the patches file path.
RuntimeGlobals.patches_file = os.path.join(RuntimeGlobals.user_application_data_directory,
Constants.patches_directory,
Constants.patches_file)
# Initializing the patches manager.
RuntimeGlobals.patches_manager = umbra.managers.patches_manager.PatchesManager(RuntimeGlobals.patches_file,
[os.path.join(path,
Constants.patches_directory)
for path in
RuntimeGlobals.resources_directories])
RuntimeGlobals.patches_manager.register_patches() and RuntimeGlobals.patches_manager.apply_patches()
# Retrieving settings file.
RuntimeGlobals.settings_file = os.path.join(RuntimeGlobals.user_application_data_directory,
Constants.settings_directory,
Constants.settings_file)
RuntimeGlobals.settings = Preferences(RuntimeGlobals.settings_file)
LOGGER.debug("> Retrieving default layouts.")
RuntimeGlobals.settings.set_default_layouts(("startup_centric",))
foundations.common.path_exists(RuntimeGlobals.settings_file) or RuntimeGlobals.settings.set_default_preferences()
LOGGER.debug("> Retrieving stored verbose level.")
RuntimeGlobals.verbosity_level = RuntimeGlobals.parameters.verbosity_level \
if RuntimeGlobals.parameters.verbosity_level is not None else \
foundations.common.get_first_item(RuntimeGlobals.settings.get_key("Settings", "verbosity_level").toInt())
LOGGER.debug("> Setting logger verbosity level to: '{0}'.".format(RuntimeGlobals.verbosity_level))
foundations.verbose.set_verbosity_level(RuntimeGlobals.verbosity_level)
RuntimeGlobals.settings.set_key("Settings", "verbosity_level", RuntimeGlobals.verbosity_level)
LOGGER.debug("> Retrieving stored logging formatter.")
logging_formatter = RuntimeGlobals.parameters.logging_formatter if RuntimeGlobals.parameters.logging_formatter is not None else \
foundations.strings.to_string(RuntimeGlobals.settings.get_key("Settings", "logging_formatter").toString())
logging_formatter = logging_formatter if logging_formatter in RuntimeGlobals.logging_formatters else None
RuntimeGlobals.logging_active_formatter = logging_formatter if logging_formatter is not None else Constants.logging_default_formatter
LOGGER.debug("> Setting logging formatter: '{0}'.".format(RuntimeGlobals.logging_active_formatter))
for handler in (RuntimeGlobals.logging_console_handler, RuntimeGlobals.logging_file_handler):
handler and handler.setFormatter(RuntimeGlobals.logging_formatters[RuntimeGlobals.logging_active_formatter])
# Starting the session handler.
RuntimeGlobals.logging_session_handler = foundations.verbose.get_logging_stream_handler()
RuntimeGlobals.logging_session_handler_stream = RuntimeGlobals.logging_session_handler.stream
LOGGER.info(Constants.logging_separators)
for line in SESSION_HEADER_TEXT:
LOGGER.info(line)
LOGGER.info("{0} | Session started at: {1}".format(Constants.application_name, time.strftime('%X - %x')))
LOGGER.info(Constants.logging_separators)
LOGGER.info("{0} | Starting Interface!".format(Constants.application_name))
# Initializing splashscreen.
if RuntimeGlobals.parameters.hide_splash_screen:
LOGGER.debug("> SplashScreen skipped by 'hide_splash_screen' command line parameter.")
else:
LOGGER.debug("> Initializing splashscreen.")
RuntimeGlobals.splashscreen_image = QPixmap(umbra.ui.common.get_resource_path(UiConstants.splash_screen_image))
RuntimeGlobals.splashscreen = Delayed_QSplashScreen(RuntimeGlobals.splashscreen_image, text_color=Qt.white)
RuntimeGlobals.splashscreen.show_message(
"{0} - {1} | Initializing {0}.".format(Constants.application_name, Constants.version))
RuntimeGlobals.splashscreen.show()
# Initializing requests stack.
RuntimeGlobals.requests_stack = collections.deque()
# Initializing engine.
RuntimeGlobals.engine = engine(parent=None,
components_paths=components_paths,
requisite_components=requisite_components,
visible_components=visible_components,
splashscreen=RuntimeGlobals.splashscreen,
requests_stack=RuntimeGlobals.requests_stack,
patches_manager=RuntimeGlobals.patches_manager,
user_application_data_directory=RuntimeGlobals.user_application_data_directory,
logging_session_handler=RuntimeGlobals.logging_session_handler,
logging_file_handler=RuntimeGlobals.logging_file_handler,
logging_console_handler=RuntimeGlobals.logging_console_handler,
logging_session_handler_stream=RuntimeGlobals.logging_session_handler_stream,
logging_active_formatter=RuntimeGlobals.logging_active_formatter,
settings=RuntimeGlobals.settings,
verbosity_level=RuntimeGlobals.verbosity_level,
parameters=RuntimeGlobals.parameters,
arguments=RuntimeGlobals.arguments)
RuntimeGlobals.engine.show()
RuntimeGlobals.engine.raise_()
return sys.exit(RuntimeGlobals.application.exec_())
|
[
"def",
"run",
"(",
"engine",
",",
"parameters",
",",
"components_paths",
"=",
"None",
",",
"requisite_components",
"=",
"None",
",",
"visible_components",
"=",
"None",
")",
":",
"# Command line parameters handling.",
"RuntimeGlobals",
".",
"parameters",
",",
"RuntimeGlobals",
".",
"arguments",
"=",
"parameters",
"foundations",
".",
"trace",
".",
"evaluate_trace_request",
"(",
"RuntimeGlobals",
".",
"parameters",
".",
"trace_modules",
",",
"foundations",
".",
"verbose",
".",
"tracer",
")",
"if",
"RuntimeGlobals",
".",
"parameters",
".",
"about",
":",
"for",
"line",
"in",
"SESSION_HEADER_TEXT",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"\"{0}\\n\"",
".",
"format",
"(",
"line",
")",
")",
"foundations",
".",
"core",
".",
"exit",
"(",
"1",
")",
"# Redirecting standard output and error messages.",
"sys",
".",
"stdout",
"=",
"foundations",
".",
"verbose",
".",
"StandardOutputStreamer",
"(",
"LOGGER",
")",
"sys",
".",
"stderr",
"=",
"foundations",
".",
"verbose",
".",
"StandardOutputStreamer",
"(",
"LOGGER",
")",
"# Setting application verbose level.",
"foundations",
".",
"verbose",
".",
"set_verbosity_level",
"(",
"4",
")",
"# Setting user application data directory.",
"if",
"RuntimeGlobals",
".",
"parameters",
".",
"user_application_data_directory",
":",
"user_application_data_directory",
"=",
"RuntimeGlobals",
".",
"user_application_data_directory",
"=",
"RuntimeGlobals",
".",
"parameters",
".",
"user_application_data_directory",
"else",
":",
"user_application_data_directory",
"=",
"RuntimeGlobals",
".",
"user_application_data_directory",
"=",
"foundations",
".",
"environment",
".",
"get_user_application_data_directory",
"(",
")",
"if",
"not",
"set_user_application_data_directory",
"(",
"user_application_data_directory",
")",
":",
"raise",
"umbra",
".",
"exceptions",
".",
"EngineConfigurationError",
"(",
"\"{0} | '{1}' user Application data directory is not available, '{2}' will now close!\"",
".",
"format",
"(",
"__name__",
",",
"RuntimeGlobals",
".",
"user_application_data_directory",
",",
"Constants",
".",
"application_name",
")",
")",
"if",
"foundations",
".",
"environment",
".",
"get_temporary_directory",
"(",
")",
"in",
"user_application_data_directory",
":",
"umbra",
".",
"ui",
".",
"widgets",
".",
"message_box",
".",
"message_box",
"(",
"\"Error\"",
",",
"\"Error\"",
",",
"\"{0} failed to use the default user Application data directory to store its preferences \\\nand has defaulted to the following directory:\\n\\n\\t'{1}'.\\n\\nReasons for this are various:\\n\\\n\\t- Undefined 'APPDATA' ( Windows ) or 'HOME' ( Mac Os X, Linux ) environment variables.\\n\\\n\\t- User name with non 'UTF-8' encoding compliant characters.\\n\\\n\\t- Non 'UTF-8' encoding compliant characters in the preferences directory path.\\n\\n\\\nYou will have to define your own preferences directory by launching {0} with the \\\n'-u \\\"path\\\\to\\\\the\\\\custom\\\\preferences\\\\directory\\\"' command line parameter.\"",
".",
"format",
"(",
"Constants",
".",
"application_name",
",",
"user_application_data_directory",
")",
")",
"LOGGER",
".",
"debug",
"(",
"\"> Application Python interpreter: '{0}'\"",
".",
"format",
"(",
"sys",
".",
"executable",
")",
")",
"LOGGER",
".",
"debug",
"(",
"\"> Application PyQt version: '{0}'\"",
".",
"format",
"(",
"PYQT_VERSION_STR",
")",
")",
"LOGGER",
".",
"debug",
"(",
"\"> Application startup location: '{0}'\"",
".",
"format",
"(",
"os",
".",
"getcwd",
"(",
")",
")",
")",
"LOGGER",
".",
"debug",
"(",
"\"> Session user Application data directory: '{0}'\"",
".",
"format",
"(",
"RuntimeGlobals",
".",
"user_application_data_directory",
")",
")",
"LOGGER",
".",
"debug",
"(",
"\"> Initializing '{0}'!\"",
".",
"format",
"(",
"Constants",
".",
"application_name",
")",
")",
"# Getting the logging file path.",
"RuntimeGlobals",
".",
"logging_file",
"=",
"get_logging_file",
"(",
")",
"RuntimeGlobals",
".",
"logging_file_handler",
"=",
"foundations",
".",
"verbose",
".",
"get_logging_file_handler",
"(",
"file",
"=",
"RuntimeGlobals",
".",
"logging_file",
")",
"# Getting the patches file path.",
"RuntimeGlobals",
".",
"patches_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"RuntimeGlobals",
".",
"user_application_data_directory",
",",
"Constants",
".",
"patches_directory",
",",
"Constants",
".",
"patches_file",
")",
"# Initializing the patches manager.",
"RuntimeGlobals",
".",
"patches_manager",
"=",
"umbra",
".",
"managers",
".",
"patches_manager",
".",
"PatchesManager",
"(",
"RuntimeGlobals",
".",
"patches_file",
",",
"[",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"Constants",
".",
"patches_directory",
")",
"for",
"path",
"in",
"RuntimeGlobals",
".",
"resources_directories",
"]",
")",
"RuntimeGlobals",
".",
"patches_manager",
".",
"register_patches",
"(",
")",
"and",
"RuntimeGlobals",
".",
"patches_manager",
".",
"apply_patches",
"(",
")",
"# Retrieving settings file.",
"RuntimeGlobals",
".",
"settings_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"RuntimeGlobals",
".",
"user_application_data_directory",
",",
"Constants",
".",
"settings_directory",
",",
"Constants",
".",
"settings_file",
")",
"RuntimeGlobals",
".",
"settings",
"=",
"Preferences",
"(",
"RuntimeGlobals",
".",
"settings_file",
")",
"LOGGER",
".",
"debug",
"(",
"\"> Retrieving default layouts.\"",
")",
"RuntimeGlobals",
".",
"settings",
".",
"set_default_layouts",
"(",
"(",
"\"startup_centric\"",
",",
")",
")",
"foundations",
".",
"common",
".",
"path_exists",
"(",
"RuntimeGlobals",
".",
"settings_file",
")",
"or",
"RuntimeGlobals",
".",
"settings",
".",
"set_default_preferences",
"(",
")",
"LOGGER",
".",
"debug",
"(",
"\"> Retrieving stored verbose level.\"",
")",
"RuntimeGlobals",
".",
"verbosity_level",
"=",
"RuntimeGlobals",
".",
"parameters",
".",
"verbosity_level",
"if",
"RuntimeGlobals",
".",
"parameters",
".",
"verbosity_level",
"is",
"not",
"None",
"else",
"foundations",
".",
"common",
".",
"get_first_item",
"(",
"RuntimeGlobals",
".",
"settings",
".",
"get_key",
"(",
"\"Settings\"",
",",
"\"verbosity_level\"",
")",
".",
"toInt",
"(",
")",
")",
"LOGGER",
".",
"debug",
"(",
"\"> Setting logger verbosity level to: '{0}'.\"",
".",
"format",
"(",
"RuntimeGlobals",
".",
"verbosity_level",
")",
")",
"foundations",
".",
"verbose",
".",
"set_verbosity_level",
"(",
"RuntimeGlobals",
".",
"verbosity_level",
")",
"RuntimeGlobals",
".",
"settings",
".",
"set_key",
"(",
"\"Settings\"",
",",
"\"verbosity_level\"",
",",
"RuntimeGlobals",
".",
"verbosity_level",
")",
"LOGGER",
".",
"debug",
"(",
"\"> Retrieving stored logging formatter.\"",
")",
"logging_formatter",
"=",
"RuntimeGlobals",
".",
"parameters",
".",
"logging_formatter",
"if",
"RuntimeGlobals",
".",
"parameters",
".",
"logging_formatter",
"is",
"not",
"None",
"else",
"foundations",
".",
"strings",
".",
"to_string",
"(",
"RuntimeGlobals",
".",
"settings",
".",
"get_key",
"(",
"\"Settings\"",
",",
"\"logging_formatter\"",
")",
".",
"toString",
"(",
")",
")",
"logging_formatter",
"=",
"logging_formatter",
"if",
"logging_formatter",
"in",
"RuntimeGlobals",
".",
"logging_formatters",
"else",
"None",
"RuntimeGlobals",
".",
"logging_active_formatter",
"=",
"logging_formatter",
"if",
"logging_formatter",
"is",
"not",
"None",
"else",
"Constants",
".",
"logging_default_formatter",
"LOGGER",
".",
"debug",
"(",
"\"> Setting logging formatter: '{0}'.\"",
".",
"format",
"(",
"RuntimeGlobals",
".",
"logging_active_formatter",
")",
")",
"for",
"handler",
"in",
"(",
"RuntimeGlobals",
".",
"logging_console_handler",
",",
"RuntimeGlobals",
".",
"logging_file_handler",
")",
":",
"handler",
"and",
"handler",
".",
"setFormatter",
"(",
"RuntimeGlobals",
".",
"logging_formatters",
"[",
"RuntimeGlobals",
".",
"logging_active_formatter",
"]",
")",
"# Starting the session handler.",
"RuntimeGlobals",
".",
"logging_session_handler",
"=",
"foundations",
".",
"verbose",
".",
"get_logging_stream_handler",
"(",
")",
"RuntimeGlobals",
".",
"logging_session_handler_stream",
"=",
"RuntimeGlobals",
".",
"logging_session_handler",
".",
"stream",
"LOGGER",
".",
"info",
"(",
"Constants",
".",
"logging_separators",
")",
"for",
"line",
"in",
"SESSION_HEADER_TEXT",
":",
"LOGGER",
".",
"info",
"(",
"line",
")",
"LOGGER",
".",
"info",
"(",
"\"{0} | Session started at: {1}\"",
".",
"format",
"(",
"Constants",
".",
"application_name",
",",
"time",
".",
"strftime",
"(",
"'%X - %x'",
")",
")",
")",
"LOGGER",
".",
"info",
"(",
"Constants",
".",
"logging_separators",
")",
"LOGGER",
".",
"info",
"(",
"\"{0} | Starting Interface!\"",
".",
"format",
"(",
"Constants",
".",
"application_name",
")",
")",
"# Initializing splashscreen.",
"if",
"RuntimeGlobals",
".",
"parameters",
".",
"hide_splash_screen",
":",
"LOGGER",
".",
"debug",
"(",
"\"> SplashScreen skipped by 'hide_splash_screen' command line parameter.\"",
")",
"else",
":",
"LOGGER",
".",
"debug",
"(",
"\"> Initializing splashscreen.\"",
")",
"RuntimeGlobals",
".",
"splashscreen_image",
"=",
"QPixmap",
"(",
"umbra",
".",
"ui",
".",
"common",
".",
"get_resource_path",
"(",
"UiConstants",
".",
"splash_screen_image",
")",
")",
"RuntimeGlobals",
".",
"splashscreen",
"=",
"Delayed_QSplashScreen",
"(",
"RuntimeGlobals",
".",
"splashscreen_image",
",",
"text_color",
"=",
"Qt",
".",
"white",
")",
"RuntimeGlobals",
".",
"splashscreen",
".",
"show_message",
"(",
"\"{0} - {1} | Initializing {0}.\"",
".",
"format",
"(",
"Constants",
".",
"application_name",
",",
"Constants",
".",
"version",
")",
")",
"RuntimeGlobals",
".",
"splashscreen",
".",
"show",
"(",
")",
"# Initializing requests stack.",
"RuntimeGlobals",
".",
"requests_stack",
"=",
"collections",
".",
"deque",
"(",
")",
"# Initializing engine.",
"RuntimeGlobals",
".",
"engine",
"=",
"engine",
"(",
"parent",
"=",
"None",
",",
"components_paths",
"=",
"components_paths",
",",
"requisite_components",
"=",
"requisite_components",
",",
"visible_components",
"=",
"visible_components",
",",
"splashscreen",
"=",
"RuntimeGlobals",
".",
"splashscreen",
",",
"requests_stack",
"=",
"RuntimeGlobals",
".",
"requests_stack",
",",
"patches_manager",
"=",
"RuntimeGlobals",
".",
"patches_manager",
",",
"user_application_data_directory",
"=",
"RuntimeGlobals",
".",
"user_application_data_directory",
",",
"logging_session_handler",
"=",
"RuntimeGlobals",
".",
"logging_session_handler",
",",
"logging_file_handler",
"=",
"RuntimeGlobals",
".",
"logging_file_handler",
",",
"logging_console_handler",
"=",
"RuntimeGlobals",
".",
"logging_console_handler",
",",
"logging_session_handler_stream",
"=",
"RuntimeGlobals",
".",
"logging_session_handler_stream",
",",
"logging_active_formatter",
"=",
"RuntimeGlobals",
".",
"logging_active_formatter",
",",
"settings",
"=",
"RuntimeGlobals",
".",
"settings",
",",
"verbosity_level",
"=",
"RuntimeGlobals",
".",
"verbosity_level",
",",
"parameters",
"=",
"RuntimeGlobals",
".",
"parameters",
",",
"arguments",
"=",
"RuntimeGlobals",
".",
"arguments",
")",
"RuntimeGlobals",
".",
"engine",
".",
"show",
"(",
")",
"RuntimeGlobals",
".",
"engine",
".",
"raise_",
"(",
")",
"return",
"sys",
".",
"exit",
"(",
"RuntimeGlobals",
".",
"application",
".",
"exec_",
"(",
")",
")"
] | 59.018405 | 35.165644 |
def reduce_vertex(self, name1, *names):
"""treat name1, name2, ... as same point.
name2.alias, name3.alias, ... are merged with name1.alias
the key name2, name3, ... in self.vertices are kept and mapped to
same Vertex instance as name1
"""
v = self.vertices[name1]
for n in names:
w = self.vertices[n]
v.alias.update(w.alias)
# replace mapping from n w by to v
self.vertices[n] = v
|
[
"def",
"reduce_vertex",
"(",
"self",
",",
"name1",
",",
"*",
"names",
")",
":",
"v",
"=",
"self",
".",
"vertices",
"[",
"name1",
"]",
"for",
"n",
"in",
"names",
":",
"w",
"=",
"self",
".",
"vertices",
"[",
"n",
"]",
"v",
".",
"alias",
".",
"update",
"(",
"w",
".",
"alias",
")",
"# replace mapping from n w by to v",
"self",
".",
"vertices",
"[",
"n",
"]",
"=",
"v"
] | 36.461538 | 11.846154 |
def create_prefetch(self, addresses):
"""Create futures needed before starting the process of reading the
address's value from the merkle tree.
Args:
addresses (list of str): addresses in the txn's inputs that
aren't in any base context (or any in the chain).
"""
with self._lock:
for add in addresses:
self._state[add] = _ContextFuture(address=add,
wait_for_tree=True)
|
[
"def",
"create_prefetch",
"(",
"self",
",",
"addresses",
")",
":",
"with",
"self",
".",
"_lock",
":",
"for",
"add",
"in",
"addresses",
":",
"self",
".",
"_state",
"[",
"add",
"]",
"=",
"_ContextFuture",
"(",
"address",
"=",
"add",
",",
"wait_for_tree",
"=",
"True",
")"
] | 38.846154 | 18.846154 |
def link_curves(*args, **kwargs):
""" Links the input curves together.
The end control point of the curve k has to be the same with the start control point of the curve k + 1.
Keyword Arguments:
* ``tol``: tolerance value for checking equality. *Default: 10e-8*
* ``validate``: flag to enable input validation. *Default: False*
:return: a tuple containing knot vector, control points, weights vector and knots
"""
# Get keyword arguments
tol = kwargs.get('tol', 10e-8)
validate = kwargs.get('validate', False)
# Validate input
if validate:
for idx in range(len(args) - 1):
if linalg.point_distance(args[idx].ctrlpts[-1], args[idx + 1].ctrlpts[0]) > tol:
raise GeomdlException("Curve #" + str(idx) + " and Curve #" + str(idx + 1) + " don't touch each other")
kv = [] # new knot vector
cpts = [] # new control points array
wgts = [] # new weights array
kv_connected = [] # superfluous knots to be removed
pdomain_end = 0
# Loop though the curves
for arg in args:
# Process knot vectors
if not kv:
kv += list(arg.knotvector[:-(arg.degree + 1)]) # get rid of the last superfluous knot to maintain split curve notation
cpts += list(arg.ctrlpts)
# Process control points
if arg.rational:
wgts += list(arg.weights)
else:
tmp_w = [1.0 for _ in range(arg.ctrlpts_size)]
wgts += tmp_w
else:
tmp_kv = [pdomain_end + k for k in arg.knotvector[1:-(arg.degree + 1)]]
kv += tmp_kv
cpts += list(arg.ctrlpts[1:])
# Process control points
if arg.rational:
wgts += list(arg.weights[1:])
else:
tmp_w = [1.0 for _ in range(arg.ctrlpts_size - 1)]
wgts += tmp_w
pdomain_end += arg.knotvector[-1]
kv_connected.append(pdomain_end)
# Fix curve by appending the last knot to the end
kv += [pdomain_end for _ in range(arg.degree + 1)]
# Remove the last knot from knot insertion list
kv_connected.pop()
return kv, cpts, wgts, kv_connected
|
[
"def",
"link_curves",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Get keyword arguments",
"tol",
"=",
"kwargs",
".",
"get",
"(",
"'tol'",
",",
"10e-8",
")",
"validate",
"=",
"kwargs",
".",
"get",
"(",
"'validate'",
",",
"False",
")",
"# Validate input",
"if",
"validate",
":",
"for",
"idx",
"in",
"range",
"(",
"len",
"(",
"args",
")",
"-",
"1",
")",
":",
"if",
"linalg",
".",
"point_distance",
"(",
"args",
"[",
"idx",
"]",
".",
"ctrlpts",
"[",
"-",
"1",
"]",
",",
"args",
"[",
"idx",
"+",
"1",
"]",
".",
"ctrlpts",
"[",
"0",
"]",
")",
">",
"tol",
":",
"raise",
"GeomdlException",
"(",
"\"Curve #\"",
"+",
"str",
"(",
"idx",
")",
"+",
"\" and Curve #\"",
"+",
"str",
"(",
"idx",
"+",
"1",
")",
"+",
"\" don't touch each other\"",
")",
"kv",
"=",
"[",
"]",
"# new knot vector",
"cpts",
"=",
"[",
"]",
"# new control points array",
"wgts",
"=",
"[",
"]",
"# new weights array",
"kv_connected",
"=",
"[",
"]",
"# superfluous knots to be removed",
"pdomain_end",
"=",
"0",
"# Loop though the curves",
"for",
"arg",
"in",
"args",
":",
"# Process knot vectors",
"if",
"not",
"kv",
":",
"kv",
"+=",
"list",
"(",
"arg",
".",
"knotvector",
"[",
":",
"-",
"(",
"arg",
".",
"degree",
"+",
"1",
")",
"]",
")",
"# get rid of the last superfluous knot to maintain split curve notation",
"cpts",
"+=",
"list",
"(",
"arg",
".",
"ctrlpts",
")",
"# Process control points",
"if",
"arg",
".",
"rational",
":",
"wgts",
"+=",
"list",
"(",
"arg",
".",
"weights",
")",
"else",
":",
"tmp_w",
"=",
"[",
"1.0",
"for",
"_",
"in",
"range",
"(",
"arg",
".",
"ctrlpts_size",
")",
"]",
"wgts",
"+=",
"tmp_w",
"else",
":",
"tmp_kv",
"=",
"[",
"pdomain_end",
"+",
"k",
"for",
"k",
"in",
"arg",
".",
"knotvector",
"[",
"1",
":",
"-",
"(",
"arg",
".",
"degree",
"+",
"1",
")",
"]",
"]",
"kv",
"+=",
"tmp_kv",
"cpts",
"+=",
"list",
"(",
"arg",
".",
"ctrlpts",
"[",
"1",
":",
"]",
")",
"# Process control points",
"if",
"arg",
".",
"rational",
":",
"wgts",
"+=",
"list",
"(",
"arg",
".",
"weights",
"[",
"1",
":",
"]",
")",
"else",
":",
"tmp_w",
"=",
"[",
"1.0",
"for",
"_",
"in",
"range",
"(",
"arg",
".",
"ctrlpts_size",
"-",
"1",
")",
"]",
"wgts",
"+=",
"tmp_w",
"pdomain_end",
"+=",
"arg",
".",
"knotvector",
"[",
"-",
"1",
"]",
"kv_connected",
".",
"append",
"(",
"pdomain_end",
")",
"# Fix curve by appending the last knot to the end",
"kv",
"+=",
"[",
"pdomain_end",
"for",
"_",
"in",
"range",
"(",
"arg",
".",
"degree",
"+",
"1",
")",
"]",
"# Remove the last knot from knot insertion list",
"kv_connected",
".",
"pop",
"(",
")",
"return",
"kv",
",",
"cpts",
",",
"wgts",
",",
"kv_connected"
] | 36.830508 | 21.59322 |
def execute(cls, cmd, stdin_payload=None, **kwargs):
"""Execute a command via subprocess.Popen and returns the stdio.
:param string|list cmd: A list or string representing the command to run.
:param string stdin_payload: A string representing the stdin payload, if any, to send.
:param **kwargs: Additional kwargs to pass through to subprocess.Popen.
:return: A tuple of strings representing (stdout, stderr), pre-decoded for utf-8.
:raises: `Executor.ExecutableNotFound` when the executable requested to run does not exist.
`Executor.NonZeroExit` when the execution fails with a non-zero exit code.
"""
process = cls.open_process(cmd=cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
**kwargs)
stdout_raw, stderr_raw = process.communicate(input=stdin_payload)
# N.B. In cases where `stdout` or `stderr` is passed as parameters, these can be None.
stdout = stdout_raw.decode('utf-8') if stdout_raw is not None else stdout_raw
stderr = stderr_raw.decode('utf-8') if stderr_raw is not None else stderr_raw
if process.returncode != 0:
raise cls.NonZeroExit(cmd, process.returncode, stdout, stderr)
return stdout, stderr
|
[
"def",
"execute",
"(",
"cls",
",",
"cmd",
",",
"stdin_payload",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"process",
"=",
"cls",
".",
"open_process",
"(",
"cmd",
"=",
"cmd",
",",
"stdin",
"=",
"subprocess",
".",
"PIPE",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
",",
"*",
"*",
"kwargs",
")",
"stdout_raw",
",",
"stderr_raw",
"=",
"process",
".",
"communicate",
"(",
"input",
"=",
"stdin_payload",
")",
"# N.B. In cases where `stdout` or `stderr` is passed as parameters, these can be None.",
"stdout",
"=",
"stdout_raw",
".",
"decode",
"(",
"'utf-8'",
")",
"if",
"stdout_raw",
"is",
"not",
"None",
"else",
"stdout_raw",
"stderr",
"=",
"stderr_raw",
".",
"decode",
"(",
"'utf-8'",
")",
"if",
"stderr_raw",
"is",
"not",
"None",
"else",
"stderr_raw",
"if",
"process",
".",
"returncode",
"!=",
"0",
":",
"raise",
"cls",
".",
"NonZeroExit",
"(",
"cmd",
",",
"process",
".",
"returncode",
",",
"stdout",
",",
"stderr",
")",
"return",
"stdout",
",",
"stderr"
] | 55.041667 | 27.333333 |
def current_position(self):
"""Return a tuple of (start, end)."""
token = self.tokenizer.peek(0)
if token:
return token.start, token.end
return self.tokenizer.position, self.tokenizer.position + 1
|
[
"def",
"current_position",
"(",
"self",
")",
":",
"token",
"=",
"self",
".",
"tokenizer",
".",
"peek",
"(",
"0",
")",
"if",
"token",
":",
"return",
"token",
".",
"start",
",",
"token",
".",
"end",
"return",
"self",
".",
"tokenizer",
".",
"position",
",",
"self",
".",
"tokenizer",
".",
"position",
"+",
"1"
] | 33.571429 | 15.142857 |
def count_consonants(text):
"""Count number of occurrences of consonants in a given string"""
count = 0
for i in text:
if i.lower() in config.AVRO_CONSONANTS:
count += 1
return count
|
[
"def",
"count_consonants",
"(",
"text",
")",
":",
"count",
"=",
"0",
"for",
"i",
"in",
"text",
":",
"if",
"i",
".",
"lower",
"(",
")",
"in",
"config",
".",
"AVRO_CONSONANTS",
":",
"count",
"+=",
"1",
"return",
"count"
] | 30.285714 | 15.857143 |
def load_template_help(builtin):
"""Loads the help for a given template"""
help_file = "templates/%s-help.yml" % builtin
help_file = resource_filename(__name__, help_file)
help_obj = {}
if os.path.exists(help_file):
help_data = yaml.safe_load(open(help_file))
if 'name' in help_data:
help_obj['name'] = help_data['name']
if 'help' in help_data:
help_obj['help'] = help_data['help']
if 'args' in help_data:
help_obj['args'] = help_data['args']
return help_obj
|
[
"def",
"load_template_help",
"(",
"builtin",
")",
":",
"help_file",
"=",
"\"templates/%s-help.yml\"",
"%",
"builtin",
"help_file",
"=",
"resource_filename",
"(",
"__name__",
",",
"help_file",
")",
"help_obj",
"=",
"{",
"}",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"help_file",
")",
":",
"help_data",
"=",
"yaml",
".",
"safe_load",
"(",
"open",
"(",
"help_file",
")",
")",
"if",
"'name'",
"in",
"help_data",
":",
"help_obj",
"[",
"'name'",
"]",
"=",
"help_data",
"[",
"'name'",
"]",
"if",
"'help'",
"in",
"help_data",
":",
"help_obj",
"[",
"'help'",
"]",
"=",
"help_data",
"[",
"'help'",
"]",
"if",
"'args'",
"in",
"help_data",
":",
"help_obj",
"[",
"'args'",
"]",
"=",
"help_data",
"[",
"'args'",
"]",
"return",
"help_obj"
] | 29.833333 | 16.888889 |
def _default_interface(self, route_output=None):
"""
:param route_output: For mocking actual output
"""
if not route_output:
out, __, __ = exec_cmd('/sbin/ip route')
lines = out.splitlines()
else:
lines = route_output.split("\n")
for line in lines:
line = line.split()
if 'default' in line:
iface = line[4]
return self.interfaces.get(iface, None)
|
[
"def",
"_default_interface",
"(",
"self",
",",
"route_output",
"=",
"None",
")",
":",
"if",
"not",
"route_output",
":",
"out",
",",
"__",
",",
"__",
"=",
"exec_cmd",
"(",
"'/sbin/ip route'",
")",
"lines",
"=",
"out",
".",
"splitlines",
"(",
")",
"else",
":",
"lines",
"=",
"route_output",
".",
"split",
"(",
"\"\\n\"",
")",
"for",
"line",
"in",
"lines",
":",
"line",
"=",
"line",
".",
"split",
"(",
")",
"if",
"'default'",
"in",
"line",
":",
"iface",
"=",
"line",
"[",
"4",
"]",
"return",
"self",
".",
"interfaces",
".",
"get",
"(",
"iface",
",",
"None",
")"
] | 31.533333 | 11.666667 |
def expanding_stdize(obj, **kwargs):
"""Standardize a pandas object column-wise on expanding window.
**kwargs -> passed to `obj.expanding`
Example
-------
df = pd.DataFrame(np.random.randn(10, 3))
print(expanding_stdize(df, min_periods=5))
0 1 2
0 NaN NaN NaN
1 NaN NaN NaN
2 NaN NaN NaN
3 NaN NaN NaN
4 0.67639 -1.03507 0.96610
5 0.95008 -0.26067 0.27761
6 1.67793 -0.50816 0.19293
7 1.50364 -1.10035 -0.87859
8 -0.64949 0.08028 -0.51354
9 0.15280 -0.73283 -0.84907
"""
return (obj - obj.expanding(**kwargs).mean()) / (
obj.expanding(**kwargs).std()
)
|
[
"def",
"expanding_stdize",
"(",
"obj",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"(",
"obj",
"-",
"obj",
".",
"expanding",
"(",
"*",
"*",
"kwargs",
")",
".",
"mean",
"(",
")",
")",
"/",
"(",
"obj",
".",
"expanding",
"(",
"*",
"*",
"kwargs",
")",
".",
"std",
"(",
")",
")"
] | 29.4 | 12.76 |
def parse_arguments(kwargs):
"""Function that parses PrettyPrinter arguments.
Detects which aesthetics are mapped to which layers
and collects user-provided values.
Parameters
----------
kwargs: dict
The keyword arguments to PrettyPrinter.
Returns
-------
dict, dict
First dictionary is aesthetic to layer mapping.
Second dictionary is aesthetic to user value mapping.
"""
aesthetics = {}
values = {}
for aes in AESTHETICS:
if aes in kwargs:
aesthetics[aes] = kwargs[aes]
val_name = AES_VALUE_MAP[aes]
# map the user-provided CSS value or use the default
values[aes] = kwargs.get(val_name, DEFAULT_VALUE_MAP[aes])
return aesthetics, values
|
[
"def",
"parse_arguments",
"(",
"kwargs",
")",
":",
"aesthetics",
"=",
"{",
"}",
"values",
"=",
"{",
"}",
"for",
"aes",
"in",
"AESTHETICS",
":",
"if",
"aes",
"in",
"kwargs",
":",
"aesthetics",
"[",
"aes",
"]",
"=",
"kwargs",
"[",
"aes",
"]",
"val_name",
"=",
"AES_VALUE_MAP",
"[",
"aes",
"]",
"# map the user-provided CSS value or use the default",
"values",
"[",
"aes",
"]",
"=",
"kwargs",
".",
"get",
"(",
"val_name",
",",
"DEFAULT_VALUE_MAP",
"[",
"aes",
"]",
")",
"return",
"aesthetics",
",",
"values"
] | 30.12 | 18.16 |
def create(cls, *args, **kwargs) -> 'Entity':
"""Create a new record in the repository.
Also performs unique validations before creating the entity
:param args: positional arguments for the entity
:param kwargs: keyword arguments for the entity
"""
logger.debug(
f'Creating new `{cls.__name__}` object using data {kwargs}')
model_cls = repo_factory.get_model(cls)
repository = repo_factory.get_repository(cls)
try:
# Build the entity from the input arguments
# Raises validation errors, if any, at this point
entity = cls(*args, **kwargs)
# Do unique checks, create this object and return it
entity._validate_unique()
# Perform Pre-Save Actions
entity.pre_save()
# Build the model object and create it
model_obj = repository.create(model_cls.from_entity(entity))
# Update the auto fields of the entity
for field_name, field_obj in entity.meta_.declared_fields.items():
if isinstance(field_obj, Auto):
if isinstance(model_obj, dict):
field_val = model_obj[field_name]
else:
field_val = getattr(model_obj, field_name)
setattr(entity, field_name, field_val)
# Set Entity status to saved
entity.state_.mark_saved()
# Perform Post-Save Actions
entity.post_save()
return entity
except ValidationError:
# FIXME Log Exception
raise
|
[
"def",
"create",
"(",
"cls",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"->",
"'Entity'",
":",
"logger",
".",
"debug",
"(",
"f'Creating new `{cls.__name__}` object using data {kwargs}'",
")",
"model_cls",
"=",
"repo_factory",
".",
"get_model",
"(",
"cls",
")",
"repository",
"=",
"repo_factory",
".",
"get_repository",
"(",
"cls",
")",
"try",
":",
"# Build the entity from the input arguments",
"# Raises validation errors, if any, at this point",
"entity",
"=",
"cls",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# Do unique checks, create this object and return it",
"entity",
".",
"_validate_unique",
"(",
")",
"# Perform Pre-Save Actions",
"entity",
".",
"pre_save",
"(",
")",
"# Build the model object and create it",
"model_obj",
"=",
"repository",
".",
"create",
"(",
"model_cls",
".",
"from_entity",
"(",
"entity",
")",
")",
"# Update the auto fields of the entity",
"for",
"field_name",
",",
"field_obj",
"in",
"entity",
".",
"meta_",
".",
"declared_fields",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"field_obj",
",",
"Auto",
")",
":",
"if",
"isinstance",
"(",
"model_obj",
",",
"dict",
")",
":",
"field_val",
"=",
"model_obj",
"[",
"field_name",
"]",
"else",
":",
"field_val",
"=",
"getattr",
"(",
"model_obj",
",",
"field_name",
")",
"setattr",
"(",
"entity",
",",
"field_name",
",",
"field_val",
")",
"# Set Entity status to saved",
"entity",
".",
"state_",
".",
"mark_saved",
"(",
")",
"# Perform Post-Save Actions",
"entity",
".",
"post_save",
"(",
")",
"return",
"entity",
"except",
"ValidationError",
":",
"# FIXME Log Exception",
"raise"
] | 34.468085 | 19.787234 |
def merge(self, df: pd.DataFrame, on: str, how: str="outer", **kwargs):
"""
Set the main dataframe from the current dataframe and the passed
dataframe
:param df: the pandas dataframe to merge
:type df: pd.DataFrame
:param on: param for ``pd.merge``
:type on: str
:param how: param for ``pd.merge``, defaults to "outer"
:type how: str, optional
:param kwargs: keyword arguments for ``pd.merge``
"""
try:
df = pd.merge(self.df, df, on=on, how=how, **kwargs)
self.df = df
except Exception as e:
self.err(e, self.merge, "Can not merge dataframes")
|
[
"def",
"merge",
"(",
"self",
",",
"df",
":",
"pd",
".",
"DataFrame",
",",
"on",
":",
"str",
",",
"how",
":",
"str",
"=",
"\"outer\"",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"df",
"=",
"pd",
".",
"merge",
"(",
"self",
".",
"df",
",",
"df",
",",
"on",
"=",
"on",
",",
"how",
"=",
"how",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"df",
"=",
"df",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"err",
"(",
"e",
",",
"self",
".",
"merge",
",",
"\"Can not merge dataframes\"",
")"
] | 37.055556 | 17.388889 |
def receive_trial_result(self, parameter_id, parameters, value):
""" Record an observation of the objective function.
Parameters
----------
parameter_id : int
parameters : dict
value : dict/float
if value is dict, it should have "default" key.
"""
reward = extract_scalar_reward(value)
if parameter_id not in self.total_data:
raise RuntimeError("Received parameter_id not in total_data.")
(_, father_id, model_id) = self.total_data[parameter_id]
graph = self.bo.searcher.load_model_by_id(model_id)
# to use the value and graph
self.add_model(reward, model_id)
self.update(father_id, graph, reward, model_id)
|
[
"def",
"receive_trial_result",
"(",
"self",
",",
"parameter_id",
",",
"parameters",
",",
"value",
")",
":",
"reward",
"=",
"extract_scalar_reward",
"(",
"value",
")",
"if",
"parameter_id",
"not",
"in",
"self",
".",
"total_data",
":",
"raise",
"RuntimeError",
"(",
"\"Received parameter_id not in total_data.\"",
")",
"(",
"_",
",",
"father_id",
",",
"model_id",
")",
"=",
"self",
".",
"total_data",
"[",
"parameter_id",
"]",
"graph",
"=",
"self",
".",
"bo",
".",
"searcher",
".",
"load_model_by_id",
"(",
"model_id",
")",
"# to use the value and graph",
"self",
".",
"add_model",
"(",
"reward",
",",
"model_id",
")",
"self",
".",
"update",
"(",
"father_id",
",",
"graph",
",",
"reward",
",",
"model_id",
")"
] | 33.227273 | 19.727273 |
def _get_stack_info_for_trace(
self,
frames,
library_frame_context_lines=None,
in_app_frame_context_lines=None,
with_locals=True,
locals_processor_func=None,
):
"""If the stacktrace originates within the elasticapm module, it will skip
frames until some other module comes up."""
return list(
iterate_with_template_sources(
frames,
with_locals=with_locals,
library_frame_context_lines=library_frame_context_lines,
in_app_frame_context_lines=in_app_frame_context_lines,
include_paths_re=self.include_paths_re,
exclude_paths_re=self.exclude_paths_re,
locals_processor_func=locals_processor_func,
)
)
|
[
"def",
"_get_stack_info_for_trace",
"(",
"self",
",",
"frames",
",",
"library_frame_context_lines",
"=",
"None",
",",
"in_app_frame_context_lines",
"=",
"None",
",",
"with_locals",
"=",
"True",
",",
"locals_processor_func",
"=",
"None",
",",
")",
":",
"return",
"list",
"(",
"iterate_with_template_sources",
"(",
"frames",
",",
"with_locals",
"=",
"with_locals",
",",
"library_frame_context_lines",
"=",
"library_frame_context_lines",
",",
"in_app_frame_context_lines",
"=",
"in_app_frame_context_lines",
",",
"include_paths_re",
"=",
"self",
".",
"include_paths_re",
",",
"exclude_paths_re",
"=",
"self",
".",
"exclude_paths_re",
",",
"locals_processor_func",
"=",
"locals_processor_func",
",",
")",
")"
] | 37.952381 | 15.52381 |
def _base_signup_form_class():
"""
Currently, we inherit from the custom form, if any. This is all
not very elegant, though it serves a purpose:
- There are two signup forms: one for local accounts, and one for
social accounts
- Both share a common base (BaseSignupForm)
- Given the above, how to put in a custom signup form? Which form
would your custom form derive from, the local or the social one?
"""
if not app_settings.SIGNUP_FORM_CLASS:
return _DummyCustomSignupForm
try:
fc_module, fc_classname = app_settings.SIGNUP_FORM_CLASS.rsplit('.', 1)
except ValueError:
raise exceptions.ImproperlyConfigured('%s does not point to a form'
' class'
% app_settings.SIGNUP_FORM_CLASS)
try:
mod = import_module(fc_module)
except ImportError as e:
raise exceptions.ImproperlyConfigured('Error importing form class %s:'
' "%s"' % (fc_module, e))
try:
fc_class = getattr(mod, fc_classname)
except AttributeError:
raise exceptions.ImproperlyConfigured('Module "%s" does not define a'
' "%s" class' % (fc_module,
fc_classname))
if not hasattr(fc_class, 'signup'):
if hasattr(fc_class, 'save'):
warnings.warn("The custom signup form must offer"
" a `def signup(self, request, user)` method",
DeprecationWarning)
else:
raise exceptions.ImproperlyConfigured(
'The custom signup form must implement a "signup" method')
return fc_class
|
[
"def",
"_base_signup_form_class",
"(",
")",
":",
"if",
"not",
"app_settings",
".",
"SIGNUP_FORM_CLASS",
":",
"return",
"_DummyCustomSignupForm",
"try",
":",
"fc_module",
",",
"fc_classname",
"=",
"app_settings",
".",
"SIGNUP_FORM_CLASS",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"except",
"ValueError",
":",
"raise",
"exceptions",
".",
"ImproperlyConfigured",
"(",
"'%s does not point to a form'",
"' class'",
"%",
"app_settings",
".",
"SIGNUP_FORM_CLASS",
")",
"try",
":",
"mod",
"=",
"import_module",
"(",
"fc_module",
")",
"except",
"ImportError",
"as",
"e",
":",
"raise",
"exceptions",
".",
"ImproperlyConfigured",
"(",
"'Error importing form class %s:'",
"' \"%s\"'",
"%",
"(",
"fc_module",
",",
"e",
")",
")",
"try",
":",
"fc_class",
"=",
"getattr",
"(",
"mod",
",",
"fc_classname",
")",
"except",
"AttributeError",
":",
"raise",
"exceptions",
".",
"ImproperlyConfigured",
"(",
"'Module \"%s\" does not define a'",
"' \"%s\" class'",
"%",
"(",
"fc_module",
",",
"fc_classname",
")",
")",
"if",
"not",
"hasattr",
"(",
"fc_class",
",",
"'signup'",
")",
":",
"if",
"hasattr",
"(",
"fc_class",
",",
"'save'",
")",
":",
"warnings",
".",
"warn",
"(",
"\"The custom signup form must offer\"",
"\" a `def signup(self, request, user)` method\"",
",",
"DeprecationWarning",
")",
"else",
":",
"raise",
"exceptions",
".",
"ImproperlyConfigured",
"(",
"'The custom signup form must implement a \"signup\" method'",
")",
"return",
"fc_class"
] | 44.275 | 21.225 |
def list(self):
"""
Run the FTP LIST command, and update the state.
"""
logger.debug('Sending FTP list command.')
self.state['file_list'] = []
self.state['dir_list'] = []
self.client.retrlines('LIST', self._process_list)
|
[
"def",
"list",
"(",
"self",
")",
":",
"logger",
".",
"debug",
"(",
"'Sending FTP list command.'",
")",
"self",
".",
"state",
"[",
"'file_list'",
"]",
"=",
"[",
"]",
"self",
".",
"state",
"[",
"'dir_list'",
"]",
"=",
"[",
"]",
"self",
".",
"client",
".",
"retrlines",
"(",
"'LIST'",
",",
"self",
".",
"_process_list",
")"
] | 34.125 | 9.875 |
def attribute(element, attribute, default=None):
"""
Returns the value of an attribute, or a default if it's not defined
:param element: The XML Element object
:type element: etree._Element
:param attribute: The name of the attribute to evaluate
:type attribute: basestring
:param default: The default value to return if the attribute is not defined
"""
attribute_value = element.get(attribute)
return attribute_value if attribute_value is not None else default
|
[
"def",
"attribute",
"(",
"element",
",",
"attribute",
",",
"default",
"=",
"None",
")",
":",
"attribute_value",
"=",
"element",
".",
"get",
"(",
"attribute",
")",
"return",
"attribute_value",
"if",
"attribute_value",
"is",
"not",
"None",
"else",
"default"
] | 35.214286 | 19.071429 |
def libvlc_vlm_set_enabled(p_instance, psz_name, b_enabled):
'''Enable or disable a media (VOD or broadcast).
@param p_instance: the instance.
@param psz_name: the media to work on.
@param b_enabled: the new status.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_set_enabled', None) or \
_Cfunction('libvlc_vlm_set_enabled', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, b_enabled)
|
[
"def",
"libvlc_vlm_set_enabled",
"(",
"p_instance",
",",
"psz_name",
",",
"b_enabled",
")",
":",
"f",
"=",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_vlm_set_enabled'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_vlm_set_enabled'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
",",
")",
",",
"None",
",",
"ctypes",
".",
"c_int",
",",
"Instance",
",",
"ctypes",
".",
"c_char_p",
",",
"ctypes",
".",
"c_int",
")",
"return",
"f",
"(",
"p_instance",
",",
"psz_name",
",",
"b_enabled",
")"
] | 47.545455 | 15 |
def get_element_mass_dictionary(self):
"""
Determine the masses of elements in the package and return as a
dictionary.
:returns: Dictionary of element symbols and masses. [kg]
"""
element_symbols = self.material.elements
element_masses = self.get_element_masses()
return {s: m for s, m in zip(element_symbols, element_masses)}
|
[
"def",
"get_element_mass_dictionary",
"(",
"self",
")",
":",
"element_symbols",
"=",
"self",
".",
"material",
".",
"elements",
"element_masses",
"=",
"self",
".",
"get_element_masses",
"(",
")",
"return",
"{",
"s",
":",
"m",
"for",
"s",
",",
"m",
"in",
"zip",
"(",
"element_symbols",
",",
"element_masses",
")",
"}"
] | 31.833333 | 20.5 |
def apply_tag_sets(tag_sets, selection):
"""All servers match a list of tag sets.
tag_sets is a list of dicts. The empty tag set {} matches any server,
and may be provided at the end of the list as a fallback. So
[{'a': 'value'}, {}] expresses a preference for servers tagged
{'a': 'value'}, but accepts any server if none matches the first
preference.
"""
for tag_set in tag_sets:
with_tag_set = apply_single_tag_set(tag_set, selection)
if with_tag_set:
return with_tag_set
return selection.with_server_descriptions([])
|
[
"def",
"apply_tag_sets",
"(",
"tag_sets",
",",
"selection",
")",
":",
"for",
"tag_set",
"in",
"tag_sets",
":",
"with_tag_set",
"=",
"apply_single_tag_set",
"(",
"tag_set",
",",
"selection",
")",
"if",
"with_tag_set",
":",
"return",
"with_tag_set",
"return",
"selection",
".",
"with_server_descriptions",
"(",
"[",
"]",
")"
] | 38.133333 | 19 |
def get_destinations(cls, domain, source):
"""Retrieve forward information."""
forwards = cls.list(domain, {'items_per_page': 500})
for fwd in forwards:
if fwd['source'] == source:
return fwd['destinations']
return []
|
[
"def",
"get_destinations",
"(",
"cls",
",",
"domain",
",",
"source",
")",
":",
"forwards",
"=",
"cls",
".",
"list",
"(",
"domain",
",",
"{",
"'items_per_page'",
":",
"500",
"}",
")",
"for",
"fwd",
"in",
"forwards",
":",
"if",
"fwd",
"[",
"'source'",
"]",
"==",
"source",
":",
"return",
"fwd",
"[",
"'destinations'",
"]",
"return",
"[",
"]"
] | 33.875 | 12.5 |
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
|
[
"def",
"savePattern",
"(",
"self",
")",
":",
"if",
"(",
"self",
".",
"dev",
"==",
"None",
")",
":",
"return",
"''",
"buf",
"=",
"[",
"REPORT_ID",
",",
"ord",
"(",
"'W'",
")",
",",
"0xBE",
",",
"0xEF",
",",
"0xCA",
",",
"0xFE",
",",
"0",
",",
"0",
",",
"0",
"]",
"return",
"self",
".",
"write",
"(",
"buf",
")"
] | 36.666667 | 9.5 |
def duplicate(self):
'''
Returns a copy of the current Line, including its taxes and discounts
@returns: Line.
'''
instance = self.__class__(name=self.name, description=self.description,
unit=self.unit, quantity=self.quantity,
date=self.date, unit_price=self.unit_price,
gin=self.gin, gtin=self.gtin, sscc=self.sscc)
for tax in self.taxes:
instance.taxes.append(tax.duplicate())
for discount in self.discounts:
instance.discounts.append(discount.duplicate())
return instance
|
[
"def",
"duplicate",
"(",
"self",
")",
":",
"instance",
"=",
"self",
".",
"__class__",
"(",
"name",
"=",
"self",
".",
"name",
",",
"description",
"=",
"self",
".",
"description",
",",
"unit",
"=",
"self",
".",
"unit",
",",
"quantity",
"=",
"self",
".",
"quantity",
",",
"date",
"=",
"self",
".",
"date",
",",
"unit_price",
"=",
"self",
".",
"unit_price",
",",
"gin",
"=",
"self",
".",
"gin",
",",
"gtin",
"=",
"self",
".",
"gtin",
",",
"sscc",
"=",
"self",
".",
"sscc",
")",
"for",
"tax",
"in",
"self",
".",
"taxes",
":",
"instance",
".",
"taxes",
".",
"append",
"(",
"tax",
".",
"duplicate",
"(",
")",
")",
"for",
"discount",
"in",
"self",
".",
"discounts",
":",
"instance",
".",
"discounts",
".",
"append",
"(",
"discount",
".",
"duplicate",
"(",
")",
")",
"return",
"instance"
] | 46.5 | 24.071429 |
def setInitialCenters(self, centers, weights):
"""
Set initial centers. Should be set before calling trainOn.
"""
self._model = StreamingKMeansModel(centers, weights)
return self
|
[
"def",
"setInitialCenters",
"(",
"self",
",",
"centers",
",",
"weights",
")",
":",
"self",
".",
"_model",
"=",
"StreamingKMeansModel",
"(",
"centers",
",",
"weights",
")",
"return",
"self"
] | 35.5 | 12.166667 |
def subproductPath(self, ext):
"""Makes a subproduct filename by appending 'ext' to the subproduct directory.
Returns a (filename,fullpath) tuple."""
ext = ext.lstrip("-")
basename = os.path.join(os.path.basename(self.dp.subproduct_dir()), ext)
fullpath = os.path.join(self.dp.subproduct_dir(), ext)
return basename, fullpath
|
[
"def",
"subproductPath",
"(",
"self",
",",
"ext",
")",
":",
"ext",
"=",
"ext",
".",
"lstrip",
"(",
"\"-\"",
")",
"basename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"dp",
".",
"subproduct_dir",
"(",
")",
")",
",",
"ext",
")",
"fullpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"dp",
".",
"subproduct_dir",
"(",
")",
",",
"ext",
")",
"return",
"basename",
",",
"fullpath"
] | 52.428571 | 12.857143 |
def doc_to_html(doc, doc_format="ROBOT"):
"""Convert documentation to HTML"""
from robot.libdocpkg.htmlwriter import DocToHtml
return DocToHtml(doc_format)(doc)
|
[
"def",
"doc_to_html",
"(",
"doc",
",",
"doc_format",
"=",
"\"ROBOT\"",
")",
":",
"from",
"robot",
".",
"libdocpkg",
".",
"htmlwriter",
"import",
"DocToHtml",
"return",
"DocToHtml",
"(",
"doc_format",
")",
"(",
"doc",
")"
] | 42.25 | 4 |
def interruptible_sleep(t, event=None):
"""
Sleeps for a specified duration, optionally stopping early for event.
Returns True if interrupted
"""
log.info("sleeping %s", t)
if event is None:
time.sleep(t)
return False
else:
return not event.wait(t)
|
[
"def",
"interruptible_sleep",
"(",
"t",
",",
"event",
"=",
"None",
")",
":",
"log",
".",
"info",
"(",
"\"sleeping %s\"",
",",
"t",
")",
"if",
"event",
"is",
"None",
":",
"time",
".",
"sleep",
"(",
"t",
")",
"return",
"False",
"else",
":",
"return",
"not",
"event",
".",
"wait",
"(",
"t",
")"
] | 22.307692 | 17.692308 |
def getTaskHelp(_Task):
r"""Gets help on the given task member.
"""
Ret = []
for k in ['name', 'desc']:
v = _Task.Config.get(k)
if v is not None:
Ret.append('%s: %s' % (k, v))
Args = _Task.Args
if Args:
Ret.append('\nArgs:')
for argName, Arg in Args.items():
Ret.append(' %s: %s' % (argName, Arg.get('desc', Arg['type_str'])))
Ret.append('')
return '\n'.join(Ret).rstrip()
|
[
"def",
"getTaskHelp",
"(",
"_Task",
")",
":",
"Ret",
"=",
"[",
"]",
"for",
"k",
"in",
"[",
"'name'",
",",
"'desc'",
"]",
":",
"v",
"=",
"_Task",
".",
"Config",
".",
"get",
"(",
"k",
")",
"if",
"v",
"is",
"not",
"None",
":",
"Ret",
".",
"append",
"(",
"'%s: %s'",
"%",
"(",
"k",
",",
"v",
")",
")",
"Args",
"=",
"_Task",
".",
"Args",
"if",
"Args",
":",
"Ret",
".",
"append",
"(",
"'\\nArgs:'",
")",
"for",
"argName",
",",
"Arg",
"in",
"Args",
".",
"items",
"(",
")",
":",
"Ret",
".",
"append",
"(",
"' %s: %s'",
"%",
"(",
"argName",
",",
"Arg",
".",
"get",
"(",
"'desc'",
",",
"Arg",
"[",
"'type_str'",
"]",
")",
")",
")",
"Ret",
".",
"append",
"(",
"''",
")",
"return",
"'\\n'",
".",
"join",
"(",
"Ret",
")",
".",
"rstrip",
"(",
")"
] | 18.409091 | 23.136364 |
def write(self, filename=None, io=None,
coors=None, igs=None, out=None, float_format=None, **kwargs):
"""
Write mesh + optional results in `out` to a file.
Parameters
----------
filename : str, optional
The file name. If None, the mesh name is used instead.
io : MeshIO instance or 'auto', optional
Passing 'auto' respects the extension of `filename`.
coors : array, optional
The coordinates that can be used instead of the mesh coordinates.
igs : array_like, optional
Passing a list of group ids selects only those groups for writing.
out : dict, optional
The output data attached to the mesh vertices and/or cells.
float_format : str, optional
The format string used to print floats in case of a text file
format.
**kwargs : dict, optional
Additional arguments that can be passed to the `MeshIO` instance.
"""
if filename is None:
filename = self.name + '.mesh'
if io is None:
io = self.io
if io is None:
io = 'auto'
if io == 'auto':
io = MeshIO.any_from_filename( filename )
if coors is None:
coors = self.coors
if igs is None:
igs = range( len( self.conns ) )
aux_mesh = Mesh.from_data( self.name, coors, self.ngroups,
self.conns, self.mat_ids, self.descs, igs )
io.set_float_format( float_format )
io.write( filename, aux_mesh, out, **kwargs )
|
[
"def",
"write",
"(",
"self",
",",
"filename",
"=",
"None",
",",
"io",
"=",
"None",
",",
"coors",
"=",
"None",
",",
"igs",
"=",
"None",
",",
"out",
"=",
"None",
",",
"float_format",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"filename",
"is",
"None",
":",
"filename",
"=",
"self",
".",
"name",
"+",
"'.mesh'",
"if",
"io",
"is",
"None",
":",
"io",
"=",
"self",
".",
"io",
"if",
"io",
"is",
"None",
":",
"io",
"=",
"'auto'",
"if",
"io",
"==",
"'auto'",
":",
"io",
"=",
"MeshIO",
".",
"any_from_filename",
"(",
"filename",
")",
"if",
"coors",
"is",
"None",
":",
"coors",
"=",
"self",
".",
"coors",
"if",
"igs",
"is",
"None",
":",
"igs",
"=",
"range",
"(",
"len",
"(",
"self",
".",
"conns",
")",
")",
"aux_mesh",
"=",
"Mesh",
".",
"from_data",
"(",
"self",
".",
"name",
",",
"coors",
",",
"self",
".",
"ngroups",
",",
"self",
".",
"conns",
",",
"self",
".",
"mat_ids",
",",
"self",
".",
"descs",
",",
"igs",
")",
"io",
".",
"set_float_format",
"(",
"float_format",
")",
"io",
".",
"write",
"(",
"filename",
",",
"aux_mesh",
",",
"out",
",",
"*",
"*",
"kwargs",
")"
] | 36.454545 | 19.727273 |
def alert_policy_path(cls, project, alert_policy):
"""Return a fully-qualified alert_policy string."""
return google.api_core.path_template.expand(
"projects/{project}/alertPolicies/{alert_policy}",
project=project,
alert_policy=alert_policy,
)
|
[
"def",
"alert_policy_path",
"(",
"cls",
",",
"project",
",",
"alert_policy",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"projects/{project}/alertPolicies/{alert_policy}\"",
",",
"project",
"=",
"project",
",",
"alert_policy",
"=",
"alert_policy",
",",
")"
] | 42.571429 | 12.714286 |
def run(file, access_key, secret_key, **kwargs):
"""命令行运行huobitrade"""
if file:
import sys
file_path, file_name = os.path.split(file)
sys.path.append(file_path)
strategy_module = importlib.import_module(os.path.splitext(file_name)[0])
init = getattr(strategy_module, 'init', None)
handle_func = getattr(strategy_module, 'handle_func', None)
schedule = getattr(strategy_module, 'schedule', None)
else:
init, handle_func, scedule = [None] * 3
setKey(access_key, secret_key)
url = kwargs.get('url')
hostname = 'api.huobi.br.com'
if url:
hostname = urlparse(url).hostname
setUrl('https://' + hostname, 'https://' + hostname)
reconn = kwargs.get('reconn', -1)
from huobitrade import HBWebsocket, HBRestAPI
from huobitrade.datatype import HBMarket, HBAccount, HBMargin
restapi = HBRestAPI(get_acc=True)
ws = HBWebsocket(host=hostname, reconn=reconn)
auth_ws = HBWebsocket(host=hostname, auth=True, reconn=reconn)
data = HBMarket()
account = HBAccount()
margin = HBMargin()
ws_open = False
ws_auth = False
@ws.after_open
def _open():
nonlocal ws_open
click.echo('行情接口连接成功')
ws_open = True
@auth_ws.after_auth
def _auth():
nonlocal ws_auth
click.echo('鉴权接口鉴权成功')
ws_auth = True
ws.run()
auth_ws.run()
for i in range(10):
time.sleep(3)
click.echo(f'连接:第{i+1}次连接')
if ws_open&ws_auth:
break
else:
ws.stop()
auth_ws.stop()
raise Exception('连接失败')
if init:
init(restapi, ws, auth_ws)
if handle_func:
for k, v in handle_func.items():
if k.split('.')[0].lower() == 'market':
ws.register_handle_func(k)(v)
else:
auth_ws.register_handle_func(k)(v)
if schedule:
print('testing')
from huobitrade.handler import TimeHandler
interval = scedule.__kwdefaults__['interval']
timerhandler = TimeHandler('scheduler', interval)
timerhandler.handle = lambda msg: schedule(restapi, ws, auth_ws)
timerhandler.start()
while True:
try:
code = click.prompt('huobitrade>>')
if code == 'exit':
if click.confirm('是否要退出huobitrade'):
break
else:
continue
else:
result = eval(code)
click.echo(result)
except Exception as e:
click.echo(traceback.format_exc())
ws.stop()
auth_ws.stop()
|
[
"def",
"run",
"(",
"file",
",",
"access_key",
",",
"secret_key",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"file",
":",
"import",
"sys",
"file_path",
",",
"file_name",
"=",
"os",
".",
"path",
".",
"split",
"(",
"file",
")",
"sys",
".",
"path",
".",
"append",
"(",
"file_path",
")",
"strategy_module",
"=",
"importlib",
".",
"import_module",
"(",
"os",
".",
"path",
".",
"splitext",
"(",
"file_name",
")",
"[",
"0",
"]",
")",
"init",
"=",
"getattr",
"(",
"strategy_module",
",",
"'init'",
",",
"None",
")",
"handle_func",
"=",
"getattr",
"(",
"strategy_module",
",",
"'handle_func'",
",",
"None",
")",
"schedule",
"=",
"getattr",
"(",
"strategy_module",
",",
"'schedule'",
",",
"None",
")",
"else",
":",
"init",
",",
"handle_func",
",",
"scedule",
"=",
"[",
"None",
"]",
"*",
"3",
"setKey",
"(",
"access_key",
",",
"secret_key",
")",
"url",
"=",
"kwargs",
".",
"get",
"(",
"'url'",
")",
"hostname",
"=",
"'api.huobi.br.com'",
"if",
"url",
":",
"hostname",
"=",
"urlparse",
"(",
"url",
")",
".",
"hostname",
"setUrl",
"(",
"'https://'",
"+",
"hostname",
",",
"'https://'",
"+",
"hostname",
")",
"reconn",
"=",
"kwargs",
".",
"get",
"(",
"'reconn'",
",",
"-",
"1",
")",
"from",
"huobitrade",
"import",
"HBWebsocket",
",",
"HBRestAPI",
"from",
"huobitrade",
".",
"datatype",
"import",
"HBMarket",
",",
"HBAccount",
",",
"HBMargin",
"restapi",
"=",
"HBRestAPI",
"(",
"get_acc",
"=",
"True",
")",
"ws",
"=",
"HBWebsocket",
"(",
"host",
"=",
"hostname",
",",
"reconn",
"=",
"reconn",
")",
"auth_ws",
"=",
"HBWebsocket",
"(",
"host",
"=",
"hostname",
",",
"auth",
"=",
"True",
",",
"reconn",
"=",
"reconn",
")",
"data",
"=",
"HBMarket",
"(",
")",
"account",
"=",
"HBAccount",
"(",
")",
"margin",
"=",
"HBMargin",
"(",
")",
"ws_open",
"=",
"False",
"ws_auth",
"=",
"False",
"@",
"ws",
".",
"after_open",
"def",
"_open",
"(",
")",
":",
"nonlocal",
"ws_open",
"click",
".",
"echo",
"(",
"'行情接口连接成功')",
"",
"ws_open",
"=",
"True",
"@",
"auth_ws",
".",
"after_auth",
"def",
"_auth",
"(",
")",
":",
"nonlocal",
"ws_auth",
"click",
".",
"echo",
"(",
"'鉴权接口鉴权成功')",
"",
"ws_auth",
"=",
"True",
"ws",
".",
"run",
"(",
")",
"auth_ws",
".",
"run",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"10",
")",
":",
"time",
".",
"sleep",
"(",
"3",
")",
"click",
".",
"echo",
"(",
"f'连接:第{i+1}次连接')",
"",
"if",
"ws_open",
"&",
"ws_auth",
":",
"break",
"else",
":",
"ws",
".",
"stop",
"(",
")",
"auth_ws",
".",
"stop",
"(",
")",
"raise",
"Exception",
"(",
"'连接失败')",
"",
"if",
"init",
":",
"init",
"(",
"restapi",
",",
"ws",
",",
"auth_ws",
")",
"if",
"handle_func",
":",
"for",
"k",
",",
"v",
"in",
"handle_func",
".",
"items",
"(",
")",
":",
"if",
"k",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
".",
"lower",
"(",
")",
"==",
"'market'",
":",
"ws",
".",
"register_handle_func",
"(",
"k",
")",
"(",
"v",
")",
"else",
":",
"auth_ws",
".",
"register_handle_func",
"(",
"k",
")",
"(",
"v",
")",
"if",
"schedule",
":",
"print",
"(",
"'testing'",
")",
"from",
"huobitrade",
".",
"handler",
"import",
"TimeHandler",
"interval",
"=",
"scedule",
".",
"__kwdefaults__",
"[",
"'interval'",
"]",
"timerhandler",
"=",
"TimeHandler",
"(",
"'scheduler'",
",",
"interval",
")",
"timerhandler",
".",
"handle",
"=",
"lambda",
"msg",
":",
"schedule",
"(",
"restapi",
",",
"ws",
",",
"auth_ws",
")",
"timerhandler",
".",
"start",
"(",
")",
"while",
"True",
":",
"try",
":",
"code",
"=",
"click",
".",
"prompt",
"(",
"'huobitrade>>'",
")",
"if",
"code",
"==",
"'exit'",
":",
"if",
"click",
".",
"confirm",
"(",
"'是否要退出huobitrade'):",
"",
"",
"break",
"else",
":",
"continue",
"else",
":",
"result",
"=",
"eval",
"(",
"code",
")",
"click",
".",
"echo",
"(",
"result",
")",
"except",
"Exception",
"as",
"e",
":",
"click",
".",
"echo",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")",
"ws",
".",
"stop",
"(",
")",
"auth_ws",
".",
"stop",
"(",
")"
] | 28.230769 | 19.098901 |
def get_chirp_params(mass1, mass2, spin1z, spin2z, f0, order,
quadparam1=None, quadparam2=None, lambda1=None,
lambda2=None):
"""
Take a set of masses and spins and convert to the various lambda
coordinates that describe the orbital phase. Accepted PN orders are:
{}
Parameters
----------
mass1 : float or array
Mass1 of input(s).
mass2 : float or array
Mass2 of input(s).
spin1z : float or array
Parallel spin component(s) of body 1.
spin2z : float or array
Parallel spin component(s) of body 2.
f0 : float
This is an arbitrary scaling factor introduced to avoid the potential
for numerical overflow when calculating this. Generally the default
value (70) is safe here. **IMPORTANT, if you want to calculate the
ethinca metric components later this MUST be set equal to f_low.**
This value must also be used consistently (ie. don't change its value
when calling different functions!).
order : string
The Post-Newtonian order that is used to translate from masses and
spins to the lambda_i coordinate system. Valid orders given above.
Returns
--------
lambdas : list of floats or numpy.arrays
The lambda coordinates for the input system(s)
"""
# Determine whether array or single value input
sngl_inp = False
try:
num_points = len(mass1)
except TypeError:
sngl_inp = True
# If you care about speed, you aren't calling this function one entry
# at a time.
mass1 = numpy.array([mass1])
mass2 = numpy.array([mass2])
spin1z = numpy.array([spin1z])
spin2z = numpy.array([spin2z])
if quadparam1 is not None:
quadparam1 = numpy.array([quadparam1])
if quadparam2 is not None:
quadparam2 = numpy.array([quadparam2])
if lambda1 is not None:
lambda1 = numpy.array([lambda1])
if lambda2 is not None:
lambda2 = numpy.array([lambda2])
num_points = 1
if quadparam1 is None:
quadparam1 = numpy.ones(len(mass1), dtype=float)
if quadparam2 is None:
quadparam2 = numpy.ones(len(mass1), dtype=float)
if lambda1 is None:
lambda1 = numpy.zeros(len(mass1), dtype=float)
if lambda2 is None:
lambda2 = numpy.zeros(len(mass1), dtype=float)
mass1_v = CreateREAL8Vector(len(mass1))
mass1_v.data[:] = mass1[:]
mass2_v = CreateREAL8Vector(len(mass1))
mass2_v.data[:] = mass2[:]
spin1z_v = CreateREAL8Vector(len(mass1))
spin1z_v.data[:] = spin1z[:]
spin2z_v = CreateREAL8Vector(len(mass1))
spin2z_v.data[:] = spin2z[:]
lambda1_v = CreateREAL8Vector(len(mass1))
lambda1_v.data[:] = lambda1[:]
lambda2_v = CreateREAL8Vector(len(mass1))
lambda2_v.data[:] = lambda2[:]
dquadparam1_v = CreateREAL8Vector(len(mass1))
dquadparam1_v.data[:] = quadparam1[:] - 1.
dquadparam2_v = CreateREAL8Vector(len(mass1))
dquadparam2_v.data[:] = quadparam2[:] - 1.
phasing_arr = lalsimulation.SimInspiralTaylorF2AlignedPhasingArray\
(mass1_v, mass2_v, spin1z_v, spin2z_v, lambda1_v, lambda2_v,
dquadparam1_v, dquadparam2_v)
vec_len = lalsimulation.PN_PHASING_SERIES_MAX_ORDER + 1;
phasing_vs = numpy.zeros([num_points, vec_len])
phasing_vlogvs = numpy.zeros([num_points, vec_len])
phasing_vlogvsqs = numpy.zeros([num_points, vec_len])
lng = len(mass1)
jmp = lng * vec_len
for idx in range(vec_len):
phasing_vs[:,idx] = phasing_arr.data[lng*idx : lng*(idx+1)]
phasing_vlogvs[:,idx] = \
phasing_arr.data[jmp + lng*idx : jmp + lng*(idx+1)]
phasing_vlogvsqs[:,idx] = \
phasing_arr.data[2*jmp + lng*idx : 2*jmp + lng*(idx+1)]
pim = PI * (mass1 + mass2)*MTSUN_SI
pmf = pim * f0
pmf13 = pmf**(1./3.)
logpim13 = numpy.log((pim)**(1./3.))
mapping = generate_inverse_mapping(order)
lambdas = []
lambda_str = '^Lambda([0-9]+)'
loglambda_str = '^LogLambda([0-9]+)'
logloglambda_str = '^LogLogLambda([0-9]+)'
for idx in range(len(mapping.keys())):
# RE magic engage!
rematch = re.match(lambda_str, mapping[idx])
if rematch:
pn_order = int(rematch.groups()[0])
term = phasing_vs[:,pn_order]
term = term + logpim13 * phasing_vlogvs[:,pn_order]
lambdas.append(term * pmf13**(-5+pn_order))
continue
rematch = re.match(loglambda_str, mapping[idx])
if rematch:
pn_order = int(rematch.groups()[0])
lambdas.append((phasing_vlogvs[:,pn_order]) * pmf13**(-5+pn_order))
continue
rematch = re.match(logloglambda_str, mapping[idx])
if rematch:
raise ValueError("LOGLOG terms are not implemented")
#pn_order = int(rematch.groups()[0])
#lambdas.append(phasing_vlogvsqs[:,pn_order] * pmf13**(-5+pn_order))
#continue
err_msg = "Failed to parse " + mapping[idx]
raise ValueError(err_msg)
if sngl_inp:
return [l[0] for l in lambdas]
else:
return lambdas
|
[
"def",
"get_chirp_params",
"(",
"mass1",
",",
"mass2",
",",
"spin1z",
",",
"spin2z",
",",
"f0",
",",
"order",
",",
"quadparam1",
"=",
"None",
",",
"quadparam2",
"=",
"None",
",",
"lambda1",
"=",
"None",
",",
"lambda2",
"=",
"None",
")",
":",
"# Determine whether array or single value input",
"sngl_inp",
"=",
"False",
"try",
":",
"num_points",
"=",
"len",
"(",
"mass1",
")",
"except",
"TypeError",
":",
"sngl_inp",
"=",
"True",
"# If you care about speed, you aren't calling this function one entry",
"# at a time.",
"mass1",
"=",
"numpy",
".",
"array",
"(",
"[",
"mass1",
"]",
")",
"mass2",
"=",
"numpy",
".",
"array",
"(",
"[",
"mass2",
"]",
")",
"spin1z",
"=",
"numpy",
".",
"array",
"(",
"[",
"spin1z",
"]",
")",
"spin2z",
"=",
"numpy",
".",
"array",
"(",
"[",
"spin2z",
"]",
")",
"if",
"quadparam1",
"is",
"not",
"None",
":",
"quadparam1",
"=",
"numpy",
".",
"array",
"(",
"[",
"quadparam1",
"]",
")",
"if",
"quadparam2",
"is",
"not",
"None",
":",
"quadparam2",
"=",
"numpy",
".",
"array",
"(",
"[",
"quadparam2",
"]",
")",
"if",
"lambda1",
"is",
"not",
"None",
":",
"lambda1",
"=",
"numpy",
".",
"array",
"(",
"[",
"lambda1",
"]",
")",
"if",
"lambda2",
"is",
"not",
"None",
":",
"lambda2",
"=",
"numpy",
".",
"array",
"(",
"[",
"lambda2",
"]",
")",
"num_points",
"=",
"1",
"if",
"quadparam1",
"is",
"None",
":",
"quadparam1",
"=",
"numpy",
".",
"ones",
"(",
"len",
"(",
"mass1",
")",
",",
"dtype",
"=",
"float",
")",
"if",
"quadparam2",
"is",
"None",
":",
"quadparam2",
"=",
"numpy",
".",
"ones",
"(",
"len",
"(",
"mass1",
")",
",",
"dtype",
"=",
"float",
")",
"if",
"lambda1",
"is",
"None",
":",
"lambda1",
"=",
"numpy",
".",
"zeros",
"(",
"len",
"(",
"mass1",
")",
",",
"dtype",
"=",
"float",
")",
"if",
"lambda2",
"is",
"None",
":",
"lambda2",
"=",
"numpy",
".",
"zeros",
"(",
"len",
"(",
"mass1",
")",
",",
"dtype",
"=",
"float",
")",
"mass1_v",
"=",
"CreateREAL8Vector",
"(",
"len",
"(",
"mass1",
")",
")",
"mass1_v",
".",
"data",
"[",
":",
"]",
"=",
"mass1",
"[",
":",
"]",
"mass2_v",
"=",
"CreateREAL8Vector",
"(",
"len",
"(",
"mass1",
")",
")",
"mass2_v",
".",
"data",
"[",
":",
"]",
"=",
"mass2",
"[",
":",
"]",
"spin1z_v",
"=",
"CreateREAL8Vector",
"(",
"len",
"(",
"mass1",
")",
")",
"spin1z_v",
".",
"data",
"[",
":",
"]",
"=",
"spin1z",
"[",
":",
"]",
"spin2z_v",
"=",
"CreateREAL8Vector",
"(",
"len",
"(",
"mass1",
")",
")",
"spin2z_v",
".",
"data",
"[",
":",
"]",
"=",
"spin2z",
"[",
":",
"]",
"lambda1_v",
"=",
"CreateREAL8Vector",
"(",
"len",
"(",
"mass1",
")",
")",
"lambda1_v",
".",
"data",
"[",
":",
"]",
"=",
"lambda1",
"[",
":",
"]",
"lambda2_v",
"=",
"CreateREAL8Vector",
"(",
"len",
"(",
"mass1",
")",
")",
"lambda2_v",
".",
"data",
"[",
":",
"]",
"=",
"lambda2",
"[",
":",
"]",
"dquadparam1_v",
"=",
"CreateREAL8Vector",
"(",
"len",
"(",
"mass1",
")",
")",
"dquadparam1_v",
".",
"data",
"[",
":",
"]",
"=",
"quadparam1",
"[",
":",
"]",
"-",
"1.",
"dquadparam2_v",
"=",
"CreateREAL8Vector",
"(",
"len",
"(",
"mass1",
")",
")",
"dquadparam2_v",
".",
"data",
"[",
":",
"]",
"=",
"quadparam2",
"[",
":",
"]",
"-",
"1.",
"phasing_arr",
"=",
"lalsimulation",
".",
"SimInspiralTaylorF2AlignedPhasingArray",
"(",
"mass1_v",
",",
"mass2_v",
",",
"spin1z_v",
",",
"spin2z_v",
",",
"lambda1_v",
",",
"lambda2_v",
",",
"dquadparam1_v",
",",
"dquadparam2_v",
")",
"vec_len",
"=",
"lalsimulation",
".",
"PN_PHASING_SERIES_MAX_ORDER",
"+",
"1",
"phasing_vs",
"=",
"numpy",
".",
"zeros",
"(",
"[",
"num_points",
",",
"vec_len",
"]",
")",
"phasing_vlogvs",
"=",
"numpy",
".",
"zeros",
"(",
"[",
"num_points",
",",
"vec_len",
"]",
")",
"phasing_vlogvsqs",
"=",
"numpy",
".",
"zeros",
"(",
"[",
"num_points",
",",
"vec_len",
"]",
")",
"lng",
"=",
"len",
"(",
"mass1",
")",
"jmp",
"=",
"lng",
"*",
"vec_len",
"for",
"idx",
"in",
"range",
"(",
"vec_len",
")",
":",
"phasing_vs",
"[",
":",
",",
"idx",
"]",
"=",
"phasing_arr",
".",
"data",
"[",
"lng",
"*",
"idx",
":",
"lng",
"*",
"(",
"idx",
"+",
"1",
")",
"]",
"phasing_vlogvs",
"[",
":",
",",
"idx",
"]",
"=",
"phasing_arr",
".",
"data",
"[",
"jmp",
"+",
"lng",
"*",
"idx",
":",
"jmp",
"+",
"lng",
"*",
"(",
"idx",
"+",
"1",
")",
"]",
"phasing_vlogvsqs",
"[",
":",
",",
"idx",
"]",
"=",
"phasing_arr",
".",
"data",
"[",
"2",
"*",
"jmp",
"+",
"lng",
"*",
"idx",
":",
"2",
"*",
"jmp",
"+",
"lng",
"*",
"(",
"idx",
"+",
"1",
")",
"]",
"pim",
"=",
"PI",
"*",
"(",
"mass1",
"+",
"mass2",
")",
"*",
"MTSUN_SI",
"pmf",
"=",
"pim",
"*",
"f0",
"pmf13",
"=",
"pmf",
"**",
"(",
"1.",
"/",
"3.",
")",
"logpim13",
"=",
"numpy",
".",
"log",
"(",
"(",
"pim",
")",
"**",
"(",
"1.",
"/",
"3.",
")",
")",
"mapping",
"=",
"generate_inverse_mapping",
"(",
"order",
")",
"lambdas",
"=",
"[",
"]",
"lambda_str",
"=",
"'^Lambda([0-9]+)'",
"loglambda_str",
"=",
"'^LogLambda([0-9]+)'",
"logloglambda_str",
"=",
"'^LogLogLambda([0-9]+)'",
"for",
"idx",
"in",
"range",
"(",
"len",
"(",
"mapping",
".",
"keys",
"(",
")",
")",
")",
":",
"# RE magic engage!",
"rematch",
"=",
"re",
".",
"match",
"(",
"lambda_str",
",",
"mapping",
"[",
"idx",
"]",
")",
"if",
"rematch",
":",
"pn_order",
"=",
"int",
"(",
"rematch",
".",
"groups",
"(",
")",
"[",
"0",
"]",
")",
"term",
"=",
"phasing_vs",
"[",
":",
",",
"pn_order",
"]",
"term",
"=",
"term",
"+",
"logpim13",
"*",
"phasing_vlogvs",
"[",
":",
",",
"pn_order",
"]",
"lambdas",
".",
"append",
"(",
"term",
"*",
"pmf13",
"**",
"(",
"-",
"5",
"+",
"pn_order",
")",
")",
"continue",
"rematch",
"=",
"re",
".",
"match",
"(",
"loglambda_str",
",",
"mapping",
"[",
"idx",
"]",
")",
"if",
"rematch",
":",
"pn_order",
"=",
"int",
"(",
"rematch",
".",
"groups",
"(",
")",
"[",
"0",
"]",
")",
"lambdas",
".",
"append",
"(",
"(",
"phasing_vlogvs",
"[",
":",
",",
"pn_order",
"]",
")",
"*",
"pmf13",
"**",
"(",
"-",
"5",
"+",
"pn_order",
")",
")",
"continue",
"rematch",
"=",
"re",
".",
"match",
"(",
"logloglambda_str",
",",
"mapping",
"[",
"idx",
"]",
")",
"if",
"rematch",
":",
"raise",
"ValueError",
"(",
"\"LOGLOG terms are not implemented\"",
")",
"#pn_order = int(rematch.groups()[0])",
"#lambdas.append(phasing_vlogvsqs[:,pn_order] * pmf13**(-5+pn_order))",
"#continue",
"err_msg",
"=",
"\"Failed to parse \"",
"+",
"mapping",
"[",
"idx",
"]",
"raise",
"ValueError",
"(",
"err_msg",
")",
"if",
"sngl_inp",
":",
"return",
"[",
"l",
"[",
"0",
"]",
"for",
"l",
"in",
"lambdas",
"]",
"else",
":",
"return",
"lambdas"
] | 37.021739 | 17.108696 |
def register_date_conversion_handler(date_specifier_patterns):
"""Decorator for registering handlers that convert text dates to dates.
Args:
date_specifier_patterns (str): the date specifier (in regex pattern format) for which the handler is registered
"""
def _decorator(func):
global DATE_SPECIFIERS_CONVERSION_HANDLERS
DATE_SPECIFIERS_CONVERSION_HANDLERS[DATE_SPECIFIERS_REGEXES[date_specifier_patterns]] = func
return func
return _decorator
|
[
"def",
"register_date_conversion_handler",
"(",
"date_specifier_patterns",
")",
":",
"def",
"_decorator",
"(",
"func",
")",
":",
"global",
"DATE_SPECIFIERS_CONVERSION_HANDLERS",
"DATE_SPECIFIERS_CONVERSION_HANDLERS",
"[",
"DATE_SPECIFIERS_REGEXES",
"[",
"date_specifier_patterns",
"]",
"]",
"=",
"func",
"return",
"func",
"return",
"_decorator"
] | 37.461538 | 29 |
def _get_hit_nearest_ref_start(self, hits):
'''Returns the hit nearest to the start of the ref sequence from the input list of hits'''
nearest_to_start = hits[0]
for hit in hits[1:]:
if hit.ref_coords().start < nearest_to_start.ref_coords().start:
nearest_to_start = hit
return nearest_to_start
|
[
"def",
"_get_hit_nearest_ref_start",
"(",
"self",
",",
"hits",
")",
":",
"nearest_to_start",
"=",
"hits",
"[",
"0",
"]",
"for",
"hit",
"in",
"hits",
"[",
"1",
":",
"]",
":",
"if",
"hit",
".",
"ref_coords",
"(",
")",
".",
"start",
"<",
"nearest_to_start",
".",
"ref_coords",
"(",
")",
".",
"start",
":",
"nearest_to_start",
"=",
"hit",
"return",
"nearest_to_start"
] | 49.714286 | 18 |
def get_publications(gene_names, save_json_name=None):
"""Return evidence publications for interaction between the given genes.
Parameters
----------
gene_names : list[str]
A list of gene names (HGNC symbols) to query interactions between.
Currently supports exactly two genes only.
save_json_name : Optional[str]
A file name to save the raw BioGRID web service output in. By default,
the raw output is not saved.
Return
------
publications : list[Publication]
A list of Publication objects that provide evidence for interactions
between the given list of genes.
"""
if len(gene_names) != 2:
logger.warning('Other than 2 gene names given.')
return []
res_dict = _send_request(gene_names)
if not res_dict:
return []
if save_json_name is not None:
# The json module produces strings, not bytes, so the file should be
# opened in text mode
with open(save_json_name, 'wt') as fh:
json.dump(res_dict, fh, indent=1)
publications = _extract_publications(res_dict, gene_names)
return publications
|
[
"def",
"get_publications",
"(",
"gene_names",
",",
"save_json_name",
"=",
"None",
")",
":",
"if",
"len",
"(",
"gene_names",
")",
"!=",
"2",
":",
"logger",
".",
"warning",
"(",
"'Other than 2 gene names given.'",
")",
"return",
"[",
"]",
"res_dict",
"=",
"_send_request",
"(",
"gene_names",
")",
"if",
"not",
"res_dict",
":",
"return",
"[",
"]",
"if",
"save_json_name",
"is",
"not",
"None",
":",
"# The json module produces strings, not bytes, so the file should be",
"# opened in text mode",
"with",
"open",
"(",
"save_json_name",
",",
"'wt'",
")",
"as",
"fh",
":",
"json",
".",
"dump",
"(",
"res_dict",
",",
"fh",
",",
"indent",
"=",
"1",
")",
"publications",
"=",
"_extract_publications",
"(",
"res_dict",
",",
"gene_names",
")",
"return",
"publications"
] | 36.387097 | 17.709677 |
def interface_temperature(Ts, Tatm, **kwargs):
'''Compute temperature at model layer interfaces.'''
# Actually it's not clear to me how the RRTM code uses these values
lev = Tatm.domain.axes['lev'].points
lev_bounds = Tatm.domain.axes['lev'].bounds
# Interpolate to layer interfaces
f = interp1d(lev, Tatm, axis=-1) # interpolation function
Tinterp = f(lev_bounds[1:-1])
# add TOA value, Assume surface temperature at bottom boundary
Ttoa = Tatm[...,0]
Tinterp = np.concatenate((Ttoa[..., np.newaxis], Tinterp, Ts), axis=-1)
return Tinterp
|
[
"def",
"interface_temperature",
"(",
"Ts",
",",
"Tatm",
",",
"*",
"*",
"kwargs",
")",
":",
"# Actually it's not clear to me how the RRTM code uses these values",
"lev",
"=",
"Tatm",
".",
"domain",
".",
"axes",
"[",
"'lev'",
"]",
".",
"points",
"lev_bounds",
"=",
"Tatm",
".",
"domain",
".",
"axes",
"[",
"'lev'",
"]",
".",
"bounds",
"# Interpolate to layer interfaces",
"f",
"=",
"interp1d",
"(",
"lev",
",",
"Tatm",
",",
"axis",
"=",
"-",
"1",
")",
"# interpolation function",
"Tinterp",
"=",
"f",
"(",
"lev_bounds",
"[",
"1",
":",
"-",
"1",
"]",
")",
"# add TOA value, Assume surface temperature at bottom boundary",
"Ttoa",
"=",
"Tatm",
"[",
"...",
",",
"0",
"]",
"Tinterp",
"=",
"np",
".",
"concatenate",
"(",
"(",
"Ttoa",
"[",
"...",
",",
"np",
".",
"newaxis",
"]",
",",
"Tinterp",
",",
"Ts",
")",
",",
"axis",
"=",
"-",
"1",
")",
"return",
"Tinterp"
] | 48 | 16.166667 |
def coroutine(
func: Callable[..., "Generator[Any, Any, _T]"]
) -> Callable[..., "Future[_T]"]:
"""Decorator for asynchronous generators.
For compatibility with older versions of Python, coroutines may
also "return" by raising the special exception `Return(value)
<Return>`.
Functions with this decorator return a `.Future`.
.. warning::
When exceptions occur inside a coroutine, the exception
information will be stored in the `.Future` object. You must
examine the result of the `.Future` object, or the exception
may go unnoticed by your code. This means yielding the function
if called from another coroutine, using something like
`.IOLoop.run_sync` for top-level calls, or passing the `.Future`
to `.IOLoop.add_future`.
.. versionchanged:: 6.0
The ``callback`` argument was removed. Use the returned
awaitable object instead.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
# type: (*Any, **Any) -> Future[_T]
# This function is type-annotated with a comment to work around
# https://bitbucket.org/pypy/pypy/issues/2868/segfault-with-args-type-annotation-in
future = _create_future()
try:
result = func(*args, **kwargs)
except (Return, StopIteration) as e:
result = _value_from_stopiteration(e)
except Exception:
future_set_exc_info(future, sys.exc_info())
try:
return future
finally:
# Avoid circular references
future = None # type: ignore
else:
if isinstance(result, Generator):
# Inline the first iteration of Runner.run. This lets us
# avoid the cost of creating a Runner when the coroutine
# never actually yields, which in turn allows us to
# use "optional" coroutines in critical path code without
# performance penalty for the synchronous case.
try:
yielded = next(result)
except (StopIteration, Return) as e:
future_set_result_unless_cancelled(
future, _value_from_stopiteration(e)
)
except Exception:
future_set_exc_info(future, sys.exc_info())
else:
# Provide strong references to Runner objects as long
# as their result future objects also have strong
# references (typically from the parent coroutine's
# Runner). This keeps the coroutine's Runner alive.
# We do this by exploiting the public API
# add_done_callback() instead of putting a private
# attribute on the Future.
# (Github issues #1769, #2229).
runner = Runner(result, future, yielded)
future.add_done_callback(lambda _: runner)
yielded = None
try:
return future
finally:
# Subtle memory optimization: if next() raised an exception,
# the future's exc_info contains a traceback which
# includes this stack frame. This creates a cycle,
# which will be collected at the next full GC but has
# been shown to greatly increase memory usage of
# benchmarks (relative to the refcount-based scheme
# used in the absence of cycles). We can avoid the
# cycle by clearing the local variable after we return it.
future = None # type: ignore
future_set_result_unless_cancelled(future, result)
return future
wrapper.__wrapped__ = func # type: ignore
wrapper.__tornado_coroutine__ = True # type: ignore
return wrapper
|
[
"def",
"coroutine",
"(",
"func",
":",
"Callable",
"[",
"...",
",",
"\"Generator[Any, Any, _T]\"",
"]",
")",
"->",
"Callable",
"[",
"...",
",",
"\"Future[_T]\"",
"]",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# type: (*Any, **Any) -> Future[_T]",
"# This function is type-annotated with a comment to work around",
"# https://bitbucket.org/pypy/pypy/issues/2868/segfault-with-args-type-annotation-in",
"future",
"=",
"_create_future",
"(",
")",
"try",
":",
"result",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"(",
"Return",
",",
"StopIteration",
")",
"as",
"e",
":",
"result",
"=",
"_value_from_stopiteration",
"(",
"e",
")",
"except",
"Exception",
":",
"future_set_exc_info",
"(",
"future",
",",
"sys",
".",
"exc_info",
"(",
")",
")",
"try",
":",
"return",
"future",
"finally",
":",
"# Avoid circular references",
"future",
"=",
"None",
"# type: ignore",
"else",
":",
"if",
"isinstance",
"(",
"result",
",",
"Generator",
")",
":",
"# Inline the first iteration of Runner.run. This lets us",
"# avoid the cost of creating a Runner when the coroutine",
"# never actually yields, which in turn allows us to",
"# use \"optional\" coroutines in critical path code without",
"# performance penalty for the synchronous case.",
"try",
":",
"yielded",
"=",
"next",
"(",
"result",
")",
"except",
"(",
"StopIteration",
",",
"Return",
")",
"as",
"e",
":",
"future_set_result_unless_cancelled",
"(",
"future",
",",
"_value_from_stopiteration",
"(",
"e",
")",
")",
"except",
"Exception",
":",
"future_set_exc_info",
"(",
"future",
",",
"sys",
".",
"exc_info",
"(",
")",
")",
"else",
":",
"# Provide strong references to Runner objects as long",
"# as their result future objects also have strong",
"# references (typically from the parent coroutine's",
"# Runner). This keeps the coroutine's Runner alive.",
"# We do this by exploiting the public API",
"# add_done_callback() instead of putting a private",
"# attribute on the Future.",
"# (Github issues #1769, #2229).",
"runner",
"=",
"Runner",
"(",
"result",
",",
"future",
",",
"yielded",
")",
"future",
".",
"add_done_callback",
"(",
"lambda",
"_",
":",
"runner",
")",
"yielded",
"=",
"None",
"try",
":",
"return",
"future",
"finally",
":",
"# Subtle memory optimization: if next() raised an exception,",
"# the future's exc_info contains a traceback which",
"# includes this stack frame. This creates a cycle,",
"# which will be collected at the next full GC but has",
"# been shown to greatly increase memory usage of",
"# benchmarks (relative to the refcount-based scheme",
"# used in the absence of cycles). We can avoid the",
"# cycle by clearing the local variable after we return it.",
"future",
"=",
"None",
"# type: ignore",
"future_set_result_unless_cancelled",
"(",
"future",
",",
"result",
")",
"return",
"future",
"wrapper",
".",
"__wrapped__",
"=",
"func",
"# type: ignore",
"wrapper",
".",
"__tornado_coroutine__",
"=",
"True",
"# type: ignore",
"return",
"wrapper"
] | 43.722222 | 21.477778 |
def get_one_required(self, locator):
"""
Gets a required component reference that matches specified locator.
:param locator: the locator to find a reference by.
:return: a matching component reference.
:raises: a [[ReferenceException]] when no references found.
"""
components = self.find(locator, True)
return components[0] if len(components) > 0 else None
|
[
"def",
"get_one_required",
"(",
"self",
",",
"locator",
")",
":",
"components",
"=",
"self",
".",
"find",
"(",
"locator",
",",
"True",
")",
"return",
"components",
"[",
"0",
"]",
"if",
"len",
"(",
"components",
")",
">",
"0",
"else",
"None"
] | 34.833333 | 20.25 |
def to_camel_case(snake_case_name):
"""
Converts snake_cased_names to CamelCaseNames.
:param snake_case_name: The name you'd like to convert from.
:type snake_case_name: string
:returns: A converted string
:rtype: string
"""
bits = snake_case_name.split('_')
return ''.join([bit.capitalize() for bit in bits])
|
[
"def",
"to_camel_case",
"(",
"snake_case_name",
")",
":",
"bits",
"=",
"snake_case_name",
".",
"split",
"(",
"'_'",
")",
"return",
"''",
".",
"join",
"(",
"[",
"bit",
".",
"capitalize",
"(",
")",
"for",
"bit",
"in",
"bits",
"]",
")"
] | 28 | 14.333333 |
def get_loadbalancer_members(self, datacenter_id, loadbalancer_id,
depth=1):
"""
Retrieves the list of NICs that are associated with a load balancer.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param loadbalancer_id: The unique ID of the load balancer.
:type loadbalancer_id: ``str``
:param depth: The depth of the response data.
:type depth: ``int``
"""
response = self._perform_request(
'/datacenters/%s/loadbalancers/%s/balancednics?depth=%s' % (
datacenter_id, loadbalancer_id, str(depth)))
return response
|
[
"def",
"get_loadbalancer_members",
"(",
"self",
",",
"datacenter_id",
",",
"loadbalancer_id",
",",
"depth",
"=",
"1",
")",
":",
"response",
"=",
"self",
".",
"_perform_request",
"(",
"'/datacenters/%s/loadbalancers/%s/balancednics?depth=%s'",
"%",
"(",
"datacenter_id",
",",
"loadbalancer_id",
",",
"str",
"(",
"depth",
")",
")",
")",
"return",
"response"
] | 36 | 21.2 |
def get_reverse_index(self, base_index):
"""Get index into this segment's data given the index into the base data
Raises IndexError if the base index doesn't map to anything in this
segment's data
"""
r = self.reverse_index_mapping[base_index]
if r < 0:
raise IndexError("index %d not mapped in this segment" % base_index)
return r
|
[
"def",
"get_reverse_index",
"(",
"self",
",",
"base_index",
")",
":",
"r",
"=",
"self",
".",
"reverse_index_mapping",
"[",
"base_index",
"]",
"if",
"r",
"<",
"0",
":",
"raise",
"IndexError",
"(",
"\"index %d not mapped in this segment\"",
"%",
"base_index",
")",
"return",
"r"
] | 39.1 | 19 |
def secant2(value_and_gradients_function,
val_0,
search_interval,
f_lim,
sufficient_decrease_param=0.1,
curvature_param=0.9,
name=None):
"""Performs the secant square procedure of Hager Zhang.
Given an interval that brackets a root, this procedure performs an update of
both end points using two intermediate points generated using the secant
interpolation. For details see the steps S1-S4 in [Hager and Zhang (2006)][2].
The interval [a, b] must satisfy the opposite slope conditions described in
the documentation for `update`.
Args:
value_and_gradients_function: A Python callable that accepts a real scalar
tensor and returns an object that can be converted to a namedtuple.
The namedtuple should have fields 'f' and 'df' that correspond to scalar
tensors of real dtype containing the value of the function and its
derivative at that point. The other namedtuple fields, if present,
should be tensors or sequences (possibly nested) of tensors.
In usual optimization application, this function would be generated by
projecting the multivariate objective function along some specific
direction. The direction is determined by some other procedure but should
be a descent direction (i.e. the derivative of the projected univariate
function must be negative at 0.).
Alternatively, the function may represent the batching of `n` such line
functions (e.g. projecting a single multivariate objective function along
`n` distinct directions at once) accepting n points as input, i.e. a
tensor of shape [n], and the fields 'f' and 'df' in the returned
namedtuple should each be a tensor of shape [n], with the corresponding
function values and derivatives at the input points.
val_0: A namedtuple, as returned by value_and_gradients_function evaluated
at `0.`.
search_interval: A namedtuple describing the current search interval,
must include the fields:
- converged: Boolean `Tensor` of shape [n], indicating batch members
where search has already converged. Interval for these batch members
won't be modified.
- failed: Boolean `Tensor` of shape [n], indicating batch members
where search has already failed. Interval for these batch members
wont be modified.
- iterations: Scalar int32 `Tensor`. Number of line search iterations
so far.
- func_evals: Scalar int32 `Tensor`. Number of function evaluations
so far.
- left: A namedtuple, as returned by value_and_gradients_function,
of the left end point of the current search interval.
- right: A namedtuple, as returned by value_and_gradients_function,
of the right end point of the current search interval.
f_lim: Scalar `Tensor` of real dtype. The function value threshold for
the approximate Wolfe conditions to be checked.
sufficient_decrease_param: Positive scalar `Tensor` of real dtype.
Bounded above by the curvature param. Corresponds to 'delta' in the
terminology of [Hager and Zhang (2006)][2].
curvature_param: Positive scalar `Tensor` of real dtype. Bounded above
by `1.`. Corresponds to 'sigma' in the terminology of
[Hager and Zhang (2006)][2].
name: (Optional) Python str. The name prefixed to the ops created by this
function. If not supplied, the default name 'secant2' is used.
Returns:
A namedtuple containing the following fields.
active: A boolean `Tensor` of shape [n]. Used internally by the procedure
to indicate batch members on which there is work left to do.
converged: A boolean `Tensor` of shape [n]. Indicates whether a point
satisfying the Wolfe conditions has been found. If this is True, the
interval will be degenerate (i.e. `left` and `right` below will be
identical).
failed: A boolean `Tensor` of shape [n]. Indicates if invalid function or
gradient values were encountered (i.e. infinity or NaNs).
num_evals: A scalar int32 `Tensor`. The total number of function
evaluations made.
left: Return value of value_and_gradients_function at the updated left
end point of the interval.
right: Return value of value_and_gradients_function at the updated right
end point of the interval.
"""
with tf.compat.v1.name_scope(name, 'secant2', [
val_0, search_interval, f_lim, sufficient_decrease_param,
curvature_param]):
# This will always be s.t. left <= c <= right
val_c = value_and_gradients_function(
_secant(search_interval.left, search_interval.right))
failed = search_interval.failed | ~is_finite(val_c)
converged = search_interval.converged | (~failed & _satisfies_wolfe(
val_0, val_c, f_lim, sufficient_decrease_param, curvature_param))
new_converged = converged & ~search_interval.converged
val_left = val_where(new_converged, val_c, search_interval.left)
val_right = val_where(new_converged, val_c, search_interval.right)
initial_args = _Secant2Result(
active=~failed & ~converged,
converged=converged,
failed=failed,
num_evals=search_interval.func_evals + 1,
left=val_left,
right=val_right)
def _apply_secant2_inner():
return _secant2_inner(
value_and_gradients_function,
initial_args,
val_0,
val_c,
f_lim,
sufficient_decrease_param,
curvature_param)
return prefer_static.cond(
tf.reduce_any(input_tensor=initial_args.active),
_apply_secant2_inner,
lambda: initial_args)
|
[
"def",
"secant2",
"(",
"value_and_gradients_function",
",",
"val_0",
",",
"search_interval",
",",
"f_lim",
",",
"sufficient_decrease_param",
"=",
"0.1",
",",
"curvature_param",
"=",
"0.9",
",",
"name",
"=",
"None",
")",
":",
"with",
"tf",
".",
"compat",
".",
"v1",
".",
"name_scope",
"(",
"name",
",",
"'secant2'",
",",
"[",
"val_0",
",",
"search_interval",
",",
"f_lim",
",",
"sufficient_decrease_param",
",",
"curvature_param",
"]",
")",
":",
"# This will always be s.t. left <= c <= right",
"val_c",
"=",
"value_and_gradients_function",
"(",
"_secant",
"(",
"search_interval",
".",
"left",
",",
"search_interval",
".",
"right",
")",
")",
"failed",
"=",
"search_interval",
".",
"failed",
"|",
"~",
"is_finite",
"(",
"val_c",
")",
"converged",
"=",
"search_interval",
".",
"converged",
"|",
"(",
"~",
"failed",
"&",
"_satisfies_wolfe",
"(",
"val_0",
",",
"val_c",
",",
"f_lim",
",",
"sufficient_decrease_param",
",",
"curvature_param",
")",
")",
"new_converged",
"=",
"converged",
"&",
"~",
"search_interval",
".",
"converged",
"val_left",
"=",
"val_where",
"(",
"new_converged",
",",
"val_c",
",",
"search_interval",
".",
"left",
")",
"val_right",
"=",
"val_where",
"(",
"new_converged",
",",
"val_c",
",",
"search_interval",
".",
"right",
")",
"initial_args",
"=",
"_Secant2Result",
"(",
"active",
"=",
"~",
"failed",
"&",
"~",
"converged",
",",
"converged",
"=",
"converged",
",",
"failed",
"=",
"failed",
",",
"num_evals",
"=",
"search_interval",
".",
"func_evals",
"+",
"1",
",",
"left",
"=",
"val_left",
",",
"right",
"=",
"val_right",
")",
"def",
"_apply_secant2_inner",
"(",
")",
":",
"return",
"_secant2_inner",
"(",
"value_and_gradients_function",
",",
"initial_args",
",",
"val_0",
",",
"val_c",
",",
"f_lim",
",",
"sufficient_decrease_param",
",",
"curvature_param",
")",
"return",
"prefer_static",
".",
"cond",
"(",
"tf",
".",
"reduce_any",
"(",
"input_tensor",
"=",
"initial_args",
".",
"active",
")",
",",
"_apply_secant2_inner",
",",
"lambda",
":",
"initial_args",
")"
] | 49.104348 | 23.93913 |
def load_cufflinks(self, filter_ok=True):
"""
Load a Cufflinks gene expression data for a cohort
Parameters
----------
filter_ok : bool, optional
If true, filter Cufflinks data to row with FPKM_status == "OK"
Returns
-------
cufflinks_data : Pandas dataframe
Pandas dataframe with Cufflinks data for all patients
columns include patient_id, gene_id, gene_short_name, FPKM, FPKM_conf_lo, FPKM_conf_hi
"""
return \
pd.concat(
[self._load_single_patient_cufflinks(patient, filter_ok) for patient in self],
copy=False
)
|
[
"def",
"load_cufflinks",
"(",
"self",
",",
"filter_ok",
"=",
"True",
")",
":",
"return",
"pd",
".",
"concat",
"(",
"[",
"self",
".",
"_load_single_patient_cufflinks",
"(",
"patient",
",",
"filter_ok",
")",
"for",
"patient",
"in",
"self",
"]",
",",
"copy",
"=",
"False",
")"
] | 33.3 | 22.9 |
def make_back_matter(self):
"""
The <back> element may have 0 or 1 <label> elements and 0 or 1 <title>
elements. Then it may have any combination of the following: <ack>,
<app-group>, <bio>, <fn-group>, <glossary>, <ref-list>, <notes>, and
<sec>. <sec> is employed here as a catch-all for material that does not
fall under the other categories.
The Back should generally be thought of as a non-linear element, though
some of its content will be parsed to the linear flow of the document.
This can be thought of as critically important meta-information that
should accompany the main text (e.g. Acknowledgments and Contributions)
Because the content of <back> contains a set of tags that intersects
with that of the Body, this method should always be called before the
general post-processing steps; keep in mind that this is also the
opportunity to permit special handling of content in the Back
"""
#Back is technically metadata content that needs to be interpreted to
#presentable content
body = self.main.getroot().find('body')
if self.article.root.find('back') is None:
return
#The following things are ordered in such a way to adhere to what
#appears to be a consistent presentation order for PLoS
#Acknowledgments
back_ack = self.make_back_acknowledgments()
if back_ack is not None:
body.append(back_ack)
#Author Contributions
self.make_back_author_contributions(body)
#Glossaries
self.make_back_glossary(body)
#Notes
self.make_back_notes(body)
|
[
"def",
"make_back_matter",
"(",
"self",
")",
":",
"#Back is technically metadata content that needs to be interpreted to",
"#presentable content",
"body",
"=",
"self",
".",
"main",
".",
"getroot",
"(",
")",
".",
"find",
"(",
"'body'",
")",
"if",
"self",
".",
"article",
".",
"root",
".",
"find",
"(",
"'back'",
")",
"is",
"None",
":",
"return",
"#The following things are ordered in such a way to adhere to what",
"#appears to be a consistent presentation order for PLoS",
"#Acknowledgments",
"back_ack",
"=",
"self",
".",
"make_back_acknowledgments",
"(",
")",
"if",
"back_ack",
"is",
"not",
"None",
":",
"body",
".",
"append",
"(",
"back_ack",
")",
"#Author Contributions",
"self",
".",
"make_back_author_contributions",
"(",
"body",
")",
"#Glossaries",
"self",
".",
"make_back_glossary",
"(",
"body",
")",
"#Notes",
"self",
".",
"make_back_notes",
"(",
"body",
")"
] | 46.722222 | 23.055556 |
def _encrypt(cipher, key, data, iv, padding):
"""
Encrypts plaintext
:param cipher:
A unicode string of "aes", "des", "tripledes_2key", "tripledes_3key",
"rc2", "rc4"
:param key:
The encryption key - a byte string 5-16 bytes long
:param data:
The plaintext - a byte string
:param iv:
The initialization vector - a byte string - unused for RC4
:param padding:
Boolean, if padding should be used - unused for RC4
:raises:
ValueError - when any of the parameters contain an invalid value
TypeError - when any of the parameters are of the wrong type
OSError - when an error is returned by the OS crypto library
:return:
A byte string of the ciphertext
"""
if not isinstance(key, byte_cls):
raise TypeError(pretty_message(
'''
key must be a byte string, not %s
''',
type_name(key)
))
if not isinstance(data, byte_cls):
raise TypeError(pretty_message(
'''
data must be a byte string, not %s
''',
type_name(data)
))
if cipher != 'rc4' and not isinstance(iv, byte_cls):
raise TypeError(pretty_message(
'''
iv must be a byte string, not %s
''',
type_name(iv)
))
if cipher != 'rc4' and not padding:
raise ValueError('padding must be specified')
if _backend == 'winlegacy':
return _advapi32_encrypt(cipher, key, data, iv, padding)
return _bcrypt_encrypt(cipher, key, data, iv, padding)
|
[
"def",
"_encrypt",
"(",
"cipher",
",",
"key",
",",
"data",
",",
"iv",
",",
"padding",
")",
":",
"if",
"not",
"isinstance",
"(",
"key",
",",
"byte_cls",
")",
":",
"raise",
"TypeError",
"(",
"pretty_message",
"(",
"'''\n key must be a byte string, not %s\n '''",
",",
"type_name",
"(",
"key",
")",
")",
")",
"if",
"not",
"isinstance",
"(",
"data",
",",
"byte_cls",
")",
":",
"raise",
"TypeError",
"(",
"pretty_message",
"(",
"'''\n data must be a byte string, not %s\n '''",
",",
"type_name",
"(",
"data",
")",
")",
")",
"if",
"cipher",
"!=",
"'rc4'",
"and",
"not",
"isinstance",
"(",
"iv",
",",
"byte_cls",
")",
":",
"raise",
"TypeError",
"(",
"pretty_message",
"(",
"'''\n iv must be a byte string, not %s\n '''",
",",
"type_name",
"(",
"iv",
")",
")",
")",
"if",
"cipher",
"!=",
"'rc4'",
"and",
"not",
"padding",
":",
"raise",
"ValueError",
"(",
"'padding must be specified'",
")",
"if",
"_backend",
"==",
"'winlegacy'",
":",
"return",
"_advapi32_encrypt",
"(",
"cipher",
",",
"key",
",",
"data",
",",
"iv",
",",
"padding",
")",
"return",
"_bcrypt_encrypt",
"(",
"cipher",
",",
"key",
",",
"data",
",",
"iv",
",",
"padding",
")"
] | 26.779661 | 21.559322 |
def filter(configs, settings):
"""Main entry function to filtering configuration types
Parameters
----------
configs: Nx4 array
array containing A-B-M-N configurations
settings: dict
'only_types': ['dd', 'other'], # filter only for those types
Returns
-------
dict
results dict containing filter results (indices) for all registered
filter functions. All remaining configs are stored under the keywords
'remaining'
"""
if isinstance(configs, pd.DataFrame):
configs = configs[['a', 'b', 'm', 'n']].values
# assign short labels to Python functions
filter_funcs = {
'dd': _filter_dipole_dipole,
'schlumberger': _filter_schlumberger,
}
# we need a list to fix the call order of filter functions
keys = ['dd', 'schlumberger', ]
allowed_keys = settings.get('only_types', filter_funcs.keys())
results = {}
# we operate iteratively on the configs, set the first round here
# rows are iteratively set to nan when filters remove them!
configs_filtered = configs.copy().astype(float)
for key in keys:
if key in allowed_keys:
configs_filtered, indices_filtered = filter_funcs[key](
configs_filtered,
)
if len(indices_filtered) > 0:
results[key] = indices_filtered
# add all remaining indices to the results dict
results['not_sorted'] = np.where(
~np.all(np.isnan(configs_filtered), axis=1)
)[0]
return results
|
[
"def",
"filter",
"(",
"configs",
",",
"settings",
")",
":",
"if",
"isinstance",
"(",
"configs",
",",
"pd",
".",
"DataFrame",
")",
":",
"configs",
"=",
"configs",
"[",
"[",
"'a'",
",",
"'b'",
",",
"'m'",
",",
"'n'",
"]",
"]",
".",
"values",
"# assign short labels to Python functions",
"filter_funcs",
"=",
"{",
"'dd'",
":",
"_filter_dipole_dipole",
",",
"'schlumberger'",
":",
"_filter_schlumberger",
",",
"}",
"# we need a list to fix the call order of filter functions",
"keys",
"=",
"[",
"'dd'",
",",
"'schlumberger'",
",",
"]",
"allowed_keys",
"=",
"settings",
".",
"get",
"(",
"'only_types'",
",",
"filter_funcs",
".",
"keys",
"(",
")",
")",
"results",
"=",
"{",
"}",
"# we operate iteratively on the configs, set the first round here",
"# rows are iteratively set to nan when filters remove them!",
"configs_filtered",
"=",
"configs",
".",
"copy",
"(",
")",
".",
"astype",
"(",
"float",
")",
"for",
"key",
"in",
"keys",
":",
"if",
"key",
"in",
"allowed_keys",
":",
"configs_filtered",
",",
"indices_filtered",
"=",
"filter_funcs",
"[",
"key",
"]",
"(",
"configs_filtered",
",",
")",
"if",
"len",
"(",
"indices_filtered",
")",
">",
"0",
":",
"results",
"[",
"key",
"]",
"=",
"indices_filtered",
"# add all remaining indices to the results dict",
"results",
"[",
"'not_sorted'",
"]",
"=",
"np",
".",
"where",
"(",
"~",
"np",
".",
"all",
"(",
"np",
".",
"isnan",
"(",
"configs_filtered",
")",
",",
"axis",
"=",
"1",
")",
")",
"[",
"0",
"]",
"return",
"results"
] | 30.14 | 21.66 |
def is_able_to_convert_detailed(self, strict: bool, from_type: Type[Any], to_type: Type[Any]):
"""
Overrides the parent method to delegate left check to the first (left) converter of the chain and right check
to the last (right) converter of the chain. This includes custom checking if they have any...
see Converter.is_able_to_convert for details
:param strict:
:param from_type:
:param to_type:
:return:
"""
# check if first and last converters are happy
if not self._converters_list[0].is_able_to_convert(strict, from_type=from_type, to_type=JOKER):
return False, None, None
elif not self._converters_list[-1].is_able_to_convert(strict, from_type=JOKER, to_type=to_type):
return False, None, None
else:
# behave as usual. This is probably useless but lets be sure.
return super(ConversionChain, self).is_able_to_convert_detailed(strict, from_type, to_type)
|
[
"def",
"is_able_to_convert_detailed",
"(",
"self",
",",
"strict",
":",
"bool",
",",
"from_type",
":",
"Type",
"[",
"Any",
"]",
",",
"to_type",
":",
"Type",
"[",
"Any",
"]",
")",
":",
"# check if first and last converters are happy",
"if",
"not",
"self",
".",
"_converters_list",
"[",
"0",
"]",
".",
"is_able_to_convert",
"(",
"strict",
",",
"from_type",
"=",
"from_type",
",",
"to_type",
"=",
"JOKER",
")",
":",
"return",
"False",
",",
"None",
",",
"None",
"elif",
"not",
"self",
".",
"_converters_list",
"[",
"-",
"1",
"]",
".",
"is_able_to_convert",
"(",
"strict",
",",
"from_type",
"=",
"JOKER",
",",
"to_type",
"=",
"to_type",
")",
":",
"return",
"False",
",",
"None",
",",
"None",
"else",
":",
"# behave as usual. This is probably useless but lets be sure.",
"return",
"super",
"(",
"ConversionChain",
",",
"self",
")",
".",
"is_able_to_convert_detailed",
"(",
"strict",
",",
"from_type",
",",
"to_type",
")"
] | 52.315789 | 31.052632 |
def human_uuid():
"""Returns a good UUID for using as a human readable string."""
return base64.b32encode(
hashlib.sha1(uuid.uuid4().bytes).digest()).lower().strip('=')
|
[
"def",
"human_uuid",
"(",
")",
":",
"return",
"base64",
".",
"b32encode",
"(",
"hashlib",
".",
"sha1",
"(",
"uuid",
".",
"uuid4",
"(",
")",
".",
"bytes",
")",
".",
"digest",
"(",
")",
")",
".",
"lower",
"(",
")",
".",
"strip",
"(",
"'='",
")"
] | 45.25 | 16 |
def to_pandas(self):
"""Return a Pandas dataframe of the minimum spanning tree.
Each row is an edge in the tree; the columns are `from`,
`to`, and `distance` giving the two vertices of the edge
which are indices into the dataset, and the distance
between those datapoints.
"""
try:
from pandas import DataFrame
except ImportError:
raise ImportError('You must have pandas installed to export pandas DataFrames')
result = DataFrame({'from': self._mst.T[0].astype(int),
'to': self._mst.T[1].astype(int),
'distance': self._mst.T[2]})
return result
|
[
"def",
"to_pandas",
"(",
"self",
")",
":",
"try",
":",
"from",
"pandas",
"import",
"DataFrame",
"except",
"ImportError",
":",
"raise",
"ImportError",
"(",
"'You must have pandas installed to export pandas DataFrames'",
")",
"result",
"=",
"DataFrame",
"(",
"{",
"'from'",
":",
"self",
".",
"_mst",
".",
"T",
"[",
"0",
"]",
".",
"astype",
"(",
"int",
")",
",",
"'to'",
":",
"self",
".",
"_mst",
".",
"T",
"[",
"1",
"]",
".",
"astype",
"(",
"int",
")",
",",
"'distance'",
":",
"self",
".",
"_mst",
".",
"T",
"[",
"2",
"]",
"}",
")",
"return",
"result"
] | 40.529412 | 20.352941 |
def _set_init_system(self, client):
"""Determine the init system of distribution."""
if not self.init_system:
try:
out = ipa_utils.execute_ssh_command(
client,
'ps -p 1 -o comm='
)
except Exception as e:
raise IpaDistroException(
'An error occurred while retrieving'
' the distro init system: %s' % e
)
if out:
self.init_system = out.strip()
|
[
"def",
"_set_init_system",
"(",
"self",
",",
"client",
")",
":",
"if",
"not",
"self",
".",
"init_system",
":",
"try",
":",
"out",
"=",
"ipa_utils",
".",
"execute_ssh_command",
"(",
"client",
",",
"'ps -p 1 -o comm='",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"IpaDistroException",
"(",
"'An error occurred while retrieving'",
"' the distro init system: %s'",
"%",
"e",
")",
"if",
"out",
":",
"self",
".",
"init_system",
"=",
"out",
".",
"strip",
"(",
")"
] | 35.933333 | 11.533333 |
def update_cache_for_instance(
model_name, instance_pk, instance=None, version=None):
"""Update the cache for an instance, with cascading updates."""
cache = SampleCache()
invalid = cache.update_instance(model_name, instance_pk, instance, version)
for invalid_name, invalid_pk, invalid_version in invalid:
update_cache_for_instance.delay(
invalid_name, invalid_pk, version=invalid_version)
|
[
"def",
"update_cache_for_instance",
"(",
"model_name",
",",
"instance_pk",
",",
"instance",
"=",
"None",
",",
"version",
"=",
"None",
")",
":",
"cache",
"=",
"SampleCache",
"(",
")",
"invalid",
"=",
"cache",
".",
"update_instance",
"(",
"model_name",
",",
"instance_pk",
",",
"instance",
",",
"version",
")",
"for",
"invalid_name",
",",
"invalid_pk",
",",
"invalid_version",
"in",
"invalid",
":",
"update_cache_for_instance",
".",
"delay",
"(",
"invalid_name",
",",
"invalid_pk",
",",
"version",
"=",
"invalid_version",
")"
] | 53.25 | 16.125 |
def get_chunk_information(self, chk, lun, chunk_name):
"""Get chunk information"""
cmd = ["nvm_cmd rprt_lun", self.envs,
"%d %d > %s" % (chk, lun, chunk_name)]
status, _, _ = cij.ssh.command(cmd, shell=True)
return status
|
[
"def",
"get_chunk_information",
"(",
"self",
",",
"chk",
",",
"lun",
",",
"chunk_name",
")",
":",
"cmd",
"=",
"[",
"\"nvm_cmd rprt_lun\"",
",",
"self",
".",
"envs",
",",
"\"%d %d > %s\"",
"%",
"(",
"chk",
",",
"lun",
",",
"chunk_name",
")",
"]",
"status",
",",
"_",
",",
"_",
"=",
"cij",
".",
"ssh",
".",
"command",
"(",
"cmd",
",",
"shell",
"=",
"True",
")",
"return",
"status"
] | 44.666667 | 11.666667 |
def to_labeled_point(sc, features, labels, categorical=False):
"""Convert numpy arrays of features and labels into
a LabeledPoint RDD for MLlib and ML integration.
:param sc: Spark context
:param features: numpy array with features
:param labels: numpy array with labels
:param categorical: boolean, whether labels are already one-hot encoded or not
:return: LabeledPoint RDD with features and labels
"""
labeled_points = []
for x, y in zip(features, labels):
if categorical:
lp = LabeledPoint(np.argmax(y), to_vector(x))
else:
lp = LabeledPoint(y, to_vector(x))
labeled_points.append(lp)
return sc.parallelize(labeled_points)
|
[
"def",
"to_labeled_point",
"(",
"sc",
",",
"features",
",",
"labels",
",",
"categorical",
"=",
"False",
")",
":",
"labeled_points",
"=",
"[",
"]",
"for",
"x",
",",
"y",
"in",
"zip",
"(",
"features",
",",
"labels",
")",
":",
"if",
"categorical",
":",
"lp",
"=",
"LabeledPoint",
"(",
"np",
".",
"argmax",
"(",
"y",
")",
",",
"to_vector",
"(",
"x",
")",
")",
"else",
":",
"lp",
"=",
"LabeledPoint",
"(",
"y",
",",
"to_vector",
"(",
"x",
")",
")",
"labeled_points",
".",
"append",
"(",
"lp",
")",
"return",
"sc",
".",
"parallelize",
"(",
"labeled_points",
")"
] | 39 | 13.555556 |
def find(self, key):
"Exact matching (returns value)"
index = self.follow_bytes(key, self.ROOT)
if index is None:
return -1
if not self.has_value(index):
return -1
return self.value(index)
|
[
"def",
"find",
"(",
"self",
",",
"key",
")",
":",
"index",
"=",
"self",
".",
"follow_bytes",
"(",
"key",
",",
"self",
".",
"ROOT",
")",
"if",
"index",
"is",
"None",
":",
"return",
"-",
"1",
"if",
"not",
"self",
".",
"has_value",
"(",
"index",
")",
":",
"return",
"-",
"1",
"return",
"self",
".",
"value",
"(",
"index",
")"
] | 30.625 | 11.625 |
def clone(client):
'''Clone the redis client to be slowlog-compatible'''
kwargs = client.redis.connection_pool.connection_kwargs
kwargs['parser_class'] = redis.connection.PythonParser
pool = redis.connection.ConnectionPool(**kwargs)
return redis.Redis(connection_pool=pool)
|
[
"def",
"clone",
"(",
"client",
")",
":",
"kwargs",
"=",
"client",
".",
"redis",
".",
"connection_pool",
".",
"connection_kwargs",
"kwargs",
"[",
"'parser_class'",
"]",
"=",
"redis",
".",
"connection",
".",
"PythonParser",
"pool",
"=",
"redis",
".",
"connection",
".",
"ConnectionPool",
"(",
"*",
"*",
"kwargs",
")",
"return",
"redis",
".",
"Redis",
"(",
"connection_pool",
"=",
"pool",
")"
] | 51.333333 | 18.666667 |
def close_list(ctx, root):
"""Close already opened list if needed.
This will try to see if it is needed to close already opened list.
:Args:
- ctx (:class:`Context`): Context object
- root (Element): lxml element representing current position.
:Returns:
lxml element where future content should be placed.
"""
try:
n = len(ctx.in_list)
if n <= 0:
return root
elem = root
while n > 0:
while True:
if elem.tag in ['ul', 'ol', 'td']:
elem = elem.getparent()
break
elem = elem.getparent()
n -= 1
ctx.in_list = []
return elem
except:
return None
|
[
"def",
"close_list",
"(",
"ctx",
",",
"root",
")",
":",
"try",
":",
"n",
"=",
"len",
"(",
"ctx",
".",
"in_list",
")",
"if",
"n",
"<=",
"0",
":",
"return",
"root",
"elem",
"=",
"root",
"while",
"n",
">",
"0",
":",
"while",
"True",
":",
"if",
"elem",
".",
"tag",
"in",
"[",
"'ul'",
",",
"'ol'",
",",
"'td'",
"]",
":",
"elem",
"=",
"elem",
".",
"getparent",
"(",
")",
"break",
"elem",
"=",
"elem",
".",
"getparent",
"(",
")",
"n",
"-=",
"1",
"ctx",
".",
"in_list",
"=",
"[",
"]",
"return",
"elem",
"except",
":",
"return",
"None"
] | 21.342857 | 24.314286 |
def start_accepting_passive_svc_checks(self):
"""Enable passive service check submission (globally)
Format of the line that triggers function call::
START_ACCEPTING_PASSIVE_SVC_CHECKS
:return: None
"""
# todo: #783 create a dedicated brok for global parameters
if not self.my_conf.accept_passive_service_checks:
self.my_conf.modified_attributes |= DICT_MODATTR["MODATTR_PASSIVE_CHECKS_ENABLED"].value
self.my_conf.accept_passive_service_checks = True
self.my_conf.explode_global_conf()
self.daemon.update_program_status()
|
[
"def",
"start_accepting_passive_svc_checks",
"(",
"self",
")",
":",
"# todo: #783 create a dedicated brok for global parameters",
"if",
"not",
"self",
".",
"my_conf",
".",
"accept_passive_service_checks",
":",
"self",
".",
"my_conf",
".",
"modified_attributes",
"|=",
"DICT_MODATTR",
"[",
"\"MODATTR_PASSIVE_CHECKS_ENABLED\"",
"]",
".",
"value",
"self",
".",
"my_conf",
".",
"accept_passive_service_checks",
"=",
"True",
"self",
".",
"my_conf",
".",
"explode_global_conf",
"(",
")",
"self",
".",
"daemon",
".",
"update_program_status",
"(",
")"
] | 43.857143 | 18.571429 |
def lowstate_file_refs(chunks, extras=''):
'''
Create a list of file ref objects to reconcile
'''
refs = {}
for chunk in chunks:
if not isinstance(chunk, dict):
continue
saltenv = 'base'
crefs = []
for state in chunk:
if state == '__env__':
saltenv = chunk[state]
elif state.startswith('__'):
continue
crefs.extend(salt_refs(chunk[state]))
if saltenv not in refs:
refs[saltenv] = []
if crefs:
refs[saltenv].append(crefs)
if extras:
extra_refs = extras.split(',')
if extra_refs:
for env in refs:
for x in extra_refs:
refs[env].append([x])
return refs
|
[
"def",
"lowstate_file_refs",
"(",
"chunks",
",",
"extras",
"=",
"''",
")",
":",
"refs",
"=",
"{",
"}",
"for",
"chunk",
"in",
"chunks",
":",
"if",
"not",
"isinstance",
"(",
"chunk",
",",
"dict",
")",
":",
"continue",
"saltenv",
"=",
"'base'",
"crefs",
"=",
"[",
"]",
"for",
"state",
"in",
"chunk",
":",
"if",
"state",
"==",
"'__env__'",
":",
"saltenv",
"=",
"chunk",
"[",
"state",
"]",
"elif",
"state",
".",
"startswith",
"(",
"'__'",
")",
":",
"continue",
"crefs",
".",
"extend",
"(",
"salt_refs",
"(",
"chunk",
"[",
"state",
"]",
")",
")",
"if",
"saltenv",
"not",
"in",
"refs",
":",
"refs",
"[",
"saltenv",
"]",
"=",
"[",
"]",
"if",
"crefs",
":",
"refs",
"[",
"saltenv",
"]",
".",
"append",
"(",
"crefs",
")",
"if",
"extras",
":",
"extra_refs",
"=",
"extras",
".",
"split",
"(",
"','",
")",
"if",
"extra_refs",
":",
"for",
"env",
"in",
"refs",
":",
"for",
"x",
"in",
"extra_refs",
":",
"refs",
"[",
"env",
"]",
".",
"append",
"(",
"[",
"x",
"]",
")",
"return",
"refs"
] | 28.407407 | 13.222222 |
def _ProcessLockAcquired(self):
"""Context manager for process locks with timeout."""
try:
is_locked = self._process_lock.acquire(timeout=self._lock_timeout)
yield is_locked
finally:
if is_locked:
self._process_lock.release()
|
[
"def",
"_ProcessLockAcquired",
"(",
"self",
")",
":",
"try",
":",
"is_locked",
"=",
"self",
".",
"_process_lock",
".",
"acquire",
"(",
"timeout",
"=",
"self",
".",
"_lock_timeout",
")",
"yield",
"is_locked",
"finally",
":",
"if",
"is_locked",
":",
"self",
".",
"_process_lock",
".",
"release",
"(",
")"
] | 36.75 | 16.375 |
def _field_value_html(self, field):
"""Return the html representation of the value of the given field"""
if field in self.fields:
return unicode(self.get(field))
else:
return self.get_timemachine_instance(field)._object_name_html()
|
[
"def",
"_field_value_html",
"(",
"self",
",",
"field",
")",
":",
"if",
"field",
"in",
"self",
".",
"fields",
":",
"return",
"unicode",
"(",
"self",
".",
"get",
"(",
"field",
")",
")",
"else",
":",
"return",
"self",
".",
"get_timemachine_instance",
"(",
"field",
")",
".",
"_object_name_html",
"(",
")"
] | 45.666667 | 13 |
def expr_str(expr, sc_expr_str_fn=standard_sc_expr_str):
"""
Returns the string representation of the expression 'expr', as in a Kconfig
file.
Passing subexpressions of expressions to this function works as expected.
sc_expr_str_fn (default: standard_sc_expr_str):
This function is called for every symbol/choice (hence "sc") appearing in
the expression, with the symbol/choice as the argument. It is expected to
return a string to be used for the symbol/choice.
This can be used e.g. to turn symbols/choices into links when generating
documentation, or for printing the value of each symbol/choice after it.
Note that quoted values are represented as constants symbols
(Symbol.is_constant == True).
"""
if expr.__class__ is not tuple:
return sc_expr_str_fn(expr)
if expr[0] is AND:
return "{} && {}".format(_parenthesize(expr[1], OR, sc_expr_str_fn),
_parenthesize(expr[2], OR, sc_expr_str_fn))
if expr[0] is OR:
# This turns A && B || C && D into "(A && B) || (C && D)", which is
# redundant, but more readable
return "{} || {}".format(_parenthesize(expr[1], AND, sc_expr_str_fn),
_parenthesize(expr[2], AND, sc_expr_str_fn))
if expr[0] is NOT:
if expr[1].__class__ is tuple:
return "!({})".format(expr_str(expr[1], sc_expr_str_fn))
return "!" + sc_expr_str_fn(expr[1]) # Symbol
# Relation
#
# Relation operands are always symbols (quoted strings are constant
# symbols)
return "{} {} {}".format(sc_expr_str_fn(expr[1]), _REL_TO_STR[expr[0]],
sc_expr_str_fn(expr[2]))
|
[
"def",
"expr_str",
"(",
"expr",
",",
"sc_expr_str_fn",
"=",
"standard_sc_expr_str",
")",
":",
"if",
"expr",
".",
"__class__",
"is",
"not",
"tuple",
":",
"return",
"sc_expr_str_fn",
"(",
"expr",
")",
"if",
"expr",
"[",
"0",
"]",
"is",
"AND",
":",
"return",
"\"{} && {}\"",
".",
"format",
"(",
"_parenthesize",
"(",
"expr",
"[",
"1",
"]",
",",
"OR",
",",
"sc_expr_str_fn",
")",
",",
"_parenthesize",
"(",
"expr",
"[",
"2",
"]",
",",
"OR",
",",
"sc_expr_str_fn",
")",
")",
"if",
"expr",
"[",
"0",
"]",
"is",
"OR",
":",
"# This turns A && B || C && D into \"(A && B) || (C && D)\", which is",
"# redundant, but more readable",
"return",
"\"{} || {}\"",
".",
"format",
"(",
"_parenthesize",
"(",
"expr",
"[",
"1",
"]",
",",
"AND",
",",
"sc_expr_str_fn",
")",
",",
"_parenthesize",
"(",
"expr",
"[",
"2",
"]",
",",
"AND",
",",
"sc_expr_str_fn",
")",
")",
"if",
"expr",
"[",
"0",
"]",
"is",
"NOT",
":",
"if",
"expr",
"[",
"1",
"]",
".",
"__class__",
"is",
"tuple",
":",
"return",
"\"!({})\"",
".",
"format",
"(",
"expr_str",
"(",
"expr",
"[",
"1",
"]",
",",
"sc_expr_str_fn",
")",
")",
"return",
"\"!\"",
"+",
"sc_expr_str_fn",
"(",
"expr",
"[",
"1",
"]",
")",
"# Symbol",
"# Relation",
"#",
"# Relation operands are always symbols (quoted strings are constant",
"# symbols)",
"return",
"\"{} {} {}\"",
".",
"format",
"(",
"sc_expr_str_fn",
"(",
"expr",
"[",
"1",
"]",
")",
",",
"_REL_TO_STR",
"[",
"expr",
"[",
"0",
"]",
"]",
",",
"sc_expr_str_fn",
"(",
"expr",
"[",
"2",
"]",
")",
")"
] | 40.52381 | 26.47619 |
def _configure(configuration_details):
"""Adds alias to shell config."""
path = Path(configuration_details.path).expanduser()
with path.open('a') as shell_config:
shell_config.write(u'\n')
shell_config.write(configuration_details.content)
shell_config.write(u'\n')
|
[
"def",
"_configure",
"(",
"configuration_details",
")",
":",
"path",
"=",
"Path",
"(",
"configuration_details",
".",
"path",
")",
".",
"expanduser",
"(",
")",
"with",
"path",
".",
"open",
"(",
"'a'",
")",
"as",
"shell_config",
":",
"shell_config",
".",
"write",
"(",
"u'\\n'",
")",
"shell_config",
".",
"write",
"(",
"configuration_details",
".",
"content",
")",
"shell_config",
".",
"write",
"(",
"u'\\n'",
")"
] | 42 | 7 |
def get_return_line_item_by_id(cls, return_line_item_id, **kwargs):
"""Find ReturnLineItem
Return single instance of ReturnLineItem by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_return_line_item_by_id(return_line_item_id, async=True)
>>> result = thread.get()
:param async bool
:param str return_line_item_id: ID of returnLineItem to return (required)
:return: ReturnLineItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._get_return_line_item_by_id_with_http_info(return_line_item_id, **kwargs)
else:
(data) = cls._get_return_line_item_by_id_with_http_info(return_line_item_id, **kwargs)
return data
|
[
"def",
"get_return_line_item_by_id",
"(",
"cls",
",",
"return_line_item_id",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async'",
")",
":",
"return",
"cls",
".",
"_get_return_line_item_by_id_with_http_info",
"(",
"return_line_item_id",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"(",
"data",
")",
"=",
"cls",
".",
"_get_return_line_item_by_id_with_http_info",
"(",
"return_line_item_id",
",",
"*",
"*",
"kwargs",
")",
"return",
"data"
] | 45.761905 | 23.238095 |
def tokenize_arabic_words(text):
"""
Tokenize text into words
@param text: the input text.
@type text: unicode.
@return: list of words.
@rtype: list.
"""
specific_tokens = []
if not text:
return specific_tokens
else:
specific_tokens = araby.tokenize(text)
return specific_tokens
|
[
"def",
"tokenize_arabic_words",
"(",
"text",
")",
":",
"specific_tokens",
"=",
"[",
"]",
"if",
"not",
"text",
":",
"return",
"specific_tokens",
"else",
":",
"specific_tokens",
"=",
"araby",
".",
"tokenize",
"(",
"text",
")",
"return",
"specific_tokens"
] | 23.266667 | 13.133333 |
def transpose(self, *dims):
"""Return a new Dataset object with all array dimensions transposed.
Although the order of dimensions on each array will change, the dataset
dimensions themselves will remain in fixed (sorted) order.
Parameters
----------
*dims : str, optional
By default, reverse the dimensions on each array. Otherwise,
reorder the dimensions to this order.
Returns
-------
transposed : Dataset
Each array in the dataset (including) coordinates will be
transposed to the given order.
Notes
-----
This operation returns a view of each array's data. It is
lazy for dask-backed DataArrays but not for numpy-backed DataArrays
-- the data will be fully loaded into memory.
See Also
--------
numpy.transpose
DataArray.transpose
"""
if dims:
if set(dims) ^ set(self.dims):
raise ValueError('arguments to transpose (%s) must be '
'permuted dataset dimensions (%s)'
% (dims, tuple(self.dims)))
ds = self.copy()
for name, var in self._variables.items():
var_dims = tuple(dim for dim in dims if dim in var.dims)
ds._variables[name] = var.transpose(*var_dims)
return ds
|
[
"def",
"transpose",
"(",
"self",
",",
"*",
"dims",
")",
":",
"if",
"dims",
":",
"if",
"set",
"(",
"dims",
")",
"^",
"set",
"(",
"self",
".",
"dims",
")",
":",
"raise",
"ValueError",
"(",
"'arguments to transpose (%s) must be '",
"'permuted dataset dimensions (%s)'",
"%",
"(",
"dims",
",",
"tuple",
"(",
"self",
".",
"dims",
")",
")",
")",
"ds",
"=",
"self",
".",
"copy",
"(",
")",
"for",
"name",
",",
"var",
"in",
"self",
".",
"_variables",
".",
"items",
"(",
")",
":",
"var_dims",
"=",
"tuple",
"(",
"dim",
"for",
"dim",
"in",
"dims",
"if",
"dim",
"in",
"var",
".",
"dims",
")",
"ds",
".",
"_variables",
"[",
"name",
"]",
"=",
"var",
".",
"transpose",
"(",
"*",
"var_dims",
")",
"return",
"ds"
] | 35.564103 | 22.307692 |
def calculateRange(self, values):
"""
Calculates the range of values for this axis based on the dataset
value amount.
:param values | [<variant>, ..]
"""
vals = filter(lambda x: x is not None, values)
try:
min_val = min(min(vals), 0)
except ValueError:
min_val = 0
try:
max_val = max(max(vals), 0)
except ValueError:
max_val = 10
ndigits = max(len(nativestring(abs(int(min_val)))), len(nativestring(abs(int(max_val)))))
rounding = 10 ** (ndigits - 1)
self.setRoundTo(rounding)
self.setMinimum(self.rounded(min_val, rounding))
self.setMaximum(self.rounded(max_val, rounding))
self.reset()
|
[
"def",
"calculateRange",
"(",
"self",
",",
"values",
")",
":",
"vals",
"=",
"filter",
"(",
"lambda",
"x",
":",
"x",
"is",
"not",
"None",
",",
"values",
")",
"try",
":",
"min_val",
"=",
"min",
"(",
"min",
"(",
"vals",
")",
",",
"0",
")",
"except",
"ValueError",
":",
"min_val",
"=",
"0",
"try",
":",
"max_val",
"=",
"max",
"(",
"max",
"(",
"vals",
")",
",",
"0",
")",
"except",
"ValueError",
":",
"max_val",
"=",
"10",
"ndigits",
"=",
"max",
"(",
"len",
"(",
"nativestring",
"(",
"abs",
"(",
"int",
"(",
"min_val",
")",
")",
")",
")",
",",
"len",
"(",
"nativestring",
"(",
"abs",
"(",
"int",
"(",
"max_val",
")",
")",
")",
")",
")",
"rounding",
"=",
"10",
"**",
"(",
"ndigits",
"-",
"1",
")",
"self",
".",
"setRoundTo",
"(",
"rounding",
")",
"self",
".",
"setMinimum",
"(",
"self",
".",
"rounded",
"(",
"min_val",
",",
"rounding",
")",
")",
"self",
".",
"setMaximum",
"(",
"self",
".",
"rounded",
"(",
"max_val",
",",
"rounding",
")",
")",
"self",
".",
"reset",
"(",
")"
] | 31.307692 | 17.846154 |
def get_zonefile_instance(new_instance=False):
"""
This is a convenience function which provides a :class:`ZoneInfoFile`
instance using the data provided by the ``dateutil`` package. By default, it
caches a single instance of the ZoneInfoFile object and returns that.
:param new_instance:
If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and
used as the cached instance for the next call. Otherwise, new instances
are created only as necessary.
:return:
Returns a :class:`ZoneInfoFile` object.
.. versionadded:: 2.6
"""
if new_instance:
zif = None
else:
zif = getattr(get_zonefile_instance, '_cached_instance', None)
if zif is None:
zif = ZoneInfoFile(getzoneinfofile_stream())
get_zonefile_instance._cached_instance = zif
return zif
|
[
"def",
"get_zonefile_instance",
"(",
"new_instance",
"=",
"False",
")",
":",
"if",
"new_instance",
":",
"zif",
"=",
"None",
"else",
":",
"zif",
"=",
"getattr",
"(",
"get_zonefile_instance",
",",
"'_cached_instance'",
",",
"None",
")",
"if",
"zif",
"is",
"None",
":",
"zif",
"=",
"ZoneInfoFile",
"(",
"getzoneinfofile_stream",
"(",
")",
")",
"get_zonefile_instance",
".",
"_cached_instance",
"=",
"zif",
"return",
"zif"
] | 31.296296 | 24.925926 |
def sync_label_shape(self, it, verbose=False):
"""Synchronize label shape with the input iterator. This is useful when
train/validation iterators have different label padding.
Parameters
----------
it : ImageDetIter
The other iterator to synchronize
verbose : bool
Print verbose log if true
Returns
-------
ImageDetIter
The synchronized other iterator, the internal label shape is updated as well.
Examples
--------
>>> train_iter = mx.image.ImageDetIter(32, (3, 300, 300), path_imgrec='train.rec')
>>> val_iter = mx.image.ImageDetIter(32, (3, 300, 300), path.imgrec='val.rec')
>>> train_iter.label_shape
(30, 6)
>>> val_iter.label_shape
(25, 6)
>>> val_iter = train_iter.sync_label_shape(val_iter, verbose=False)
>>> train_iter.label_shape
(30, 6)
>>> val_iter.label_shape
(30, 6)
"""
assert isinstance(it, ImageDetIter), 'Synchronize with invalid iterator.'
train_label_shape = self.label_shape
val_label_shape = it.label_shape
assert train_label_shape[1] == val_label_shape[1], "object width mismatch."
max_count = max(train_label_shape[0], val_label_shape[0])
if max_count > train_label_shape[0]:
self.reshape(None, (max_count, train_label_shape[1]))
if max_count > val_label_shape[0]:
it.reshape(None, (max_count, val_label_shape[1]))
if verbose and max_count > min(train_label_shape[0], val_label_shape[0]):
logging.info('Resized label_shape to (%d, %d).', max_count, train_label_shape[1])
return it
|
[
"def",
"sync_label_shape",
"(",
"self",
",",
"it",
",",
"verbose",
"=",
"False",
")",
":",
"assert",
"isinstance",
"(",
"it",
",",
"ImageDetIter",
")",
",",
"'Synchronize with invalid iterator.'",
"train_label_shape",
"=",
"self",
".",
"label_shape",
"val_label_shape",
"=",
"it",
".",
"label_shape",
"assert",
"train_label_shape",
"[",
"1",
"]",
"==",
"val_label_shape",
"[",
"1",
"]",
",",
"\"object width mismatch.\"",
"max_count",
"=",
"max",
"(",
"train_label_shape",
"[",
"0",
"]",
",",
"val_label_shape",
"[",
"0",
"]",
")",
"if",
"max_count",
">",
"train_label_shape",
"[",
"0",
"]",
":",
"self",
".",
"reshape",
"(",
"None",
",",
"(",
"max_count",
",",
"train_label_shape",
"[",
"1",
"]",
")",
")",
"if",
"max_count",
">",
"val_label_shape",
"[",
"0",
"]",
":",
"it",
".",
"reshape",
"(",
"None",
",",
"(",
"max_count",
",",
"val_label_shape",
"[",
"1",
"]",
")",
")",
"if",
"verbose",
"and",
"max_count",
">",
"min",
"(",
"train_label_shape",
"[",
"0",
"]",
",",
"val_label_shape",
"[",
"0",
"]",
")",
":",
"logging",
".",
"info",
"(",
"'Resized label_shape to (%d, %d).'",
",",
"max_count",
",",
"train_label_shape",
"[",
"1",
"]",
")",
"return",
"it"
] | 40.357143 | 22.452381 |
def simBirth(self,which_agents):
'''
Makes new consumers for the given indices. Initialized variables include aNrm and pLvl, as
well as time variables t_age and t_cycle. Normalized assets and permanent income levels
are drawn from lognormal distributions given by aNrmInitMean and aNrmInitStd (etc).
Parameters
----------
which_agents : np.array(Bool)
Boolean array of size self.AgentCount indicating which agents should be "born".
Returns
-------
None
'''
IndShockConsumerType.simBirth(self,which_agents)
if hasattr(self,'aLvlNow'):
self.aLvlNow[which_agents] = self.aNrmNow[which_agents]*self.pLvlNow[which_agents]
else:
self.aLvlNow = self.aNrmNow*self.pLvlNow
|
[
"def",
"simBirth",
"(",
"self",
",",
"which_agents",
")",
":",
"IndShockConsumerType",
".",
"simBirth",
"(",
"self",
",",
"which_agents",
")",
"if",
"hasattr",
"(",
"self",
",",
"'aLvlNow'",
")",
":",
"self",
".",
"aLvlNow",
"[",
"which_agents",
"]",
"=",
"self",
".",
"aNrmNow",
"[",
"which_agents",
"]",
"*",
"self",
".",
"pLvlNow",
"[",
"which_agents",
"]",
"else",
":",
"self",
".",
"aLvlNow",
"=",
"self",
".",
"aNrmNow",
"*",
"self",
".",
"pLvlNow"
] | 39.8 | 30.1 |
def git_tag_list(pattern=None):
"""
Return a list of all the tags in the git repo matching a regex passed in
`pattern`. If `pattern` is None, return all the tags.
"""
with chdir(get_root()):
result = run_command('git tag', capture=True).stdout
result = result.splitlines()
if not pattern:
return result
regex = re.compile(pattern)
return list(filter(regex.search, result))
|
[
"def",
"git_tag_list",
"(",
"pattern",
"=",
"None",
")",
":",
"with",
"chdir",
"(",
"get_root",
"(",
")",
")",
":",
"result",
"=",
"run_command",
"(",
"'git tag'",
",",
"capture",
"=",
"True",
")",
".",
"stdout",
"result",
"=",
"result",
".",
"splitlines",
"(",
")",
"if",
"not",
"pattern",
":",
"return",
"result",
"regex",
"=",
"re",
".",
"compile",
"(",
"pattern",
")",
"return",
"list",
"(",
"filter",
"(",
"regex",
".",
"search",
",",
"result",
")",
")"
] | 29.785714 | 16.642857 |
def get_gewest_by_id(self, id):
'''
Get a `gewest` by id.
:param integer id: The id of a `gewest`.
:rtype: A :class:`Gewest`.
'''
def creator():
nl = crab_gateway_request(
self.client, 'GetGewestByGewestIdAndTaalCode', id, 'nl'
)
fr = crab_gateway_request(
self.client, 'GetGewestByGewestIdAndTaalCode', id, 'fr'
)
de = crab_gateway_request(
self.client, 'GetGewestByGewestIdAndTaalCode', id, 'de'
)
if nl == None:
raise GatewayResourceNotFoundException()
return Gewest(
nl.GewestId,
{
'nl': nl.GewestNaam,
'fr': fr.GewestNaam,
'de': de.GewestNaam
},
(nl.CenterX, nl.CenterY),
(nl.MinimumX, nl.MinimumY, nl.MaximumX, nl.MaximumY),
)
if self.caches['permanent'].is_configured:
key = 'GetGewestByGewestId#%s' % id
gewest = self.caches['long'].get_or_create(key, creator)
else:
gewest = creator()
gewest.set_gateway(self)
return gewest
|
[
"def",
"get_gewest_by_id",
"(",
"self",
",",
"id",
")",
":",
"def",
"creator",
"(",
")",
":",
"nl",
"=",
"crab_gateway_request",
"(",
"self",
".",
"client",
",",
"'GetGewestByGewestIdAndTaalCode'",
",",
"id",
",",
"'nl'",
")",
"fr",
"=",
"crab_gateway_request",
"(",
"self",
".",
"client",
",",
"'GetGewestByGewestIdAndTaalCode'",
",",
"id",
",",
"'fr'",
")",
"de",
"=",
"crab_gateway_request",
"(",
"self",
".",
"client",
",",
"'GetGewestByGewestIdAndTaalCode'",
",",
"id",
",",
"'de'",
")",
"if",
"nl",
"==",
"None",
":",
"raise",
"GatewayResourceNotFoundException",
"(",
")",
"return",
"Gewest",
"(",
"nl",
".",
"GewestId",
",",
"{",
"'nl'",
":",
"nl",
".",
"GewestNaam",
",",
"'fr'",
":",
"fr",
".",
"GewestNaam",
",",
"'de'",
":",
"de",
".",
"GewestNaam",
"}",
",",
"(",
"nl",
".",
"CenterX",
",",
"nl",
".",
"CenterY",
")",
",",
"(",
"nl",
".",
"MinimumX",
",",
"nl",
".",
"MinimumY",
",",
"nl",
".",
"MaximumX",
",",
"nl",
".",
"MaximumY",
")",
",",
")",
"if",
"self",
".",
"caches",
"[",
"'permanent'",
"]",
".",
"is_configured",
":",
"key",
"=",
"'GetGewestByGewestId#%s'",
"%",
"id",
"gewest",
"=",
"self",
".",
"caches",
"[",
"'long'",
"]",
".",
"get_or_create",
"(",
"key",
",",
"creator",
")",
"else",
":",
"gewest",
"=",
"creator",
"(",
")",
"gewest",
".",
"set_gateway",
"(",
"self",
")",
"return",
"gewest"
] | 34.055556 | 16.611111 |
def coarseMaximum(arr, shape):
'''
return an array of [shape]
where every cell equals the localised maximum of the given array [arr]
at the same (scalled) position
'''
ss0, ss1 = shape
s0, s1 = arr.shape
pos0 = linspace2(0, s0, ss0, dtype=int)
pos1 = linspace2(0, s1, ss1, dtype=int)
k0 = pos0[0]
k1 = pos1[0]
out = np.empty(shape, dtype=arr.dtype)
_calc(arr, out, pos0, pos1, k0, k1, ss0, ss1)
return out
|
[
"def",
"coarseMaximum",
"(",
"arr",
",",
"shape",
")",
":",
"ss0",
",",
"ss1",
"=",
"shape",
"s0",
",",
"s1",
"=",
"arr",
".",
"shape",
"pos0",
"=",
"linspace2",
"(",
"0",
",",
"s0",
",",
"ss0",
",",
"dtype",
"=",
"int",
")",
"pos1",
"=",
"linspace2",
"(",
"0",
",",
"s1",
",",
"ss1",
",",
"dtype",
"=",
"int",
")",
"k0",
"=",
"pos0",
"[",
"0",
"]",
"k1",
"=",
"pos1",
"[",
"0",
"]",
"out",
"=",
"np",
".",
"empty",
"(",
"shape",
",",
"dtype",
"=",
"arr",
".",
"dtype",
")",
"_calc",
"(",
"arr",
",",
"out",
",",
"pos0",
",",
"pos1",
",",
"k0",
",",
"k1",
",",
"ss0",
",",
"ss1",
")",
"return",
"out"
] | 25.777778 | 20.444444 |
def generate(env):
"""Add Builders and construction variables for gfortran to an
Environment."""
fortran.generate(env)
for dialect in ['F77', 'F90', 'FORTRAN', 'F95', 'F03', 'F08']:
env['%s' % dialect] = 'gfortran'
env['SH%s' % dialect] = '$%s' % dialect
if env['PLATFORM'] in ['cygwin', 'win32']:
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect)
else:
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect)
env['INC%sPREFIX' % dialect] = "-I"
env['INC%sSUFFIX' % dialect] = ""
|
[
"def",
"generate",
"(",
"env",
")",
":",
"fortran",
".",
"generate",
"(",
"env",
")",
"for",
"dialect",
"in",
"[",
"'F77'",
",",
"'F90'",
",",
"'FORTRAN'",
",",
"'F95'",
",",
"'F03'",
",",
"'F08'",
"]",
":",
"env",
"[",
"'%s'",
"%",
"dialect",
"]",
"=",
"'gfortran'",
"env",
"[",
"'SH%s'",
"%",
"dialect",
"]",
"=",
"'$%s'",
"%",
"dialect",
"if",
"env",
"[",
"'PLATFORM'",
"]",
"in",
"[",
"'cygwin'",
",",
"'win32'",
"]",
":",
"env",
"[",
"'SH%sFLAGS'",
"%",
"dialect",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$%sFLAGS'",
"%",
"dialect",
")",
"else",
":",
"env",
"[",
"'SH%sFLAGS'",
"%",
"dialect",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$%sFLAGS -fPIC'",
"%",
"dialect",
")",
"env",
"[",
"'INC%sPREFIX'",
"%",
"dialect",
"]",
"=",
"\"-I\"",
"env",
"[",
"'INC%sSUFFIX'",
"%",
"dialect",
"]",
"=",
"\"\""
] | 39.4 | 18.333333 |
def _add_parameterized_validator_internal(param_validator, base_tag):
"with builtin tag prefixing"
add_parameterized_validator(param_validator, base_tag, tag_prefix=u'!~~%s(' % param_validator.__name__)
|
[
"def",
"_add_parameterized_validator_internal",
"(",
"param_validator",
",",
"base_tag",
")",
":",
"add_parameterized_validator",
"(",
"param_validator",
",",
"base_tag",
",",
"tag_prefix",
"=",
"u'!~~%s('",
"%",
"param_validator",
".",
"__name__",
")"
] | 69.333333 | 34.666667 |
def _check_public_functions(self, primary_header, all_headers):
"""Verify all the public functions are also declared in a header
file."""
public_symbols = {}
declared_only_symbols = {}
if primary_header:
for name, symbol in primary_header.public_symbols.items():
if isinstance(symbol, ast.Function):
public_symbols[name] = symbol
declared_only_symbols = dict.fromkeys(public_symbols, True)
for node in self.ast_list:
# Make sure we have a function that should be exported.
if not isinstance(node, ast.Function):
continue
if isinstance(node, ast.Method):
# Ensure that for Foo::Bar, Foo is *not* a namespace.
# If Foo is a namespace, we have a function and not a method.
names = [n.name for n in node.in_class]
if names != self.symbol_table.get_namespace(names):
continue
if not (node.is_definition() and node.is_exportable()):
continue
# This function should be declared in a header file.
name = node.name
if name in public_symbols:
declared_only_symbols[name] = False
else:
self._find_public_function_warnings(node,
name,
primary_header,
all_headers)
for name, declared_only in declared_only_symbols.items():
if declared_only:
node = public_symbols[name]
if node.templated_types is None:
msg = "'{}' declared but not defined".format(name)
self._add_warning(msg, node, primary_header.filename)
|
[
"def",
"_check_public_functions",
"(",
"self",
",",
"primary_header",
",",
"all_headers",
")",
":",
"public_symbols",
"=",
"{",
"}",
"declared_only_symbols",
"=",
"{",
"}",
"if",
"primary_header",
":",
"for",
"name",
",",
"symbol",
"in",
"primary_header",
".",
"public_symbols",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"symbol",
",",
"ast",
".",
"Function",
")",
":",
"public_symbols",
"[",
"name",
"]",
"=",
"symbol",
"declared_only_symbols",
"=",
"dict",
".",
"fromkeys",
"(",
"public_symbols",
",",
"True",
")",
"for",
"node",
"in",
"self",
".",
"ast_list",
":",
"# Make sure we have a function that should be exported.",
"if",
"not",
"isinstance",
"(",
"node",
",",
"ast",
".",
"Function",
")",
":",
"continue",
"if",
"isinstance",
"(",
"node",
",",
"ast",
".",
"Method",
")",
":",
"# Ensure that for Foo::Bar, Foo is *not* a namespace.",
"# If Foo is a namespace, we have a function and not a method.",
"names",
"=",
"[",
"n",
".",
"name",
"for",
"n",
"in",
"node",
".",
"in_class",
"]",
"if",
"names",
"!=",
"self",
".",
"symbol_table",
".",
"get_namespace",
"(",
"names",
")",
":",
"continue",
"if",
"not",
"(",
"node",
".",
"is_definition",
"(",
")",
"and",
"node",
".",
"is_exportable",
"(",
")",
")",
":",
"continue",
"# This function should be declared in a header file.",
"name",
"=",
"node",
".",
"name",
"if",
"name",
"in",
"public_symbols",
":",
"declared_only_symbols",
"[",
"name",
"]",
"=",
"False",
"else",
":",
"self",
".",
"_find_public_function_warnings",
"(",
"node",
",",
"name",
",",
"primary_header",
",",
"all_headers",
")",
"for",
"name",
",",
"declared_only",
"in",
"declared_only_symbols",
".",
"items",
"(",
")",
":",
"if",
"declared_only",
":",
"node",
"=",
"public_symbols",
"[",
"name",
"]",
"if",
"node",
".",
"templated_types",
"is",
"None",
":",
"msg",
"=",
"\"'{}' declared but not defined\"",
".",
"format",
"(",
"name",
")",
"self",
".",
"_add_warning",
"(",
"msg",
",",
"node",
",",
"primary_header",
".",
"filename",
")"
] | 46.425 | 18.775 |
def prox_max_entropy(X, step, gamma=1):
"""Proximal operator for maximum entropy regularization.
g(x) = gamma \sum_i x_i ln(x_i)
has the analytical solution of gamma W(1/gamma exp((X-gamma)/gamma)), where
W is the Lambert W function.
"""
from scipy.special import lambertw
gamma_ = _step_gamma(step, gamma)
# minimize entropy: return gamma_ * np.real(lambertw(np.exp((X - gamma_) / gamma_) / gamma_))
above = X > 0
X[above] = gamma_ * np.real(lambertw(np.exp(X[above]/gamma_ - 1) / gamma_))
return X
|
[
"def",
"prox_max_entropy",
"(",
"X",
",",
"step",
",",
"gamma",
"=",
"1",
")",
":",
"from",
"scipy",
".",
"special",
"import",
"lambertw",
"gamma_",
"=",
"_step_gamma",
"(",
"step",
",",
"gamma",
")",
"# minimize entropy: return gamma_ * np.real(lambertw(np.exp((X - gamma_) / gamma_) / gamma_))",
"above",
"=",
"X",
">",
"0",
"X",
"[",
"above",
"]",
"=",
"gamma_",
"*",
"np",
".",
"real",
"(",
"lambertw",
"(",
"np",
".",
"exp",
"(",
"X",
"[",
"above",
"]",
"/",
"gamma_",
"-",
"1",
")",
"/",
"gamma_",
")",
")",
"return",
"X"
] | 38 | 20.357143 |
def from_representation(self, data):
"""Convert representation value to ``bool`` if it has expected form."""
if data in self._TRUE_VALUES:
return True
elif data in self._FALSE_VALUES:
return False
else:
raise ValueError(
"{type} type value must be one of {values}".format(
type=self.type,
values=self._TRUE_VALUES.union(self._FALSE_VALUES)
)
)
|
[
"def",
"from_representation",
"(",
"self",
",",
"data",
")",
":",
"if",
"data",
"in",
"self",
".",
"_TRUE_VALUES",
":",
"return",
"True",
"elif",
"data",
"in",
"self",
".",
"_FALSE_VALUES",
":",
"return",
"False",
"else",
":",
"raise",
"ValueError",
"(",
"\"{type} type value must be one of {values}\"",
".",
"format",
"(",
"type",
"=",
"self",
".",
"type",
",",
"values",
"=",
"self",
".",
"_TRUE_VALUES",
".",
"union",
"(",
"self",
".",
"_FALSE_VALUES",
")",
")",
")"
] | 37.153846 | 14.615385 |
def use_in(ContentHandler):
"""
Modify ContentHandler, a sub-class of
pycbc_glue.ligolw.LIGOLWContentHandler, to cause it to use the Array and
ArrayStream classes defined in this module when parsing XML
documents.
Example:
>>> from pycbc_glue.ligolw import ligolw
>>> class MyContentHandler(ligolw.LIGOLWContentHandler):
... pass
...
>>> use_in(MyContentHandler)
<class 'pycbc_glue.ligolw.array.MyContentHandler'>
"""
def startStream(self, parent, attrs, __orig_startStream = ContentHandler.startStream):
if parent.tagName == ligolw.Array.tagName:
return ArrayStream(attrs).config(parent)
return __orig_startStream(self, parent, attrs)
def startArray(self, parent, attrs):
return Array(attrs)
ContentHandler.startStream = startStream
ContentHandler.startArray = startArray
return ContentHandler
|
[
"def",
"use_in",
"(",
"ContentHandler",
")",
":",
"def",
"startStream",
"(",
"self",
",",
"parent",
",",
"attrs",
",",
"__orig_startStream",
"=",
"ContentHandler",
".",
"startStream",
")",
":",
"if",
"parent",
".",
"tagName",
"==",
"ligolw",
".",
"Array",
".",
"tagName",
":",
"return",
"ArrayStream",
"(",
"attrs",
")",
".",
"config",
"(",
"parent",
")",
"return",
"__orig_startStream",
"(",
"self",
",",
"parent",
",",
"attrs",
")",
"def",
"startArray",
"(",
"self",
",",
"parent",
",",
"attrs",
")",
":",
"return",
"Array",
"(",
"attrs",
")",
"ContentHandler",
".",
"startStream",
"=",
"startStream",
"ContentHandler",
".",
"startArray",
"=",
"startArray",
"return",
"ContentHandler"
] | 28.535714 | 19.25 |
def _pull_out_unaffected_blocks_rhs(rest, rhs, out_port, in_port):
"""Similar to :func:`_pull_out_unaffected_blocks_lhs` but on the RHS of a
series product self-feedback.
"""
_, block_index = rhs.index_in_block(in_port)
rest = tuple(rest)
bs = rhs.block_structure
(nbefore, nblock, nafter) = (sum(bs[:block_index]),
bs[block_index],
sum(bs[block_index + 1:]))
before, block, after = rhs.get_blocks((nbefore, nblock, nafter))
if before != cid(nbefore) or after != cid(nafter):
outer_rhs = before + cid(nblock - 1) + after
inner_rhs = cid(nbefore) + block + cid(nafter)
return Feedback.create(SeriesProduct.create(*(rest + (inner_rhs,))),
out_port=out_port, in_port=in_port) << outer_rhs
elif block == cid(nblock):
outer_rhs = before + cid(nblock - 1) + after
return Feedback.create(SeriesProduct.create(*rest),
out_port=out_port, in_port=in_port) << outer_rhs
raise CannotSimplify()
|
[
"def",
"_pull_out_unaffected_blocks_rhs",
"(",
"rest",
",",
"rhs",
",",
"out_port",
",",
"in_port",
")",
":",
"_",
",",
"block_index",
"=",
"rhs",
".",
"index_in_block",
"(",
"in_port",
")",
"rest",
"=",
"tuple",
"(",
"rest",
")",
"bs",
"=",
"rhs",
".",
"block_structure",
"(",
"nbefore",
",",
"nblock",
",",
"nafter",
")",
"=",
"(",
"sum",
"(",
"bs",
"[",
":",
"block_index",
"]",
")",
",",
"bs",
"[",
"block_index",
"]",
",",
"sum",
"(",
"bs",
"[",
"block_index",
"+",
"1",
":",
"]",
")",
")",
"before",
",",
"block",
",",
"after",
"=",
"rhs",
".",
"get_blocks",
"(",
"(",
"nbefore",
",",
"nblock",
",",
"nafter",
")",
")",
"if",
"before",
"!=",
"cid",
"(",
"nbefore",
")",
"or",
"after",
"!=",
"cid",
"(",
"nafter",
")",
":",
"outer_rhs",
"=",
"before",
"+",
"cid",
"(",
"nblock",
"-",
"1",
")",
"+",
"after",
"inner_rhs",
"=",
"cid",
"(",
"nbefore",
")",
"+",
"block",
"+",
"cid",
"(",
"nafter",
")",
"return",
"Feedback",
".",
"create",
"(",
"SeriesProduct",
".",
"create",
"(",
"*",
"(",
"rest",
"+",
"(",
"inner_rhs",
",",
")",
")",
")",
",",
"out_port",
"=",
"out_port",
",",
"in_port",
"=",
"in_port",
")",
"<<",
"outer_rhs",
"elif",
"block",
"==",
"cid",
"(",
"nblock",
")",
":",
"outer_rhs",
"=",
"before",
"+",
"cid",
"(",
"nblock",
"-",
"1",
")",
"+",
"after",
"return",
"Feedback",
".",
"create",
"(",
"SeriesProduct",
".",
"create",
"(",
"*",
"rest",
")",
",",
"out_port",
"=",
"out_port",
",",
"in_port",
"=",
"in_port",
")",
"<<",
"outer_rhs",
"raise",
"CannotSimplify",
"(",
")"
] | 51.095238 | 16.714286 |
def _TableFactory(tag, stream, offset, length):
"""
Return an instance of |Table| appropriate to *tag*, loaded from
*font_file* with content of *length* starting at *offset*.
"""
TableClass = {
'head': _HeadTable,
'name': _NameTable,
}.get(tag, _BaseTable)
return TableClass(tag, stream, offset, length)
|
[
"def",
"_TableFactory",
"(",
"tag",
",",
"stream",
",",
"offset",
",",
"length",
")",
":",
"TableClass",
"=",
"{",
"'head'",
":",
"_HeadTable",
",",
"'name'",
":",
"_NameTable",
",",
"}",
".",
"get",
"(",
"tag",
",",
"_BaseTable",
")",
"return",
"TableClass",
"(",
"tag",
",",
"stream",
",",
"offset",
",",
"length",
")"
] | 33.8 | 12.8 |
def Plot_Impact_PolProjPoly(lS, Leg="", ax=None, Ang='theta', AngUnit='rad',
Sketch=True, dP=None,
dLeg=_def.TorLegd, draw=True, fs=None,
wintit=None, tit=None, Test=True):
""" Plotting the toroidal projection of a Ves instance
D. VEZINET, Aug. 2014
Inputs :
T A Ves instance
Leg A str (the legend label to be used if T is not a Ves instance)
ax A plt.Axes instance (if given) on which to plot the projection space, otherwise ('None') a new figure/axes is created
Dict A dictionnary specifying the style of the boundary polygon plot
dLeg A dictionnary specifying the style of the legend box
Outputs :
ax The plt.Axes instance on which the poloidal plot was performed
"""
if Test:
Lax, C0, C1, C2 = _check_Lax(ax,n=1)
assert C0 or C1, 'Arg ax should a plt.Axes instance !'
assert dP is None or type(dP) is dict, "Arg dP must be a dictionary !"
assert dLeg is None or type(dLeg) is dict, "Arg dLeg must be a dictionary !"
assert Ang in ['theta','xi'], "Arg Ang must be in ['theta','xi'] !"
assert AngUnit in ['rad','deg'], "Arg AngUnit must be in ['rad','deg'] !"
C0 = issubclass(lS.__class__, utils.ToFuObject)
C1 = (isinstance(lS,list)
and all([issubclass(ss.__class__, utils.ToFuObject) for ss in lS]))
msg = "Arg lves must be a Struct subclass or a list of such !"
assert C0 or C1, msg
if C0:
lS = [lS]
nS = len(lS)
# Get Sketch
if ax is None:
if wintit is None:
wintit = _wintit
ax, axsketch = _def.Plot_Impact_DefAxes('Cross', fs=fs, wintit=wintit,
Ang=Ang, AngUnit=AngUnit,
Sketch=Sketch)
if dP is not None:
dp = dP
# Get up/down limits
pPmax, pPmin = 0, 0
for ss in lS:
pmax = np.max(ss.dsino['EnvMinMax'])
if pmax>pPmax:
pPmax = pmax
pmin = np.min(ss.dsino['EnvMinMax'])
if pmin<pPmin:
pPmin = pmin
if nS>0:
DoUp = (pPmin,pPmax)
nP = pmax.size
handles, labels = ax.get_legend_handles_labels()
for ii in range(0,nS):
Theta, pP = lS[ii].dsino['EnvTheta'], lS[ii].dsino['EnvMinMax'][0,:]
pN = lS[ii].dsino['EnvMinMax'][1,:]
if Ang=='xi':
Theta, pP, pN = _GG.ConvertImpact_Theta2Xi(Theta, pP, pN)
Theta = Theta.ravel()
if dP is None:
dp = {'facecolor':lS[ii].get_color(), 'edgecolor':'k',
'linewidth':1., 'linestyle':'-'}
if lS[ii]._InOut=='in':
ax.fill_between(Theta, pP, DoUp[1]*np.ones((nP,)),**dp)
ax.fill_between(Theta, DoUp[0]*np.ones((nP,)), pN,**dp)
elif lS[ii]._InOut=='out':
ax.fill_between(Theta, pP, pN, **dp)
else:
msg = "self._InOut not defined for {0}".format(lS[ii].Id.Cls)
raise Exception(msg)
proxy = plt.Rectangle((0,0),1,1, fc=dp['facecolor'])
handles.append(proxy)
labels.append(lS[ii].Id.Cls+' '+lS[ii].Id.Name)
if nS>0:
ax.set_ylim(DoUp)
if not dLeg is None:
ax.legend(handles,labels,**dLeg)
if draw:
ax.figure.canvas.draw()
return ax
|
[
"def",
"Plot_Impact_PolProjPoly",
"(",
"lS",
",",
"Leg",
"=",
"\"\"",
",",
"ax",
"=",
"None",
",",
"Ang",
"=",
"'theta'",
",",
"AngUnit",
"=",
"'rad'",
",",
"Sketch",
"=",
"True",
",",
"dP",
"=",
"None",
",",
"dLeg",
"=",
"_def",
".",
"TorLegd",
",",
"draw",
"=",
"True",
",",
"fs",
"=",
"None",
",",
"wintit",
"=",
"None",
",",
"tit",
"=",
"None",
",",
"Test",
"=",
"True",
")",
":",
"if",
"Test",
":",
"Lax",
",",
"C0",
",",
"C1",
",",
"C2",
"=",
"_check_Lax",
"(",
"ax",
",",
"n",
"=",
"1",
")",
"assert",
"C0",
"or",
"C1",
",",
"'Arg ax should a plt.Axes instance !'",
"assert",
"dP",
"is",
"None",
"or",
"type",
"(",
"dP",
")",
"is",
"dict",
",",
"\"Arg dP must be a dictionary !\"",
"assert",
"dLeg",
"is",
"None",
"or",
"type",
"(",
"dLeg",
")",
"is",
"dict",
",",
"\"Arg dLeg must be a dictionary !\"",
"assert",
"Ang",
"in",
"[",
"'theta'",
",",
"'xi'",
"]",
",",
"\"Arg Ang must be in ['theta','xi'] !\"",
"assert",
"AngUnit",
"in",
"[",
"'rad'",
",",
"'deg'",
"]",
",",
"\"Arg AngUnit must be in ['rad','deg'] !\"",
"C0",
"=",
"issubclass",
"(",
"lS",
".",
"__class__",
",",
"utils",
".",
"ToFuObject",
")",
"C1",
"=",
"(",
"isinstance",
"(",
"lS",
",",
"list",
")",
"and",
"all",
"(",
"[",
"issubclass",
"(",
"ss",
".",
"__class__",
",",
"utils",
".",
"ToFuObject",
")",
"for",
"ss",
"in",
"lS",
"]",
")",
")",
"msg",
"=",
"\"Arg lves must be a Struct subclass or a list of such !\"",
"assert",
"C0",
"or",
"C1",
",",
"msg",
"if",
"C0",
":",
"lS",
"=",
"[",
"lS",
"]",
"nS",
"=",
"len",
"(",
"lS",
")",
"# Get Sketch",
"if",
"ax",
"is",
"None",
":",
"if",
"wintit",
"is",
"None",
":",
"wintit",
"=",
"_wintit",
"ax",
",",
"axsketch",
"=",
"_def",
".",
"Plot_Impact_DefAxes",
"(",
"'Cross'",
",",
"fs",
"=",
"fs",
",",
"wintit",
"=",
"wintit",
",",
"Ang",
"=",
"Ang",
",",
"AngUnit",
"=",
"AngUnit",
",",
"Sketch",
"=",
"Sketch",
")",
"if",
"dP",
"is",
"not",
"None",
":",
"dp",
"=",
"dP",
"# Get up/down limits",
"pPmax",
",",
"pPmin",
"=",
"0",
",",
"0",
"for",
"ss",
"in",
"lS",
":",
"pmax",
"=",
"np",
".",
"max",
"(",
"ss",
".",
"dsino",
"[",
"'EnvMinMax'",
"]",
")",
"if",
"pmax",
">",
"pPmax",
":",
"pPmax",
"=",
"pmax",
"pmin",
"=",
"np",
".",
"min",
"(",
"ss",
".",
"dsino",
"[",
"'EnvMinMax'",
"]",
")",
"if",
"pmin",
"<",
"pPmin",
":",
"pPmin",
"=",
"pmin",
"if",
"nS",
">",
"0",
":",
"DoUp",
"=",
"(",
"pPmin",
",",
"pPmax",
")",
"nP",
"=",
"pmax",
".",
"size",
"handles",
",",
"labels",
"=",
"ax",
".",
"get_legend_handles_labels",
"(",
")",
"for",
"ii",
"in",
"range",
"(",
"0",
",",
"nS",
")",
":",
"Theta",
",",
"pP",
"=",
"lS",
"[",
"ii",
"]",
".",
"dsino",
"[",
"'EnvTheta'",
"]",
",",
"lS",
"[",
"ii",
"]",
".",
"dsino",
"[",
"'EnvMinMax'",
"]",
"[",
"0",
",",
":",
"]",
"pN",
"=",
"lS",
"[",
"ii",
"]",
".",
"dsino",
"[",
"'EnvMinMax'",
"]",
"[",
"1",
",",
":",
"]",
"if",
"Ang",
"==",
"'xi'",
":",
"Theta",
",",
"pP",
",",
"pN",
"=",
"_GG",
".",
"ConvertImpact_Theta2Xi",
"(",
"Theta",
",",
"pP",
",",
"pN",
")",
"Theta",
"=",
"Theta",
".",
"ravel",
"(",
")",
"if",
"dP",
"is",
"None",
":",
"dp",
"=",
"{",
"'facecolor'",
":",
"lS",
"[",
"ii",
"]",
".",
"get_color",
"(",
")",
",",
"'edgecolor'",
":",
"'k'",
",",
"'linewidth'",
":",
"1.",
",",
"'linestyle'",
":",
"'-'",
"}",
"if",
"lS",
"[",
"ii",
"]",
".",
"_InOut",
"==",
"'in'",
":",
"ax",
".",
"fill_between",
"(",
"Theta",
",",
"pP",
",",
"DoUp",
"[",
"1",
"]",
"*",
"np",
".",
"ones",
"(",
"(",
"nP",
",",
")",
")",
",",
"*",
"*",
"dp",
")",
"ax",
".",
"fill_between",
"(",
"Theta",
",",
"DoUp",
"[",
"0",
"]",
"*",
"np",
".",
"ones",
"(",
"(",
"nP",
",",
")",
")",
",",
"pN",
",",
"*",
"*",
"dp",
")",
"elif",
"lS",
"[",
"ii",
"]",
".",
"_InOut",
"==",
"'out'",
":",
"ax",
".",
"fill_between",
"(",
"Theta",
",",
"pP",
",",
"pN",
",",
"*",
"*",
"dp",
")",
"else",
":",
"msg",
"=",
"\"self._InOut not defined for {0}\"",
".",
"format",
"(",
"lS",
"[",
"ii",
"]",
".",
"Id",
".",
"Cls",
")",
"raise",
"Exception",
"(",
"msg",
")",
"proxy",
"=",
"plt",
".",
"Rectangle",
"(",
"(",
"0",
",",
"0",
")",
",",
"1",
",",
"1",
",",
"fc",
"=",
"dp",
"[",
"'facecolor'",
"]",
")",
"handles",
".",
"append",
"(",
"proxy",
")",
"labels",
".",
"append",
"(",
"lS",
"[",
"ii",
"]",
".",
"Id",
".",
"Cls",
"+",
"' '",
"+",
"lS",
"[",
"ii",
"]",
".",
"Id",
".",
"Name",
")",
"if",
"nS",
">",
"0",
":",
"ax",
".",
"set_ylim",
"(",
"DoUp",
")",
"if",
"not",
"dLeg",
"is",
"None",
":",
"ax",
".",
"legend",
"(",
"handles",
",",
"labels",
",",
"*",
"*",
"dLeg",
")",
"if",
"draw",
":",
"ax",
".",
"figure",
".",
"canvas",
".",
"draw",
"(",
")",
"return",
"ax"
] | 38.045455 | 23.465909 |
def _parse_gcs_uri(self, raw_uri):
"""Return a valid docker_path for a GCS bucket."""
# Assume URI is a directory path.
raw_uri = directory_fmt(raw_uri)
_, docker_path = _gcs_uri_rewriter(raw_uri)
docker_uri = os.path.join(self._relative_path, docker_path)
return docker_uri
|
[
"def",
"_parse_gcs_uri",
"(",
"self",
",",
"raw_uri",
")",
":",
"# Assume URI is a directory path.",
"raw_uri",
"=",
"directory_fmt",
"(",
"raw_uri",
")",
"_",
",",
"docker_path",
"=",
"_gcs_uri_rewriter",
"(",
"raw_uri",
")",
"docker_uri",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_relative_path",
",",
"docker_path",
")",
"return",
"docker_uri"
] | 41.714286 | 8.857143 |
def get_netconf_client_capabilities_input_session_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_netconf_client_capabilities = ET.Element("get_netconf_client_capabilities")
config = get_netconf_client_capabilities
input = ET.SubElement(get_netconf_client_capabilities, "input")
session_id = ET.SubElement(input, "session-id")
session_id.text = kwargs.pop('session_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
[
"def",
"get_netconf_client_capabilities_input_session_id",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"config",
"=",
"ET",
".",
"Element",
"(",
"\"config\"",
")",
"get_netconf_client_capabilities",
"=",
"ET",
".",
"Element",
"(",
"\"get_netconf_client_capabilities\"",
")",
"config",
"=",
"get_netconf_client_capabilities",
"input",
"=",
"ET",
".",
"SubElement",
"(",
"get_netconf_client_capabilities",
",",
"\"input\"",
")",
"session_id",
"=",
"ET",
".",
"SubElement",
"(",
"input",
",",
"\"session-id\"",
")",
"session_id",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'session_id'",
")",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
",",
"self",
".",
"_callback",
")",
"return",
"callback",
"(",
"config",
")"
] | 45.5 | 17.416667 |
def read_gsc_sfr(self):
"""Read the gain-scaling-coefficient and sample flow rate.
:returns: dictionary containing GSC and SFR
"""
config = []
data = {}
# Send the command byte and sleep for 10 ms
self.cnxn.xfer([0x33])
sleep(10e-3)
# Read the config variables by sending 256 empty bytes
for i in range(8):
resp = self.cnxn.xfer([0x00])[0]
config.append(resp)
data["GSC"] = self._calculate_float(config[0:4])
data["SFR"] = self._calculate_float(config[4:])
return data
|
[
"def",
"read_gsc_sfr",
"(",
"self",
")",
":",
"config",
"=",
"[",
"]",
"data",
"=",
"{",
"}",
"# Send the command byte and sleep for 10 ms",
"self",
".",
"cnxn",
".",
"xfer",
"(",
"[",
"0x33",
"]",
")",
"sleep",
"(",
"10e-3",
")",
"# Read the config variables by sending 256 empty bytes",
"for",
"i",
"in",
"range",
"(",
"8",
")",
":",
"resp",
"=",
"self",
".",
"cnxn",
".",
"xfer",
"(",
"[",
"0x00",
"]",
")",
"[",
"0",
"]",
"config",
".",
"append",
"(",
"resp",
")",
"data",
"[",
"\"GSC\"",
"]",
"=",
"self",
".",
"_calculate_float",
"(",
"config",
"[",
"0",
":",
"4",
"]",
")",
"data",
"[",
"\"SFR\"",
"]",
"=",
"self",
".",
"_calculate_float",
"(",
"config",
"[",
"4",
":",
"]",
")",
"return",
"data"
] | 27.857143 | 19.52381 |
def _getattrs(self, obj, *attrs):
""" Return dictionary of given attrs on given object, if they exist,
processing through _filter_value().
"""
filtered_attrs = {}
for attr in attrs:
if hasattr(obj, attr):
filtered_attrs[attr] = obj_to_string(
self._filter_value(getattr(obj, attr))
)
return filtered_attrs
|
[
"def",
"_getattrs",
"(",
"self",
",",
"obj",
",",
"*",
"attrs",
")",
":",
"filtered_attrs",
"=",
"{",
"}",
"for",
"attr",
"in",
"attrs",
":",
"if",
"hasattr",
"(",
"obj",
",",
"attr",
")",
":",
"filtered_attrs",
"[",
"attr",
"]",
"=",
"obj_to_string",
"(",
"self",
".",
"_filter_value",
"(",
"getattr",
"(",
"obj",
",",
"attr",
")",
")",
")",
"return",
"filtered_attrs"
] | 37.909091 | 9.636364 |
def _special_value_rows(em):
'''
_special_value_rows - Handle "rows" special attribute, which differs if tagName is a textarea or frameset
'''
if em.tagName == 'textarea':
return convertToIntRange(em.getAttribute('rows', 2), minValue=1, maxValue=None, invalidDefault=2)
else:
# frameset
return em.getAttribute('rows', '')
|
[
"def",
"_special_value_rows",
"(",
"em",
")",
":",
"if",
"em",
".",
"tagName",
"==",
"'textarea'",
":",
"return",
"convertToIntRange",
"(",
"em",
".",
"getAttribute",
"(",
"'rows'",
",",
"2",
")",
",",
"minValue",
"=",
"1",
",",
"maxValue",
"=",
"None",
",",
"invalidDefault",
"=",
"2",
")",
"else",
":",
"# frameset",
"return",
"em",
".",
"getAttribute",
"(",
"'rows'",
",",
"''",
")"
] | 40.111111 | 31 |
def file_reader(self):
"""Generator process to read file"""
self.update_config()
path = self.config['path']
# open file to get dimensions
with self.subprocess(
['ffmpeg', '-v', 'info', '-y', '-an', '-vn', '-i', path, '-'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
stdin=open(os.devnull), universal_newlines=True) as sp:
for line in sp.stderr.read().splitlines():
match = re.search('(\d{2,})x(\d{2,})', line)
if match:
xlen, ylen = map(int, match.groups())
break
else:
self.logger.critical('Failed to open %s', path)
return
# read file repeatedly to allow looping
while True:
# can change config once per outer loop
self.update_config()
bit16 = self.config['16bit']
self.frame_type = self.config['type']
self.metadata = Metadata().from_file(path)
audit = self.metadata.get('audit')
audit += 'data = %s\n' % path
audit += ' type: %s, 16bit: %s\n' % (self.frame_type, bit16)
self.metadata.set('audit', audit)
bps = {'RGB': 3, 'Y': 1}[self.frame_type]
pix_fmt = {'RGB': ('rgb24', 'rgb48le'),
'Y': ('gray', 'gray16le')}[self.frame_type][bit16]
bytes_per_line = xlen * ylen * bps
if bit16:
bytes_per_line *= 2
# open file to read data
with self.subprocess(
['ffmpeg', '-v', 'warning', '-an', '-i', path,
'-f', 'image2pipe', '-pix_fmt', pix_fmt,
'-c:v', 'rawvideo', '-'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
stdin=open(os.devnull), bufsize=bytes_per_line) as sp:
while True:
try:
raw_data = sp.stdout.read(bytes_per_line)
except Exception as ex:
self.logger.exception(ex)
return
if len(raw_data) < bytes_per_line:
break
if bit16:
image = numpy.fromstring(raw_data, dtype=numpy.uint16)
image = image.astype(pt_float) / pt_float(256.0)
else:
image = numpy.fromstring(raw_data, dtype=numpy.uint8)
yield image.reshape((ylen, xlen, bps))
self.update_config()
if self.frame_no == 0 or self.config['looping'] == 'off':
return
|
[
"def",
"file_reader",
"(",
"self",
")",
":",
"self",
".",
"update_config",
"(",
")",
"path",
"=",
"self",
".",
"config",
"[",
"'path'",
"]",
"# open file to get dimensions",
"with",
"self",
".",
"subprocess",
"(",
"[",
"'ffmpeg'",
",",
"'-v'",
",",
"'info'",
",",
"'-y'",
",",
"'-an'",
",",
"'-vn'",
",",
"'-i'",
",",
"path",
",",
"'-'",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
",",
"stdin",
"=",
"open",
"(",
"os",
".",
"devnull",
")",
",",
"universal_newlines",
"=",
"True",
")",
"as",
"sp",
":",
"for",
"line",
"in",
"sp",
".",
"stderr",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
":",
"match",
"=",
"re",
".",
"search",
"(",
"'(\\d{2,})x(\\d{2,})'",
",",
"line",
")",
"if",
"match",
":",
"xlen",
",",
"ylen",
"=",
"map",
"(",
"int",
",",
"match",
".",
"groups",
"(",
")",
")",
"break",
"else",
":",
"self",
".",
"logger",
".",
"critical",
"(",
"'Failed to open %s'",
",",
"path",
")",
"return",
"# read file repeatedly to allow looping",
"while",
"True",
":",
"# can change config once per outer loop",
"self",
".",
"update_config",
"(",
")",
"bit16",
"=",
"self",
".",
"config",
"[",
"'16bit'",
"]",
"self",
".",
"frame_type",
"=",
"self",
".",
"config",
"[",
"'type'",
"]",
"self",
".",
"metadata",
"=",
"Metadata",
"(",
")",
".",
"from_file",
"(",
"path",
")",
"audit",
"=",
"self",
".",
"metadata",
".",
"get",
"(",
"'audit'",
")",
"audit",
"+=",
"'data = %s\\n'",
"%",
"path",
"audit",
"+=",
"' type: %s, 16bit: %s\\n'",
"%",
"(",
"self",
".",
"frame_type",
",",
"bit16",
")",
"self",
".",
"metadata",
".",
"set",
"(",
"'audit'",
",",
"audit",
")",
"bps",
"=",
"{",
"'RGB'",
":",
"3",
",",
"'Y'",
":",
"1",
"}",
"[",
"self",
".",
"frame_type",
"]",
"pix_fmt",
"=",
"{",
"'RGB'",
":",
"(",
"'rgb24'",
",",
"'rgb48le'",
")",
",",
"'Y'",
":",
"(",
"'gray'",
",",
"'gray16le'",
")",
"}",
"[",
"self",
".",
"frame_type",
"]",
"[",
"bit16",
"]",
"bytes_per_line",
"=",
"xlen",
"*",
"ylen",
"*",
"bps",
"if",
"bit16",
":",
"bytes_per_line",
"*=",
"2",
"# open file to read data",
"with",
"self",
".",
"subprocess",
"(",
"[",
"'ffmpeg'",
",",
"'-v'",
",",
"'warning'",
",",
"'-an'",
",",
"'-i'",
",",
"path",
",",
"'-f'",
",",
"'image2pipe'",
",",
"'-pix_fmt'",
",",
"pix_fmt",
",",
"'-c:v'",
",",
"'rawvideo'",
",",
"'-'",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
",",
"stdin",
"=",
"open",
"(",
"os",
".",
"devnull",
")",
",",
"bufsize",
"=",
"bytes_per_line",
")",
"as",
"sp",
":",
"while",
"True",
":",
"try",
":",
"raw_data",
"=",
"sp",
".",
"stdout",
".",
"read",
"(",
"bytes_per_line",
")",
"except",
"Exception",
"as",
"ex",
":",
"self",
".",
"logger",
".",
"exception",
"(",
"ex",
")",
"return",
"if",
"len",
"(",
"raw_data",
")",
"<",
"bytes_per_line",
":",
"break",
"if",
"bit16",
":",
"image",
"=",
"numpy",
".",
"fromstring",
"(",
"raw_data",
",",
"dtype",
"=",
"numpy",
".",
"uint16",
")",
"image",
"=",
"image",
".",
"astype",
"(",
"pt_float",
")",
"/",
"pt_float",
"(",
"256.0",
")",
"else",
":",
"image",
"=",
"numpy",
".",
"fromstring",
"(",
"raw_data",
",",
"dtype",
"=",
"numpy",
".",
"uint8",
")",
"yield",
"image",
".",
"reshape",
"(",
"(",
"ylen",
",",
"xlen",
",",
"bps",
")",
")",
"self",
".",
"update_config",
"(",
")",
"if",
"self",
".",
"frame_no",
"==",
"0",
"or",
"self",
".",
"config",
"[",
"'looping'",
"]",
"==",
"'off'",
":",
"return"
] | 46.12069 | 15.948276 |
def login_required(view_function):
""" This decorator ensures that the current user is logged in.
Example::
@route('/member_page')
@login_required
def member_page(): # User must be logged in
...
If USER_ENABLE_EMAIL is True and USER_ENABLE_CONFIRM_EMAIL is True,
this view decorator also ensures that the user has a confirmed email address.
| Calls unauthorized_view() when the user is not logged in
or when the user has not confirmed their email address.
| Calls the decorated view otherwise.
"""
@wraps(view_function) # Tells debuggers that is is a function wrapper
def decorator(*args, **kwargs):
user_manager = current_app.user_manager
# User must be logged in with a confirmed email address
allowed = _is_logged_in_with_confirmed_email(user_manager)
if not allowed:
# Redirect to unauthenticated page
return user_manager.unauthenticated_view()
# It's OK to call the view
return view_function(*args, **kwargs)
return decorator
|
[
"def",
"login_required",
"(",
"view_function",
")",
":",
"@",
"wraps",
"(",
"view_function",
")",
"# Tells debuggers that is is a function wrapper",
"def",
"decorator",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"user_manager",
"=",
"current_app",
".",
"user_manager",
"# User must be logged in with a confirmed email address",
"allowed",
"=",
"_is_logged_in_with_confirmed_email",
"(",
"user_manager",
")",
"if",
"not",
"allowed",
":",
"# Redirect to unauthenticated page",
"return",
"user_manager",
".",
"unauthenticated_view",
"(",
")",
"# It's OK to call the view",
"return",
"view_function",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"decorator"
] | 34.677419 | 21.032258 |
def on_download_to_activated(self, menu_item):
'''下载文件/目录到指定的文件夹里.'''
tree_paths = self.iconview.get_selected_items()
if not tree_paths:
return
dialog = Gtk.FileChooserDialog(_('Save to...'), self.app.window,
Gtk.FileChooserAction.SELECT_FOLDER,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK))
response = dialog.run()
if response != Gtk.ResponseType.OK:
dialog.destroy()
return
dirname = dialog.get_filename()
dialog.destroy()
pcs_files = [self.get_pcs_file(p) for p in tree_paths]
self.app.blink_page(self.app.download_page)
self.app.download_page.add_tasks(pcs_files, dirname)
|
[
"def",
"on_download_to_activated",
"(",
"self",
",",
"menu_item",
")",
":",
"tree_paths",
"=",
"self",
".",
"iconview",
".",
"get_selected_items",
"(",
")",
"if",
"not",
"tree_paths",
":",
"return",
"dialog",
"=",
"Gtk",
".",
"FileChooserDialog",
"(",
"_",
"(",
"'Save to...'",
")",
",",
"self",
".",
"app",
".",
"window",
",",
"Gtk",
".",
"FileChooserAction",
".",
"SELECT_FOLDER",
",",
"(",
"Gtk",
".",
"STOCK_CANCEL",
",",
"Gtk",
".",
"ResponseType",
".",
"CANCEL",
",",
"Gtk",
".",
"STOCK_OK",
",",
"Gtk",
".",
"ResponseType",
".",
"OK",
")",
")",
"response",
"=",
"dialog",
".",
"run",
"(",
")",
"if",
"response",
"!=",
"Gtk",
".",
"ResponseType",
".",
"OK",
":",
"dialog",
".",
"destroy",
"(",
")",
"return",
"dirname",
"=",
"dialog",
".",
"get_filename",
"(",
")",
"dialog",
".",
"destroy",
"(",
")",
"pcs_files",
"=",
"[",
"self",
".",
"get_pcs_file",
"(",
"p",
")",
"for",
"p",
"in",
"tree_paths",
"]",
"self",
".",
"app",
".",
"blink_page",
"(",
"self",
".",
"app",
".",
"download_page",
")",
"self",
".",
"app",
".",
"download_page",
".",
"add_tasks",
"(",
"pcs_files",
",",
"dirname",
")"
] | 38.3 | 16.9 |
def _full_axis_reduce_along_select_indices(self, func, axis, index):
"""Reduce Manger along select indices using function that needs full axis.
Args:
func: Callable that reduces the dimension of the object and requires full
knowledge of the entire axis.
axis: 0 for columns and 1 for rows. Defaults to 0.
index: Index of the resulting QueryCompiler.
Returns:
A new QueryCompiler object with index or BaseFrameManager object.
"""
# Convert indices to numeric indices
old_index = self.index if axis else self.columns
numeric_indices = [i for i, name in enumerate(old_index) if name in index]
result = self.data.apply_func_to_select_indices_along_full_axis(
axis, func, numeric_indices
)
return result
|
[
"def",
"_full_axis_reduce_along_select_indices",
"(",
"self",
",",
"func",
",",
"axis",
",",
"index",
")",
":",
"# Convert indices to numeric indices",
"old_index",
"=",
"self",
".",
"index",
"if",
"axis",
"else",
"self",
".",
"columns",
"numeric_indices",
"=",
"[",
"i",
"for",
"i",
",",
"name",
"in",
"enumerate",
"(",
"old_index",
")",
"if",
"name",
"in",
"index",
"]",
"result",
"=",
"self",
".",
"data",
".",
"apply_func_to_select_indices_along_full_axis",
"(",
"axis",
",",
"func",
",",
"numeric_indices",
")",
"return",
"result"
] | 44.105263 | 22.578947 |
def tenant_forgot_password_login(self, data, tenant_id=None, api_version="v2.0"):
"""
Forgot password API
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/login/password/forgot".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data, sensitive=True)
|
[
"def",
"tenant_forgot_password_login",
"(",
"self",
",",
"data",
",",
"tenant_id",
"=",
"None",
",",
"api_version",
"=",
"\"v2.0\"",
")",
":",
"if",
"tenant_id",
"is",
"None",
"and",
"self",
".",
"_parent_class",
".",
"tenant_id",
":",
"# Pull tenant_id from parent namespace cache.",
"tenant_id",
"=",
"self",
".",
"_parent_class",
".",
"tenant_id",
"elif",
"not",
"tenant_id",
":",
"# No value for tenant_id.",
"raise",
"TypeError",
"(",
"\"tenant_id is required but not set or cached.\"",
")",
"cur_ctlr",
"=",
"self",
".",
"_parent_class",
".",
"controller",
"url",
"=",
"str",
"(",
"cur_ctlr",
")",
"+",
"\"/{}/api/tenants/{}/login/password/forgot\"",
".",
"format",
"(",
"api_version",
",",
"tenant_id",
")",
"api_logger",
".",
"debug",
"(",
"\"URL = %s\"",
",",
"url",
")",
"return",
"self",
".",
"_parent_class",
".",
"rest_call",
"(",
"url",
",",
"\"post\"",
",",
"data",
"=",
"data",
",",
"sensitive",
"=",
"True",
")"
] | 41.538462 | 25.846154 |
def check_unused(intersection, duplicates, intersections):
"""Check if a "valid" ``intersection`` is already in ``intersections``.
This assumes that
* ``intersection`` will have at least one of ``s == 0.0`` or ``t == 0.0``
* At least one of the intersections in ``intersections`` is classified as
``COINCIDENT_UNUSED``.
Args:
intersection (.Intersection): An intersection to be added.
duplicates (List[.Intersection]): List of duplicate intersections.
intersections (List[.Intersection]): List of "accepted" (i.e.
non-duplicate) intersections.
Returns:
bool: Indicates if the ``intersection`` is a duplicate.
"""
for other in intersections:
if (
other.interior_curve == UNUSED_T
and intersection.index_first == other.index_first
and intersection.index_second == other.index_second
):
if intersection.s == 0.0 and other.s == 0.0:
duplicates.append(intersection)
return True
if intersection.t == 0.0 and other.t == 0.0:
duplicates.append(intersection)
return True
return False
|
[
"def",
"check_unused",
"(",
"intersection",
",",
"duplicates",
",",
"intersections",
")",
":",
"for",
"other",
"in",
"intersections",
":",
"if",
"(",
"other",
".",
"interior_curve",
"==",
"UNUSED_T",
"and",
"intersection",
".",
"index_first",
"==",
"other",
".",
"index_first",
"and",
"intersection",
".",
"index_second",
"==",
"other",
".",
"index_second",
")",
":",
"if",
"intersection",
".",
"s",
"==",
"0.0",
"and",
"other",
".",
"s",
"==",
"0.0",
":",
"duplicates",
".",
"append",
"(",
"intersection",
")",
"return",
"True",
"if",
"intersection",
".",
"t",
"==",
"0.0",
"and",
"other",
".",
"t",
"==",
"0.0",
":",
"duplicates",
".",
"append",
"(",
"intersection",
")",
"return",
"True",
"return",
"False"
] | 35.575758 | 22.606061 |
def prepare_url_list(urlresolver, namespace_path='', namespace=''):
"""
returns list of tuples [(<url_name>, <url_patern_tuple> ), ...]
"""
exclude_ns = getattr(settings, 'JS_REVERSE_EXCLUDE_NAMESPACES', JS_EXCLUDE_NAMESPACES)
include_only_ns = getattr(settings, 'JS_REVERSE_INCLUDE_ONLY_NAMESPACES', JS_INCLUDE_ONLY_NAMESPACES)
if exclude_ns and include_only_ns:
raise ImproperlyConfigured(
'Neither use JS_REVERSE_EXCLUDE_NAMESPACES nor JS_REVERSE_INCLUDE_ONLY_NAMESPACES setting')
if namespace[:-1] in exclude_ns:
return
include_only_allow = True # include_only state varible
if include_only_ns != []:
# True mean that ns passed the test
in_on_empty_ns = False
in_on_is_in_list = False
in_on_null = False
# Test urls without ns
if namespace == '' and '' in include_only_ns:
in_on_empty_ns = True
# check if nestead ns isn't subns of include_only ns
# e.g. ns = "foo:bar" include_only = ["foo"] -> this ns will be used
# works for ns = "lorem:ipsum:dolor" include_only = ["lorem:ipsum"]
# ns "lorem" will be ignored but "lorem:ipsum" & "lorem:ipsum:.." won't
for ns in include_only_ns:
if ns != "" and namespace[:-1].startswith(ns):
in_on_is_in_list = True
break
# Test if isn't used "\0" flag
# use "foo\0" to add urls just from "foo" not from subns "foo:bar"
if namespace[:-1] + '\0' in include_only_ns:
in_on_null = True
include_only_allow = in_on_empty_ns or in_on_is_in_list or in_on_null
if include_only_allow:
for url_name in urlresolver.reverse_dict.keys():
if isinstance(url_name, (text_type, str)):
url_patterns = []
for url_pattern in urlresolver.reverse_dict.getlist(url_name):
url_patterns += [
[namespace_path + pat[0], pat[1]] for pat in url_pattern[0]]
yield [namespace + url_name, url_patterns]
for inner_ns, (inner_ns_path, inner_urlresolver) in \
urlresolver.namespace_dict.items():
inner_ns_path = namespace_path + inner_ns_path
inner_ns = namespace + inner_ns + ':'
# if we have inner_ns_path, reconstruct a new resolver so that we can
# handle regex substitutions within the regex of a namespace.
if inner_ns_path:
args = [inner_ns_path, inner_urlresolver]
# https://github.com/ierror/django-js-reverse/issues/65
if StrictVersion(django.get_version()) >= StrictVersion("2.0.6"):
args.append(tuple(urlresolver.pattern.converters.items()))
inner_urlresolver = urlresolvers.get_ns_resolver(*args)
inner_ns_path = ''
for x in prepare_url_list(inner_urlresolver, inner_ns_path, inner_ns):
yield x
|
[
"def",
"prepare_url_list",
"(",
"urlresolver",
",",
"namespace_path",
"=",
"''",
",",
"namespace",
"=",
"''",
")",
":",
"exclude_ns",
"=",
"getattr",
"(",
"settings",
",",
"'JS_REVERSE_EXCLUDE_NAMESPACES'",
",",
"JS_EXCLUDE_NAMESPACES",
")",
"include_only_ns",
"=",
"getattr",
"(",
"settings",
",",
"'JS_REVERSE_INCLUDE_ONLY_NAMESPACES'",
",",
"JS_INCLUDE_ONLY_NAMESPACES",
")",
"if",
"exclude_ns",
"and",
"include_only_ns",
":",
"raise",
"ImproperlyConfigured",
"(",
"'Neither use JS_REVERSE_EXCLUDE_NAMESPACES nor JS_REVERSE_INCLUDE_ONLY_NAMESPACES setting'",
")",
"if",
"namespace",
"[",
":",
"-",
"1",
"]",
"in",
"exclude_ns",
":",
"return",
"include_only_allow",
"=",
"True",
"# include_only state varible",
"if",
"include_only_ns",
"!=",
"[",
"]",
":",
"# True mean that ns passed the test",
"in_on_empty_ns",
"=",
"False",
"in_on_is_in_list",
"=",
"False",
"in_on_null",
"=",
"False",
"# Test urls without ns",
"if",
"namespace",
"==",
"''",
"and",
"''",
"in",
"include_only_ns",
":",
"in_on_empty_ns",
"=",
"True",
"# check if nestead ns isn't subns of include_only ns",
"# e.g. ns = \"foo:bar\" include_only = [\"foo\"] -> this ns will be used",
"# works for ns = \"lorem:ipsum:dolor\" include_only = [\"lorem:ipsum\"]",
"# ns \"lorem\" will be ignored but \"lorem:ipsum\" & \"lorem:ipsum:..\" won't",
"for",
"ns",
"in",
"include_only_ns",
":",
"if",
"ns",
"!=",
"\"\"",
"and",
"namespace",
"[",
":",
"-",
"1",
"]",
".",
"startswith",
"(",
"ns",
")",
":",
"in_on_is_in_list",
"=",
"True",
"break",
"# Test if isn't used \"\\0\" flag",
"# use \"foo\\0\" to add urls just from \"foo\" not from subns \"foo:bar\"",
"if",
"namespace",
"[",
":",
"-",
"1",
"]",
"+",
"'\\0'",
"in",
"include_only_ns",
":",
"in_on_null",
"=",
"True",
"include_only_allow",
"=",
"in_on_empty_ns",
"or",
"in_on_is_in_list",
"or",
"in_on_null",
"if",
"include_only_allow",
":",
"for",
"url_name",
"in",
"urlresolver",
".",
"reverse_dict",
".",
"keys",
"(",
")",
":",
"if",
"isinstance",
"(",
"url_name",
",",
"(",
"text_type",
",",
"str",
")",
")",
":",
"url_patterns",
"=",
"[",
"]",
"for",
"url_pattern",
"in",
"urlresolver",
".",
"reverse_dict",
".",
"getlist",
"(",
"url_name",
")",
":",
"url_patterns",
"+=",
"[",
"[",
"namespace_path",
"+",
"pat",
"[",
"0",
"]",
",",
"pat",
"[",
"1",
"]",
"]",
"for",
"pat",
"in",
"url_pattern",
"[",
"0",
"]",
"]",
"yield",
"[",
"namespace",
"+",
"url_name",
",",
"url_patterns",
"]",
"for",
"inner_ns",
",",
"(",
"inner_ns_path",
",",
"inner_urlresolver",
")",
"in",
"urlresolver",
".",
"namespace_dict",
".",
"items",
"(",
")",
":",
"inner_ns_path",
"=",
"namespace_path",
"+",
"inner_ns_path",
"inner_ns",
"=",
"namespace",
"+",
"inner_ns",
"+",
"':'",
"# if we have inner_ns_path, reconstruct a new resolver so that we can",
"# handle regex substitutions within the regex of a namespace.",
"if",
"inner_ns_path",
":",
"args",
"=",
"[",
"inner_ns_path",
",",
"inner_urlresolver",
"]",
"# https://github.com/ierror/django-js-reverse/issues/65",
"if",
"StrictVersion",
"(",
"django",
".",
"get_version",
"(",
")",
")",
">=",
"StrictVersion",
"(",
"\"2.0.6\"",
")",
":",
"args",
".",
"append",
"(",
"tuple",
"(",
"urlresolver",
".",
"pattern",
".",
"converters",
".",
"items",
"(",
")",
")",
")",
"inner_urlresolver",
"=",
"urlresolvers",
".",
"get_ns_resolver",
"(",
"*",
"args",
")",
"inner_ns_path",
"=",
"''",
"for",
"x",
"in",
"prepare_url_list",
"(",
"inner_urlresolver",
",",
"inner_ns_path",
",",
"inner_ns",
")",
":",
"yield",
"x"
] | 41.157143 | 23.985714 |
def select_multi_directory_dialog():
""" Opens a directory selection dialog
Style - specifies style of dialog (read wx documentation for information)
"""
import wx.lib.agw.multidirdialog as MDD
app = wx.App(0)
dlg = MDD.MultiDirDialog(None, title="Select directories", defaultPath=os.getcwd(),
agwStyle=MDD.DD_MULTIPLE | MDD.DD_DIR_MUST_EXIST)
if dlg.ShowModal() != wx.ID_OK:
dlg.Destroy()
return
paths = dlg.GetPaths()
dlg.Destroy()
app.MainLoop()
return paths
|
[
"def",
"select_multi_directory_dialog",
"(",
")",
":",
"import",
"wx",
".",
"lib",
".",
"agw",
".",
"multidirdialog",
"as",
"MDD",
"app",
"=",
"wx",
".",
"App",
"(",
"0",
")",
"dlg",
"=",
"MDD",
".",
"MultiDirDialog",
"(",
"None",
",",
"title",
"=",
"\"Select directories\"",
",",
"defaultPath",
"=",
"os",
".",
"getcwd",
"(",
")",
",",
"agwStyle",
"=",
"MDD",
".",
"DD_MULTIPLE",
"|",
"MDD",
".",
"DD_DIR_MUST_EXIST",
")",
"if",
"dlg",
".",
"ShowModal",
"(",
")",
"!=",
"wx",
".",
"ID_OK",
":",
"dlg",
".",
"Destroy",
"(",
")",
"return",
"paths",
"=",
"dlg",
".",
"GetPaths",
"(",
")",
"dlg",
".",
"Destroy",
"(",
")",
"app",
".",
"MainLoop",
"(",
")",
"return",
"paths"
] | 27 | 24.35 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.