body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
3f39bdfa40f579218f6997b3c6c388ac8cba5c5b059c8d62181d6f3987c72c0d
def to_dict(self): 'Returns the model properties as a dict' result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(CertificateUsageResource, dict): for (key, value) in self.items(): result[key] = value return result
Returns the model properties as a dict
octopus_deploy_swagger_client/models/certificate_usage_resource.py
to_dict
cvent/octopus-deploy-api-client
0
python
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(CertificateUsageResource, dict): for (key, value) in self.items(): result[key] = value return result
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(CertificateUsageResource, dict): for (key, value) in self.items(): result[key] = value return result<|docstring|>Returns the model properties as a dict<|endoftext|>
cbb19eaa2fc8a113d9e32f924ef280a7e97563f8915f94f65dab438997af2e99
def to_str(self): 'Returns the string representation of the model' return pprint.pformat(self.to_dict())
Returns the string representation of the model
octopus_deploy_swagger_client/models/certificate_usage_resource.py
to_str
cvent/octopus-deploy-api-client
0
python
def to_str(self): return pprint.pformat(self.to_dict())
def to_str(self): return pprint.pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|>
772243a2c2b3261a9b954d07aaf295e3c1242a579a495e2d6a5679c677861703
def __repr__(self): 'For `print` and `pprint`' return self.to_str()
For `print` and `pprint`
octopus_deploy_swagger_client/models/certificate_usage_resource.py
__repr__
cvent/octopus-deploy-api-client
0
python
def __repr__(self): return self.to_str()
def __repr__(self): return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|>
d51cf462cb88a2785206d8cc04443be4682f2eab1935e1bff060cfc04b2213d5
def __eq__(self, other): 'Returns true if both objects are equal' if (not isinstance(other, CertificateUsageResource)): return False return (self.__dict__ == other.__dict__)
Returns true if both objects are equal
octopus_deploy_swagger_client/models/certificate_usage_resource.py
__eq__
cvent/octopus-deploy-api-client
0
python
def __eq__(self, other): if (not isinstance(other, CertificateUsageResource)): return False return (self.__dict__ == other.__dict__)
def __eq__(self, other): if (not isinstance(other, CertificateUsageResource)): return False return (self.__dict__ == other.__dict__)<|docstring|>Returns true if both objects are equal<|endoftext|>
43dc6740163eb9fc1161d09cb2208a64c7ad0cc8d9c8637ac3264522d3ec7e42
def __ne__(self, other): 'Returns true if both objects are not equal' return (not (self == other))
Returns true if both objects are not equal
octopus_deploy_swagger_client/models/certificate_usage_resource.py
__ne__
cvent/octopus-deploy-api-client
0
python
def __ne__(self, other): return (not (self == other))
def __ne__(self, other): return (not (self == other))<|docstring|>Returns true if both objects are not equal<|endoftext|>
f7d96aace381a0065dac017fe03f036dcb9874e5b97b366f52fd908b8fdcdbb6
@requires_scipy_version('0.11') def test_module_nesting(): 'Test that module imports are necessary\n ' proc = Popen([sys.executable, '-c', run_script], stdout=PIPE, stderr=PIPE) (stdout, stderr) = proc.communicate() if proc.returncode: raise AssertionError(stdout)
Test that module imports are necessary
mne/tests/test_import_nesting.py
test_module_nesting
fraimondo/mne-python
1
python
@requires_scipy_version('0.11') def test_module_nesting(): '\n ' proc = Popen([sys.executable, '-c', run_script], stdout=PIPE, stderr=PIPE) (stdout, stderr) = proc.communicate() if proc.returncode: raise AssertionError(stdout)
@requires_scipy_version('0.11') def test_module_nesting(): '\n ' proc = Popen([sys.executable, '-c', run_script], stdout=PIPE, stderr=PIPE) (stdout, stderr) = proc.communicate() if proc.returncode: raise AssertionError(stdout)<|docstring|>Test that module imports are necessary<|endoftext|>
709c9475c387102906f242be753c4d414bda8d2fd0f4b65924af64869fdc3591
def __init__(self, plugin: ProjectPlugin, ancestor: ProjectPlugin): "Initialize cyclic inheritance error.\n\n Args:\n plugin: A ProjectPlugin\n ancestor: The given ProjectPlugins' ancestor.\n " super().__init__() self.plugin = plugin self.ancestor = ancestor
Initialize cyclic inheritance error. Args: plugin: A ProjectPlugin ancestor: The given ProjectPlugins' ancestor.
src/meltano/core/plugin/project_plugin.py
__init__
Mu-L/meltano
0
python
def __init__(self, plugin: ProjectPlugin, ancestor: ProjectPlugin): "Initialize cyclic inheritance error.\n\n Args:\n plugin: A ProjectPlugin\n ancestor: The given ProjectPlugins' ancestor.\n " super().__init__() self.plugin = plugin self.ancestor = ancestor
def __init__(self, plugin: ProjectPlugin, ancestor: ProjectPlugin): "Initialize cyclic inheritance error.\n\n Args:\n plugin: A ProjectPlugin\n ancestor: The given ProjectPlugins' ancestor.\n " super().__init__() self.plugin = plugin self.ancestor = ancestor<|docstring|>Initialize cyclic inheritance error. Args: plugin: A ProjectPlugin ancestor: The given ProjectPlugins' ancestor.<|endoftext|>
80a7611bc285e6c35d0bbdfb4bff9cf5dfadd5aae3d7dcb555405fe226945752
def __str__(self): 'Return error message.\n\n Returns:\n A formatted error message string.\n ' return ("{type} '{name}' cannot inherit from '{ancestor}', " + "which itself inherits from '{name}'").format(type=self.plugin.type.descriptor.capitalize(), name=self.plugin.name, ancestor=self.ancestor.name)
Return error message. Returns: A formatted error message string.
src/meltano/core/plugin/project_plugin.py
__str__
Mu-L/meltano
0
python
def __str__(self): 'Return error message.\n\n Returns:\n A formatted error message string.\n ' return ("{type} '{name}' cannot inherit from '{ancestor}', " + "which itself inherits from '{name}'").format(type=self.plugin.type.descriptor.capitalize(), name=self.plugin.name, ancestor=self.ancestor.name)
def __str__(self): 'Return error message.\n\n Returns:\n A formatted error message string.\n ' return ("{type} '{name}' cannot inherit from '{ancestor}', " + "which itself inherits from '{name}'").format(type=self.plugin.type.descriptor.capitalize(), name=self.plugin.name, ancestor=self.ancestor.name)<|docstring|>Return error message. Returns: A formatted error message string.<|endoftext|>
563cdd88ee2b003c63192674486c986446474c75238295938ab6e4e1524e0ecf
def __init__(self, plugin_type: PluginType, name: str, inherit_from: (str | None)=None, namespace: (str | None)=None, variant: (str | None)=None, pip_url: (str | None)=None, executable: (str | None)=None, config: (dict | None)=None, commands: (dict | None)=None, default_variant=Variant.ORIGINAL_NAME, **extras): 'ProjectPlugin.\n\n Args:\n plugin_type: PluginType instance.\n name: Plugin name.\n inherit_from: (optional) Name of plugin to inherit from.\n namespace: (optional) Plugin namespace.\n variant: (optional) Plugin variant.\n pip_url: (optional) Plugin install pip url.\n executable: (optional) Executable name.\n config: (optional) Plugin configuration.\n commands: (optional) Plugin commands.\n default_variant: (optional) Default variant for this plugin.\n extras: Extra keyword arguments.\n ' super().__init__(plugin_type, name) self.inherit_from = (inherit_from if (inherit_from and (inherit_from != name)) else None) self.custom_definition = None self._flattened.add('custom_definition') self._parent = None if ((not self.inherit_from) and namespace): self.custom_definition = PluginDefinition(plugin_type, name, namespace, variant=variant, pip_url=pip_url, executable=executable, **extras) extras = self.custom_definition.extras self.custom_definition.extras = {} self.parent = base_plugin_factory(self.custom_definition, variant) self.namespace = namespace self.set_presentation_attrs(extras) self.variant = variant self.pip_url = pip_url self.executable = executable self.commands = Command.parse_all(commands) self._fallbacks.update(['logo_url', 'description', self.VARIANT_ATTR, 'pip_url', 'executable']) self._defaults[self.VARIANT_ATTR] = (lambda _: default_variant) if self.inherit_from: self._defaults['namespace'] = (lambda plugin: plugin.name.replace('-', '_')) self._defaults['label'] = (lambda plugin: (f'{plugin.parent.label}: {plugin.name}' if plugin.parent else plugin.name)) else: self._fallbacks.update(['namespace', 'label']) self.config = copy.deepcopy((config or {})) self.extras = extras if ('profiles' in extras): logger.warning(('Plugin configuration profiles are no longer supported, ignoring ' + f"`profiles` in '{name}' {plugin_type.descriptor} definition."))
ProjectPlugin. Args: plugin_type: PluginType instance. name: Plugin name. inherit_from: (optional) Name of plugin to inherit from. namespace: (optional) Plugin namespace. variant: (optional) Plugin variant. pip_url: (optional) Plugin install pip url. executable: (optional) Executable name. config: (optional) Plugin configuration. commands: (optional) Plugin commands. default_variant: (optional) Default variant for this plugin. extras: Extra keyword arguments.
src/meltano/core/plugin/project_plugin.py
__init__
Mu-L/meltano
0
python
def __init__(self, plugin_type: PluginType, name: str, inherit_from: (str | None)=None, namespace: (str | None)=None, variant: (str | None)=None, pip_url: (str | None)=None, executable: (str | None)=None, config: (dict | None)=None, commands: (dict | None)=None, default_variant=Variant.ORIGINAL_NAME, **extras): 'ProjectPlugin.\n\n Args:\n plugin_type: PluginType instance.\n name: Plugin name.\n inherit_from: (optional) Name of plugin to inherit from.\n namespace: (optional) Plugin namespace.\n variant: (optional) Plugin variant.\n pip_url: (optional) Plugin install pip url.\n executable: (optional) Executable name.\n config: (optional) Plugin configuration.\n commands: (optional) Plugin commands.\n default_variant: (optional) Default variant for this plugin.\n extras: Extra keyword arguments.\n ' super().__init__(plugin_type, name) self.inherit_from = (inherit_from if (inherit_from and (inherit_from != name)) else None) self.custom_definition = None self._flattened.add('custom_definition') self._parent = None if ((not self.inherit_from) and namespace): self.custom_definition = PluginDefinition(plugin_type, name, namespace, variant=variant, pip_url=pip_url, executable=executable, **extras) extras = self.custom_definition.extras self.custom_definition.extras = {} self.parent = base_plugin_factory(self.custom_definition, variant) self.namespace = namespace self.set_presentation_attrs(extras) self.variant = variant self.pip_url = pip_url self.executable = executable self.commands = Command.parse_all(commands) self._fallbacks.update(['logo_url', 'description', self.VARIANT_ATTR, 'pip_url', 'executable']) self._defaults[self.VARIANT_ATTR] = (lambda _: default_variant) if self.inherit_from: self._defaults['namespace'] = (lambda plugin: plugin.name.replace('-', '_')) self._defaults['label'] = (lambda plugin: (f'{plugin.parent.label}: {plugin.name}' if plugin.parent else plugin.name)) else: self._fallbacks.update(['namespace', 'label']) self.config = copy.deepcopy((config or {})) self.extras = extras if ('profiles' in extras): logger.warning(('Plugin configuration profiles are no longer supported, ignoring ' + f"`profiles` in '{name}' {plugin_type.descriptor} definition."))
def __init__(self, plugin_type: PluginType, name: str, inherit_from: (str | None)=None, namespace: (str | None)=None, variant: (str | None)=None, pip_url: (str | None)=None, executable: (str | None)=None, config: (dict | None)=None, commands: (dict | None)=None, default_variant=Variant.ORIGINAL_NAME, **extras): 'ProjectPlugin.\n\n Args:\n plugin_type: PluginType instance.\n name: Plugin name.\n inherit_from: (optional) Name of plugin to inherit from.\n namespace: (optional) Plugin namespace.\n variant: (optional) Plugin variant.\n pip_url: (optional) Plugin install pip url.\n executable: (optional) Executable name.\n config: (optional) Plugin configuration.\n commands: (optional) Plugin commands.\n default_variant: (optional) Default variant for this plugin.\n extras: Extra keyword arguments.\n ' super().__init__(plugin_type, name) self.inherit_from = (inherit_from if (inherit_from and (inherit_from != name)) else None) self.custom_definition = None self._flattened.add('custom_definition') self._parent = None if ((not self.inherit_from) and namespace): self.custom_definition = PluginDefinition(plugin_type, name, namespace, variant=variant, pip_url=pip_url, executable=executable, **extras) extras = self.custom_definition.extras self.custom_definition.extras = {} self.parent = base_plugin_factory(self.custom_definition, variant) self.namespace = namespace self.set_presentation_attrs(extras) self.variant = variant self.pip_url = pip_url self.executable = executable self.commands = Command.parse_all(commands) self._fallbacks.update(['logo_url', 'description', self.VARIANT_ATTR, 'pip_url', 'executable']) self._defaults[self.VARIANT_ATTR] = (lambda _: default_variant) if self.inherit_from: self._defaults['namespace'] = (lambda plugin: plugin.name.replace('-', '_')) self._defaults['label'] = (lambda plugin: (f'{plugin.parent.label}: {plugin.name}' if plugin.parent else plugin.name)) else: self._fallbacks.update(['namespace', 'label']) self.config = copy.deepcopy((config or {})) self.extras = extras if ('profiles' in extras): logger.warning(('Plugin configuration profiles are no longer supported, ignoring ' + f"`profiles` in '{name}' {plugin_type.descriptor} definition."))<|docstring|>ProjectPlugin. Args: plugin_type: PluginType instance. name: Plugin name. inherit_from: (optional) Name of plugin to inherit from. namespace: (optional) Plugin namespace. variant: (optional) Plugin variant. pip_url: (optional) Plugin install pip url. executable: (optional) Executable name. config: (optional) Plugin configuration. commands: (optional) Plugin commands. default_variant: (optional) Default variant for this plugin. extras: Extra keyword arguments.<|endoftext|>
7f63dd78fe98ee7fff1eaee7170c2f4fde56b66728ebcbf45e265b20c51996e6
@property def parent(self) -> ProjectPlugin: 'Plugins parent.\n\n Returns:\n Parent ProjectPlugin instance, or None if no parent.\n ' return self._parent
Plugins parent. Returns: Parent ProjectPlugin instance, or None if no parent.
src/meltano/core/plugin/project_plugin.py
parent
Mu-L/meltano
0
python
@property def parent(self) -> ProjectPlugin: 'Plugins parent.\n\n Returns:\n Parent ProjectPlugin instance, or None if no parent.\n ' return self._parent
@property def parent(self) -> ProjectPlugin: 'Plugins parent.\n\n Returns:\n Parent ProjectPlugin instance, or None if no parent.\n ' return self._parent<|docstring|>Plugins parent. Returns: Parent ProjectPlugin instance, or None if no parent.<|endoftext|>
971baefebf1fd7b30e22b0b3045032670a7bd3baefc9cab27141aeee81f20702
@property def is_variant_set(self) -> bool: "Check if variant is set explicitly.\n\n Returns:\n 'True' if variant is set explicitly.\n " return self.is_attr_set(self.VARIANT_ATTR)
Check if variant is set explicitly. Returns: 'True' if variant is set explicitly.
src/meltano/core/plugin/project_plugin.py
is_variant_set
Mu-L/meltano
0
python
@property def is_variant_set(self) -> bool: "Check if variant is set explicitly.\n\n Returns:\n 'True' if variant is set explicitly.\n " return self.is_attr_set(self.VARIANT_ATTR)
@property def is_variant_set(self) -> bool: "Check if variant is set explicitly.\n\n Returns:\n 'True' if variant is set explicitly.\n " return self.is_attr_set(self.VARIANT_ATTR)<|docstring|>Check if variant is set explicitly. Returns: 'True' if variant is set explicitly.<|endoftext|>
1e727e5c4873a591db97b7099074fe232c23a345f585e734e13c173b85ab4ad5
@property def info(self) -> dict[(str, str)]: 'Plugin info dict.\n\n Returns:\n Dictionary of plugin info (name, namespace and variant)\n ' return {'name': self.name, 'namespace': self.namespace, 'variant': self.variant}
Plugin info dict. Returns: Dictionary of plugin info (name, namespace and variant)
src/meltano/core/plugin/project_plugin.py
info
Mu-L/meltano
0
python
@property def info(self) -> dict[(str, str)]: 'Plugin info dict.\n\n Returns:\n Dictionary of plugin info (name, namespace and variant)\n ' return {'name': self.name, 'namespace': self.namespace, 'variant': self.variant}
@property def info(self) -> dict[(str, str)]: 'Plugin info dict.\n\n Returns:\n Dictionary of plugin info (name, namespace and variant)\n ' return {'name': self.name, 'namespace': self.namespace, 'variant': self.variant}<|docstring|>Plugin info dict. Returns: Dictionary of plugin info (name, namespace and variant)<|endoftext|>
584e4890dc87abe16ed6f1442cea0a600a1368092b425b0e9f30b97b12215ebd
@property def info_env(self) -> dict[(str, str)]: 'Plugin environment info.\n\n Returns:\n Dictionary of plugin info formatted as Meltano environment variables.\n ' return flatten({'meltano': {self.type.singular: self.info}}, 'env_var')
Plugin environment info. Returns: Dictionary of plugin info formatted as Meltano environment variables.
src/meltano/core/plugin/project_plugin.py
info_env
Mu-L/meltano
0
python
@property def info_env(self) -> dict[(str, str)]: 'Plugin environment info.\n\n Returns:\n Dictionary of plugin info formatted as Meltano environment variables.\n ' return flatten({'meltano': {self.type.singular: self.info}}, 'env_var')
@property def info_env(self) -> dict[(str, str)]: 'Plugin environment info.\n\n Returns:\n Dictionary of plugin info formatted as Meltano environment variables.\n ' return flatten({'meltano': {self.type.singular: self.info}}, 'env_var')<|docstring|>Plugin environment info. Returns: Dictionary of plugin info formatted as Meltano environment variables.<|endoftext|>
eac08ce2e17c40c31d1411c6d9bfb57ae012be40113297e37c2f2e6e4e999b3f
@property def all_commands(self) -> dict[(str, Command)]: 'Return all commands for this plugin.\n\n Returns:\n Dictionary of supported commands, including those inherited from the parent plugin.\n ' return {**self._parent.all_commands, **self.commands}
Return all commands for this plugin. Returns: Dictionary of supported commands, including those inherited from the parent plugin.
src/meltano/core/plugin/project_plugin.py
all_commands
Mu-L/meltano
0
python
@property def all_commands(self) -> dict[(str, Command)]: 'Return all commands for this plugin.\n\n Returns:\n Dictionary of supported commands, including those inherited from the parent plugin.\n ' return {**self._parent.all_commands, **self.commands}
@property def all_commands(self) -> dict[(str, Command)]: 'Return all commands for this plugin.\n\n Returns:\n Dictionary of supported commands, including those inherited from the parent plugin.\n ' return {**self._parent.all_commands, **self.commands}<|docstring|>Return all commands for this plugin. Returns: Dictionary of supported commands, including those inherited from the parent plugin.<|endoftext|>
1fa7490c47b4c6f2253b780955852f9e766c89d0f6284f29421e67a0428acb6d
@property def test_commands(self) -> dict[(str, Command)]: 'Return the test commands for this plugin.\n\n Returns:\n Dictionary of supported test commands, including those inherited from the parent plugin.\n ' return {name: command for (name, command) in self.all_commands.items() if name.startswith('test')}
Return the test commands for this plugin. Returns: Dictionary of supported test commands, including those inherited from the parent plugin.
src/meltano/core/plugin/project_plugin.py
test_commands
Mu-L/meltano
0
python
@property def test_commands(self) -> dict[(str, Command)]: 'Return the test commands for this plugin.\n\n Returns:\n Dictionary of supported test commands, including those inherited from the parent plugin.\n ' return {name: command for (name, command) in self.all_commands.items() if name.startswith('test')}
@property def test_commands(self) -> dict[(str, Command)]: 'Return the test commands for this plugin.\n\n Returns:\n Dictionary of supported test commands, including those inherited from the parent plugin.\n ' return {name: command for (name, command) in self.all_commands.items() if name.startswith('test')}<|docstring|>Return the test commands for this plugin. Returns: Dictionary of supported test commands, including those inherited from the parent plugin.<|endoftext|>
b010a86349ecd00625757519f1f79f72dc1a427e00fdd120fd0f18bf25e24c96
@property def supported_commands(self) -> list[str]: 'Return supported command names.\n\n Returns:\n All defined command names for the plugin.\n ' return list(self.all_commands.keys())
Return supported command names. Returns: All defined command names for the plugin.
src/meltano/core/plugin/project_plugin.py
supported_commands
Mu-L/meltano
0
python
@property def supported_commands(self) -> list[str]: 'Return supported command names.\n\n Returns:\n All defined command names for the plugin.\n ' return list(self.all_commands.keys())
@property def supported_commands(self) -> list[str]: 'Return supported command names.\n\n Returns:\n All defined command names for the plugin.\n ' return list(self.all_commands.keys())<|docstring|>Return supported command names. Returns: All defined command names for the plugin.<|endoftext|>
6f5520522c22cd7da6a47edc51d09e178e628114930b38bd24974511242e6427
def env_prefixes(self, for_writing=False) -> list[str]: 'Return environment variable prefixes.\n\n Args:\n for_writing: Include parent prefix (used when writing to env vars)\n\n Returns:\n A list of env prefixes.\n ' prefixes = [self.name, self.namespace] if for_writing: prefixes.extend(self._parent.env_prefixes(for_writing=True)) prefixes.append(f'meltano_{self.type.verb}') return uniques_in(prefixes)
Return environment variable prefixes. Args: for_writing: Include parent prefix (used when writing to env vars) Returns: A list of env prefixes.
src/meltano/core/plugin/project_plugin.py
env_prefixes
Mu-L/meltano
0
python
def env_prefixes(self, for_writing=False) -> list[str]: 'Return environment variable prefixes.\n\n Args:\n for_writing: Include parent prefix (used when writing to env vars)\n\n Returns:\n A list of env prefixes.\n ' prefixes = [self.name, self.namespace] if for_writing: prefixes.extend(self._parent.env_prefixes(for_writing=True)) prefixes.append(f'meltano_{self.type.verb}') return uniques_in(prefixes)
def env_prefixes(self, for_writing=False) -> list[str]: 'Return environment variable prefixes.\n\n Args:\n for_writing: Include parent prefix (used when writing to env vars)\n\n Returns:\n A list of env prefixes.\n ' prefixes = [self.name, self.namespace] if for_writing: prefixes.extend(self._parent.env_prefixes(for_writing=True)) prefixes.append(f'meltano_{self.type.verb}') return uniques_in(prefixes)<|docstring|>Return environment variable prefixes. Args: for_writing: Include parent prefix (used when writing to env vars) Returns: A list of env prefixes.<|endoftext|>
6bfecb6dfd5c154f930595825708be1fa58268ec5150b2ee6b2d662bdd020a65
@property def extra_config(self) -> list[(str, Any)]: 'Return plugin extra config.\n\n Returns:\n Dictionary of extra config.\n ' return {f'_{key}': value for (key, value) in self.extras.items()}
Return plugin extra config. Returns: Dictionary of extra config.
src/meltano/core/plugin/project_plugin.py
extra_config
Mu-L/meltano
0
python
@property def extra_config(self) -> list[(str, Any)]: 'Return plugin extra config.\n\n Returns:\n Dictionary of extra config.\n ' return {f'_{key}': value for (key, value) in self.extras.items()}
@property def extra_config(self) -> list[(str, Any)]: 'Return plugin extra config.\n\n Returns:\n Dictionary of extra config.\n ' return {f'_{key}': value for (key, value) in self.extras.items()}<|docstring|>Return plugin extra config. Returns: Dictionary of extra config.<|endoftext|>
c83ea5fe5395f87b0ed851d5f63d900538f5f3e5467219306ce98594376dbaa3
@property def config_with_extras(self) -> list[(str, Any)]: 'Return config with extras.\n\n Returns:\n Complete config dictionary, including config extras.\n ' return {**self.config, **self.extra_config}
Return config with extras. Returns: Complete config dictionary, including config extras.
src/meltano/core/plugin/project_plugin.py
config_with_extras
Mu-L/meltano
0
python
@property def config_with_extras(self) -> list[(str, Any)]: 'Return config with extras.\n\n Returns:\n Complete config dictionary, including config extras.\n ' return {**self.config, **self.extra_config}
@property def config_with_extras(self) -> list[(str, Any)]: 'Return config with extras.\n\n Returns:\n Complete config dictionary, including config extras.\n ' return {**self.config, **self.extra_config}<|docstring|>Return config with extras. Returns: Complete config dictionary, including config extras.<|endoftext|>
ed765e9208dd701f756a8b2221a390c888b9c7c1b1e4d7388f0e23d6ff78da96
@property def settings(self) -> list[SettingDefinition]: 'Return plugin settings.\n\n Returns:\n A list of Plugin settings, including those defined by the parent.\n ' existing_settings = self._parent.settings return [*existing_settings, *SettingDefinition.from_missing(existing_settings, self.config)]
Return plugin settings. Returns: A list of Plugin settings, including those defined by the parent.
src/meltano/core/plugin/project_plugin.py
settings
Mu-L/meltano
0
python
@property def settings(self) -> list[SettingDefinition]: 'Return plugin settings.\n\n Returns:\n A list of Plugin settings, including those defined by the parent.\n ' existing_settings = self._parent.settings return [*existing_settings, *SettingDefinition.from_missing(existing_settings, self.config)]
@property def settings(self) -> list[SettingDefinition]: 'Return plugin settings.\n\n Returns:\n A list of Plugin settings, including those defined by the parent.\n ' existing_settings = self._parent.settings return [*existing_settings, *SettingDefinition.from_missing(existing_settings, self.config)]<|docstring|>Return plugin settings. Returns: A list of Plugin settings, including those defined by the parent.<|endoftext|>
99c48470f080660719dfb70af14f0901bb5d526ad33feedb1de40343821801a2
@property def extra_settings(self) -> list[SettingDefinition]: 'Return extra settings.\n\n Returns:\n A list of extra SettingDefinitions, including those defined by the parent.\n ' existing_settings = self._parent.extra_settings return [*existing_settings, *SettingDefinition.from_missing(existing_settings, self.extra_config)]
Return extra settings. Returns: A list of extra SettingDefinitions, including those defined by the parent.
src/meltano/core/plugin/project_plugin.py
extra_settings
Mu-L/meltano
0
python
@property def extra_settings(self) -> list[SettingDefinition]: 'Return extra settings.\n\n Returns:\n A list of extra SettingDefinitions, including those defined by the parent.\n ' existing_settings = self._parent.extra_settings return [*existing_settings, *SettingDefinition.from_missing(existing_settings, self.extra_config)]
@property def extra_settings(self) -> list[SettingDefinition]: 'Return extra settings.\n\n Returns:\n A list of extra SettingDefinitions, including those defined by the parent.\n ' existing_settings = self._parent.extra_settings return [*existing_settings, *SettingDefinition.from_missing(existing_settings, self.extra_config)]<|docstring|>Return extra settings. Returns: A list of extra SettingDefinitions, including those defined by the parent.<|endoftext|>
35f6cadbae7ea574e72e2fa57d2f600aa9a3b54dfde5027dbc56d54785f71558
@property def settings_with_extras(self) -> list[SettingDefinition]: 'Return all settings.\n\n Returns:\n A complete list of SettingDefinitions, including extras.\n ' return [*self.settings, *self.extra_settings]
Return all settings. Returns: A complete list of SettingDefinitions, including extras.
src/meltano/core/plugin/project_plugin.py
settings_with_extras
Mu-L/meltano
0
python
@property def settings_with_extras(self) -> list[SettingDefinition]: 'Return all settings.\n\n Returns:\n A complete list of SettingDefinitions, including extras.\n ' return [*self.settings, *self.extra_settings]
@property def settings_with_extras(self) -> list[SettingDefinition]: 'Return all settings.\n\n Returns:\n A complete list of SettingDefinitions, including extras.\n ' return [*self.settings, *self.extra_settings]<|docstring|>Return all settings. Returns: A complete list of SettingDefinitions, including extras.<|endoftext|>
2bc5a790281dc49b97544d0c92b005ea929b02340200c43a2561a06e23705cc7
def is_custom(self) -> bool: "Return if plugin is custom.\n\n Returns:\n 'True' is plugin is custom.\n " return (self.custom_definition is not None)
Return if plugin is custom. Returns: 'True' is plugin is custom.
src/meltano/core/plugin/project_plugin.py
is_custom
Mu-L/meltano
0
python
def is_custom(self) -> bool: "Return if plugin is custom.\n\n Returns:\n 'True' is plugin is custom.\n " return (self.custom_definition is not None)
def is_custom(self) -> bool: "Return if plugin is custom.\n\n Returns:\n 'True' is plugin is custom.\n " return (self.custom_definition is not None)<|docstring|>Return if plugin is custom. Returns: 'True' is plugin is custom.<|endoftext|>
31942189a43564349968e2edfd1d637ee5fb33d30ae20901f32c9fcec6d8654d
@property def is_shadowing(self) -> bool: "Return whether this plugin is shadowing a base plugin with the same name.\n\n Returns:\n 'True' if this plugin is shadowing a base plugin with the same name.\n " return (not self.inherit_from)
Return whether this plugin is shadowing a base plugin with the same name. Returns: 'True' if this plugin is shadowing a base plugin with the same name.
src/meltano/core/plugin/project_plugin.py
is_shadowing
Mu-L/meltano
0
python
@property def is_shadowing(self) -> bool: "Return whether this plugin is shadowing a base plugin with the same name.\n\n Returns:\n 'True' if this plugin is shadowing a base plugin with the same name.\n " return (not self.inherit_from)
@property def is_shadowing(self) -> bool: "Return whether this plugin is shadowing a base plugin with the same name.\n\n Returns:\n 'True' if this plugin is shadowing a base plugin with the same name.\n " return (not self.inherit_from)<|docstring|>Return whether this plugin is shadowing a base plugin with the same name. Returns: 'True' if this plugin is shadowing a base plugin with the same name.<|endoftext|>
bc83e2b0730ecb313734cd2359e39e28b1e345858079fb60bd79d1070762c6ee
@property def formatted_pip_url(self) -> str: 'Return the formatted version of the pip_url.\n\n Expands ${MELTANO__PYTHON_VERSION} to the major.minor version string of the current runtime.\n\n Returns:\n Expanded pip url string.\n ' return expand_env_vars(self.pip_url, {'MELTANO__PYTHON_VERSION': f'{sys.version_info.major}.{sys.version_info.minor}'})
Return the formatted version of the pip_url. Expands ${MELTANO__PYTHON_VERSION} to the major.minor version string of the current runtime. Returns: Expanded pip url string.
src/meltano/core/plugin/project_plugin.py
formatted_pip_url
Mu-L/meltano
0
python
@property def formatted_pip_url(self) -> str: 'Return the formatted version of the pip_url.\n\n Expands ${MELTANO__PYTHON_VERSION} to the major.minor version string of the current runtime.\n\n Returns:\n Expanded pip url string.\n ' return expand_env_vars(self.pip_url, {'MELTANO__PYTHON_VERSION': f'{sys.version_info.major}.{sys.version_info.minor}'})
@property def formatted_pip_url(self) -> str: 'Return the formatted version of the pip_url.\n\n Expands ${MELTANO__PYTHON_VERSION} to the major.minor version string of the current runtime.\n\n Returns:\n Expanded pip url string.\n ' return expand_env_vars(self.pip_url, {'MELTANO__PYTHON_VERSION': f'{sys.version_info.major}.{sys.version_info.minor}'})<|docstring|>Return the formatted version of the pip_url. Expands ${MELTANO__PYTHON_VERSION} to the major.minor version string of the current runtime. Returns: Expanded pip url string.<|endoftext|>
45019dc77e260f406b70e5d89227d93657a9c7035d0c5e785757772bec0c52a5
@property def venv_name(self) -> str: 'Return the venv name this plugin should use.\n\n Returns:\n The name of this plugins parent if both pip urls are the same, else this plugins name.\n ' if (not self.inherit_from): return self.name if ((not self.pip_url) or (self.parent.pip_url == self.pip_url)): return self.parent.name return self.name
Return the venv name this plugin should use. Returns: The name of this plugins parent if both pip urls are the same, else this plugins name.
src/meltano/core/plugin/project_plugin.py
venv_name
Mu-L/meltano
0
python
@property def venv_name(self) -> str: 'Return the venv name this plugin should use.\n\n Returns:\n The name of this plugins parent if both pip urls are the same, else this plugins name.\n ' if (not self.inherit_from): return self.name if ((not self.pip_url) or (self.parent.pip_url == self.pip_url)): return self.parent.name return self.name
@property def venv_name(self) -> str: 'Return the venv name this plugin should use.\n\n Returns:\n The name of this plugins parent if both pip urls are the same, else this plugins name.\n ' if (not self.inherit_from): return self.name if ((not self.pip_url) or (self.parent.pip_url == self.pip_url)): return self.parent.name return self.name<|docstring|>Return the venv name this plugin should use. Returns: The name of this plugins parent if both pip urls are the same, else this plugins name.<|endoftext|>
8b3458f48e86b52a898d5f2a0c468e0faf89bc61abc28eececf5eeae6e9f7279
def fixed_platt_calibrator(slope=(- 1.0), offset=0.0, **params): '\n **Description**\n None\n\n :param slope: The slope parameter of f(x) = 1 / (1 + exp(slope *\n x + offset) (settings).\n :param offset: The offset parameter of f(x) = 1 / (1 + exp(slope\n * x + offset) (settings).\n ' entrypoint_name = 'FixedPlattCalibrator' settings = {} if (slope is not None): settings['Slope'] = try_set(obj=slope, none_acceptable=True, is_of_type=numbers.Real) if (offset is not None): settings['Offset'] = try_set(obj=offset, none_acceptable=True, is_of_type=numbers.Real) component = Component(name=entrypoint_name, settings=settings, kind='CalibratorTrainer') return component
**Description** None :param slope: The slope parameter of f(x) = 1 / (1 + exp(slope * x + offset) (settings). :param offset: The offset parameter of f(x) = 1 / (1 + exp(slope * x + offset) (settings).
src/python/nimbusml/internal/entrypoints/_calibratortrainer_fixedplattcalibrator.py
fixed_platt_calibrator
montehoover/NimbusML
134
python
def fixed_platt_calibrator(slope=(- 1.0), offset=0.0, **params): '\n **Description**\n None\n\n :param slope: The slope parameter of f(x) = 1 / (1 + exp(slope *\n x + offset) (settings).\n :param offset: The offset parameter of f(x) = 1 / (1 + exp(slope\n * x + offset) (settings).\n ' entrypoint_name = 'FixedPlattCalibrator' settings = {} if (slope is not None): settings['Slope'] = try_set(obj=slope, none_acceptable=True, is_of_type=numbers.Real) if (offset is not None): settings['Offset'] = try_set(obj=offset, none_acceptable=True, is_of_type=numbers.Real) component = Component(name=entrypoint_name, settings=settings, kind='CalibratorTrainer') return component
def fixed_platt_calibrator(slope=(- 1.0), offset=0.0, **params): '\n **Description**\n None\n\n :param slope: The slope parameter of f(x) = 1 / (1 + exp(slope *\n x + offset) (settings).\n :param offset: The offset parameter of f(x) = 1 / (1 + exp(slope\n * x + offset) (settings).\n ' entrypoint_name = 'FixedPlattCalibrator' settings = {} if (slope is not None): settings['Slope'] = try_set(obj=slope, none_acceptable=True, is_of_type=numbers.Real) if (offset is not None): settings['Offset'] = try_set(obj=offset, none_acceptable=True, is_of_type=numbers.Real) component = Component(name=entrypoint_name, settings=settings, kind='CalibratorTrainer') return component<|docstring|>**Description** None :param slope: The slope parameter of f(x) = 1 / (1 + exp(slope * x + offset) (settings). :param offset: The offset parameter of f(x) = 1 / (1 + exp(slope * x + offset) (settings).<|endoftext|>
9901dff9794872b4d99fbbf40d16d14bc2b7c9709bb9610c244e77c694b792f1
def conversion_types(val): '\n 将val的类型转换为字符串并插入array\n ' if isinstance(val, datetime): val = val.strftime('%Y-%m-%d %H:%M:%S') return val
将val的类型转换为字符串并插入array
aestate/libs/utils.py
conversion_types
canotf/aestate
0
python
def conversion_types(val): '\n \n ' if isinstance(val, datetime): val = val.strftime('%Y-%m-%d %H:%M:%S') return val
def conversion_types(val): '\n \n ' if isinstance(val, datetime): val = val.strftime('%Y-%m-%d %H:%M:%S') return val<|docstring|>将val的类型转换为字符串并插入array<|endoftext|>
dc47c6ebbc76a016e4893ad510b8d7432a71d6c60d593cd08b0f67bc2b911d41
def date_format(time_obj=time, fmt='%Y-%m-%d %H:%M:%S') -> str: '\n 时间转字符串\n :param time_obj:\n :param fmt:\n :return:\n ' _tm = time_obj.time() _t = time.localtime(_tm) return time.strftime(fmt, _t)
时间转字符串 :param time_obj: :param fmt: :return:
aestate/libs/utils.py
date_format
canotf/aestate
0
python
def date_format(time_obj=time, fmt='%Y-%m-%d %H:%M:%S') -> str: '\n 时间转字符串\n :param time_obj:\n :param fmt:\n :return:\n ' _tm = time_obj.time() _t = time.localtime(_tm) return time.strftime(fmt, _t)
def date_format(time_obj=time, fmt='%Y-%m-%d %H:%M:%S') -> str: '\n 时间转字符串\n :param time_obj:\n :param fmt:\n :return:\n ' _tm = time_obj.time() _t = time.localtime(_tm) return time.strftime(fmt, _t)<|docstring|>时间转字符串 :param time_obj: :param fmt: :return:<|endoftext|>
6c5b42a89a9532d3adc72f7dc0ae054281eac0488496a8db1a6ef6322153be23
def time_to_datetime(t_time): '\n 时间戳转datetime\n ' try: d_time = datetime.fromtimestamp(t_time) except OSError as ose: return None return d_time
时间戳转datetime
aestate/libs/utils.py
time_to_datetime
canotf/aestate
0
python
def time_to_datetime(t_time): '\n \n ' try: d_time = datetime.fromtimestamp(t_time) except OSError as ose: return None return d_time
def time_to_datetime(t_time): '\n \n ' try: d_time = datetime.fromtimestamp(t_time) except OSError as ose: return None return d_time<|docstring|>时间戳转datetime<|endoftext|>
618f5c3b72106281c7cbded92690fd2012658572a4c9fba35d4cd80526783e86
def get_static_fields(cls): '\n 获取类的非默认全局变量\n ' retD = list(set(dir(cls)).difference(set(BASE_ATTR))) return retD
获取类的非默认全局变量
aestate/libs/utils.py
get_static_fields
canotf/aestate
0
python
def get_static_fields(cls): '\n \n ' retD = list(set(dir(cls)).difference(set(BASE_ATTR))) return retD
def get_static_fields(cls): '\n \n ' retD = list(set(dir(cls)).difference(set(BASE_ATTR))) return retD<|docstring|>获取类的非默认全局变量<|endoftext|>
9092526a0ec3a5d31bd84a8e809c7d36eb23a2f3a1969d91933f1ac9ed5076d0
def fullname(o): '获取对象的类名' module = o.__class__.__module__ if ((module is None) or (module == str.__class__.__module__)): cls_name = o.__class__.__name__ else: cls_name = ((module + '.') + o.__class__.__name__) if (cls_name == 'type'): cls_name = ((o.__base__.__module__ + '.') + o.__base__.__name__) return cls_name
获取对象的类名
aestate/libs/utils.py
fullname
canotf/aestate
0
python
def fullname(o): module = o.__class__.__module__ if ((module is None) or (module == str.__class__.__module__)): cls_name = o.__class__.__name__ else: cls_name = ((module + '.') + o.__class__.__name__) if (cls_name == 'type'): cls_name = ((o.__base__.__module__ + '.') + o.__base__.__name__) return cls_name
def fullname(o): module = o.__class__.__module__ if ((module is None) or (module == str.__class__.__module__)): cls_name = o.__class__.__name__ else: cls_name = ((module + '.') + o.__class__.__name__) if (cls_name == 'type'): cls_name = ((o.__base__.__module__ + '.') + o.__base__.__name__) return cls_name<|docstring|>获取对象的类名<|endoftext|>
b01a65413ac57474517dabb8495775eea28f983262e34434f9f602835f8dc308
@pytest.fixture def mock_click() -> CliRunner: 'Fixture for invoking command-line interfaces.' return CliRunner()
Fixture for invoking command-line interfaces.
tests/cli/conftest.py
mock_click
aidanmelen/website_checker
1
python
@pytest.fixture def mock_click() -> CliRunner: return CliRunner()
@pytest.fixture def mock_click() -> CliRunner: return CliRunner()<|docstring|>Fixture for invoking command-line interfaces.<|endoftext|>
1d081ba177fd415825cc473c49959ba4f32df6977a7c27fd530803064bc49bbb
@pytest.fixture def mock_checks_network(mocker: MockFixture): 'Fixture for mocking checks.network.' return mocker.patch('website_checker.checks.network')
Fixture for mocking checks.network.
tests/cli/conftest.py
mock_checks_network
aidanmelen/website_checker
1
python
@pytest.fixture def mock_checks_network(mocker: MockFixture): return mocker.patch('website_checker.checks.network')
@pytest.fixture def mock_checks_network(mocker: MockFixture): return mocker.patch('website_checker.checks.network')<|docstring|>Fixture for mocking checks.network.<|endoftext|>
7d6456589febe1e2c49e322e79028ed0b0bfb4298a43087cdd901c28530a8b78
@pytest.fixture def mock_checks_health(mocker: MockFixture): 'Fixture for mocking checks.health.' return mocker.patch('website_checker.checks.health')
Fixture for mocking checks.health.
tests/cli/conftest.py
mock_checks_health
aidanmelen/website_checker
1
python
@pytest.fixture def mock_checks_health(mocker: MockFixture): return mocker.patch('website_checker.checks.health')
@pytest.fixture def mock_checks_health(mocker: MockFixture): return mocker.patch('website_checker.checks.health')<|docstring|>Fixture for mocking checks.health.<|endoftext|>
c2c883bc2f7bce8153e7e30fd9490946080712c27f80b716fd7c12af66e4789c
@pytest.fixture def mock_checks_latency(mocker: MockFixture): 'Fixture for mocking checks.latency.' return mocker.patch('website_checker.checks.latency')
Fixture for mocking checks.latency.
tests/cli/conftest.py
mock_checks_latency
aidanmelen/website_checker
1
python
@pytest.fixture def mock_checks_latency(mocker: MockFixture): return mocker.patch('website_checker.checks.latency')
@pytest.fixture def mock_checks_latency(mocker: MockFixture): return mocker.patch('website_checker.checks.latency')<|docstring|>Fixture for mocking checks.latency.<|endoftext|>
2c56a029df0ab243a96d62f1d1c1b499551a1bf7f419996ea8343461c75470e2
@pytest.fixture(autouse=True, scope='session') def mock_envvars(): 'Mock environment variables.' os.environ['CHECK_NETWORK_URLS'] = 'https://google.com https://wikipedia.org' os.environ['CHECK_HEALTH_URLS'] = 'https://google.com' os.environ['CHECK_LATENCY_URLS'] = 'https://google.com' (yield) del os.environ['CHECK_NETWORK_URLS'] del os.environ['CHECK_HEALTH_URLS'] del os.environ['CHECK_LATENCY_URLS']
Mock environment variables.
tests/cli/conftest.py
mock_envvars
aidanmelen/website_checker
1
python
@pytest.fixture(autouse=True, scope='session') def mock_envvars(): os.environ['CHECK_NETWORK_URLS'] = 'https://google.com https://wikipedia.org' os.environ['CHECK_HEALTH_URLS'] = 'https://google.com' os.environ['CHECK_LATENCY_URLS'] = 'https://google.com' (yield) del os.environ['CHECK_NETWORK_URLS'] del os.environ['CHECK_HEALTH_URLS'] del os.environ['CHECK_LATENCY_URLS']
@pytest.fixture(autouse=True, scope='session') def mock_envvars(): os.environ['CHECK_NETWORK_URLS'] = 'https://google.com https://wikipedia.org' os.environ['CHECK_HEALTH_URLS'] = 'https://google.com' os.environ['CHECK_LATENCY_URLS'] = 'https://google.com' (yield) del os.environ['CHECK_NETWORK_URLS'] del os.environ['CHECK_HEALTH_URLS'] del os.environ['CHECK_LATENCY_URLS']<|docstring|>Mock environment variables.<|endoftext|>
77f08cee950792237901490a6a9e90bfcafc179c5126f6cf7991b37ffdce286b
def get_fieldsets(self, request, obj=None): ' Remove the is_superuser checkbox from the Admin page, if user is MooringLicensingAdmin and NOT superuser ' fieldsets = super(UserAdmin, self).get_fieldsets(request, obj) if request.user.is_superuser: return fieldsets fieldsets = deepcopy(fieldsets) for fieldset in fieldsets: if ('is_superuser' in fieldset[1]['fields']): if (type(fieldset[1]['fields']) == tuple): fieldset[1]['fields'] = list(fieldset[1]['fields']) fieldset[1]['fields'].remove('is_superuser') break return fieldsets
Remove the is_superuser checkbox from the Admin page, if user is MooringLicensingAdmin and NOT superuser
mooringlicensing/admin.py
get_fieldsets
jawaidm/mooringlicensing
0
python
def get_fieldsets(self, request, obj=None): ' ' fieldsets = super(UserAdmin, self).get_fieldsets(request, obj) if request.user.is_superuser: return fieldsets fieldsets = deepcopy(fieldsets) for fieldset in fieldsets: if ('is_superuser' in fieldset[1]['fields']): if (type(fieldset[1]['fields']) == tuple): fieldset[1]['fields'] = list(fieldset[1]['fields']) fieldset[1]['fields'].remove('is_superuser') break return fieldsets
def get_fieldsets(self, request, obj=None): ' ' fieldsets = super(UserAdmin, self).get_fieldsets(request, obj) if request.user.is_superuser: return fieldsets fieldsets = deepcopy(fieldsets) for fieldset in fieldsets: if ('is_superuser' in fieldset[1]['fields']): if (type(fieldset[1]['fields']) == tuple): fieldset[1]['fields'] = list(fieldset[1]['fields']) fieldset[1]['fields'].remove('is_superuser') break return fieldsets<|docstring|>Remove the is_superuser checkbox from the Admin page, if user is MooringLicensingAdmin and NOT superuser<|endoftext|>
ead55a8103da8457d19c9246968ec780f3d3d10802bc5904515b8335121ea006
@task def clean(c): 'Remove generated files' if os.path.isdir(CONFIG['deploy_path']): shutil.rmtree(CONFIG['deploy_path']) os.makedirs(CONFIG['deploy_path'])
Remove generated files
tasks.py
clean
amelrod/forktest
1
python
@task def clean(c): if os.path.isdir(CONFIG['deploy_path']): shutil.rmtree(CONFIG['deploy_path']) os.makedirs(CONFIG['deploy_path'])
@task def clean(c): if os.path.isdir(CONFIG['deploy_path']): shutil.rmtree(CONFIG['deploy_path']) os.makedirs(CONFIG['deploy_path'])<|docstring|>Remove generated files<|endoftext|>
24095d77dff402c274dd9f42e19393f029363b5ed89e75467f9ded5057fb59c3
@task def build(c): 'Build local version of site' pelican_run('-s {settings_base}'.format(**CONFIG))
Build local version of site
tasks.py
build
amelrod/forktest
1
python
@task def build(c): pelican_run('-s {settings_base}'.format(**CONFIG))
@task def build(c): pelican_run('-s {settings_base}'.format(**CONFIG))<|docstring|>Build local version of site<|endoftext|>
70230cd6f7b1fecefa178f8fa577cc16439c41ead91f5cd7549146f9370356cb
@task def rebuild(c): '`build` with the delete switch' pelican_run('-d -s {settings_base}'.format(**CONFIG))
`build` with the delete switch
tasks.py
rebuild
amelrod/forktest
1
python
@task def rebuild(c): pelican_run('-d -s {settings_base}'.format(**CONFIG))
@task def rebuild(c): pelican_run('-d -s {settings_base}'.format(**CONFIG))<|docstring|>`build` with the delete switch<|endoftext|>
3926b38e5cb7183e82a12622de5725f80885a9d9a35faf7a42da9aaff6449b10
@task def regenerate(c): 'Automatically regenerate site upon file modification' pelican_run('-r -s {settings_base}'.format(**CONFIG))
Automatically regenerate site upon file modification
tasks.py
regenerate
amelrod/forktest
1
python
@task def regenerate(c): pelican_run('-r -s {settings_base}'.format(**CONFIG))
@task def regenerate(c): pelican_run('-r -s {settings_base}'.format(**CONFIG))<|docstring|>Automatically regenerate site upon file modification<|endoftext|>
56f88a63f3724fa65425588bde81a8ecdd5df76e22b65442a5ee86f5d62aab59
@task def serve(c): 'Serve site at http://$HOST:$PORT/ (default is localhost:8000)' class AddressReuseTCPServer(RootedHTTPServer): allow_reuse_address = True server = AddressReuseTCPServer(CONFIG['deploy_path'], (CONFIG['host'], CONFIG['port']), ComplexHTTPRequestHandler) sys.stderr.write('Serving at {host}:{port} ...\n'.format(**CONFIG)) server.serve_forever()
Serve site at http://$HOST:$PORT/ (default is localhost:8000)
tasks.py
serve
amelrod/forktest
1
python
@task def serve(c): class AddressReuseTCPServer(RootedHTTPServer): allow_reuse_address = True server = AddressReuseTCPServer(CONFIG['deploy_path'], (CONFIG['host'], CONFIG['port']), ComplexHTTPRequestHandler) sys.stderr.write('Serving at {host}:{port} ...\n'.format(**CONFIG)) server.serve_forever()
@task def serve(c): class AddressReuseTCPServer(RootedHTTPServer): allow_reuse_address = True server = AddressReuseTCPServer(CONFIG['deploy_path'], (CONFIG['host'], CONFIG['port']), ComplexHTTPRequestHandler) sys.stderr.write('Serving at {host}:{port} ...\n'.format(**CONFIG)) server.serve_forever()<|docstring|>Serve site at http://$HOST:$PORT/ (default is localhost:8000)<|endoftext|>
db45921abfa7e68e21e894c185b45d0bca9d4ae4eb35173fd054cb6076195f0b
@task def reserve(c): '`build`, then `serve`' build(c) serve(c)
`build`, then `serve`
tasks.py
reserve
amelrod/forktest
1
python
@task def reserve(c): build(c) serve(c)
@task def reserve(c): build(c) serve(c)<|docstring|>`build`, then `serve`<|endoftext|>
00b2f99be88ad2ceacd6d908ae324c45da57543b1d16848de014da637b10ec48
@task def preview(c): 'Build production version of site' pelican_run('-s {settings_publish}'.format(**CONFIG))
Build production version of site
tasks.py
preview
amelrod/forktest
1
python
@task def preview(c): pelican_run('-s {settings_publish}'.format(**CONFIG))
@task def preview(c): pelican_run('-s {settings_publish}'.format(**CONFIG))<|docstring|>Build production version of site<|endoftext|>
f21ee4b0be110f0b5fbcb3b0579c6aca1582dd1b959aed057d4ceab9eee386d8
@task def livereload(c): 'Automatically reload browser tab upon file modification.' from livereload import Server build(c) server = Server() server.watch(CONFIG['settings_base'], (lambda : build(c))) content_file_extensions = ['.md', '.rst'] for extension in content_file_extensions: content_blob = '{0}/**/*{1}'.format(SETTINGS['PATH'], extension) server.watch(content_blob, (lambda : build(c))) theme_path = SETTINGS['THEME'] server.watch('{}/templates/*.html'.format(theme_path), (lambda : build(c))) static_file_extensions = ['.css', '.js'] for extension in static_file_extensions: static_file = '{0}/static/**/*{1}'.format(theme_path, extension) server.watch(static_file, (lambda : build(c))) server.serve(host=CONFIG['host'], port=CONFIG['port'], root=CONFIG['deploy_path'])
Automatically reload browser tab upon file modification.
tasks.py
livereload
amelrod/forktest
1
python
@task def livereload(c): from livereload import Server build(c) server = Server() server.watch(CONFIG['settings_base'], (lambda : build(c))) content_file_extensions = ['.md', '.rst'] for extension in content_file_extensions: content_blob = '{0}/**/*{1}'.format(SETTINGS['PATH'], extension) server.watch(content_blob, (lambda : build(c))) theme_path = SETTINGS['THEME'] server.watch('{}/templates/*.html'.format(theme_path), (lambda : build(c))) static_file_extensions = ['.css', '.js'] for extension in static_file_extensions: static_file = '{0}/static/**/*{1}'.format(theme_path, extension) server.watch(static_file, (lambda : build(c))) server.serve(host=CONFIG['host'], port=CONFIG['port'], root=CONFIG['deploy_path'])
@task def livereload(c): from livereload import Server build(c) server = Server() server.watch(CONFIG['settings_base'], (lambda : build(c))) content_file_extensions = ['.md', '.rst'] for extension in content_file_extensions: content_blob = '{0}/**/*{1}'.format(SETTINGS['PATH'], extension) server.watch(content_blob, (lambda : build(c))) theme_path = SETTINGS['THEME'] server.watch('{}/templates/*.html'.format(theme_path), (lambda : build(c))) static_file_extensions = ['.css', '.js'] for extension in static_file_extensions: static_file = '{0}/static/**/*{1}'.format(theme_path, extension) server.watch(static_file, (lambda : build(c))) server.serve(host=CONFIG['host'], port=CONFIG['port'], root=CONFIG['deploy_path'])<|docstring|>Automatically reload browser tab upon file modification.<|endoftext|>
fb4eab289811558e6db41a1c5537d34396a58246d05721a14b3d4adda8abc3ef
@task def publish(c): 'Publish to production via rsync' pelican_run('-s {settings_publish}'.format(**CONFIG)) c.run('rsync --delete --exclude ".DS_Store" -pthrvz -c -e "ssh -p {ssh_port}" {} {ssh_user}@{ssh_host}:{ssh_path}'.format((CONFIG['deploy_path'].rstrip('/') + '/'), **CONFIG))
Publish to production via rsync
tasks.py
publish
amelrod/forktest
1
python
@task def publish(c): pelican_run('-s {settings_publish}'.format(**CONFIG)) c.run('rsync --delete --exclude ".DS_Store" -pthrvz -c -e "ssh -p {ssh_port}" {} {ssh_user}@{ssh_host}:{ssh_path}'.format((CONFIG['deploy_path'].rstrip('/') + '/'), **CONFIG))
@task def publish(c): pelican_run('-s {settings_publish}'.format(**CONFIG)) c.run('rsync --delete --exclude ".DS_Store" -pthrvz -c -e "ssh -p {ssh_port}" {} {ssh_user}@{ssh_host}:{ssh_path}'.format((CONFIG['deploy_path'].rstrip('/') + '/'), **CONFIG))<|docstring|>Publish to production via rsync<|endoftext|>
aa1d6992878bfbdec0b358ecf658a6ad755da836a2e4846ea44aa83738fe9196
@task def gh_pages(c): 'Publish to GitHub Pages' preview(c) c.run('ghp-import -b {github_pages_branch} -m {commit_message} {deploy_path} -p'.format(**CONFIG))
Publish to GitHub Pages
tasks.py
gh_pages
amelrod/forktest
1
python
@task def gh_pages(c): preview(c) c.run('ghp-import -b {github_pages_branch} -m {commit_message} {deploy_path} -p'.format(**CONFIG))
@task def gh_pages(c): preview(c) c.run('ghp-import -b {github_pages_branch} -m {commit_message} {deploy_path} -p'.format(**CONFIG))<|docstring|>Publish to GitHub Pages<|endoftext|>
d55a47322826027d14846301de3009452305fdcbb3709a6cac8755f946bd10f4
def visitMultiElementOneOf(self, ctx: ShExDocParser.MultiElementOneOfContext): " multiElementOneOf: groupTripleExpr ('|' groupTripleExpr)+ " expressions = [] for gs in ctx.groupTripleExpr(): parser = ShexTripleExpressionParser(self.context) parser.visit(gs) expressions.append(parser.expression) self.expression = OneOf(expressions=expressions)
multiElementOneOf: groupTripleExpr ('|' groupTripleExpr)+
pyshexc/parser_impl/shex_oneofshape_parser.py
visitMultiElementOneOf
ericprud/grammar-python-antlr
1
python
def visitMultiElementOneOf(self, ctx: ShExDocParser.MultiElementOneOfContext): " " expressions = [] for gs in ctx.groupTripleExpr(): parser = ShexTripleExpressionParser(self.context) parser.visit(gs) expressions.append(parser.expression) self.expression = OneOf(expressions=expressions)
def visitMultiElementOneOf(self, ctx: ShExDocParser.MultiElementOneOfContext): " " expressions = [] for gs in ctx.groupTripleExpr(): parser = ShexTripleExpressionParser(self.context) parser.visit(gs) expressions.append(parser.expression) self.expression = OneOf(expressions=expressions)<|docstring|>multiElementOneOf: groupTripleExpr ('|' groupTripleExpr)+<|endoftext|>
d28c8b5086c9a579acc290ac8578411c2d6b4d59a99b7b215bdeab72fd19dc5d
def visitMultiElementGroup(self, ctx: ShExDocParser.MultiElementGroupContext): " multiElementGroup: unaryTripleExpr (';' unaryTripleExpr)+ ';'? " expressions = [] for us in ctx.unaryTripleExpr(): parser = ShexTripleExpressionParser(self.context) parser.visit(us) expressions.append(parser.expression) self.expression = EachOf(expressions=expressions)
multiElementGroup: unaryTripleExpr (';' unaryTripleExpr)+ ';'?
pyshexc/parser_impl/shex_oneofshape_parser.py
visitMultiElementGroup
ericprud/grammar-python-antlr
1
python
def visitMultiElementGroup(self, ctx: ShExDocParser.MultiElementGroupContext): " " expressions = [] for us in ctx.unaryTripleExpr(): parser = ShexTripleExpressionParser(self.context) parser.visit(us) expressions.append(parser.expression) self.expression = EachOf(expressions=expressions)
def visitMultiElementGroup(self, ctx: ShExDocParser.MultiElementGroupContext): " " expressions = [] for us in ctx.unaryTripleExpr(): parser = ShexTripleExpressionParser(self.context) parser.visit(us) expressions.append(parser.expression) self.expression = EachOf(expressions=expressions)<|docstring|>multiElementGroup: unaryTripleExpr (';' unaryTripleExpr)+ ';'?<|endoftext|>
86e1d6ea2f56e661ab163ac955d7b2a0847916f8995f7e21caf87237d09efd9a
def visitUnaryTripleExpr(self, ctx: ShExDocParser.UnaryTripleExprContext): " unaryTripleExpr: ('$' tripleExprLabel)? (tripleConstraint | bracketedTripleExpr) | include " if ctx.include(): self.expression = self.context.tripleexprlabel_to_iriref(ctx.include().tripleExprLabel()) else: lbl = (self.context.tripleexprlabel_to_iriref(ctx.tripleExprLabel()) if ctx.tripleExprLabel() else None) if ctx.tripleConstraint(): self.expression = TripleConstraint(lbl) self.visit(ctx.tripleConstraint()) elif ctx.bracketedTripleExpr(): self.visit(ctx.bracketedTripleExpr()) self.expression.id = lbl
unaryTripleExpr: ('$' tripleExprLabel)? (tripleConstraint | bracketedTripleExpr) | include
pyshexc/parser_impl/shex_oneofshape_parser.py
visitUnaryTripleExpr
ericprud/grammar-python-antlr
1
python
def visitUnaryTripleExpr(self, ctx: ShExDocParser.UnaryTripleExprContext): " " if ctx.include(): self.expression = self.context.tripleexprlabel_to_iriref(ctx.include().tripleExprLabel()) else: lbl = (self.context.tripleexprlabel_to_iriref(ctx.tripleExprLabel()) if ctx.tripleExprLabel() else None) if ctx.tripleConstraint(): self.expression = TripleConstraint(lbl) self.visit(ctx.tripleConstraint()) elif ctx.bracketedTripleExpr(): self.visit(ctx.bracketedTripleExpr()) self.expression.id = lbl
def visitUnaryTripleExpr(self, ctx: ShExDocParser.UnaryTripleExprContext): " " if ctx.include(): self.expression = self.context.tripleexprlabel_to_iriref(ctx.include().tripleExprLabel()) else: lbl = (self.context.tripleexprlabel_to_iriref(ctx.tripleExprLabel()) if ctx.tripleExprLabel() else None) if ctx.tripleConstraint(): self.expression = TripleConstraint(lbl) self.visit(ctx.tripleConstraint()) elif ctx.bracketedTripleExpr(): self.visit(ctx.bracketedTripleExpr()) self.expression.id = lbl<|docstring|>unaryTripleExpr: ('$' tripleExprLabel)? (tripleConstraint | bracketedTripleExpr) | include<|endoftext|>
0568e77d158dae6f6fda203404f0f708fe177f296de1c11bfe99154440643497
def visitBracketedTripleExpr(self, ctx: ShExDocParser.BracketedTripleExprContext): " bracketedTripleExpr: '(' tripleExpression ')' cardinality? /* onShapeExpr?*/ annotation* semanticAction* " enc_shape = ShexTripleExpressionParser(self.context) enc_shape.visit(ctx.tripleExpression()) self.expression = enc_shape.expression self._card_annotations_and_semacts(ctx)
bracketedTripleExpr: '(' tripleExpression ')' cardinality? /* onShapeExpr?*/ annotation* semanticAction*
pyshexc/parser_impl/shex_oneofshape_parser.py
visitBracketedTripleExpr
ericprud/grammar-python-antlr
1
python
def visitBracketedTripleExpr(self, ctx: ShExDocParser.BracketedTripleExprContext): " " enc_shape = ShexTripleExpressionParser(self.context) enc_shape.visit(ctx.tripleExpression()) self.expression = enc_shape.expression self._card_annotations_and_semacts(ctx)
def visitBracketedTripleExpr(self, ctx: ShExDocParser.BracketedTripleExprContext): " " enc_shape = ShexTripleExpressionParser(self.context) enc_shape.visit(ctx.tripleExpression()) self.expression = enc_shape.expression self._card_annotations_and_semacts(ctx)<|docstring|>bracketedTripleExpr: '(' tripleExpression ')' cardinality? /* onShapeExpr?*/ annotation* semanticAction*<|endoftext|>
33efb8345cab8d4ee7ddffafd2979915ad25114616c0276202be00673f31532c
def visitTripleConstraint(self, ctx: ShExDocParser.TripleConstraintContext): ' tripleConstraint: senseFlags? predicate inlineShapeExpression cardinality? annotation* semanticAction ' if ctx.senseFlags(): self.visit(ctx.senseFlags()) self.visit(ctx.predicate()) self.visit(ctx.inlineShapeExpression()) self._card_annotations_and_semacts(ctx)
tripleConstraint: senseFlags? predicate inlineShapeExpression cardinality? annotation* semanticAction
pyshexc/parser_impl/shex_oneofshape_parser.py
visitTripleConstraint
ericprud/grammar-python-antlr
1
python
def visitTripleConstraint(self, ctx: ShExDocParser.TripleConstraintContext): ' ' if ctx.senseFlags(): self.visit(ctx.senseFlags()) self.visit(ctx.predicate()) self.visit(ctx.inlineShapeExpression()) self._card_annotations_and_semacts(ctx)
def visitTripleConstraint(self, ctx: ShExDocParser.TripleConstraintContext): ' ' if ctx.senseFlags(): self.visit(ctx.senseFlags()) self.visit(ctx.predicate()) self.visit(ctx.inlineShapeExpression()) self._card_annotations_and_semacts(ctx)<|docstring|>tripleConstraint: senseFlags? predicate inlineShapeExpression cardinality? annotation* semanticAction<|endoftext|>
65f5ee8e5defcc1f6cb66529a670a2e88391adb64281a666dd53945971456f2b
def visitStarCardinality(self, ctx: ShExDocParser.StarCardinalityContext): " '*' " self.expression.min = 0 self.expression.max = (- 1)
'*'
pyshexc/parser_impl/shex_oneofshape_parser.py
visitStarCardinality
ericprud/grammar-python-antlr
1
python
def visitStarCardinality(self, ctx: ShExDocParser.StarCardinalityContext): " " self.expression.min = 0 self.expression.max = (- 1)
def visitStarCardinality(self, ctx: ShExDocParser.StarCardinalityContext): " " self.expression.min = 0 self.expression.max = (- 1)<|docstring|>'*'<|endoftext|>
2637ba541301de7631bb4f05545eed08589bd86972fbed8dd9d0548b4e38467d
def visitPlusCardinality(self, ctx: ShExDocParser.PlusCardinalityContext): " '+' " self.expression.min = 1 self.expression.max = (- 1)
'+'
pyshexc/parser_impl/shex_oneofshape_parser.py
visitPlusCardinality
ericprud/grammar-python-antlr
1
python
def visitPlusCardinality(self, ctx: ShExDocParser.PlusCardinalityContext): " " self.expression.min = 1 self.expression.max = (- 1)
def visitPlusCardinality(self, ctx: ShExDocParser.PlusCardinalityContext): " " self.expression.min = 1 self.expression.max = (- 1)<|docstring|>'+'<|endoftext|>
b6a9af22f40692e5cece05d2a8f1d22b933934e891784564bd159bfa59db82bd
def visitOptionalCardinality(self, ctx: ShExDocParser.OptionalCardinalityContext): " '?' " self.expression.min = 0 self.expression.max = 1
'?'
pyshexc/parser_impl/shex_oneofshape_parser.py
visitOptionalCardinality
ericprud/grammar-python-antlr
1
python
def visitOptionalCardinality(self, ctx: ShExDocParser.OptionalCardinalityContext): " " self.expression.min = 0 self.expression.max = 1
def visitOptionalCardinality(self, ctx: ShExDocParser.OptionalCardinalityContext): " " self.expression.min = 0 self.expression.max = 1<|docstring|>'?'<|endoftext|>
0699f9a9a50d639939746785b9cffb3df25896fec65145e6eba16884c294e4ea
def visitExactRange(self, ctx: ShExDocParser.ExactRangeContext): " repeatRange: '{' INTEGER '}' #exactRange " self.expression.min = int(ctx.INTEGER().getText()) self.expression.max = self.expression.min
repeatRange: '{' INTEGER '}' #exactRange
pyshexc/parser_impl/shex_oneofshape_parser.py
visitExactRange
ericprud/grammar-python-antlr
1
python
def visitExactRange(self, ctx: ShExDocParser.ExactRangeContext): " " self.expression.min = int(ctx.INTEGER().getText()) self.expression.max = self.expression.min
def visitExactRange(self, ctx: ShExDocParser.ExactRangeContext): " " self.expression.min = int(ctx.INTEGER().getText()) self.expression.max = self.expression.min<|docstring|>repeatRange: '{' INTEGER '}' #exactRange<|endoftext|>
d92f4f308fcfd0eab37e043dec1271f315f9fd3d2a281e72f91b4ce0b9ac834f
def visitMinMaxRange(self, ctx: ShExDocParser.MinMaxRangeContext): " repeatRange: '{' INTEGER (',' (INTEGER | UNBOUNDED)? '}' " self.expression.min = int(ctx.INTEGER(0).getText()) self.expression.max = (int(ctx.INTEGER(1).getText()) if (len(ctx.INTEGER()) > 1) else (- 1))
repeatRange: '{' INTEGER (',' (INTEGER | UNBOUNDED)? '}'
pyshexc/parser_impl/shex_oneofshape_parser.py
visitMinMaxRange
ericprud/grammar-python-antlr
1
python
def visitMinMaxRange(self, ctx: ShExDocParser.MinMaxRangeContext): " " self.expression.min = int(ctx.INTEGER(0).getText()) self.expression.max = (int(ctx.INTEGER(1).getText()) if (len(ctx.INTEGER()) > 1) else (- 1))
def visitMinMaxRange(self, ctx: ShExDocParser.MinMaxRangeContext): " " self.expression.min = int(ctx.INTEGER(0).getText()) self.expression.max = (int(ctx.INTEGER(1).getText()) if (len(ctx.INTEGER()) > 1) else (- 1))<|docstring|>repeatRange: '{' INTEGER (',' (INTEGER | UNBOUNDED)? '}'<|endoftext|>
12f7118967862c78f1ab68e4047a011a50f0eb52c1d4c8de976807cbaa183837
def visitSenseFlags(self, ctx: ShExDocParser.SenseFlagsContext): " senseFlags: '^' " self.expression.inverse = True
senseFlags: '^'
pyshexc/parser_impl/shex_oneofshape_parser.py
visitSenseFlags
ericprud/grammar-python-antlr
1
python
def visitSenseFlags(self, ctx: ShExDocParser.SenseFlagsContext): " " self.expression.inverse = True
def visitSenseFlags(self, ctx: ShExDocParser.SenseFlagsContext): " " self.expression.inverse = True<|docstring|>senseFlags: '^'<|endoftext|>
1b9e16c0c3c658a507674a17df4472dfca3dd0f69710d70423eb1a7dc6a24d72
def visitPredicate(self, ctx: ShExDocParser.PredicateContext): ' predicate: iri | rdfType ' self.expression.predicate = self.context.predicate_to_IRI(ctx)
predicate: iri | rdfType
pyshexc/parser_impl/shex_oneofshape_parser.py
visitPredicate
ericprud/grammar-python-antlr
1
python
def visitPredicate(self, ctx: ShExDocParser.PredicateContext): ' ' self.expression.predicate = self.context.predicate_to_IRI(ctx)
def visitPredicate(self, ctx: ShExDocParser.PredicateContext): ' ' self.expression.predicate = self.context.predicate_to_IRI(ctx)<|docstring|>predicate: iri | rdfType<|endoftext|>
8a05d7c8e02347eb3ef8caf56d4c28fb589c33303a7f8a253cbce9a73021faf1
def visitInlineShapeExpression(self, ctx: ShExDocParser.InlineShapeExpressionContext): ' inlineShapeExpression: inlineShapeOr ' expr_parser = ShexShapeExpressionParser(self.context) expr_parser.visitChildren(ctx) self.expression.valueExpr = expr_parser.expr
inlineShapeExpression: inlineShapeOr
pyshexc/parser_impl/shex_oneofshape_parser.py
visitInlineShapeExpression
ericprud/grammar-python-antlr
1
python
def visitInlineShapeExpression(self, ctx: ShExDocParser.InlineShapeExpressionContext): ' ' expr_parser = ShexShapeExpressionParser(self.context) expr_parser.visitChildren(ctx) self.expression.valueExpr = expr_parser.expr
def visitInlineShapeExpression(self, ctx: ShExDocParser.InlineShapeExpressionContext): ' ' expr_parser = ShexShapeExpressionParser(self.context) expr_parser.visitChildren(ctx) self.expression.valueExpr = expr_parser.expr<|docstring|>inlineShapeExpression: inlineShapeOr<|endoftext|>
c5620ae6c321f70693a6a01c012896cc0e569ffcfeff2805c311eeacd7acaed9
def plugin_cli_arguments(ctlr, parser, plugin_config): '\n set up cli arguments for the plugin\n ' plugin_class = ctlr.get_plugin_class(plugin_config.get('type')) config = copy.deepcopy(plugin_config) confu.schema.apply_defaults(plugin_class.ConfigSchema(), config) confu_cli_args = plugin_class.confu_cli_args_cls()(parser, plugin_class.ConfigSchema().config, config.get('config')) if hasattr(plugin_class, 'add_arguments'): plugin_class.add_arguments(parser, config.get('config'), confu_cli_args) if (not confu_cli_args.routes): confu_cli_args.add(parser)
set up cli arguments for the plugin
src/ctl/__init__.py
plugin_cli_arguments
grizz/ctl
0
python
def plugin_cli_arguments(ctlr, parser, plugin_config): '\n \n ' plugin_class = ctlr.get_plugin_class(plugin_config.get('type')) config = copy.deepcopy(plugin_config) confu.schema.apply_defaults(plugin_class.ConfigSchema(), config) confu_cli_args = plugin_class.confu_cli_args_cls()(parser, plugin_class.ConfigSchema().config, config.get('config')) if hasattr(plugin_class, 'add_arguments'): plugin_class.add_arguments(parser, config.get('config'), confu_cli_args) if (not confu_cli_args.routes): confu_cli_args.add(parser)
def plugin_cli_arguments(ctlr, parser, plugin_config): '\n \n ' plugin_class = ctlr.get_plugin_class(plugin_config.get('type')) config = copy.deepcopy(plugin_config) confu.schema.apply_defaults(plugin_class.ConfigSchema(), config) confu_cli_args = plugin_class.confu_cli_args_cls()(parser, plugin_class.ConfigSchema().config, config.get('config')) if hasattr(plugin_class, 'add_arguments'): plugin_class.add_arguments(parser, config.get('config'), confu_cli_args) if (not confu_cli_args.routes): confu_cli_args.add(parser)<|docstring|>set up cli arguments for the plugin<|endoftext|>
e99ac50dac46f99bb7ff98759cfeb8657c742b3b7ae516e817894138cdc6da7c
def read_config(schema, config_dir, config_name='config', ctx=None): '\n read a config file from config_dir\n ' conf_path = os.path.expanduser(config_dir) if (not os.path.exists(conf_path)): raise OSError(f'config dir not found at {conf_path}') for (codec, filename) in munge.find_datafile(config_name, conf_path): if tmpl: engine = tmpl.get_engine('jinja2')(search_path=os.path.dirname(filename)) engine.engine.undefined = IgnoreUndefined engine.engine.filters['escape_regex'] = filter_escape_regex data = codec().loads(engine._render(src=os.path.basename(filename), env=ctx.tmpl['env'])) ctx.tmpl.update(engine=engine) else: with open(filename) as fobj: data = codec().load(fobj) meta = dict(config_dir=config_dir, config_file=filename) return confu.config.Config(schema, data, meta) raise OSError(f'config dir not found at {conf_path}')
read a config file from config_dir
src/ctl/__init__.py
read_config
grizz/ctl
0
python
def read_config(schema, config_dir, config_name='config', ctx=None): '\n \n ' conf_path = os.path.expanduser(config_dir) if (not os.path.exists(conf_path)): raise OSError(f'config dir not found at {conf_path}') for (codec, filename) in munge.find_datafile(config_name, conf_path): if tmpl: engine = tmpl.get_engine('jinja2')(search_path=os.path.dirname(filename)) engine.engine.undefined = IgnoreUndefined engine.engine.filters['escape_regex'] = filter_escape_regex data = codec().loads(engine._render(src=os.path.basename(filename), env=ctx.tmpl['env'])) ctx.tmpl.update(engine=engine) else: with open(filename) as fobj: data = codec().load(fobj) meta = dict(config_dir=config_dir, config_file=filename) return confu.config.Config(schema, data, meta) raise OSError(f'config dir not found at {conf_path}')
def read_config(schema, config_dir, config_name='config', ctx=None): '\n \n ' conf_path = os.path.expanduser(config_dir) if (not os.path.exists(conf_path)): raise OSError(f'config dir not found at {conf_path}') for (codec, filename) in munge.find_datafile(config_name, conf_path): if tmpl: engine = tmpl.get_engine('jinja2')(search_path=os.path.dirname(filename)) engine.engine.undefined = IgnoreUndefined engine.engine.filters['escape_regex'] = filter_escape_regex data = codec().loads(engine._render(src=os.path.basename(filename), env=ctx.tmpl['env'])) ctx.tmpl.update(engine=engine) else: with open(filename) as fobj: data = codec().load(fobj) meta = dict(config_dir=config_dir, config_file=filename) return confu.config.Config(schema, data, meta) raise OSError(f'config dir not found at {conf_path}')<|docstring|>read a config file from config_dir<|endoftext|>
1983c600647ea94c5e39871a6841869cd977847554059e337f8d551f6ce3fbf4
def argv_to_grainy_namespace(operation, args=[]): '\n create gainy permissioning namespace from argv\n ' namespace = ['ctl'] if operation: namespace.append(operation) for arg in args: if (arg[0] == '-'): continue namespace.append(arg) return grainy.core.Namespace(namespace)
create gainy permissioning namespace from argv
src/ctl/__init__.py
argv_to_grainy_namespace
grizz/ctl
0
python
def argv_to_grainy_namespace(operation, args=[]): '\n \n ' namespace = ['ctl'] if operation: namespace.append(operation) for arg in args: if (arg[0] == '-'): continue namespace.append(arg) return grainy.core.Namespace(namespace)
def argv_to_grainy_namespace(operation, args=[]): '\n \n ' namespace = ['ctl'] if operation: namespace.append(operation) for arg in args: if (arg[0] == '-'): continue namespace.append(arg) return grainy.core.Namespace(namespace)<|docstring|>create gainy permissioning namespace from argv<|endoftext|>
080ce971e2189dc913fbfe8369b93aa9bbca25ca98a5f3f2346083c03c5955bb
def update_options(self, kwargs): "\n updates config based on passed options\n\n too complicated for now, we can just make working_dir as an option and simplify this cluster:\n if config_dir is passed, it will be used to load config from\n if home is passed, it will update config::home\n if home is passed and config_dir isn't set, will try to load config from there\n config_dir and config_file cannot both be passed\n if config_file is passed, home or config::home must be set\n " opt = self.__class__.pop_options(kwargs) if (opt.get('debug', None) is not None): self.debug = opt['debug'] if (opt.get('verbose', None) is not None): self.verbose = opt['verbose'] if (opt.get('quiet', None) is not None): self.quiet = opt['quiet'] if opt.get('home', None): if ('config_dir' in kwargs): raise ValueError('config_dir and home are mutually exclusive') self._new_home(opt['home']) elif kwargs.get('config_dir'): self._new_home(kwargs['config_dir']) elif (not self.home): self.find_home() self.init()
updates config based on passed options too complicated for now, we can just make working_dir as an option and simplify this cluster: if config_dir is passed, it will be used to load config from if home is passed, it will update config::home if home is passed and config_dir isn't set, will try to load config from there config_dir and config_file cannot both be passed if config_file is passed, home or config::home must be set
src/ctl/__init__.py
update_options
grizz/ctl
0
python
def update_options(self, kwargs): "\n updates config based on passed options\n\n too complicated for now, we can just make working_dir as an option and simplify this cluster:\n if config_dir is passed, it will be used to load config from\n if home is passed, it will update config::home\n if home is passed and config_dir isn't set, will try to load config from there\n config_dir and config_file cannot both be passed\n if config_file is passed, home or config::home must be set\n " opt = self.__class__.pop_options(kwargs) if (opt.get('debug', None) is not None): self.debug = opt['debug'] if (opt.get('verbose', None) is not None): self.verbose = opt['verbose'] if (opt.get('quiet', None) is not None): self.quiet = opt['quiet'] if opt.get('home', None): if ('config_dir' in kwargs): raise ValueError('config_dir and home are mutually exclusive') self._new_home(opt['home']) elif kwargs.get('config_dir'): self._new_home(kwargs['config_dir']) elif (not self.home): self.find_home() self.init()
def update_options(self, kwargs): "\n updates config based on passed options\n\n too complicated for now, we can just make working_dir as an option and simplify this cluster:\n if config_dir is passed, it will be used to load config from\n if home is passed, it will update config::home\n if home is passed and config_dir isn't set, will try to load config from there\n config_dir and config_file cannot both be passed\n if config_file is passed, home or config::home must be set\n " opt = self.__class__.pop_options(kwargs) if (opt.get('debug', None) is not None): self.debug = opt['debug'] if (opt.get('verbose', None) is not None): self.verbose = opt['verbose'] if (opt.get('quiet', None) is not None): self.quiet = opt['quiet'] if opt.get('home', None): if ('config_dir' in kwargs): raise ValueError('config_dir and home are mutually exclusive') self._new_home(opt['home']) elif kwargs.get('config_dir'): self._new_home(kwargs['config_dir']) elif (not self.home): self.find_home() self.init()<|docstring|>updates config based on passed options too complicated for now, we can just make working_dir as an option and simplify this cluster: if config_dir is passed, it will be used to load config from if home is passed, it will update config::home if home is passed and config_dir isn't set, will try to load config from there config_dir and config_file cannot both be passed if config_file is passed, home or config::home must be set<|endoftext|>
93612fd22cdbd1ae60a5f4362e92442aed7ec1b8d323b4173576596149c7c8e6
def init(self): '\n call after updating options\n '
call after updating options
src/ctl/__init__.py
init
grizz/ctl
0
python
def init(self): '\n \n '
def init(self): '\n \n '<|docstring|>call after updating options<|endoftext|>
ac9af169ff1d36148baf2419ec08a046ab62e7d4f37fee3a579051ceccaf9e28
def init_plugin_manager(self): "\n Initialize the plugin manager and\n set it's apropriate search paths\n " plugin_path = self.ctx.config.get_nested('ctl', 'plugin_path') if plugin_path: plugin.searchpath = plugin_path else: plugin.searchpath = [os.path.join(self.ctx.home, 'plugins')]
Initialize the plugin manager and set it's apropriate search paths
src/ctl/__init__.py
init_plugin_manager
grizz/ctl
0
python
def init_plugin_manager(self): "\n Initialize the plugin manager and\n set it's apropriate search paths\n " plugin_path = self.ctx.config.get_nested('ctl', 'plugin_path') if plugin_path: plugin.searchpath = plugin_path else: plugin.searchpath = [os.path.join(self.ctx.home, 'plugins')]
def init_plugin_manager(self): "\n Initialize the plugin manager and\n set it's apropriate search paths\n " plugin_path = self.ctx.config.get_nested('ctl', 'plugin_path') if plugin_path: plugin.searchpath = plugin_path else: plugin.searchpath = [os.path.join(self.ctx.home, 'plugins')]<|docstring|>Initialize the plugin manager and set it's apropriate search paths<|endoftext|>
24af8853af5e84550a682ddc35e8d852b21aac1b3de19febe881e465666de903
def init_logging(self): '\n Apply python logging config and create `log` and `usage_log`\n properties\n ' set_pylogger_config(self.ctx.config.get_nested('ctl', 'log')) self.log = Log('ctl') self.usage_log = Log('usage')
Apply python logging config and create `log` and `usage_log` properties
src/ctl/__init__.py
init_logging
grizz/ctl
0
python
def init_logging(self): '\n Apply python logging config and create `log` and `usage_log`\n properties\n ' set_pylogger_config(self.ctx.config.get_nested('ctl', 'log')) self.log = Log('ctl') self.usage_log = Log('usage')
def init_logging(self): '\n Apply python logging config and create `log` and `usage_log`\n properties\n ' set_pylogger_config(self.ctx.config.get_nested('ctl', 'log')) self.log = Log('ctl') self.usage_log = Log('usage')<|docstring|>Apply python logging config and create `log` and `usage_log` properties<|endoftext|>
62531e6f333d4156ceac041dc0762dda9578075200122f20a74a1ebab156a38e
def init_permissions(self): '\n Initialize permissions for ctl usage\n ' self.permissions = PermissionSet({row.get('namespace'): int_flags(row.get('permission')) for row in self.ctx.config.get_nested('ctl', 'permissions')})
Initialize permissions for ctl usage
src/ctl/__init__.py
init_permissions
grizz/ctl
0
python
def init_permissions(self): '\n \n ' self.permissions = PermissionSet({row.get('namespace'): int_flags(row.get('permission')) for row in self.ctx.config.get_nested('ctl', 'permissions')})
def init_permissions(self): '\n \n ' self.permissions = PermissionSet({row.get('namespace'): int_flags(row.get('permission')) for row in self.ctx.config.get_nested('ctl', 'permissions')})<|docstring|>Initialize permissions for ctl usage<|endoftext|>
f117051aae0df98a07a0fa98343364e5cffb6b080df342c1047dfd8c97ef0c66
def init_plugins(self): '\n Instantiate plugins\n ' plugin.import_external() plugins_config = self.ctx.config.get_nested('ctl', 'plugins') if plugins_config: plugin.instantiate(plugins_config, self)
Instantiate plugins
src/ctl/__init__.py
init_plugins
grizz/ctl
0
python
def init_plugins(self): '\n \n ' plugin.import_external() plugins_config = self.ctx.config.get_nested('ctl', 'plugins') if plugins_config: plugin.instantiate(plugins_config, self)
def init_plugins(self): '\n \n ' plugin.import_external() plugins_config = self.ctx.config.get_nested('ctl', 'plugins') if plugins_config: plugin.instantiate(plugins_config, self)<|docstring|>Instantiate plugins<|endoftext|>
acd69c6437f458191a6400fb820dc19ad8140412bf77a26ef4770c803c497545
def expose_plugin_vars(self): '\n Checks all configured plugins if they have\n the `expose_vars` classmethod.\n\n If they do those vars will be exposed to the context\n template environment\n\n This can be done without having to instantiate the plugins\n ' if ('plugin' not in self.ctx.tmpl['env']): self.ctx.tmpl['env']['plugin'] = {} for plugin_config in self.config.get_nested('ctl', 'plugins'): plugin_class = self.get_plugin_class(plugin_config['type']) name = plugin_config.get('name') if hasattr(plugin_class, 'expose_vars'): env = self.ctx.tmpl['env']['plugin'].get(name, {}) errors = plugin_class.expose_vars(env, plugin_config.get('config', {})) for (filepath, error) in list(errors.items()): self.log.debug(f'expose_vars: {filepath}: {error}') self.ctx.tmpl['env']['plugin'][name] = env
Checks all configured plugins if they have the `expose_vars` classmethod. If they do those vars will be exposed to the context template environment This can be done without having to instantiate the plugins
src/ctl/__init__.py
expose_plugin_vars
grizz/ctl
0
python
def expose_plugin_vars(self): '\n Checks all configured plugins if they have\n the `expose_vars` classmethod.\n\n If they do those vars will be exposed to the context\n template environment\n\n This can be done without having to instantiate the plugins\n ' if ('plugin' not in self.ctx.tmpl['env']): self.ctx.tmpl['env']['plugin'] = {} for plugin_config in self.config.get_nested('ctl', 'plugins'): plugin_class = self.get_plugin_class(plugin_config['type']) name = plugin_config.get('name') if hasattr(plugin_class, 'expose_vars'): env = self.ctx.tmpl['env']['plugin'].get(name, {}) errors = plugin_class.expose_vars(env, plugin_config.get('config', {})) for (filepath, error) in list(errors.items()): self.log.debug(f'expose_vars: {filepath}: {error}') self.ctx.tmpl['env']['plugin'][name] = env
def expose_plugin_vars(self): '\n Checks all configured plugins if they have\n the `expose_vars` classmethod.\n\n If they do those vars will be exposed to the context\n template environment\n\n This can be done without having to instantiate the plugins\n ' if ('plugin' not in self.ctx.tmpl['env']): self.ctx.tmpl['env']['plugin'] = {} for plugin_config in self.config.get_nested('ctl', 'plugins'): plugin_class = self.get_plugin_class(plugin_config['type']) name = plugin_config.get('name') if hasattr(plugin_class, 'expose_vars'): env = self.ctx.tmpl['env']['plugin'].get(name, {}) errors = plugin_class.expose_vars(env, plugin_config.get('config', {})) for (filepath, error) in list(errors.items()): self.log.debug(f'expose_vars: {filepath}: {error}') self.ctx.tmpl['env']['plugin'][name] = env<|docstring|>Checks all configured plugins if they have the `expose_vars` classmethod. If they do those vars will be exposed to the context template environment This can be done without having to instantiate the plugins<|endoftext|>
07c0706fa9f05f2d4a67ff97f4fbc589da2bc897da54e53b36076f1082b9cb12
def get_plugin_class(self, name): '\n get plugin class\n ' self.check_permissions(argv_to_grainy_namespace(name), 'r') return plugin.get_plugin_class(name)
get plugin class
src/ctl/__init__.py
get_plugin_class
grizz/ctl
0
python
def get_plugin_class(self, name): '\n \n ' self.check_permissions(argv_to_grainy_namespace(name), 'r') return plugin.get_plugin_class(name)
def get_plugin_class(self, name): '\n \n ' self.check_permissions(argv_to_grainy_namespace(name), 'r') return plugin.get_plugin_class(name)<|docstring|>get plugin class<|endoftext|>
ad197c66360122bd380edf349cbccaffa7008769665b0a1a2f2942e236893c37
def get_plugin(self, name): '\n get configured plugin by name\n ' self.check_permissions(argv_to_grainy_namespace(name), 'r') return plugin.get_instance(name, self)
get configured plugin by name
src/ctl/__init__.py
get_plugin
grizz/ctl
0
python
def get_plugin(self, name): '\n \n ' self.check_permissions(argv_to_grainy_namespace(name), 'r') return plugin.get_instance(name, self)
def get_plugin(self, name): '\n \n ' self.check_permissions(argv_to_grainy_namespace(name), 'r') return plugin.get_instance(name, self)<|docstring|>get configured plugin by name<|endoftext|>
f72ef9cfce8dc0312f54f8f0f0dc6e267e780f97e0198c3fa1f597edd6a55209
def __setup_user_group(self): '\n Creates PXF user with the required groups and bash as default shell\n ' import params User(params.pxf_user, groups=[params.hdfs_superuser_group, params.user_group, params.tomcat_group], shell='/bin/bash')
Creates PXF user with the required groups and bash as default shell
ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
__setup_user_group
MacgradyHuang/ApacheAmbari
25
python
def __setup_user_group(self): '\n \n ' import params User(params.pxf_user, groups=[params.hdfs_superuser_group, params.user_group, params.tomcat_group], shell='/bin/bash')
def __setup_user_group(self): '\n \n ' import params User(params.pxf_user, groups=[params.hdfs_superuser_group, params.user_group, params.tomcat_group], shell='/bin/bash')<|docstring|>Creates PXF user with the required groups and bash as default shell<|endoftext|>
c519dfbf3aae21eb40c14bd311ae77d914f05dcb3a19cf468a7f02da8e1cedb4
def __generate_config_files(self): '\n Generates pxf-env.sh file from jinja template and sets the classpath for HDP\n ' import params import shutil hdp_stack = 'HDP' File('{0}/pxf-env.sh'.format(params.pxf_conf_dir), content=Template('pxf-env.j2')) if (params.stack_name == hdp_stack): shutil.copy2('{0}/pxf-privatehdp.classpath'.format(params.pxf_conf_dir), '{0}/pxf-private.classpath'.format(params.pxf_conf_dir)) File('{0}/pxf-public.classpath'.format(params.pxf_conf_dir), content=params.config['configurations']['pxf-public-classpath']['content'].lstrip()) File('{0}/pxf-profiles.xml'.format(params.pxf_conf_dir), content=params.config['configurations']['pxf-profiles']['content'].lstrip()) XmlConfig('pxf-site.xml', conf_dir=params.pxf_conf_dir, configurations=params.config['configurations']['pxf-site'], configuration_attributes=params.config['configurationAttributes']['pxf-site'])
Generates pxf-env.sh file from jinja template and sets the classpath for HDP
ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
__generate_config_files
MacgradyHuang/ApacheAmbari
25
python
def __generate_config_files(self): '\n \n ' import params import shutil hdp_stack = 'HDP' File('{0}/pxf-env.sh'.format(params.pxf_conf_dir), content=Template('pxf-env.j2')) if (params.stack_name == hdp_stack): shutil.copy2('{0}/pxf-privatehdp.classpath'.format(params.pxf_conf_dir), '{0}/pxf-private.classpath'.format(params.pxf_conf_dir)) File('{0}/pxf-public.classpath'.format(params.pxf_conf_dir), content=params.config['configurations']['pxf-public-classpath']['content'].lstrip()) File('{0}/pxf-profiles.xml'.format(params.pxf_conf_dir), content=params.config['configurations']['pxf-profiles']['content'].lstrip()) XmlConfig('pxf-site.xml', conf_dir=params.pxf_conf_dir, configurations=params.config['configurations']['pxf-site'], configuration_attributes=params.config['configurationAttributes']['pxf-site'])
def __generate_config_files(self): '\n \n ' import params import shutil hdp_stack = 'HDP' File('{0}/pxf-env.sh'.format(params.pxf_conf_dir), content=Template('pxf-env.j2')) if (params.stack_name == hdp_stack): shutil.copy2('{0}/pxf-privatehdp.classpath'.format(params.pxf_conf_dir), '{0}/pxf-private.classpath'.format(params.pxf_conf_dir)) File('{0}/pxf-public.classpath'.format(params.pxf_conf_dir), content=params.config['configurations']['pxf-public-classpath']['content'].lstrip()) File('{0}/pxf-profiles.xml'.format(params.pxf_conf_dir), content=params.config['configurations']['pxf-profiles']['content'].lstrip()) XmlConfig('pxf-site.xml', conf_dir=params.pxf_conf_dir, configurations=params.config['configurations']['pxf-site'], configuration_attributes=params.config['configurationAttributes']['pxf-site'])<|docstring|>Generates pxf-env.sh file from jinja template and sets the classpath for HDP<|endoftext|>
8e41489b8e26e98210b8ed1be1ca4e712d8159cce1df06bf8681f2b1abd256f0
def __grant_permissions(self): '\n Grants permission to pxf:pxf for PXF instance directory\n ' import params Directory(params.pxf_instance_dir, owner=params.pxf_user, group=params.pxf_group, create_parents=True)
Grants permission to pxf:pxf for PXF instance directory
ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
__grant_permissions
MacgradyHuang/ApacheAmbari
25
python
def __grant_permissions(self): '\n \n ' import params Directory(params.pxf_instance_dir, owner=params.pxf_user, group=params.pxf_group, create_parents=True)
def __grant_permissions(self): '\n \n ' import params Directory(params.pxf_instance_dir, owner=params.pxf_user, group=params.pxf_group, create_parents=True)<|docstring|>Grants permission to pxf:pxf for PXF instance directory<|endoftext|>
b2a30d95efc6e7eef5d6c11853e8536e1d1faa1425c9ce8ba26386bce7030ff3
def _sample_decode(self, model, global_step, sess, src_placeholder, batch_size_placeholder, eval_data, summary_writer): 'Pick a sentence and decode.' decode_ids = np.random.randint(low=0, high=(len(eval_data) - 1), size=1) sample_data = [] for decode_id in decode_ids: sample_data.append(eval_data[decode_id]) iterator_feed_dict = {src_placeholder: sample_data, batch_size_placeholder: len(decode_ids)} sess.run(model.iterator.initializer, feed_dict=iterator_feed_dict) (ncm_outputs, infer_summary) = model.decode(sess) for (i, decode_id) in enumerate(decode_ids): log.print_out(' # {}'.format(decode_id)) output = ncm_outputs[i] if ((self.config.beam_width > 0) and self._consider_beam()): output = output[0] translation = ncm_utils.get_translation(output, sent_id=0) delimited_sample = eval_data[decode_id].split('\t') (utterances, topic) = (delimited_sample[:(- 1)], delimited_sample[(- 1)]) (sources, target) = (utterances[:(- 1)], utterances[(- 1)]) log.print_out(' sources:') for (t, src) in enumerate(sources): log.print_out(' @{} {}'.format((t + 1), src)) log.print_out(' topic: {}'.format(topic)) log.print_out(' resp: {}'.format(target)) log.print_out((b' generated: ' + translation)) if (infer_summary is not None): summary_writer.add_summary(infer_summary, global_step)
Pick a sentence and decode.
thred/models/thred/thred_wrapper.py
_sample_decode
panxuemiao111/THRED
102
python
def _sample_decode(self, model, global_step, sess, src_placeholder, batch_size_placeholder, eval_data, summary_writer): decode_ids = np.random.randint(low=0, high=(len(eval_data) - 1), size=1) sample_data = [] for decode_id in decode_ids: sample_data.append(eval_data[decode_id]) iterator_feed_dict = {src_placeholder: sample_data, batch_size_placeholder: len(decode_ids)} sess.run(model.iterator.initializer, feed_dict=iterator_feed_dict) (ncm_outputs, infer_summary) = model.decode(sess) for (i, decode_id) in enumerate(decode_ids): log.print_out(' # {}'.format(decode_id)) output = ncm_outputs[i] if ((self.config.beam_width > 0) and self._consider_beam()): output = output[0] translation = ncm_utils.get_translation(output, sent_id=0) delimited_sample = eval_data[decode_id].split('\t') (utterances, topic) = (delimited_sample[:(- 1)], delimited_sample[(- 1)]) (sources, target) = (utterances[:(- 1)], utterances[(- 1)]) log.print_out(' sources:') for (t, src) in enumerate(sources): log.print_out(' @{} {}'.format((t + 1), src)) log.print_out(' topic: {}'.format(topic)) log.print_out(' resp: {}'.format(target)) log.print_out((b' generated: ' + translation)) if (infer_summary is not None): summary_writer.add_summary(infer_summary, global_step)
def _sample_decode(self, model, global_step, sess, src_placeholder, batch_size_placeholder, eval_data, summary_writer): decode_ids = np.random.randint(low=0, high=(len(eval_data) - 1), size=1) sample_data = [] for decode_id in decode_ids: sample_data.append(eval_data[decode_id]) iterator_feed_dict = {src_placeholder: sample_data, batch_size_placeholder: len(decode_ids)} sess.run(model.iterator.initializer, feed_dict=iterator_feed_dict) (ncm_outputs, infer_summary) = model.decode(sess) for (i, decode_id) in enumerate(decode_ids): log.print_out(' # {}'.format(decode_id)) output = ncm_outputs[i] if ((self.config.beam_width > 0) and self._consider_beam()): output = output[0] translation = ncm_utils.get_translation(output, sent_id=0) delimited_sample = eval_data[decode_id].split('\t') (utterances, topic) = (delimited_sample[:(- 1)], delimited_sample[(- 1)]) (sources, target) = (utterances[:(- 1)], utterances[(- 1)]) log.print_out(' sources:') for (t, src) in enumerate(sources): log.print_out(' @{} {}'.format((t + 1), src)) log.print_out(' topic: {}'.format(topic)) log.print_out(' resp: {}'.format(target)) log.print_out((b' generated: ' + translation)) if (infer_summary is not None): summary_writer.add_summary(infer_summary, global_step)<|docstring|>Pick a sentence and decode.<|endoftext|>
7141812f00b1006981af0574f21c6fba62e9e7a1bb45c657ef959b8b3667c4d7
def getsymbles(jinja_env, extensions=''): 'return a list of :class:`.Symbles` of a :class:`jinja2.Environment`;\n\n ' symbles = [] for name in jinja_env.list_templates(): if (not name.endswith(extensions)): continue sourcetuple = jinja_env.loader.get_source(jinja_env, name) try: ast = jinja_env.parse(sourcetuple) except: (exc_type, exc, traceback) = sys.exc_info() print('getsymbles: parse ....', file=sys.stderr) error = '{}, {}'.format(exc_type.__name__, exc.message) print(' {}: {}'.format(name, error), file=sys.stderr) continue symbles.append(Symble(ast, name)) return symbles
return a list of :class:`.Symbles` of a :class:`jinja2.Environment`;
gvjinja.py
getsymbles
VCHui/gvjinja
1
python
def getsymbles(jinja_env, extensions=): '\n\n ' symbles = [] for name in jinja_env.list_templates(): if (not name.endswith(extensions)): continue sourcetuple = jinja_env.loader.get_source(jinja_env, name) try: ast = jinja_env.parse(sourcetuple) except: (exc_type, exc, traceback) = sys.exc_info() print('getsymbles: parse ....', file=sys.stderr) error = '{}, {}'.format(exc_type.__name__, exc.message) print(' {}: {}'.format(name, error), file=sys.stderr) continue symbles.append(Symble(ast, name)) return symbles
def getsymbles(jinja_env, extensions=): '\n\n ' symbles = [] for name in jinja_env.list_templates(): if (not name.endswith(extensions)): continue sourcetuple = jinja_env.loader.get_source(jinja_env, name) try: ast = jinja_env.parse(sourcetuple) except: (exc_type, exc, traceback) = sys.exc_info() print('getsymbles: parse ....', file=sys.stderr) error = '{}, {}'.format(exc_type.__name__, exc.message) print(' {}: {}'.format(name, error), file=sys.stderr) continue symbles.append(Symble(ast, name)) return symbles<|docstring|>return a list of :class:`.Symbles` of a :class:`jinja2.Environment`;<|endoftext|>
7fa06193ce4b6bdba0a43266db977d294a7968c9f6924d0b13689040ad567555
def usage(): '::\n\n Usage: gvjinja.py [-m [module] [env]] [-b]\n\n # print usage;\n $ gvjinja.py\n\n # render a digraph of gvjinja itself;\n $ gvjinja.py -m gvjinja gvjinja.env |\\\n tee gvjinja.gv | dot -T png > gvjinja.png\n\n # render a basic digraph of gvjinja itself;\n $ gvjinja.py -m gvjinja gvjinja.env -b |\\\n tee gvjinja-basic.gv | dot -T png > gvjinja-basic.png\n\n:\n' pass
:: Usage: gvjinja.py [-m [module] [env]] [-b] # print usage; $ gvjinja.py # render a digraph of gvjinja itself; $ gvjinja.py -m gvjinja gvjinja.env |\ tee gvjinja.gv | dot -T png > gvjinja.png # render a basic digraph of gvjinja itself; $ gvjinja.py -m gvjinja gvjinja.env -b |\ tee gvjinja-basic.gv | dot -T png > gvjinja-basic.png :
gvjinja.py
usage
VCHui/gvjinja
1
python
def usage(): '::\n\n Usage: gvjinja.py [-m [module] [env]] [-b]\n\n # print usage;\n $ gvjinja.py\n\n # render a digraph of gvjinja itself;\n $ gvjinja.py -m gvjinja gvjinja.env |\\\n tee gvjinja.gv | dot -T png > gvjinja.png\n\n # render a basic digraph of gvjinja itself;\n $ gvjinja.py -m gvjinja gvjinja.env -b |\\\n tee gvjinja-basic.gv | dot -T png > gvjinja-basic.png\n\n:\n' pass
def usage(): '::\n\n Usage: gvjinja.py [-m [module] [env]] [-b]\n\n # print usage;\n $ gvjinja.py\n\n # render a digraph of gvjinja itself;\n $ gvjinja.py -m gvjinja gvjinja.env |\\\n tee gvjinja.gv | dot -T png > gvjinja.png\n\n # render a basic digraph of gvjinja itself;\n $ gvjinja.py -m gvjinja gvjinja.env -b |\\\n tee gvjinja-basic.gv | dot -T png > gvjinja-basic.png\n\n:\n' pass<|docstring|>:: Usage: gvjinja.py [-m [module] [env]] [-b] # print usage; $ gvjinja.py # render a digraph of gvjinja itself; $ gvjinja.py -m gvjinja gvjinja.env |\ tee gvjinja.gv | dot -T png > gvjinja.png # render a basic digraph of gvjinja itself; $ gvjinja.py -m gvjinja gvjinja.env -b |\ tee gvjinja-basic.gv | dot -T png > gvjinja-basic.png :<|endoftext|>
ca350db86320940900e4c2cd736ca025af783805d075883fd697e74ea9e15bdd
def getrefs(self, reftype): 'yield a list of references for a reference type.\n\n lifted from :func:`jinja2.meta.find_referenced_templates` to\n perform searching for specific reference types; no report for\n dynamic inheritances or inclusions;\n\n ' for node in self.find_all(reftype): if (not isinstance(node.template, nodes.Const)): if isinstance(node.template, (nodes.Tuple, nodes.List)): for template_name in node.template.items: if isinstance(template_name.value, self.str_types): (yield template_name.value) elif isinstance(node.template.value, self.str_types): (yield node.template.value) elif isinstance(node, node.Include): if isinstance(node.template.value, (tuple, list)): for template_name in node.template.value: (yield template_name)
yield a list of references for a reference type. lifted from :func:`jinja2.meta.find_referenced_templates` to perform searching for specific reference types; no report for dynamic inheritances or inclusions;
gvjinja.py
getrefs
VCHui/gvjinja
1
python
def getrefs(self, reftype): 'yield a list of references for a reference type.\n\n lifted from :func:`jinja2.meta.find_referenced_templates` to\n perform searching for specific reference types; no report for\n dynamic inheritances or inclusions;\n\n ' for node in self.find_all(reftype): if (not isinstance(node.template, nodes.Const)): if isinstance(node.template, (nodes.Tuple, nodes.List)): for template_name in node.template.items: if isinstance(template_name.value, self.str_types): (yield template_name.value) elif isinstance(node.template.value, self.str_types): (yield node.template.value) elif isinstance(node, node.Include): if isinstance(node.template.value, (tuple, list)): for template_name in node.template.value: (yield template_name)
def getrefs(self, reftype): 'yield a list of references for a reference type.\n\n lifted from :func:`jinja2.meta.find_referenced_templates` to\n perform searching for specific reference types; no report for\n dynamic inheritances or inclusions;\n\n ' for node in self.find_all(reftype): if (not isinstance(node.template, nodes.Const)): if isinstance(node.template, (nodes.Tuple, nodes.List)): for template_name in node.template.items: if isinstance(template_name.value, self.str_types): (yield template_name.value) elif isinstance(node.template.value, self.str_types): (yield node.template.value) elif isinstance(node, node.Include): if isinstance(node.template.value, (tuple, list)): for template_name in node.template.value: (yield template_name)<|docstring|>yield a list of references for a reference type. lifted from :func:`jinja2.meta.find_referenced_templates` to perform searching for specific reference types; no report for dynamic inheritances or inclusions;<|endoftext|>
224fd5c7c256d2197797d0a73f9a12a41a5ae323f34a5b80af92addc7c80baab
@classmethod def digraph(my, symbles, extensions=''): '``stdout`` a UML graph file of env in details.\n\n .. figure:: ../gvjinja.png\n :alt: gvjinja.digraph\n\n ' if isinstance(symbles, Environment): symbles = getsymbles(symbles, extensions) return my.env.get_template('DIGRAPH').render(symbles=symbles)
``stdout`` a UML graph file of env in details. .. figure:: ../gvjinja.png :alt: gvjinja.digraph
gvjinja.py
digraph
VCHui/gvjinja
1
python
@classmethod def digraph(my, symbles, extensions=): '``stdout`` a UML graph file of env in details.\n\n .. figure:: ../gvjinja.png\n :alt: gvjinja.digraph\n\n ' if isinstance(symbles, Environment): symbles = getsymbles(symbles, extensions) return my.env.get_template('DIGRAPH').render(symbles=symbles)
@classmethod def digraph(my, symbles, extensions=): '``stdout`` a UML graph file of env in details.\n\n .. figure:: ../gvjinja.png\n :alt: gvjinja.digraph\n\n ' if isinstance(symbles, Environment): symbles = getsymbles(symbles, extensions) return my.env.get_template('DIGRAPH').render(symbles=symbles)<|docstring|>``stdout`` a UML graph file of env in details. .. figure:: ../gvjinja.png :alt: gvjinja.digraph<|endoftext|>
24daa7b5552594d416b254a5b1446b1ad352be8876d39365f62233365463555d
@classmethod def digraphbasic(my, symbles, extensions=''): '``stdout`` a basic graph file of env, no node and edge details.\n\n .. figure:: ../gvjinja-basic.png\n :alt: gvjinja.digraphbasic\n\n ' if isinstance(symbles, Environment): symbles = getsymbles(symbles, extensions) return my.env.get_template('DIGRAPHBASIC').render(symbles=symbles)
``stdout`` a basic graph file of env, no node and edge details. .. figure:: ../gvjinja-basic.png :alt: gvjinja.digraphbasic
gvjinja.py
digraphbasic
VCHui/gvjinja
1
python
@classmethod def digraphbasic(my, symbles, extensions=): '``stdout`` a basic graph file of env, no node and edge details.\n\n .. figure:: ../gvjinja-basic.png\n :alt: gvjinja.digraphbasic\n\n ' if isinstance(symbles, Environment): symbles = getsymbles(symbles, extensions) return my.env.get_template('DIGRAPHBASIC').render(symbles=symbles)
@classmethod def digraphbasic(my, symbles, extensions=): '``stdout`` a basic graph file of env, no node and edge details.\n\n .. figure:: ../gvjinja-basic.png\n :alt: gvjinja.digraphbasic\n\n ' if isinstance(symbles, Environment): symbles = getsymbles(symbles, extensions) return my.env.get_template('DIGRAPHBASIC').render(symbles=symbles)<|docstring|>``stdout`` a basic graph file of env, no node and edge details. .. figure:: ../gvjinja-basic.png :alt: gvjinja.digraphbasic<|endoftext|>
100a237deafc7f43c908c7358345ab83ab71ec457c887eccb6162a6408cf4db0
def GTA_antihot(onehot_img, colors: dict, width: int, height: int): '\n This function takes a onehot encoded input and turns it into a image with\n three channels.\n\n Args:\n onehot_img (tensor): onehot encoded image.\n col (dict): a mapping of the differnt layers to different colors.\n width (int): width of the given image.\n height (int): height of the given image.\n\n Return:\n Tensor: An image with 3 channels.\n ' inv_colors = {v: k for (k, v) in colors.items()} mat = onehot_img.numpy() map = torch.empty((3, height, width), dtype=torch.uint8) for i in range(0, len(set(colors.values()))): col = inv_colors.get(i) indicies = np.where((mat[i] == 1)) for (r, c) in zip(*indicies): map[0][r][c] = col[0] map[1][r][c] = col[1] map[2][r][c] = col[2] return map
This function takes a onehot encoded input and turns it into a image with three channels. Args: onehot_img (tensor): onehot encoded image. col (dict): a mapping of the differnt layers to different colors. width (int): width of the given image. height (int): height of the given image. Return: Tensor: An image with 3 channels.
GTApack/GTA_antihot.py
GTA_antihot
marctimjen/Deep_final
0
python
def GTA_antihot(onehot_img, colors: dict, width: int, height: int): '\n This function takes a onehot encoded input and turns it into a image with\n three channels.\n\n Args:\n onehot_img (tensor): onehot encoded image.\n col (dict): a mapping of the differnt layers to different colors.\n width (int): width of the given image.\n height (int): height of the given image.\n\n Return:\n Tensor: An image with 3 channels.\n ' inv_colors = {v: k for (k, v) in colors.items()} mat = onehot_img.numpy() map = torch.empty((3, height, width), dtype=torch.uint8) for i in range(0, len(set(colors.values()))): col = inv_colors.get(i) indicies = np.where((mat[i] == 1)) for (r, c) in zip(*indicies): map[0][r][c] = col[0] map[1][r][c] = col[1] map[2][r][c] = col[2] return map
def GTA_antihot(onehot_img, colors: dict, width: int, height: int): '\n This function takes a onehot encoded input and turns it into a image with\n three channels.\n\n Args:\n onehot_img (tensor): onehot encoded image.\n col (dict): a mapping of the differnt layers to different colors.\n width (int): width of the given image.\n height (int): height of the given image.\n\n Return:\n Tensor: An image with 3 channels.\n ' inv_colors = {v: k for (k, v) in colors.items()} mat = onehot_img.numpy() map = torch.empty((3, height, width), dtype=torch.uint8) for i in range(0, len(set(colors.values()))): col = inv_colors.get(i) indicies = np.where((mat[i] == 1)) for (r, c) in zip(*indicies): map[0][r][c] = col[0] map[1][r][c] = col[1] map[2][r][c] = col[2] return map<|docstring|>This function takes a onehot encoded input and turns it into a image with three channels. Args: onehot_img (tensor): onehot encoded image. col (dict): a mapping of the differnt layers to different colors. width (int): width of the given image. height (int): height of the given image. Return: Tensor: An image with 3 channels.<|endoftext|>
ac0a83ddec0f19c2b1f74277bffb9adab29419b7496b88fbcde64681601c668d
def _inference(self, images, graph_part, cell_list, train_flag): 'Method for recovering the network model provided by graph_part and cellist.\n Args:\n images: Images returned from Dataset() or inputs().\n graph_part: The topology structure of th network given by adjacency table\n cellist:\n Returns:\n Logits.' topo_order = self._toposort(graph_part) nodelen = len(graph_part) inputs = [images for _ in range(nodelen)] getinput = [False for _ in range(nodelen)] getinput[0] = True for node in topo_order: layer = self._make_layer(inputs[node], cell_list[node], node, train_flag) for j in graph_part[node]: if getinput[j]: inputs[j] = self._pad(inputs[j], layer) else: inputs[j] = layer getinput[j] = True last_layer = tf.identity(layer, name=('last_layer' + str(self.block_num))) return last_layer
Method for recovering the network model provided by graph_part and cellist. Args: images: Images returned from Dataset() or inputs(). graph_part: The topology structure of th network given by adjacency table cellist: Returns: Logits.
1_nas/evaluator_classification_2.py
_inference
nuaa-QK/1_NAS
0
python
def _inference(self, images, graph_part, cell_list, train_flag): 'Method for recovering the network model provided by graph_part and cellist.\n Args:\n images: Images returned from Dataset() or inputs().\n graph_part: The topology structure of th network given by adjacency table\n cellist:\n Returns:\n Logits.' topo_order = self._toposort(graph_part) nodelen = len(graph_part) inputs = [images for _ in range(nodelen)] getinput = [False for _ in range(nodelen)] getinput[0] = True for node in topo_order: layer = self._make_layer(inputs[node], cell_list[node], node, train_flag) for j in graph_part[node]: if getinput[j]: inputs[j] = self._pad(inputs[j], layer) else: inputs[j] = layer getinput[j] = True last_layer = tf.identity(layer, name=('last_layer' + str(self.block_num))) return last_layer
def _inference(self, images, graph_part, cell_list, train_flag): 'Method for recovering the network model provided by graph_part and cellist.\n Args:\n images: Images returned from Dataset() or inputs().\n graph_part: The topology structure of th network given by adjacency table\n cellist:\n Returns:\n Logits.' topo_order = self._toposort(graph_part) nodelen = len(graph_part) inputs = [images for _ in range(nodelen)] getinput = [False for _ in range(nodelen)] getinput[0] = True for node in topo_order: layer = self._make_layer(inputs[node], cell_list[node], node, train_flag) for j in graph_part[node]: if getinput[j]: inputs[j] = self._pad(inputs[j], layer) else: inputs[j] = layer getinput[j] = True last_layer = tf.identity(layer, name=('last_layer' + str(self.block_num))) return last_layer<|docstring|>Method for recovering the network model provided by graph_part and cellist. Args: images: Images returned from Dataset() or inputs(). graph_part: The topology structure of th network given by adjacency table cellist: Returns: Logits.<|endoftext|>
f38f9349f555d67749bdbdbb843abe04e9e403e0283c20f77babfbf4a51645ff
def _make_layer(self, inputs, cell, node, train_flag): 'Method for constructing and calculating cell in tensorflow\n Args:\n inputs: the input tensor of this operation\n cell: Class Cell(), hyper parameters for building this layer\n node: int, the index of this operation\n train_flag: boolean, indicating whether this is a training process or not\n Returns:\n layer: tensor.' if (cell.type == 'conv'): layer = self._makeconv(inputs, cell, node, train_flag) elif (cell.type == 'pooling'): layer = self._makepool(inputs, cell) elif (cell.type == 'id'): layer = tf.identity(inputs) elif (cell.type == 'sep_conv'): layer = self._makesep_conv(inputs, cell, node, train_flag) else: assert False, 'Wrong cell type!' return layer
Method for constructing and calculating cell in tensorflow Args: inputs: the input tensor of this operation cell: Class Cell(), hyper parameters for building this layer node: int, the index of this operation train_flag: boolean, indicating whether this is a training process or not Returns: layer: tensor.
1_nas/evaluator_classification_2.py
_make_layer
nuaa-QK/1_NAS
0
python
def _make_layer(self, inputs, cell, node, train_flag): 'Method for constructing and calculating cell in tensorflow\n Args:\n inputs: the input tensor of this operation\n cell: Class Cell(), hyper parameters for building this layer\n node: int, the index of this operation\n train_flag: boolean, indicating whether this is a training process or not\n Returns:\n layer: tensor.' if (cell.type == 'conv'): layer = self._makeconv(inputs, cell, node, train_flag) elif (cell.type == 'pooling'): layer = self._makepool(inputs, cell) elif (cell.type == 'id'): layer = tf.identity(inputs) elif (cell.type == 'sep_conv'): layer = self._makesep_conv(inputs, cell, node, train_flag) else: assert False, 'Wrong cell type!' return layer
def _make_layer(self, inputs, cell, node, train_flag): 'Method for constructing and calculating cell in tensorflow\n Args:\n inputs: the input tensor of this operation\n cell: Class Cell(), hyper parameters for building this layer\n node: int, the index of this operation\n train_flag: boolean, indicating whether this is a training process or not\n Returns:\n layer: tensor.' if (cell.type == 'conv'): layer = self._makeconv(inputs, cell, node, train_flag) elif (cell.type == 'pooling'): layer = self._makepool(inputs, cell) elif (cell.type == 'id'): layer = tf.identity(inputs) elif (cell.type == 'sep_conv'): layer = self._makesep_conv(inputs, cell, node, train_flag) else: assert False, 'Wrong cell type!' return layer<|docstring|>Method for constructing and calculating cell in tensorflow Args: inputs: the input tensor of this operation cell: Class Cell(), hyper parameters for building this layer node: int, the index of this operation train_flag: boolean, indicating whether this is a training process or not Returns: layer: tensor.<|endoftext|>
e67d29a6ec95ad2a9d88d495931d9c764cb6184eb225406eca24e606246e0e08
def _makeconv(self, x, hplist, node, train_flag): 'Generates a convolutional layer according to information in hplist\n Args:\n x: inputing data.\n hplist: hyperparameters for building this layer\n node: int, the index of this operation\n Returns:\n conv_layer: the output tensor\n ' with tf.variable_scope(((('block' + str(self.block_num)) + 'conv') + str(node))) as scope: inputdim = x.shape[3] kernel = self._get_variable('weights', shape=[hplist.kernel_size, hplist.kernel_size, inputdim, hplist.filter_size]) x = self._activation_layer(hplist.activation, x, scope) x = tf.nn.conv2d(x, kernel, [1, 1, 1, 1], padding='SAME') biases = self._get_variable('biases', hplist.filter_size) x = self._batch_norm(tf.nn.bias_add(x, biases), train_flag) return x
Generates a convolutional layer according to information in hplist Args: x: inputing data. hplist: hyperparameters for building this layer node: int, the index of this operation Returns: conv_layer: the output tensor
1_nas/evaluator_classification_2.py
_makeconv
nuaa-QK/1_NAS
0
python
def _makeconv(self, x, hplist, node, train_flag): 'Generates a convolutional layer according to information in hplist\n Args:\n x: inputing data.\n hplist: hyperparameters for building this layer\n node: int, the index of this operation\n Returns:\n conv_layer: the output tensor\n ' with tf.variable_scope(((('block' + str(self.block_num)) + 'conv') + str(node))) as scope: inputdim = x.shape[3] kernel = self._get_variable('weights', shape=[hplist.kernel_size, hplist.kernel_size, inputdim, hplist.filter_size]) x = self._activation_layer(hplist.activation, x, scope) x = tf.nn.conv2d(x, kernel, [1, 1, 1, 1], padding='SAME') biases = self._get_variable('biases', hplist.filter_size) x = self._batch_norm(tf.nn.bias_add(x, biases), train_flag) return x
def _makeconv(self, x, hplist, node, train_flag): 'Generates a convolutional layer according to information in hplist\n Args:\n x: inputing data.\n hplist: hyperparameters for building this layer\n node: int, the index of this operation\n Returns:\n conv_layer: the output tensor\n ' with tf.variable_scope(((('block' + str(self.block_num)) + 'conv') + str(node))) as scope: inputdim = x.shape[3] kernel = self._get_variable('weights', shape=[hplist.kernel_size, hplist.kernel_size, inputdim, hplist.filter_size]) x = self._activation_layer(hplist.activation, x, scope) x = tf.nn.conv2d(x, kernel, [1, 1, 1, 1], padding='SAME') biases = self._get_variable('biases', hplist.filter_size) x = self._batch_norm(tf.nn.bias_add(x, biases), train_flag) return x<|docstring|>Generates a convolutional layer according to information in hplist Args: x: inputing data. hplist: hyperparameters for building this layer node: int, the index of this operation Returns: conv_layer: the output tensor<|endoftext|>
5e8f88aaf3f23b30d1845c1381d9716bf0b3ef366f50e23f1840174e19d3579f
def _makepool(self, inputs, hplist): 'Generates a pooling layer according to information in hplist\n Args:\n inputs: inputing data.\n hplist: hyperparameters for building this layer\n Returns:\n tensor.\n ' if (hplist.pooling_type == 'avg'): return tf.nn.avg_pool(inputs, ksize=[1, hplist.kernel_size, hplist.kernel_size, 1], strides=[1, hplist.kernel_size, hplist.kernel_size, 1], padding='SAME') elif (hplist.pooling_type == 'max'): return tf.nn.max_pool(inputs, ksize=[1, hplist.kernel_size, hplist.kernel_size, 1], strides=[1, hplist.kernel_size, hplist.kernel_size, 1], padding='SAME') elif (hplist.pooling_type == 'global'): return tf.reduce_mean(inputs, [1, 2], keep_dims=True)
Generates a pooling layer according to information in hplist Args: inputs: inputing data. hplist: hyperparameters for building this layer Returns: tensor.
1_nas/evaluator_classification_2.py
_makepool
nuaa-QK/1_NAS
0
python
def _makepool(self, inputs, hplist): 'Generates a pooling layer according to information in hplist\n Args:\n inputs: inputing data.\n hplist: hyperparameters for building this layer\n Returns:\n tensor.\n ' if (hplist.pooling_type == 'avg'): return tf.nn.avg_pool(inputs, ksize=[1, hplist.kernel_size, hplist.kernel_size, 1], strides=[1, hplist.kernel_size, hplist.kernel_size, 1], padding='SAME') elif (hplist.pooling_type == 'max'): return tf.nn.max_pool(inputs, ksize=[1, hplist.kernel_size, hplist.kernel_size, 1], strides=[1, hplist.kernel_size, hplist.kernel_size, 1], padding='SAME') elif (hplist.pooling_type == 'global'): return tf.reduce_mean(inputs, [1, 2], keep_dims=True)
def _makepool(self, inputs, hplist): 'Generates a pooling layer according to information in hplist\n Args:\n inputs: inputing data.\n hplist: hyperparameters for building this layer\n Returns:\n tensor.\n ' if (hplist.pooling_type == 'avg'): return tf.nn.avg_pool(inputs, ksize=[1, hplist.kernel_size, hplist.kernel_size, 1], strides=[1, hplist.kernel_size, hplist.kernel_size, 1], padding='SAME') elif (hplist.pooling_type == 'max'): return tf.nn.max_pool(inputs, ksize=[1, hplist.kernel_size, hplist.kernel_size, 1], strides=[1, hplist.kernel_size, hplist.kernel_size, 1], padding='SAME') elif (hplist.pooling_type == 'global'): return tf.reduce_mean(inputs, [1, 2], keep_dims=True)<|docstring|>Generates a pooling layer according to information in hplist Args: inputs: inputing data. hplist: hyperparameters for building this layer Returns: tensor.<|endoftext|>
59fbbbbea8cb8f4afcd2d851f597b1e337df2a639d2b3b1a261644f23ee38658
def _makedense(self, inputs, hplist): 'Generates dense layers according to information in hplist\n Args:\n inputs: inputing data.\n hplist: hyperparameters for building layers\n node: number of this cell\n Returns:\n tensor.\n ' inputs = tf.reshape(inputs, [self.batch_size, (- 1)]) for (i, neural_num) in enumerate(hplist[1]): with tf.variable_scope(((('block' + str(self.block_num)) + 'dense') + str(i))) as scope: weights = self._get_variable('weights', shape=[inputs.shape[(- 1)], neural_num]) biases = self._get_variable('biases', [neural_num]) mul = (tf.matmul(inputs, weights) + biases) if (neural_num == self.output_shape[(- 1)]): local3 = self._activation_layer('', mul, scope) else: local3 = self._activation_layer(hplist[2], mul, scope) inputs = local3 return inputs
Generates dense layers according to information in hplist Args: inputs: inputing data. hplist: hyperparameters for building layers node: number of this cell Returns: tensor.
1_nas/evaluator_classification_2.py
_makedense
nuaa-QK/1_NAS
0
python
def _makedense(self, inputs, hplist): 'Generates dense layers according to information in hplist\n Args:\n inputs: inputing data.\n hplist: hyperparameters for building layers\n node: number of this cell\n Returns:\n tensor.\n ' inputs = tf.reshape(inputs, [self.batch_size, (- 1)]) for (i, neural_num) in enumerate(hplist[1]): with tf.variable_scope(((('block' + str(self.block_num)) + 'dense') + str(i))) as scope: weights = self._get_variable('weights', shape=[inputs.shape[(- 1)], neural_num]) biases = self._get_variable('biases', [neural_num]) mul = (tf.matmul(inputs, weights) + biases) if (neural_num == self.output_shape[(- 1)]): local3 = self._activation_layer(, mul, scope) else: local3 = self._activation_layer(hplist[2], mul, scope) inputs = local3 return inputs
def _makedense(self, inputs, hplist): 'Generates dense layers according to information in hplist\n Args:\n inputs: inputing data.\n hplist: hyperparameters for building layers\n node: number of this cell\n Returns:\n tensor.\n ' inputs = tf.reshape(inputs, [self.batch_size, (- 1)]) for (i, neural_num) in enumerate(hplist[1]): with tf.variable_scope(((('block' + str(self.block_num)) + 'dense') + str(i))) as scope: weights = self._get_variable('weights', shape=[inputs.shape[(- 1)], neural_num]) biases = self._get_variable('biases', [neural_num]) mul = (tf.matmul(inputs, weights) + biases) if (neural_num == self.output_shape[(- 1)]): local3 = self._activation_layer(, mul, scope) else: local3 = self._activation_layer(hplist[2], mul, scope) inputs = local3 return inputs<|docstring|>Generates dense layers according to information in hplist Args: inputs: inputing data. hplist: hyperparameters for building layers node: number of this cell Returns: tensor.<|endoftext|>
010346d444f3f7456b180044206f13e4416171577103a0e02094cca783afeea5
def evaluate(self, network, pre_block=[], is_bestNN=False, update_pre_weight=False): "Method for evaluate the given network.\n \n :param network: NetworkItem()\n :param pre_block: The pre-block structure, every block has two parts 'graph_part' and 'cell_list' of this block.\n :param is_bestNN: Symbol for indicating whether the evaluating network is the best network of this round.\n :param update_pre_weight: Symbol for indicating whether to update previous blocks' weight.\n :return: accuracy, float.\n " assert (self.train_num >= self.batch_size) tf.reset_default_graph() self.block_num = len(pre_block) self.log = (((('-' * 20) + str(network.id)) + ('-' * 20)) + '\n') for block in pre_block: self.log = (((self.log + str(block.graph)) + str(block.cell_list)) + '\n') self.log = (((self.log + str(network.graph)) + str(network.cell_list)) + '\n') with tf.Session() as sess: (data_x, data_y, block_input, train_flag) = self._get_input(sess, pre_block, update_pre_weight) (graph_full, cell_list) = self._recode(network.graph, network.cell_list, NAS_CONFIG['nas_main']['repeat_num']) graph_full = (graph_full + [[]]) cell_list = (cell_list + [Cell('pooling', 'max', 2)]) logits = self._inference(block_input, graph_full, cell_list, train_flag) logits = tf.nn.dropout(logits, keep_prob=1.0) logits = self._makedense(logits, ('', [self.output_shape[(- 1)]], '')) (precision, log) = self._eval(sess, logits, data_x, data_y, train_flag) self.log += log saver = tf.train.Saver(tf.global_variables()) if is_bestNN: if (not os.path.exists(os.path.join(self.model_path))): os.makedirs(os.path.join(self.model_path)) saver.save(sess, os.path.join(self.model_path, ('model' + str(network.id)))) (NAS_LOG << ('eva', self.log)) return precision
Method for evaluate the given network. :param network: NetworkItem() :param pre_block: The pre-block structure, every block has two parts 'graph_part' and 'cell_list' of this block. :param is_bestNN: Symbol for indicating whether the evaluating network is the best network of this round. :param update_pre_weight: Symbol for indicating whether to update previous blocks' weight. :return: accuracy, float.
1_nas/evaluator_classification_2.py
evaluate
nuaa-QK/1_NAS
0
python
def evaluate(self, network, pre_block=[], is_bestNN=False, update_pre_weight=False): "Method for evaluate the given network.\n \n :param network: NetworkItem()\n :param pre_block: The pre-block structure, every block has two parts 'graph_part' and 'cell_list' of this block.\n :param is_bestNN: Symbol for indicating whether the evaluating network is the best network of this round.\n :param update_pre_weight: Symbol for indicating whether to update previous blocks' weight.\n :return: accuracy, float.\n " assert (self.train_num >= self.batch_size) tf.reset_default_graph() self.block_num = len(pre_block) self.log = (((('-' * 20) + str(network.id)) + ('-' * 20)) + '\n') for block in pre_block: self.log = (((self.log + str(block.graph)) + str(block.cell_list)) + '\n') self.log = (((self.log + str(network.graph)) + str(network.cell_list)) + '\n') with tf.Session() as sess: (data_x, data_y, block_input, train_flag) = self._get_input(sess, pre_block, update_pre_weight) (graph_full, cell_list) = self._recode(network.graph, network.cell_list, NAS_CONFIG['nas_main']['repeat_num']) graph_full = (graph_full + [[]]) cell_list = (cell_list + [Cell('pooling', 'max', 2)]) logits = self._inference(block_input, graph_full, cell_list, train_flag) logits = tf.nn.dropout(logits, keep_prob=1.0) logits = self._makedense(logits, (, [self.output_shape[(- 1)]], )) (precision, log) = self._eval(sess, logits, data_x, data_y, train_flag) self.log += log saver = tf.train.Saver(tf.global_variables()) if is_bestNN: if (not os.path.exists(os.path.join(self.model_path))): os.makedirs(os.path.join(self.model_path)) saver.save(sess, os.path.join(self.model_path, ('model' + str(network.id)))) (NAS_LOG << ('eva', self.log)) return precision
def evaluate(self, network, pre_block=[], is_bestNN=False, update_pre_weight=False): "Method for evaluate the given network.\n \n :param network: NetworkItem()\n :param pre_block: The pre-block structure, every block has two parts 'graph_part' and 'cell_list' of this block.\n :param is_bestNN: Symbol for indicating whether the evaluating network is the best network of this round.\n :param update_pre_weight: Symbol for indicating whether to update previous blocks' weight.\n :return: accuracy, float.\n " assert (self.train_num >= self.batch_size) tf.reset_default_graph() self.block_num = len(pre_block) self.log = (((('-' * 20) + str(network.id)) + ('-' * 20)) + '\n') for block in pre_block: self.log = (((self.log + str(block.graph)) + str(block.cell_list)) + '\n') self.log = (((self.log + str(network.graph)) + str(network.cell_list)) + '\n') with tf.Session() as sess: (data_x, data_y, block_input, train_flag) = self._get_input(sess, pre_block, update_pre_weight) (graph_full, cell_list) = self._recode(network.graph, network.cell_list, NAS_CONFIG['nas_main']['repeat_num']) graph_full = (graph_full + [[]]) cell_list = (cell_list + [Cell('pooling', 'max', 2)]) logits = self._inference(block_input, graph_full, cell_list, train_flag) logits = tf.nn.dropout(logits, keep_prob=1.0) logits = self._makedense(logits, (, [self.output_shape[(- 1)]], )) (precision, log) = self._eval(sess, logits, data_x, data_y, train_flag) self.log += log saver = tf.train.Saver(tf.global_variables()) if is_bestNN: if (not os.path.exists(os.path.join(self.model_path))): os.makedirs(os.path.join(self.model_path)) saver.save(sess, os.path.join(self.model_path, ('model' + str(network.id)))) (NAS_LOG << ('eva', self.log)) return precision<|docstring|>Method for evaluate the given network. :param network: NetworkItem() :param pre_block: The pre-block structure, every block has two parts 'graph_part' and 'cell_list' of this block. :param is_bestNN: Symbol for indicating whether the evaluating network is the best network of this round. :param update_pre_weight: Symbol for indicating whether to update previous blocks' weight. :return: accuracy, float.<|endoftext|>
534a5ce3c0256d03219ef29e03c9fe18cb860a5bfb8731b0d7576781faeb32e2
def _get_input(self, sess, pre_block, update_pre_weight=False): 'Get input for _inference' if (len(pre_block) > 0): assert os.path.exists(os.path.join(self.model_path, (('model' + str(pre_block[(- 1)].id)) + '.meta'))) new_saver = tf.train.import_meta_graph(os.path.join(self.model_path, (('model' + str(pre_block[(- 1)].id)) + '.meta'))) new_saver.restore(sess, os.path.join(self.model_path, ('model' + str(pre_block[(- 1)].id)))) graph = tf.get_default_graph() data_x = graph.get_tensor_by_name('input:0') data_y = graph.get_tensor_by_name('label:0') train_flag = graph.get_tensor_by_name('train_flag:0') block_input = graph.get_tensor_by_name((('last_layer' + str((self.block_num - 1))) + ':0')) if (not update_pre_weight): block_input = tf.stop_gradient(block_input, name='stop_gradient') else: data_x = tf.placeholder(tf.float32, self.input_shape, name='input') data_y = tf.placeholder(tf.int32, self.output_shape, name='label') train_flag = tf.placeholder(tf.bool, name='train_flag') block_input = tf.identity(data_x) return (data_x, data_y, block_input, train_flag)
Get input for _inference
1_nas/evaluator_classification_2.py
_get_input
nuaa-QK/1_NAS
0
python
def _get_input(self, sess, pre_block, update_pre_weight=False): if (len(pre_block) > 0): assert os.path.exists(os.path.join(self.model_path, (('model' + str(pre_block[(- 1)].id)) + '.meta'))) new_saver = tf.train.import_meta_graph(os.path.join(self.model_path, (('model' + str(pre_block[(- 1)].id)) + '.meta'))) new_saver.restore(sess, os.path.join(self.model_path, ('model' + str(pre_block[(- 1)].id)))) graph = tf.get_default_graph() data_x = graph.get_tensor_by_name('input:0') data_y = graph.get_tensor_by_name('label:0') train_flag = graph.get_tensor_by_name('train_flag:0') block_input = graph.get_tensor_by_name((('last_layer' + str((self.block_num - 1))) + ':0')) if (not update_pre_weight): block_input = tf.stop_gradient(block_input, name='stop_gradient') else: data_x = tf.placeholder(tf.float32, self.input_shape, name='input') data_y = tf.placeholder(tf.int32, self.output_shape, name='label') train_flag = tf.placeholder(tf.bool, name='train_flag') block_input = tf.identity(data_x) return (data_x, data_y, block_input, train_flag)
def _get_input(self, sess, pre_block, update_pre_weight=False): if (len(pre_block) > 0): assert os.path.exists(os.path.join(self.model_path, (('model' + str(pre_block[(- 1)].id)) + '.meta'))) new_saver = tf.train.import_meta_graph(os.path.join(self.model_path, (('model' + str(pre_block[(- 1)].id)) + '.meta'))) new_saver.restore(sess, os.path.join(self.model_path, ('model' + str(pre_block[(- 1)].id)))) graph = tf.get_default_graph() data_x = graph.get_tensor_by_name('input:0') data_y = graph.get_tensor_by_name('label:0') train_flag = graph.get_tensor_by_name('train_flag:0') block_input = graph.get_tensor_by_name((('last_layer' + str((self.block_num - 1))) + ':0')) if (not update_pre_weight): block_input = tf.stop_gradient(block_input, name='stop_gradient') else: data_x = tf.placeholder(tf.float32, self.input_shape, name='input') data_y = tf.placeholder(tf.int32, self.output_shape, name='label') train_flag = tf.placeholder(tf.bool, name='train_flag') block_input = tf.identity(data_x) return (data_x, data_y, block_input, train_flag)<|docstring|>Get input for _inference<|endoftext|>
838298eb9f3870108e978cf23437e274eb581c085f1e24bbfb9df8e749aa5255
def _eval(self, sess, logits, data_x, data_y, train_flag, retrain=False): '\n The actual training process, including the definination of loss and train optimizer\n Args:\n sess: tensorflow session\n logits: output tensor of the model, 2-D tensor of shape [self.batch_size, self.NUM_CLASS]\n data_x: input image\n data_y: input label, 2-D tensor of shape [self.batch_size, self.NUM_CLASS]\n Returns:\n targets: float, the optimization target, could be the accuracy or the combination of both time and accuracy, etc\n saver: Tensorflow Saver class\n log: string, log to be write and saved\n ' global_step = tf.Variable(0, trainable=False, name=('global_step' + str(self.block_num))) accuracy = self._cal_accuracy(logits, data_y) loss = self._loss(data_y, logits) train_op = self._train_op(global_step, loss) sess.run(tf.global_variables_initializer()) if retrain: self.train_data = np.concatenate((np.array(self.train_data), np.array(self.valid_data)), axis=0).tolist() self.train_label = np.concatenate((np.array(self.train_label), np.array(self.valid_label)), axis=0).tolist() max_steps = (len(list(self.train_label)) // self.batch_size) test_data = copy.deepcopy(self.test_data) test_label = copy.deepcopy(self.test_label) num_iter = (len(test_label) // self.batch_size) else: max_steps = (self.train_num // self.batch_size) test_data = copy.deepcopy(self.valid_data) test_label = copy.deepcopy(self.valid_label) num_iter = (len(self.valid_label) // self.batch_size) log = '' cost_time = 0 precision = np.zeros([self.epoch]) for ep in range(self.epoch): print('epoch', ep, ':') start_time = time.time() for step in range(max_steps): batch_x = self.train_data[(step * self.batch_size):((step + 1) * self.batch_size)] batch_y = self.train_label[(step * self.batch_size):((step + 1) * self.batch_size)] batch_x = DataSet().process(batch_x) (_, loss_value, acc) = sess.run([train_op, loss, accuracy], feed_dict={data_x: batch_x, data_y: batch_y, train_flag: True}) if np.isnan(loss_value): return ((- 1), log) sys.stdout.write(('\r>> train %d/%d loss %.4f acc %.4f' % (step, max_steps, loss_value, acc))) sys.stdout.write('\n') for step in range(num_iter): batch_x = test_data[(step * self.batch_size):((step + 1) * self.batch_size)] batch_y = test_label[(step * self.batch_size):((step + 1) * self.batch_size)] (l, acc_) = sess.run([loss, accuracy], feed_dict={data_x: batch_x, data_y: batch_y, train_flag: False}) precision[ep] += (acc_ / num_iter) sys.stdout.write(('\r>> valid %d/%d loss %.4f acc %.4f' % (step, num_iter, l, acc_))) sys.stdout.write('\n') if ((ep > 5) and (not retrain)): if ((((2 * precision[ep]) - precision[(ep - 5)]) - precision[(ep - 1)]) < (0.001 / DataSet().NUM_CLASSES)): precision = precision[:ep] log += ('early stop at %d epoch\n' % ep) break cost_time += (float((time.time() - start_time)) / self.epoch) log += ('epoch %d: precision = %.3f, cost time %.3f\n' % (ep, precision[ep], float((time.time() - start_time)))) print(('precision = %.3f, cost time %.3f' % (precision[ep], float((time.time() - start_time))))) return (precision[(- 1)], log)
The actual training process, including the definination of loss and train optimizer Args: sess: tensorflow session logits: output tensor of the model, 2-D tensor of shape [self.batch_size, self.NUM_CLASS] data_x: input image data_y: input label, 2-D tensor of shape [self.batch_size, self.NUM_CLASS] Returns: targets: float, the optimization target, could be the accuracy or the combination of both time and accuracy, etc saver: Tensorflow Saver class log: string, log to be write and saved
1_nas/evaluator_classification_2.py
_eval
nuaa-QK/1_NAS
0
python
def _eval(self, sess, logits, data_x, data_y, train_flag, retrain=False): '\n The actual training process, including the definination of loss and train optimizer\n Args:\n sess: tensorflow session\n logits: output tensor of the model, 2-D tensor of shape [self.batch_size, self.NUM_CLASS]\n data_x: input image\n data_y: input label, 2-D tensor of shape [self.batch_size, self.NUM_CLASS]\n Returns:\n targets: float, the optimization target, could be the accuracy or the combination of both time and accuracy, etc\n saver: Tensorflow Saver class\n log: string, log to be write and saved\n ' global_step = tf.Variable(0, trainable=False, name=('global_step' + str(self.block_num))) accuracy = self._cal_accuracy(logits, data_y) loss = self._loss(data_y, logits) train_op = self._train_op(global_step, loss) sess.run(tf.global_variables_initializer()) if retrain: self.train_data = np.concatenate((np.array(self.train_data), np.array(self.valid_data)), axis=0).tolist() self.train_label = np.concatenate((np.array(self.train_label), np.array(self.valid_label)), axis=0).tolist() max_steps = (len(list(self.train_label)) // self.batch_size) test_data = copy.deepcopy(self.test_data) test_label = copy.deepcopy(self.test_label) num_iter = (len(test_label) // self.batch_size) else: max_steps = (self.train_num // self.batch_size) test_data = copy.deepcopy(self.valid_data) test_label = copy.deepcopy(self.valid_label) num_iter = (len(self.valid_label) // self.batch_size) log = cost_time = 0 precision = np.zeros([self.epoch]) for ep in range(self.epoch): print('epoch', ep, ':') start_time = time.time() for step in range(max_steps): batch_x = self.train_data[(step * self.batch_size):((step + 1) * self.batch_size)] batch_y = self.train_label[(step * self.batch_size):((step + 1) * self.batch_size)] batch_x = DataSet().process(batch_x) (_, loss_value, acc) = sess.run([train_op, loss, accuracy], feed_dict={data_x: batch_x, data_y: batch_y, train_flag: True}) if np.isnan(loss_value): return ((- 1), log) sys.stdout.write(('\r>> train %d/%d loss %.4f acc %.4f' % (step, max_steps, loss_value, acc))) sys.stdout.write('\n') for step in range(num_iter): batch_x = test_data[(step * self.batch_size):((step + 1) * self.batch_size)] batch_y = test_label[(step * self.batch_size):((step + 1) * self.batch_size)] (l, acc_) = sess.run([loss, accuracy], feed_dict={data_x: batch_x, data_y: batch_y, train_flag: False}) precision[ep] += (acc_ / num_iter) sys.stdout.write(('\r>> valid %d/%d loss %.4f acc %.4f' % (step, num_iter, l, acc_))) sys.stdout.write('\n') if ((ep > 5) and (not retrain)): if ((((2 * precision[ep]) - precision[(ep - 5)]) - precision[(ep - 1)]) < (0.001 / DataSet().NUM_CLASSES)): precision = precision[:ep] log += ('early stop at %d epoch\n' % ep) break cost_time += (float((time.time() - start_time)) / self.epoch) log += ('epoch %d: precision = %.3f, cost time %.3f\n' % (ep, precision[ep], float((time.time() - start_time)))) print(('precision = %.3f, cost time %.3f' % (precision[ep], float((time.time() - start_time))))) return (precision[(- 1)], log)
def _eval(self, sess, logits, data_x, data_y, train_flag, retrain=False): '\n The actual training process, including the definination of loss and train optimizer\n Args:\n sess: tensorflow session\n logits: output tensor of the model, 2-D tensor of shape [self.batch_size, self.NUM_CLASS]\n data_x: input image\n data_y: input label, 2-D tensor of shape [self.batch_size, self.NUM_CLASS]\n Returns:\n targets: float, the optimization target, could be the accuracy or the combination of both time and accuracy, etc\n saver: Tensorflow Saver class\n log: string, log to be write and saved\n ' global_step = tf.Variable(0, trainable=False, name=('global_step' + str(self.block_num))) accuracy = self._cal_accuracy(logits, data_y) loss = self._loss(data_y, logits) train_op = self._train_op(global_step, loss) sess.run(tf.global_variables_initializer()) if retrain: self.train_data = np.concatenate((np.array(self.train_data), np.array(self.valid_data)), axis=0).tolist() self.train_label = np.concatenate((np.array(self.train_label), np.array(self.valid_label)), axis=0).tolist() max_steps = (len(list(self.train_label)) // self.batch_size) test_data = copy.deepcopy(self.test_data) test_label = copy.deepcopy(self.test_label) num_iter = (len(test_label) // self.batch_size) else: max_steps = (self.train_num // self.batch_size) test_data = copy.deepcopy(self.valid_data) test_label = copy.deepcopy(self.valid_label) num_iter = (len(self.valid_label) // self.batch_size) log = cost_time = 0 precision = np.zeros([self.epoch]) for ep in range(self.epoch): print('epoch', ep, ':') start_time = time.time() for step in range(max_steps): batch_x = self.train_data[(step * self.batch_size):((step + 1) * self.batch_size)] batch_y = self.train_label[(step * self.batch_size):((step + 1) * self.batch_size)] batch_x = DataSet().process(batch_x) (_, loss_value, acc) = sess.run([train_op, loss, accuracy], feed_dict={data_x: batch_x, data_y: batch_y, train_flag: True}) if np.isnan(loss_value): return ((- 1), log) sys.stdout.write(('\r>> train %d/%d loss %.4f acc %.4f' % (step, max_steps, loss_value, acc))) sys.stdout.write('\n') for step in range(num_iter): batch_x = test_data[(step * self.batch_size):((step + 1) * self.batch_size)] batch_y = test_label[(step * self.batch_size):((step + 1) * self.batch_size)] (l, acc_) = sess.run([loss, accuracy], feed_dict={data_x: batch_x, data_y: batch_y, train_flag: False}) precision[ep] += (acc_ / num_iter) sys.stdout.write(('\r>> valid %d/%d loss %.4f acc %.4f' % (step, num_iter, l, acc_))) sys.stdout.write('\n') if ((ep > 5) and (not retrain)): if ((((2 * precision[ep]) - precision[(ep - 5)]) - precision[(ep - 1)]) < (0.001 / DataSet().NUM_CLASSES)): precision = precision[:ep] log += ('early stop at %d epoch\n' % ep) break cost_time += (float((time.time() - start_time)) / self.epoch) log += ('epoch %d: precision = %.3f, cost time %.3f\n' % (ep, precision[ep], float((time.time() - start_time)))) print(('precision = %.3f, cost time %.3f' % (precision[ep], float((time.time() - start_time))))) return (precision[(- 1)], log)<|docstring|>The actual training process, including the definination of loss and train optimizer Args: sess: tensorflow session logits: output tensor of the model, 2-D tensor of shape [self.batch_size, self.NUM_CLASS] data_x: input image data_y: input label, 2-D tensor of shape [self.batch_size, self.NUM_CLASS] Returns: targets: float, the optimization target, could be the accuracy or the combination of both time and accuracy, etc saver: Tensorflow Saver class log: string, log to be write and saved<|endoftext|>
53f52ed4813834b8e95cca47ef945753d5a54b98ff94593b394f96d35dc2ffa3
def _cal_accuracy(self, logits, labels): '\n calculate the target of this task\n Args:\n logits: Logits from softmax.\n labels: Labels from distorted_inputs or inputs(). 2-D tensor of shape [self.batch_size, self.NUM_CLASS]\n Returns:\n Target tensor of type float.\n ' correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(labels, 1)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) return accuracy
calculate the target of this task Args: logits: Logits from softmax. labels: Labels from distorted_inputs or inputs(). 2-D tensor of shape [self.batch_size, self.NUM_CLASS] Returns: Target tensor of type float.
1_nas/evaluator_classification_2.py
_cal_accuracy
nuaa-QK/1_NAS
0
python
def _cal_accuracy(self, logits, labels): '\n calculate the target of this task\n Args:\n logits: Logits from softmax.\n labels: Labels from distorted_inputs or inputs(). 2-D tensor of shape [self.batch_size, self.NUM_CLASS]\n Returns:\n Target tensor of type float.\n ' correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(labels, 1)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) return accuracy
def _cal_accuracy(self, logits, labels): '\n calculate the target of this task\n Args:\n logits: Logits from softmax.\n labels: Labels from distorted_inputs or inputs(). 2-D tensor of shape [self.batch_size, self.NUM_CLASS]\n Returns:\n Target tensor of type float.\n ' correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(labels, 1)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) return accuracy<|docstring|>calculate the target of this task Args: logits: Logits from softmax. labels: Labels from distorted_inputs or inputs(). 2-D tensor of shape [self.batch_size, self.NUM_CLASS] Returns: Target tensor of type float.<|endoftext|>
afa2035648b7091f9f28681efb089cfef32f419a48fca9365201066cb3fd8d92
def _loss(self, labels, logits): '\n Args:\n logits: Logits from softmax.\n labels: Labels from distorted_inputs or inputs(). 1-D tensor of shape [self.batch_size]\n Returns:\n Loss tensor of type float.\n ' cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits)) l2 = tf.add_n([tf.nn.l2_loss(var) for var in tf.trainable_variables()]) loss = (cross_entropy + (l2 * self.weight_decay)) return loss
Args: logits: Logits from softmax. labels: Labels from distorted_inputs or inputs(). 1-D tensor of shape [self.batch_size] Returns: Loss tensor of type float.
1_nas/evaluator_classification_2.py
_loss
nuaa-QK/1_NAS
0
python
def _loss(self, labels, logits): '\n Args:\n logits: Logits from softmax.\n labels: Labels from distorted_inputs or inputs(). 1-D tensor of shape [self.batch_size]\n Returns:\n Loss tensor of type float.\n ' cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits)) l2 = tf.add_n([tf.nn.l2_loss(var) for var in tf.trainable_variables()]) loss = (cross_entropy + (l2 * self.weight_decay)) return loss
def _loss(self, labels, logits): '\n Args:\n logits: Logits from softmax.\n labels: Labels from distorted_inputs or inputs(). 1-D tensor of shape [self.batch_size]\n Returns:\n Loss tensor of type float.\n ' cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits)) l2 = tf.add_n([tf.nn.l2_loss(var) for var in tf.trainable_variables()]) loss = (cross_entropy + (l2 * self.weight_decay)) return loss<|docstring|>Args: logits: Logits from softmax. labels: Labels from distorted_inputs or inputs(). 1-D tensor of shape [self.batch_size] Returns: Loss tensor of type float.<|endoftext|>
d870126f11fd4ace144833669bee0c0bae6d008a2504be32fcfc1893977ad464
def make_app(conf: Union[(Config, CKANConfig)]) -> CKANApp: '\n Initialise the Flask app and wrap it in dispatcher middleware.\n ' load_environment(conf) flask_app = make_flask_stack(conf) global _internal_test_request_context _internal_test_request_context = flask_app._wsgi_app.test_request_context() return flask_app
Initialise the Flask app and wrap it in dispatcher middleware.
ckan/config/middleware/__init__.py
make_app
robin-NEC/ckan
1
python
def make_app(conf: Union[(Config, CKANConfig)]) -> CKANApp: '\n \n ' load_environment(conf) flask_app = make_flask_stack(conf) global _internal_test_request_context _internal_test_request_context = flask_app._wsgi_app.test_request_context() return flask_app
def make_app(conf: Union[(Config, CKANConfig)]) -> CKANApp: '\n \n ' load_environment(conf) flask_app = make_flask_stack(conf) global _internal_test_request_context _internal_test_request_context = flask_app._wsgi_app.test_request_context() return flask_app<|docstring|>Initialise the Flask app and wrap it in dispatcher middleware.<|endoftext|>
ba33984b341b5bca5e30b8dbdd8915265c91a44b9295b0f1743c2e22e03bc241
def __init__(self, i2c_addr=56, i2c_dev=None): 'Initialise sensor.\n\n :param i2c_addr: i2c address of sensor\n :param i2c_dev: SMBus-compatible instance\n\n ' self._i2c_addr = i2c_addr self._i2c_dev = i2c_dev self._is_setup = False self._bh1745 = Device(I2C_ADDRESSES, i2c_dev=self._i2c_dev, bit_width=8, registers=(Register('SYSTEM_CONTROL', 64, fields=(BitField('sw_reset', 128), BitField('int_reset', 64), BitField('part_id', 63, read_only=True))), Register('MODE_CONTROL1', 65, fields=(BitField('measurement_time_ms', 7, adapter=LookupAdapter({160: 0, 320: 1, 640: 2, 1280: 3, 2560: 4, 5120: 5})),)), Register('MODE_CONTROL2', 66, fields=(BitField('valid', 128, read_only=True), BitField('rgbc_en', 16), BitField('adc_gain_x', 3, adapter=LookupAdapter({1: 0, 2: 1, 16: 2})))), Register('MODE_CONTROL3', 68, fields=(BitField('on', 255, adapter=LookupAdapter({True: 2, False: 0})),)), Register('COLOUR_DATA', 80, fields=(BitField('red', 18446462598732840960, adapter=U16ByteSwapAdapter()), BitField('green', 281470681743360, adapter=U16ByteSwapAdapter()), BitField('blue', 4294901760, adapter=U16ByteSwapAdapter()), BitField('clear', 65535, adapter=U16ByteSwapAdapter())), bit_width=64, read_only=True), Register('DINT_DATA', 88, fields=(BitField('data', 65535, adapter=U16ByteSwapAdapter()),), bit_width=16), Register('INTERRUPT', 96, fields=(BitField('status', 128, read_only=True), BitField('latch', 16, adapter=LookupAdapter({0: 1, 1: 0})), BitField('source', 12, read_only=True, adapter=LookupAdapter({'red': 0, 'green': 1, 'blue': 2, 'clear': 3})), BitField('enable', 1))), Register('PERSISTENCE', 97, fields=(BitField('mode', 3, adapter=LookupAdapter({'toggle': 0, 'update': 1, 'update_on_4': 2, 'update_on_8': 3})),)), Register('THRESHOLD', 98, fields=(BitField('high', 4294901760, adapter=U16ByteSwapAdapter()), BitField('low', 65535, adapter=U16ByteSwapAdapter())), bit_width=32), Register('MANUFACTURER', 146, fields=(BitField('id', 255),), read_only=True, volatile=False))) self._bh1745.select_address(self._i2c_addr) for register in self._bh1745.registers: register = self._bh1745.registers[register] for field in register.fields: field = register.fields[field] if isinstance(field.adapter, LookupAdapter): for key in field.adapter.lookup_table: name = 'BH1745_{register}_{field}_{key}'.format(register=register.name, field=field.name, key=key).upper() globals()[name] = key '\n Approximate compensation for the spectral response performance curves\n ' self._channel_compensation = (2.2, 1.0, 1.8, 10.0) self._enable_channel_compensation = True
Initialise sensor. :param i2c_addr: i2c address of sensor :param i2c_dev: SMBus-compatible instance
library/bh1745/__init__.py
__init__
pimoroni/bh1745-python
8
python
def __init__(self, i2c_addr=56, i2c_dev=None): 'Initialise sensor.\n\n :param i2c_addr: i2c address of sensor\n :param i2c_dev: SMBus-compatible instance\n\n ' self._i2c_addr = i2c_addr self._i2c_dev = i2c_dev self._is_setup = False self._bh1745 = Device(I2C_ADDRESSES, i2c_dev=self._i2c_dev, bit_width=8, registers=(Register('SYSTEM_CONTROL', 64, fields=(BitField('sw_reset', 128), BitField('int_reset', 64), BitField('part_id', 63, read_only=True))), Register('MODE_CONTROL1', 65, fields=(BitField('measurement_time_ms', 7, adapter=LookupAdapter({160: 0, 320: 1, 640: 2, 1280: 3, 2560: 4, 5120: 5})),)), Register('MODE_CONTROL2', 66, fields=(BitField('valid', 128, read_only=True), BitField('rgbc_en', 16), BitField('adc_gain_x', 3, adapter=LookupAdapter({1: 0, 2: 1, 16: 2})))), Register('MODE_CONTROL3', 68, fields=(BitField('on', 255, adapter=LookupAdapter({True: 2, False: 0})),)), Register('COLOUR_DATA', 80, fields=(BitField('red', 18446462598732840960, adapter=U16ByteSwapAdapter()), BitField('green', 281470681743360, adapter=U16ByteSwapAdapter()), BitField('blue', 4294901760, adapter=U16ByteSwapAdapter()), BitField('clear', 65535, adapter=U16ByteSwapAdapter())), bit_width=64, read_only=True), Register('DINT_DATA', 88, fields=(BitField('data', 65535, adapter=U16ByteSwapAdapter()),), bit_width=16), Register('INTERRUPT', 96, fields=(BitField('status', 128, read_only=True), BitField('latch', 16, adapter=LookupAdapter({0: 1, 1: 0})), BitField('source', 12, read_only=True, adapter=LookupAdapter({'red': 0, 'green': 1, 'blue': 2, 'clear': 3})), BitField('enable', 1))), Register('PERSISTENCE', 97, fields=(BitField('mode', 3, adapter=LookupAdapter({'toggle': 0, 'update': 1, 'update_on_4': 2, 'update_on_8': 3})),)), Register('THRESHOLD', 98, fields=(BitField('high', 4294901760, adapter=U16ByteSwapAdapter()), BitField('low', 65535, adapter=U16ByteSwapAdapter())), bit_width=32), Register('MANUFACTURER', 146, fields=(BitField('id', 255),), read_only=True, volatile=False))) self._bh1745.select_address(self._i2c_addr) for register in self._bh1745.registers: register = self._bh1745.registers[register] for field in register.fields: field = register.fields[field] if isinstance(field.adapter, LookupAdapter): for key in field.adapter.lookup_table: name = 'BH1745_{register}_{field}_{key}'.format(register=register.name, field=field.name, key=key).upper() globals()[name] = key '\n Approximate compensation for the spectral response performance curves\n ' self._channel_compensation = (2.2, 1.0, 1.8, 10.0) self._enable_channel_compensation = True
def __init__(self, i2c_addr=56, i2c_dev=None): 'Initialise sensor.\n\n :param i2c_addr: i2c address of sensor\n :param i2c_dev: SMBus-compatible instance\n\n ' self._i2c_addr = i2c_addr self._i2c_dev = i2c_dev self._is_setup = False self._bh1745 = Device(I2C_ADDRESSES, i2c_dev=self._i2c_dev, bit_width=8, registers=(Register('SYSTEM_CONTROL', 64, fields=(BitField('sw_reset', 128), BitField('int_reset', 64), BitField('part_id', 63, read_only=True))), Register('MODE_CONTROL1', 65, fields=(BitField('measurement_time_ms', 7, adapter=LookupAdapter({160: 0, 320: 1, 640: 2, 1280: 3, 2560: 4, 5120: 5})),)), Register('MODE_CONTROL2', 66, fields=(BitField('valid', 128, read_only=True), BitField('rgbc_en', 16), BitField('adc_gain_x', 3, adapter=LookupAdapter({1: 0, 2: 1, 16: 2})))), Register('MODE_CONTROL3', 68, fields=(BitField('on', 255, adapter=LookupAdapter({True: 2, False: 0})),)), Register('COLOUR_DATA', 80, fields=(BitField('red', 18446462598732840960, adapter=U16ByteSwapAdapter()), BitField('green', 281470681743360, adapter=U16ByteSwapAdapter()), BitField('blue', 4294901760, adapter=U16ByteSwapAdapter()), BitField('clear', 65535, adapter=U16ByteSwapAdapter())), bit_width=64, read_only=True), Register('DINT_DATA', 88, fields=(BitField('data', 65535, adapter=U16ByteSwapAdapter()),), bit_width=16), Register('INTERRUPT', 96, fields=(BitField('status', 128, read_only=True), BitField('latch', 16, adapter=LookupAdapter({0: 1, 1: 0})), BitField('source', 12, read_only=True, adapter=LookupAdapter({'red': 0, 'green': 1, 'blue': 2, 'clear': 3})), BitField('enable', 1))), Register('PERSISTENCE', 97, fields=(BitField('mode', 3, adapter=LookupAdapter({'toggle': 0, 'update': 1, 'update_on_4': 2, 'update_on_8': 3})),)), Register('THRESHOLD', 98, fields=(BitField('high', 4294901760, adapter=U16ByteSwapAdapter()), BitField('low', 65535, adapter=U16ByteSwapAdapter())), bit_width=32), Register('MANUFACTURER', 146, fields=(BitField('id', 255),), read_only=True, volatile=False))) self._bh1745.select_address(self._i2c_addr) for register in self._bh1745.registers: register = self._bh1745.registers[register] for field in register.fields: field = register.fields[field] if isinstance(field.adapter, LookupAdapter): for key in field.adapter.lookup_table: name = 'BH1745_{register}_{field}_{key}'.format(register=register.name, field=field.name, key=key).upper() globals()[name] = key '\n Approximate compensation for the spectral response performance curves\n ' self._channel_compensation = (2.2, 1.0, 1.8, 10.0) self._enable_channel_compensation = True<|docstring|>Initialise sensor. :param i2c_addr: i2c address of sensor :param i2c_dev: SMBus-compatible instance<|endoftext|>
2cc53da0274d8c697939b5d3bbc50ac54aa5d86a97289ae2863f060f9247abb6
def ready(self): 'Return true if setup has been successful.' return self._is_setup
Return true if setup has been successful.
library/bh1745/__init__.py
ready
pimoroni/bh1745-python
8
python
def ready(self): return self._is_setup
def ready(self): return self._is_setup<|docstring|>Return true if setup has been successful.<|endoftext|>
76a6e7e9f9d13f913dad54b1ddb3db67663ea4db1c300a29843286640d5f7aca
def setup(self, i2c_addr=None, timeout=BH1745_RESET_TIMEOUT_SEC): 'Set up the bh1745 sensor.\n\n :param i2c_addr: Optional i2c_addr to switch to\n\n ' if self._is_setup: return True if (timeout <= 0): raise ValueError('Device timeout period must be greater than 0') if (i2c_addr is not None): self._bh1745.select_address(i2c_addr) try: self._bh1745.get('SYSTEM_CONTROL') except IOError: raise RuntimeError('BH1745 not found: IO error attempting to query device!') if ((self._bh1745.get('SYSTEM_CONTROL').part_id != 11) or (self._bh1745.get('MANUFACTURER').id != 224)): raise RuntimeError('BH1745 not found: Manufacturer or Part ID mismatch!') self._is_setup = True self._bh1745.set('SYSTEM_CONTROL', sw_reset=1) t_start = time.time() pending_reset = True while ((time.time() - t_start) < timeout): if (not self._bh1745.get('SYSTEM_CONTROL').sw_reset): pending_reset = False break time.sleep(0.01) if pending_reset: raise BH1745TimeoutError('Timeout waiting for BH1745 to reset.') self._bh1745.set('SYSTEM_CONTROL', int_reset=0) self._bh1745.set('MODE_CONTROL1', measurement_time_ms=320) self._bh1745.set('MODE_CONTROL2', adc_gain_x=1, rgbc_en=1) self._bh1745.set('MODE_CONTROL3', on=1) self._bh1745.set('THRESHOLD', low=65535, high=0) self._bh1745.set('INTERRUPT', latch=1) time.sleep(0.32)
Set up the bh1745 sensor. :param i2c_addr: Optional i2c_addr to switch to
library/bh1745/__init__.py
setup
pimoroni/bh1745-python
8
python
def setup(self, i2c_addr=None, timeout=BH1745_RESET_TIMEOUT_SEC): 'Set up the bh1745 sensor.\n\n :param i2c_addr: Optional i2c_addr to switch to\n\n ' if self._is_setup: return True if (timeout <= 0): raise ValueError('Device timeout period must be greater than 0') if (i2c_addr is not None): self._bh1745.select_address(i2c_addr) try: self._bh1745.get('SYSTEM_CONTROL') except IOError: raise RuntimeError('BH1745 not found: IO error attempting to query device!') if ((self._bh1745.get('SYSTEM_CONTROL').part_id != 11) or (self._bh1745.get('MANUFACTURER').id != 224)): raise RuntimeError('BH1745 not found: Manufacturer or Part ID mismatch!') self._is_setup = True self._bh1745.set('SYSTEM_CONTROL', sw_reset=1) t_start = time.time() pending_reset = True while ((time.time() - t_start) < timeout): if (not self._bh1745.get('SYSTEM_CONTROL').sw_reset): pending_reset = False break time.sleep(0.01) if pending_reset: raise BH1745TimeoutError('Timeout waiting for BH1745 to reset.') self._bh1745.set('SYSTEM_CONTROL', int_reset=0) self._bh1745.set('MODE_CONTROL1', measurement_time_ms=320) self._bh1745.set('MODE_CONTROL2', adc_gain_x=1, rgbc_en=1) self._bh1745.set('MODE_CONTROL3', on=1) self._bh1745.set('THRESHOLD', low=65535, high=0) self._bh1745.set('INTERRUPT', latch=1) time.sleep(0.32)
def setup(self, i2c_addr=None, timeout=BH1745_RESET_TIMEOUT_SEC): 'Set up the bh1745 sensor.\n\n :param i2c_addr: Optional i2c_addr to switch to\n\n ' if self._is_setup: return True if (timeout <= 0): raise ValueError('Device timeout period must be greater than 0') if (i2c_addr is not None): self._bh1745.select_address(i2c_addr) try: self._bh1745.get('SYSTEM_CONTROL') except IOError: raise RuntimeError('BH1745 not found: IO error attempting to query device!') if ((self._bh1745.get('SYSTEM_CONTROL').part_id != 11) or (self._bh1745.get('MANUFACTURER').id != 224)): raise RuntimeError('BH1745 not found: Manufacturer or Part ID mismatch!') self._is_setup = True self._bh1745.set('SYSTEM_CONTROL', sw_reset=1) t_start = time.time() pending_reset = True while ((time.time() - t_start) < timeout): if (not self._bh1745.get('SYSTEM_CONTROL').sw_reset): pending_reset = False break time.sleep(0.01) if pending_reset: raise BH1745TimeoutError('Timeout waiting for BH1745 to reset.') self._bh1745.set('SYSTEM_CONTROL', int_reset=0) self._bh1745.set('MODE_CONTROL1', measurement_time_ms=320) self._bh1745.set('MODE_CONTROL2', adc_gain_x=1, rgbc_en=1) self._bh1745.set('MODE_CONTROL3', on=1) self._bh1745.set('THRESHOLD', low=65535, high=0) self._bh1745.set('INTERRUPT', latch=1) time.sleep(0.32)<|docstring|>Set up the bh1745 sensor. :param i2c_addr: Optional i2c_addr to switch to<|endoftext|>
1f65cc26b87b30c241a0f3e40eae40b499146d53da067521a6de8048568a849d
def set_measurement_time_ms(self, time_ms): 'Set the measurement time in milliseconds.\n\n :param time_ms: The time in milliseconds: 160, 320, 640, 1280, 2560, 5120\n\n ' self.setup() self._bh1745.set('MODE_CONTROL1', measurement_time_ms=time_ms)
Set the measurement time in milliseconds. :param time_ms: The time in milliseconds: 160, 320, 640, 1280, 2560, 5120
library/bh1745/__init__.py
set_measurement_time_ms
pimoroni/bh1745-python
8
python
def set_measurement_time_ms(self, time_ms): 'Set the measurement time in milliseconds.\n\n :param time_ms: The time in milliseconds: 160, 320, 640, 1280, 2560, 5120\n\n ' self.setup() self._bh1745.set('MODE_CONTROL1', measurement_time_ms=time_ms)
def set_measurement_time_ms(self, time_ms): 'Set the measurement time in milliseconds.\n\n :param time_ms: The time in milliseconds: 160, 320, 640, 1280, 2560, 5120\n\n ' self.setup() self._bh1745.set('MODE_CONTROL1', measurement_time_ms=time_ms)<|docstring|>Set the measurement time in milliseconds. :param time_ms: The time in milliseconds: 160, 320, 640, 1280, 2560, 5120<|endoftext|>
4add40c46f3d349f2f5f77df3d4efd5889547b9dd4eed06241c4c3b0043f491b
def set_adc_gain_x(self, gain_x): 'Set the ADC gain multiplier.\n\n :param gain_x: Must be either 1, 2 or 16\n\n ' self.setup() self._bh1745.set('MODE_CONTROL2', adc_gain_x=gain_x)
Set the ADC gain multiplier. :param gain_x: Must be either 1, 2 or 16
library/bh1745/__init__.py
set_adc_gain_x
pimoroni/bh1745-python
8
python
def set_adc_gain_x(self, gain_x): 'Set the ADC gain multiplier.\n\n :param gain_x: Must be either 1, 2 or 16\n\n ' self.setup() self._bh1745.set('MODE_CONTROL2', adc_gain_x=gain_x)
def set_adc_gain_x(self, gain_x): 'Set the ADC gain multiplier.\n\n :param gain_x: Must be either 1, 2 or 16\n\n ' self.setup() self._bh1745.set('MODE_CONTROL2', adc_gain_x=gain_x)<|docstring|>Set the ADC gain multiplier. :param gain_x: Must be either 1, 2 or 16<|endoftext|>
f114c92b65b4584e7c334afa24b96f6c7e4cfbb012fe751543760fef35ffaff1
def set_leds(self, state): 'Toggle the onboard LEDs.\n\n :param state: Either 1 for on, or 0 for off\n\n ' self.setup() self._bh1745.set('INTERRUPT', enable=(1 if state else 0))
Toggle the onboard LEDs. :param state: Either 1 for on, or 0 for off
library/bh1745/__init__.py
set_leds
pimoroni/bh1745-python
8
python
def set_leds(self, state): 'Toggle the onboard LEDs.\n\n :param state: Either 1 for on, or 0 for off\n\n ' self.setup() self._bh1745.set('INTERRUPT', enable=(1 if state else 0))
def set_leds(self, state): 'Toggle the onboard LEDs.\n\n :param state: Either 1 for on, or 0 for off\n\n ' self.setup() self._bh1745.set('INTERRUPT', enable=(1 if state else 0))<|docstring|>Toggle the onboard LEDs. :param state: Either 1 for on, or 0 for off<|endoftext|>