body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
5ddb0f45824bd88d6d472dc13e6648a01938384a5f4eb15806a7ce33b2da380f | def get_network_info_for_port(session, port_id):
'Get Port network informations from DB\n\n Get network informations (MAC, IP and gateway addresses and\n subnet mask) from database associated to a port\n '
LOG.debug(('get_network_info_for_port() called for port %s' % port_id))
with session.begin(subtransactions=True):
try:
net_info = session.query(models_v2.Port.mac_address, models_v2.IPAllocation.ip_address, models_v2.Subnet.cidr, models_v2.Subnet.gateway_ip).join(models_v2.IPAllocation).join(models_v2.Subnet, (models_v2.IPAllocation.subnet_id == models_v2.Subnet.id)).filter((models_v2.Subnet.ip_version == 4)).filter((models_v2.Port.id == port_id)).one()
return net_info
except exc.NoResultFound:
raise NoNetworkInfoForPort(port_id) | Get Port network informations from DB
Get network informations (MAC, IP and gateway addresses and
subnet mask) from database associated to a port | networking_bagpipe/driver/mech_bagpipe.py | get_network_info_for_port | mail2nsrajesh/networking-bagpipe | 0 | python | def get_network_info_for_port(session, port_id):
'Get Port network informations from DB\n\n Get network informations (MAC, IP and gateway addresses and\n subnet mask) from database associated to a port\n '
LOG.debug(('get_network_info_for_port() called for port %s' % port_id))
with session.begin(subtransactions=True):
try:
net_info = session.query(models_v2.Port.mac_address, models_v2.IPAllocation.ip_address, models_v2.Subnet.cidr, models_v2.Subnet.gateway_ip).join(models_v2.IPAllocation).join(models_v2.Subnet, (models_v2.IPAllocation.subnet_id == models_v2.Subnet.id)).filter((models_v2.Subnet.ip_version == 4)).filter((models_v2.Port.id == port_id)).one()
return net_info
except exc.NoResultFound:
raise NoNetworkInfoForPort(port_id) | def get_network_info_for_port(session, port_id):
'Get Port network informations from DB\n\n Get network informations (MAC, IP and gateway addresses and\n subnet mask) from database associated to a port\n '
LOG.debug(('get_network_info_for_port() called for port %s' % port_id))
with session.begin(subtransactions=True):
try:
net_info = session.query(models_v2.Port.mac_address, models_v2.IPAllocation.ip_address, models_v2.Subnet.cidr, models_v2.Subnet.gateway_ip).join(models_v2.IPAllocation).join(models_v2.Subnet, (models_v2.IPAllocation.subnet_id == models_v2.Subnet.id)).filter((models_v2.Subnet.ip_version == 4)).filter((models_v2.Port.id == port_id)).one()
return net_info
except exc.NoResultFound:
raise NoNetworkInfoForPort(port_id)<|docstring|>Get Port network informations from DB
Get network informations (MAC, IP and gateway addresses and
subnet mask) from database associated to a port<|endoftext|> |
720637ba67c681fbf8c4d7f100f92f27f8bd4b2ea754431a7fab7794f590bde9 | def _get_network_info_for_port(self, port_id):
'Get MAC, IP and Gw IP addresses informations for a specific port'
session = db_api.get_reader_session()
(mac_address, ip_address, cidr, gateway_ip) = get_network_info_for_port(session, port_id)
return {'mac_address': mac_address, 'ip_address': (ip_address + cidr[cidr.index('/'):]), 'gateway_ip': gateway_ip} | Get MAC, IP and Gw IP addresses informations for a specific port | networking_bagpipe/driver/mech_bagpipe.py | _get_network_info_for_port | mail2nsrajesh/networking-bagpipe | 0 | python | def _get_network_info_for_port(self, port_id):
session = db_api.get_reader_session()
(mac_address, ip_address, cidr, gateway_ip) = get_network_info_for_port(session, port_id)
return {'mac_address': mac_address, 'ip_address': (ip_address + cidr[cidr.index('/'):]), 'gateway_ip': gateway_ip} | def _get_network_info_for_port(self, port_id):
session = db_api.get_reader_session()
(mac_address, ip_address, cidr, gateway_ip) = get_network_info_for_port(session, port_id)
return {'mac_address': mac_address, 'ip_address': (ip_address + cidr[cidr.index('/'):]), 'gateway_ip': gateway_ip}<|docstring|>Get MAC, IP and Gw IP addresses informations for a specific port<|endoftext|> |
85ca685672915e86a5525f323b5666724acbe62b77b1952e601f78150781728c | def _retrieve_bagpipe_net_info_for_port(self, port_id, segment):
"Retrieve BaGPipe network informations for a specific port\n\n {\n 'network_id': <UUID>,\n 'mac_address': '00:00:de:ad:be:ef',\n 'ip_address': '10.0.0.2',\n 'gateway_ip': '10.0.0.1',\n 'evpn' : {\n 'import_rt': ['12345:1', '12345:2', '12345:3'],\n 'export_rt': ['12345:1', '12345:2', '12345:4']\n }\n }\n "
bagpipe_network_info = {}
bagpipe_rt = self._get_route_target(segment)
if bagpipe_rt:
bagpipe_network_info.update({'evpn': {'import_rt': bagpipe_rt, 'export_rt': bagpipe_rt}})
else:
LOG.debug('No E-VPN RT info for port %s', port_id)
LOG.debug('Getting port %s network details', port_id)
bagpipe_network_info.update(self._get_network_info_for_port(port_id))
return bagpipe_network_info | Retrieve BaGPipe network informations for a specific port
{
'network_id': <UUID>,
'mac_address': '00:00:de:ad:be:ef',
'ip_address': '10.0.0.2',
'gateway_ip': '10.0.0.1',
'evpn' : {
'import_rt': ['12345:1', '12345:2', '12345:3'],
'export_rt': ['12345:1', '12345:2', '12345:4']
}
} | networking_bagpipe/driver/mech_bagpipe.py | _retrieve_bagpipe_net_info_for_port | mail2nsrajesh/networking-bagpipe | 0 | python | def _retrieve_bagpipe_net_info_for_port(self, port_id, segment):
"Retrieve BaGPipe network informations for a specific port\n\n {\n 'network_id': <UUID>,\n 'mac_address': '00:00:de:ad:be:ef',\n 'ip_address': '10.0.0.2',\n 'gateway_ip': '10.0.0.1',\n 'evpn' : {\n 'import_rt': ['12345:1', '12345:2', '12345:3'],\n 'export_rt': ['12345:1', '12345:2', '12345:4']\n }\n }\n "
bagpipe_network_info = {}
bagpipe_rt = self._get_route_target(segment)
if bagpipe_rt:
bagpipe_network_info.update({'evpn': {'import_rt': bagpipe_rt, 'export_rt': bagpipe_rt}})
else:
LOG.debug('No E-VPN RT info for port %s', port_id)
LOG.debug('Getting port %s network details', port_id)
bagpipe_network_info.update(self._get_network_info_for_port(port_id))
return bagpipe_network_info | def _retrieve_bagpipe_net_info_for_port(self, port_id, segment):
"Retrieve BaGPipe network informations for a specific port\n\n {\n 'network_id': <UUID>,\n 'mac_address': '00:00:de:ad:be:ef',\n 'ip_address': '10.0.0.2',\n 'gateway_ip': '10.0.0.1',\n 'evpn' : {\n 'import_rt': ['12345:1', '12345:2', '12345:3'],\n 'export_rt': ['12345:1', '12345:2', '12345:4']\n }\n }\n "
bagpipe_network_info = {}
bagpipe_rt = self._get_route_target(segment)
if bagpipe_rt:
bagpipe_network_info.update({'evpn': {'import_rt': bagpipe_rt, 'export_rt': bagpipe_rt}})
else:
LOG.debug('No E-VPN RT info for port %s', port_id)
LOG.debug('Getting port %s network details', port_id)
bagpipe_network_info.update(self._get_network_info_for_port(port_id))
return bagpipe_network_info<|docstring|>Retrieve BaGPipe network informations for a specific port
{
'network_id': <UUID>,
'mac_address': '00:00:de:ad:be:ef',
'ip_address': '10.0.0.2',
'gateway_ip': '10.0.0.1',
'evpn' : {
'import_rt': ['12345:1', '12345:2', '12345:3'],
'export_rt': ['12345:1', '12345:2', '12345:4']
}
}<|endoftext|> |
26c617b34e270d45b0c31c44958dd4bda6f1b0fa03f06aa9ef9e05b2d0a8845d | def _read_in_thread(address, pty, blocking):
'Read data from the pty in a thread.\n '
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(address)
while 1:
data = pty.read(4096, blocking=blocking)
if ((not data) and (not pty.isalive())):
while ((not data) and (not pty.iseof())):
data += pty.read(4096, blocking=blocking)
if (not data):
try:
client.send(b'')
except socket.error:
pass
break
try:
client.send(data)
except socket.error:
break
client.close() | Read data from the pty in a thread. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | _read_in_thread | Semaine52/AuFilDuBoamp_Docs | 2 | python | def _read_in_thread(address, pty, blocking):
'\n '
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(address)
while 1:
data = pty.read(4096, blocking=blocking)
if ((not data) and (not pty.isalive())):
while ((not data) and (not pty.iseof())):
data += pty.read(4096, blocking=blocking)
if (not data):
try:
client.send(b)
except socket.error:
pass
break
try:
client.send(data)
except socket.error:
break
client.close() | def _read_in_thread(address, pty, blocking):
'\n '
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(address)
while 1:
data = pty.read(4096, blocking=blocking)
if ((not data) and (not pty.isalive())):
while ((not data) and (not pty.iseof())):
data += pty.read(4096, blocking=blocking)
if (not data):
try:
client.send(b)
except socket.error:
pass
break
try:
client.send(data)
except socket.error:
break
client.close()<|docstring|>Read data from the pty in a thread.<|endoftext|> |
c1bb71cd9a1a0158bfd03922b217ad8cc9ae916b1f34738050cdbb27dd94050f | @classmethod
def spawn(cls, argv, cwd=None, env=None, dimensions=(24, 80), encoding='utf-8', backend=None):
'Start the given command in a child process in a pseudo terminal.\n\n This does all the setting up the pty, and returns an instance of\n PtyProcess.\n\n Dimensions of the psuedoterminal used for the subprocess can be\n specified as a tuple (rows, cols), or the default (24, 80) will be\n used.\n '
if isinstance(argv, str):
argv = shlex.split(argv, posix=False)
if (not isinstance(argv, (list, tuple))):
raise TypeError(('Expected a list or tuple for argv, got %r' % argv))
argv = argv[:]
command = argv[0]
env = (env or os.environ)
path = env.get('PATH', os.defpath)
command_with_path = which(command, path=path)
if (command_with_path is None):
raise FileNotFoundError(('The command was not found or was not ' + ('executable: %s.' % command)))
command = command_with_path
argv[0] = command
cmdline = (' ' + subprocess.list2cmdline(argv[1:]))
cwd = (cwd or os.getcwd())
backend = os.environ.get('PYWINPTY_BACKEND', None)
backend = (int(backend) if (backend is not None) else backend)
proc = PTY(dimensions[1], dimensions[0], encoding=encoding, backend=backend)
envStrs = []
for (key, value) in env.items():
envStrs.append(('%s=%s' % (key, value)))
env = ('\x00'.join(envStrs) + '\x00')
command = bytes(command, encoding)
cwd = bytes(cwd, encoding)
cmdline = bytes(cmdline, encoding)
env = bytes(env, encoding)
if (len(argv) == 1):
proc.spawn(command, cwd=cwd, env=env)
else:
proc.spawn(command, cwd=cwd, env=env, cmdline=cmdline)
inst = cls(proc, encoding)
inst._winsize = dimensions
inst.argv = argv
if (env is not None):
inst.env = env
if (cwd is not None):
inst.launch_dir = cwd
return inst | Start the given command in a child process in a pseudo terminal.
This does all the setting up the pty, and returns an instance of
PtyProcess.
Dimensions of the psuedoterminal used for the subprocess can be
specified as a tuple (rows, cols), or the default (24, 80) will be
used. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | spawn | Semaine52/AuFilDuBoamp_Docs | 2 | python | @classmethod
def spawn(cls, argv, cwd=None, env=None, dimensions=(24, 80), encoding='utf-8', backend=None):
'Start the given command in a child process in a pseudo terminal.\n\n This does all the setting up the pty, and returns an instance of\n PtyProcess.\n\n Dimensions of the psuedoterminal used for the subprocess can be\n specified as a tuple (rows, cols), or the default (24, 80) will be\n used.\n '
if isinstance(argv, str):
argv = shlex.split(argv, posix=False)
if (not isinstance(argv, (list, tuple))):
raise TypeError(('Expected a list or tuple for argv, got %r' % argv))
argv = argv[:]
command = argv[0]
env = (env or os.environ)
path = env.get('PATH', os.defpath)
command_with_path = which(command, path=path)
if (command_with_path is None):
raise FileNotFoundError(('The command was not found or was not ' + ('executable: %s.' % command)))
command = command_with_path
argv[0] = command
cmdline = (' ' + subprocess.list2cmdline(argv[1:]))
cwd = (cwd or os.getcwd())
backend = os.environ.get('PYWINPTY_BACKEND', None)
backend = (int(backend) if (backend is not None) else backend)
proc = PTY(dimensions[1], dimensions[0], encoding=encoding, backend=backend)
envStrs = []
for (key, value) in env.items():
envStrs.append(('%s=%s' % (key, value)))
env = ('\x00'.join(envStrs) + '\x00')
command = bytes(command, encoding)
cwd = bytes(cwd, encoding)
cmdline = bytes(cmdline, encoding)
env = bytes(env, encoding)
if (len(argv) == 1):
proc.spawn(command, cwd=cwd, env=env)
else:
proc.spawn(command, cwd=cwd, env=env, cmdline=cmdline)
inst = cls(proc, encoding)
inst._winsize = dimensions
inst.argv = argv
if (env is not None):
inst.env = env
if (cwd is not None):
inst.launch_dir = cwd
return inst | @classmethod
def spawn(cls, argv, cwd=None, env=None, dimensions=(24, 80), encoding='utf-8', backend=None):
'Start the given command in a child process in a pseudo terminal.\n\n This does all the setting up the pty, and returns an instance of\n PtyProcess.\n\n Dimensions of the psuedoterminal used for the subprocess can be\n specified as a tuple (rows, cols), or the default (24, 80) will be\n used.\n '
if isinstance(argv, str):
argv = shlex.split(argv, posix=False)
if (not isinstance(argv, (list, tuple))):
raise TypeError(('Expected a list or tuple for argv, got %r' % argv))
argv = argv[:]
command = argv[0]
env = (env or os.environ)
path = env.get('PATH', os.defpath)
command_with_path = which(command, path=path)
if (command_with_path is None):
raise FileNotFoundError(('The command was not found or was not ' + ('executable: %s.' % command)))
command = command_with_path
argv[0] = command
cmdline = (' ' + subprocess.list2cmdline(argv[1:]))
cwd = (cwd or os.getcwd())
backend = os.environ.get('PYWINPTY_BACKEND', None)
backend = (int(backend) if (backend is not None) else backend)
proc = PTY(dimensions[1], dimensions[0], encoding=encoding, backend=backend)
envStrs = []
for (key, value) in env.items():
envStrs.append(('%s=%s' % (key, value)))
env = ('\x00'.join(envStrs) + '\x00')
command = bytes(command, encoding)
cwd = bytes(cwd, encoding)
cmdline = bytes(cmdline, encoding)
env = bytes(env, encoding)
if (len(argv) == 1):
proc.spawn(command, cwd=cwd, env=env)
else:
proc.spawn(command, cwd=cwd, env=env, cmdline=cmdline)
inst = cls(proc, encoding)
inst._winsize = dimensions
inst.argv = argv
if (env is not None):
inst.env = env
if (cwd is not None):
inst.launch_dir = cwd
return inst<|docstring|>Start the given command in a child process in a pseudo terminal.
This does all the setting up the pty, and returns an instance of
PtyProcess.
Dimensions of the psuedoterminal used for the subprocess can be
specified as a tuple (rows, cols), or the default (24, 80) will be
used.<|endoftext|> |
f123b7ee0ce4cd714bf33485b2c93a0c5e7676315a611014eb86c253994545a5 | @property
def exitstatus(self):
'The exit status of the process.\n '
return self.pty.get_exitstatus() | The exit status of the process. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | exitstatus | Semaine52/AuFilDuBoamp_Docs | 2 | python | @property
def exitstatus(self):
'\n '
return self.pty.get_exitstatus() | @property
def exitstatus(self):
'\n '
return self.pty.get_exitstatus()<|docstring|>The exit status of the process.<|endoftext|> |
c805ea2e0cdb9691cfa1049a684f7bcfeeb931fc30468ed0a7c823c90ebaa6ef | def fileno(self):
'This returns the file descriptor of the pty for the child.\n '
return self.fd | This returns the file descriptor of the pty for the child. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | fileno | Semaine52/AuFilDuBoamp_Docs | 2 | python | def fileno(self):
'\n '
return self.fd | def fileno(self):
'\n '
return self.fd<|docstring|>This returns the file descriptor of the pty for the child.<|endoftext|> |
32979d72b344544685b3087ffad7e2709a69ac8879db3cb1c23ffed4df40947b | def close(self, force=False):
'This closes the connection with the child application. Note that\n calling close() more than once is valid. This emulates standard Python\n behavior with files. Set force to True if you want to make sure that\n the child is terminated (SIGKILL is sent if the child ignores\n SIGINT).'
if (not self.closed):
del self.pty
self.pty = None
self.fileobj.close()
self._server.close()
time.sleep(self.delayafterclose)
if self.isalive():
if (not self.terminate(force)):
raise IOError('Could not terminate the child.')
self.fd = (- 1)
self.closed = True | This closes the connection with the child application. Note that
calling close() more than once is valid. This emulates standard Python
behavior with files. Set force to True if you want to make sure that
the child is terminated (SIGKILL is sent if the child ignores
SIGINT). | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | close | Semaine52/AuFilDuBoamp_Docs | 2 | python | def close(self, force=False):
'This closes the connection with the child application. Note that\n calling close() more than once is valid. This emulates standard Python\n behavior with files. Set force to True if you want to make sure that\n the child is terminated (SIGKILL is sent if the child ignores\n SIGINT).'
if (not self.closed):
del self.pty
self.pty = None
self.fileobj.close()
self._server.close()
time.sleep(self.delayafterclose)
if self.isalive():
if (not self.terminate(force)):
raise IOError('Could not terminate the child.')
self.fd = (- 1)
self.closed = True | def close(self, force=False):
'This closes the connection with the child application. Note that\n calling close() more than once is valid. This emulates standard Python\n behavior with files. Set force to True if you want to make sure that\n the child is terminated (SIGKILL is sent if the child ignores\n SIGINT).'
if (not self.closed):
del self.pty
self.pty = None
self.fileobj.close()
self._server.close()
time.sleep(self.delayafterclose)
if self.isalive():
if (not self.terminate(force)):
raise IOError('Could not terminate the child.')
self.fd = (- 1)
self.closed = True<|docstring|>This closes the connection with the child application. Note that
calling close() more than once is valid. This emulates standard Python
behavior with files. Set force to True if you want to make sure that
the child is terminated (SIGKILL is sent if the child ignores
SIGINT).<|endoftext|> |
abc37f20e57510976254e3de6dafe7c7088c0cdc98b01354b614a2dd65946317 | def __del__(self):
'This makes sure that no system resources are left open. Python only\n garbage collects Python objects. OS file descriptors are not Python\n objects, so they must be handled explicitly. If the child file\n descriptor was opened outside of this class (passed to the constructor)\n then this does not close it.\n '
try:
self.close()
except Exception:
pass | This makes sure that no system resources are left open. Python only
garbage collects Python objects. OS file descriptors are not Python
objects, so they must be handled explicitly. If the child file
descriptor was opened outside of this class (passed to the constructor)
then this does not close it. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | __del__ | Semaine52/AuFilDuBoamp_Docs | 2 | python | def __del__(self):
'This makes sure that no system resources are left open. Python only\n garbage collects Python objects. OS file descriptors are not Python\n objects, so they must be handled explicitly. If the child file\n descriptor was opened outside of this class (passed to the constructor)\n then this does not close it.\n '
try:
self.close()
except Exception:
pass | def __del__(self):
'This makes sure that no system resources are left open. Python only\n garbage collects Python objects. OS file descriptors are not Python\n objects, so they must be handled explicitly. If the child file\n descriptor was opened outside of this class (passed to the constructor)\n then this does not close it.\n '
try:
self.close()
except Exception:
pass<|docstring|>This makes sure that no system resources are left open. Python only
garbage collects Python objects. OS file descriptors are not Python
objects, so they must be handled explicitly. If the child file
descriptor was opened outside of this class (passed to the constructor)
then this does not close it.<|endoftext|> |
8bed5469ba443a8cfe855af61945a60bad3388899da9a44bf22edd745bd4a4bc | def flush(self):
'This does nothing. It is here to support the interface for a\n File-like object. '
pass | This does nothing. It is here to support the interface for a
File-like object. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | flush | Semaine52/AuFilDuBoamp_Docs | 2 | python | def flush(self):
'This does nothing. It is here to support the interface for a\n File-like object. '
pass | def flush(self):
'This does nothing. It is here to support the interface for a\n File-like object. '
pass<|docstring|>This does nothing. It is here to support the interface for a
File-like object.<|endoftext|> |
c5bd2375ccf02c39f9edfc3b85385a5b856b463aab4ec248d0f6e59097c3ce5e | def isatty(self):
'This returns True if the file descriptor is open and connected to a\n tty(-like) device, else False.'
return self.isalive() | This returns True if the file descriptor is open and connected to a
tty(-like) device, else False. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | isatty | Semaine52/AuFilDuBoamp_Docs | 2 | python | def isatty(self):
'This returns True if the file descriptor is open and connected to a\n tty(-like) device, else False.'
return self.isalive() | def isatty(self):
'This returns True if the file descriptor is open and connected to a\n tty(-like) device, else False.'
return self.isalive()<|docstring|>This returns True if the file descriptor is open and connected to a
tty(-like) device, else False.<|endoftext|> |
c74662f65c53122d5a30d4eb8f811b5aff052f47aa2f232c568b940d301e88f8 | def read(self, size=1024):
'Read and return at most ``size`` characters from the pty.\n\n Can block if there is nothing to read. Raises :exc:`EOFError` if the\n terminal was closed.\n '
data = self.fileobj.recv(size)
if (not data):
self.flag_eof = True
raise EOFError('Pty is closed')
return self.decoder.decode(data, final=False) | Read and return at most ``size`` characters from the pty.
Can block if there is nothing to read. Raises :exc:`EOFError` if the
terminal was closed. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | read | Semaine52/AuFilDuBoamp_Docs | 2 | python | def read(self, size=1024):
'Read and return at most ``size`` characters from the pty.\n\n Can block if there is nothing to read. Raises :exc:`EOFError` if the\n terminal was closed.\n '
data = self.fileobj.recv(size)
if (not data):
self.flag_eof = True
raise EOFError('Pty is closed')
return self.decoder.decode(data, final=False) | def read(self, size=1024):
'Read and return at most ``size`` characters from the pty.\n\n Can block if there is nothing to read. Raises :exc:`EOFError` if the\n terminal was closed.\n '
data = self.fileobj.recv(size)
if (not data):
self.flag_eof = True
raise EOFError('Pty is closed')
return self.decoder.decode(data, final=False)<|docstring|>Read and return at most ``size`` characters from the pty.
Can block if there is nothing to read. Raises :exc:`EOFError` if the
terminal was closed.<|endoftext|> |
db848ecd2037d06792c457144421548ff9530505e4dcf5abfedaa6699f8bb3b4 | def readline(self):
'Read one line from the pseudoterminal as bytes.\n\n Can block if there is nothing to read. Raises :exc:`EOFError` if the\n terminal was closed.\n '
buf = []
while 1:
try:
ch = self.read(1)
except EOFError:
return ''.join(buf)
buf.append(ch)
if (ch == '\n'):
return ''.join(buf) | Read one line from the pseudoterminal as bytes.
Can block if there is nothing to read. Raises :exc:`EOFError` if the
terminal was closed. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | readline | Semaine52/AuFilDuBoamp_Docs | 2 | python | def readline(self):
'Read one line from the pseudoterminal as bytes.\n\n Can block if there is nothing to read. Raises :exc:`EOFError` if the\n terminal was closed.\n '
buf = []
while 1:
try:
ch = self.read(1)
except EOFError:
return .join(buf)
buf.append(ch)
if (ch == '\n'):
return .join(buf) | def readline(self):
'Read one line from the pseudoterminal as bytes.\n\n Can block if there is nothing to read. Raises :exc:`EOFError` if the\n terminal was closed.\n '
buf = []
while 1:
try:
ch = self.read(1)
except EOFError:
return .join(buf)
buf.append(ch)
if (ch == '\n'):
return .join(buf)<|docstring|>Read one line from the pseudoterminal as bytes.
Can block if there is nothing to read. Raises :exc:`EOFError` if the
terminal was closed.<|endoftext|> |
93da51787d4477b99c59d8f40d0b2817f5aa8b938d9d0672eb86adda33e7e7bb | def write(self, s):
'Write the string ``s`` to the pseudoterminal.\n\n Returns the number of bytes written.\n '
if (not self.isalive()):
raise EOFError('Pty is closed')
nbytes = self.pty.write(bytes(s, self.encoding))
return nbytes | Write the string ``s`` to the pseudoterminal.
Returns the number of bytes written. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | write | Semaine52/AuFilDuBoamp_Docs | 2 | python | def write(self, s):
'Write the string ``s`` to the pseudoterminal.\n\n Returns the number of bytes written.\n '
if (not self.isalive()):
raise EOFError('Pty is closed')
nbytes = self.pty.write(bytes(s, self.encoding))
return nbytes | def write(self, s):
'Write the string ``s`` to the pseudoterminal.\n\n Returns the number of bytes written.\n '
if (not self.isalive()):
raise EOFError('Pty is closed')
nbytes = self.pty.write(bytes(s, self.encoding))
return nbytes<|docstring|>Write the string ``s`` to the pseudoterminal.
Returns the number of bytes written.<|endoftext|> |
2bb7b33a98980919a2f0d686a0a652c4b42f5d90e1ed4d94f68e2a422c6554f3 | def terminate(self, force=False):
'This forces a child process to terminate.'
if (not self.isalive()):
return True
self.kill(signal.SIGINT)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
if force:
self.kill(signal.SIGKILL)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
else:
return False | This forces a child process to terminate. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | terminate | Semaine52/AuFilDuBoamp_Docs | 2 | python | def terminate(self, force=False):
if (not self.isalive()):
return True
self.kill(signal.SIGINT)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
if force:
self.kill(signal.SIGKILL)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
else:
return False | def terminate(self, force=False):
if (not self.isalive()):
return True
self.kill(signal.SIGINT)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
if force:
self.kill(signal.SIGKILL)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
else:
return False<|docstring|>This forces a child process to terminate.<|endoftext|> |
04115149ad1558fd19b15e5acb6b5ae4d13c4f9527014fc8c6f111aeb84a53b9 | def wait(self):
'This waits until the child exits. This is a blocking call. This will\n not read any data from the child.\n '
while self.isalive():
time.sleep(0.1)
return self.exitstatus | This waits until the child exits. This is a blocking call. This will
not read any data from the child. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | wait | Semaine52/AuFilDuBoamp_Docs | 2 | python | def wait(self):
'This waits until the child exits. This is a blocking call. This will\n not read any data from the child.\n '
while self.isalive():
time.sleep(0.1)
return self.exitstatus | def wait(self):
'This waits until the child exits. This is a blocking call. This will\n not read any data from the child.\n '
while self.isalive():
time.sleep(0.1)
return self.exitstatus<|docstring|>This waits until the child exits. This is a blocking call. This will
not read any data from the child.<|endoftext|> |
004d5ec077afb132e0cf7d5cb39f2045acedfe42ca6eee5946fe34412bc8f3c9 | def isalive(self):
'This tests if the child process is running or not. This is\n non-blocking. If the child was terminated then this will read the\n exitstatus or signalstatus of the child. This returns True if the child\n process appears to be running or False if not.\n '
return (self.pty and self.pty.isalive()) | This tests if the child process is running or not. This is
non-blocking. If the child was terminated then this will read the
exitstatus or signalstatus of the child. This returns True if the child
process appears to be running or False if not. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | isalive | Semaine52/AuFilDuBoamp_Docs | 2 | python | def isalive(self):
'This tests if the child process is running or not. This is\n non-blocking. If the child was terminated then this will read the\n exitstatus or signalstatus of the child. This returns True if the child\n process appears to be running or False if not.\n '
return (self.pty and self.pty.isalive()) | def isalive(self):
'This tests if the child process is running or not. This is\n non-blocking. If the child was terminated then this will read the\n exitstatus or signalstatus of the child. This returns True if the child\n process appears to be running or False if not.\n '
return (self.pty and self.pty.isalive())<|docstring|>This tests if the child process is running or not. This is
non-blocking. If the child was terminated then this will read the
exitstatus or signalstatus of the child. This returns True if the child
process appears to be running or False if not.<|endoftext|> |
4041de4156b259f6cf59b97142649421e31b7a67ff022f5a98ca0582a78f5879 | def kill(self, sig=None):
'Kill the process with the given signal.\n '
os.kill(self.pid, sig) | Kill the process with the given signal. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | kill | Semaine52/AuFilDuBoamp_Docs | 2 | python | def kill(self, sig=None):
'\n '
os.kill(self.pid, sig) | def kill(self, sig=None):
'\n '
os.kill(self.pid, sig)<|docstring|>Kill the process with the given signal.<|endoftext|> |
142c94d69e8513c25656dc39efc21bb22d3f2456d05637b0961aaffd64398ae6 | def sendcontrol(self, char):
"Helper method that wraps send() with mnemonic access for sending control\n character to the child (such as Ctrl-C or Ctrl-D). For example, to send\n Ctrl-G (ASCII 7, bell, '\x07')::\n child.sendcontrol('g')\n See also, sendintr() and sendeof().\n "
char = char.lower()
a = ord(char)
if (97 <= a <= 122):
a = ((a - ord('a')) + 1)
byte = bytes([a])
return (self.pty.write(byte), byte)
d = {'@': 0, '`': 0, '[': 27, '{': 27, '\\': 28, '|': 28, ']': 29, '}': 29, '^': 30, '~': 30, '_': 31, '?': 127}
if (char not in d):
return (0, b'')
byte = bytes([d[char]])
return (self.pty.write(byte), byte) | Helper method that wraps send() with mnemonic access for sending control
character to the child (such as Ctrl-C or Ctrl-D). For example, to send
Ctrl-G (ASCII 7, bell, '')::
child.sendcontrol('g')
See also, sendintr() and sendeof(). | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | sendcontrol | Semaine52/AuFilDuBoamp_Docs | 2 | python | def sendcontrol(self, char):
"Helper method that wraps send() with mnemonic access for sending control\n character to the child (such as Ctrl-C or Ctrl-D). For example, to send\n Ctrl-G (ASCII 7, bell, '\x07')::\n child.sendcontrol('g')\n See also, sendintr() and sendeof().\n "
char = char.lower()
a = ord(char)
if (97 <= a <= 122):
a = ((a - ord('a')) + 1)
byte = bytes([a])
return (self.pty.write(byte), byte)
d = {'@': 0, '`': 0, '[': 27, '{': 27, '\\': 28, '|': 28, ']': 29, '}': 29, '^': 30, '~': 30, '_': 31, '?': 127}
if (char not in d):
return (0, b)
byte = bytes([d[char]])
return (self.pty.write(byte), byte) | def sendcontrol(self, char):
"Helper method that wraps send() with mnemonic access for sending control\n character to the child (such as Ctrl-C or Ctrl-D). For example, to send\n Ctrl-G (ASCII 7, bell, '\x07')::\n child.sendcontrol('g')\n See also, sendintr() and sendeof().\n "
char = char.lower()
a = ord(char)
if (97 <= a <= 122):
a = ((a - ord('a')) + 1)
byte = bytes([a])
return (self.pty.write(byte), byte)
d = {'@': 0, '`': 0, '[': 27, '{': 27, '\\': 28, '|': 28, ']': 29, '}': 29, '^': 30, '~': 30, '_': 31, '?': 127}
if (char not in d):
return (0, b)
byte = bytes([d[char]])
return (self.pty.write(byte), byte)<|docstring|>Helper method that wraps send() with mnemonic access for sending control
character to the child (such as Ctrl-C or Ctrl-D). For example, to send
Ctrl-G (ASCII 7, bell, '')::
child.sendcontrol('g')
See also, sendintr() and sendeof().<|endoftext|> |
6671872c9556574b854222caa82dfb883e2f788b53f9233de69eb913eaefdfa4 | def sendeof(self):
'This sends an EOF to the child. This sends a character which causes\n the pending parent output buffer to be sent to the waiting child\n program without waiting for end-of-line. If it is the first character\n of the line, the read() in the user program returns 0, which signifies\n end-of-file. This means to work as expected a sendeof() has to be\n called at the beginning of a line. This method does not send a newline.\n It is the responsibility of the caller to ensure the eof is sent at the\n beginning of a line.'
(self.pty.write(b'\x04'), '\x04') | This sends an EOF to the child. This sends a character which causes
the pending parent output buffer to be sent to the waiting child
program without waiting for end-of-line. If it is the first character
of the line, the read() in the user program returns 0, which signifies
end-of-file. This means to work as expected a sendeof() has to be
called at the beginning of a line. This method does not send a newline.
It is the responsibility of the caller to ensure the eof is sent at the
beginning of a line. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | sendeof | Semaine52/AuFilDuBoamp_Docs | 2 | python | def sendeof(self):
'This sends an EOF to the child. This sends a character which causes\n the pending parent output buffer to be sent to the waiting child\n program without waiting for end-of-line. If it is the first character\n of the line, the read() in the user program returns 0, which signifies\n end-of-file. This means to work as expected a sendeof() has to be\n called at the beginning of a line. This method does not send a newline.\n It is the responsibility of the caller to ensure the eof is sent at the\n beginning of a line.'
(self.pty.write(b'\x04'), '\x04') | def sendeof(self):
'This sends an EOF to the child. This sends a character which causes\n the pending parent output buffer to be sent to the waiting child\n program without waiting for end-of-line. If it is the first character\n of the line, the read() in the user program returns 0, which signifies\n end-of-file. This means to work as expected a sendeof() has to be\n called at the beginning of a line. This method does not send a newline.\n It is the responsibility of the caller to ensure the eof is sent at the\n beginning of a line.'
(self.pty.write(b'\x04'), '\x04')<|docstring|>This sends an EOF to the child. This sends a character which causes
the pending parent output buffer to be sent to the waiting child
program without waiting for end-of-line. If it is the first character
of the line, the read() in the user program returns 0, which signifies
end-of-file. This means to work as expected a sendeof() has to be
called at the beginning of a line. This method does not send a newline.
It is the responsibility of the caller to ensure the eof is sent at the
beginning of a line.<|endoftext|> |
41d0473309d4ea7546c361d694c8a8a5165e9caf534665f552caa797ae0d7f91 | def sendintr(self):
'This sends a SIGINT to the child. It does not require\n the SIGINT to be the first character on a line. '
(self.pty.write(b'\x03'), '\x03') | This sends a SIGINT to the child. It does not require
the SIGINT to be the first character on a line. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | sendintr | Semaine52/AuFilDuBoamp_Docs | 2 | python | def sendintr(self):
'This sends a SIGINT to the child. It does not require\n the SIGINT to be the first character on a line. '
(self.pty.write(b'\x03'), '\x03') | def sendintr(self):
'This sends a SIGINT to the child. It does not require\n the SIGINT to be the first character on a line. '
(self.pty.write(b'\x03'), '\x03')<|docstring|>This sends a SIGINT to the child. It does not require
the SIGINT to be the first character on a line.<|endoftext|> |
abda035d6d4d7637596030e6eea6cbce12e1a677a40c21775a63d6ab803e454c | def eof(self):
'This returns True if the EOF exception was ever raised.\n '
return self.flag_eof | This returns True if the EOF exception was ever raised. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | eof | Semaine52/AuFilDuBoamp_Docs | 2 | python | def eof(self):
'\n '
return self.flag_eof | def eof(self):
'\n '
return self.flag_eof<|docstring|>This returns True if the EOF exception was ever raised.<|endoftext|> |
78779423ea1ad3056d778e90895d2f3f195f51a8cd29f50a3dcff0602e8418ef | def getwinsize(self):
'Return the window size of the pseudoterminal as a tuple (rows, cols).\n '
return self._winsize | Return the window size of the pseudoterminal as a tuple (rows, cols). | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | getwinsize | Semaine52/AuFilDuBoamp_Docs | 2 | python | def getwinsize(self):
'\n '
return self._winsize | def getwinsize(self):
'\n '
return self._winsize<|docstring|>Return the window size of the pseudoterminal as a tuple (rows, cols).<|endoftext|> |
323684842cf0a2458f2ee99224ac7bc5c8ad8f7a0c860f5b05c789524a6eb760 | def setwinsize(self, rows, cols):
'Set the terminal window size of the child tty.\n '
self._winsize = (rows, cols)
self.pty.set_size(cols, rows) | Set the terminal window size of the child tty. | afdb_docs_env/Lib/site-packages/winpty/ptyprocess.py | setwinsize | Semaine52/AuFilDuBoamp_Docs | 2 | python | def setwinsize(self, rows, cols):
'\n '
self._winsize = (rows, cols)
self.pty.set_size(cols, rows) | def setwinsize(self, rows, cols):
'\n '
self._winsize = (rows, cols)
self.pty.set_size(cols, rows)<|docstring|>Set the terminal window size of the child tty.<|endoftext|> |
e9a03ad1cd540520099cf565281421944544d869ece5acea9bdee28d489ae213 | def __init__(self, bytes_per_second=None, io_rate=None, time_periods=None):
'Constructor for the BandwidthLimitOverride class'
self.bytes_per_second = bytes_per_second
self.io_rate = io_rate
self.time_periods = time_periods | Constructor for the BandwidthLimitOverride class | cohesity_management_sdk/models/bandwidth_limit_override.py | __init__ | nick6655/management-sdk-python | 18 | python | def __init__(self, bytes_per_second=None, io_rate=None, time_periods=None):
self.bytes_per_second = bytes_per_second
self.io_rate = io_rate
self.time_periods = time_periods | def __init__(self, bytes_per_second=None, io_rate=None, time_periods=None):
self.bytes_per_second = bytes_per_second
self.io_rate = io_rate
self.time_periods = time_periods<|docstring|>Constructor for the BandwidthLimitOverride class<|endoftext|> |
6f20d3dc0faf6880351d94fb44e2483e5a1af32befedf7ede48256af252225e5 | @classmethod
def from_dictionary(cls, dictionary):
"Creates an instance of this model from a dictionary\n\n Args:\n dictionary (dictionary): A dictionary representation of the object as\n obtained from the deserialization of the server's response. The keys\n MUST match property names in the API description.\n\n Returns:\n object: An instance of this structure class.\n\n "
if (dictionary is None):
return None
bytes_per_second = dictionary.get('bytesPerSecond')
io_rate = dictionary.get('ioRate')
time_periods = (cohesity_management_sdk.models.time_of_a_week.TimeOfAWeek.from_dictionary(dictionary.get('timePeriods')) if dictionary.get('timePeriods') else None)
return cls(bytes_per_second, io_rate, time_periods) | Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class. | cohesity_management_sdk/models/bandwidth_limit_override.py | from_dictionary | nick6655/management-sdk-python | 18 | python | @classmethod
def from_dictionary(cls, dictionary):
"Creates an instance of this model from a dictionary\n\n Args:\n dictionary (dictionary): A dictionary representation of the object as\n obtained from the deserialization of the server's response. The keys\n MUST match property names in the API description.\n\n Returns:\n object: An instance of this structure class.\n\n "
if (dictionary is None):
return None
bytes_per_second = dictionary.get('bytesPerSecond')
io_rate = dictionary.get('ioRate')
time_periods = (cohesity_management_sdk.models.time_of_a_week.TimeOfAWeek.from_dictionary(dictionary.get('timePeriods')) if dictionary.get('timePeriods') else None)
return cls(bytes_per_second, io_rate, time_periods) | @classmethod
def from_dictionary(cls, dictionary):
"Creates an instance of this model from a dictionary\n\n Args:\n dictionary (dictionary): A dictionary representation of the object as\n obtained from the deserialization of the server's response. The keys\n MUST match property names in the API description.\n\n Returns:\n object: An instance of this structure class.\n\n "
if (dictionary is None):
return None
bytes_per_second = dictionary.get('bytesPerSecond')
io_rate = dictionary.get('ioRate')
time_periods = (cohesity_management_sdk.models.time_of_a_week.TimeOfAWeek.from_dictionary(dictionary.get('timePeriods')) if dictionary.get('timePeriods') else None)
return cls(bytes_per_second, io_rate, time_periods)<|docstring|>Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.<|endoftext|> |
102f93ab15f2e1a50995ccf80c9275b0f4d0780320e6a772a521be06d7bf3811 | async def test_flow_user_init(hass, mqtt_mock):
'Test the initialization of the form in the first step of the config flow.'
result = (await hass.config_entries.flow.async_init(config_flow.DOMAIN, context={'source': 'user'}))
expected = {'data_schema': config_flow.TOPIC_SCHEMA, 'description_placeholders': None, 'errors': {}, 'flow_id': mock.ANY, 'handler': 'ferroamp', 'step_id': 'user', 'type': 'form', 'last_step': None}
assert (expected == result) | Test the initialization of the form in the first step of the config flow. | tests/test_config_flow.py | test_flow_user_init | TurboJonte/ha-ferroamp | 22 | python | async def test_flow_user_init(hass, mqtt_mock):
result = (await hass.config_entries.flow.async_init(config_flow.DOMAIN, context={'source': 'user'}))
expected = {'data_schema': config_flow.TOPIC_SCHEMA, 'description_placeholders': None, 'errors': {}, 'flow_id': mock.ANY, 'handler': 'ferroamp', 'step_id': 'user', 'type': 'form', 'last_step': None}
assert (expected == result) | async def test_flow_user_init(hass, mqtt_mock):
result = (await hass.config_entries.flow.async_init(config_flow.DOMAIN, context={'source': 'user'}))
expected = {'data_schema': config_flow.TOPIC_SCHEMA, 'description_placeholders': None, 'errors': {}, 'flow_id': mock.ANY, 'handler': 'ferroamp', 'step_id': 'user', 'type': 'form', 'last_step': None}
assert (expected == result)<|docstring|>Test the initialization of the form in the first step of the config flow.<|endoftext|> |
f2bda85ef6b44c1e21dba1190273daebc614e7cdf2cc7fc7a127fedab5d45ebb | async def test_flow_user_step_no_input(hass, mqtt_mock):
'Test appropriate error when no input is provided.'
_result = (await hass.config_entries.flow.async_init(config_flow.DOMAIN, context={'source': 'user'}))
result = (await hass.config_entries.flow.async_configure(_result['flow_id'], user_input={CONF_NAME: '', CONF_PREFIX: ''}))
assert ({'base': 'name'} == result['errors']) | Test appropriate error when no input is provided. | tests/test_config_flow.py | test_flow_user_step_no_input | TurboJonte/ha-ferroamp | 22 | python | async def test_flow_user_step_no_input(hass, mqtt_mock):
_result = (await hass.config_entries.flow.async_init(config_flow.DOMAIN, context={'source': 'user'}))
result = (await hass.config_entries.flow.async_configure(_result['flow_id'], user_input={CONF_NAME: , CONF_PREFIX: }))
assert ({'base': 'name'} == result['errors']) | async def test_flow_user_step_no_input(hass, mqtt_mock):
_result = (await hass.config_entries.flow.async_init(config_flow.DOMAIN, context={'source': 'user'}))
result = (await hass.config_entries.flow.async_configure(_result['flow_id'], user_input={CONF_NAME: , CONF_PREFIX: }))
assert ({'base': 'name'} == result['errors'])<|docstring|>Test appropriate error when no input is provided.<|endoftext|> |
477826f6a1612fa031bc5de346fa1b7337d8f42b07ee0cd711b96f3487bf6d92 | async def test_flow_user_creates_config_entry(hass, mqtt_mock):
'Test the config entry is successfully created.'
_result = (await hass.config_entries.flow.async_init(config_flow.DOMAIN, context={'source': 'user'}))
result = (await hass.config_entries.flow.async_configure(_result['flow_id'], user_input={}))
expected = {'version': 1, 'type': 'create_entry', 'flow_id': mock.ANY, 'handler': 'ferroamp', 'options': {}, 'title': 'Ferroamp', 'data': {'name': 'Ferroamp', 'prefix': 'extapi'}, 'description': None, 'description_placeholders': None, 'result': mock.ANY}
assert (expected == result) | Test the config entry is successfully created. | tests/test_config_flow.py | test_flow_user_creates_config_entry | TurboJonte/ha-ferroamp | 22 | python | async def test_flow_user_creates_config_entry(hass, mqtt_mock):
_result = (await hass.config_entries.flow.async_init(config_flow.DOMAIN, context={'source': 'user'}))
result = (await hass.config_entries.flow.async_configure(_result['flow_id'], user_input={}))
expected = {'version': 1, 'type': 'create_entry', 'flow_id': mock.ANY, 'handler': 'ferroamp', 'options': {}, 'title': 'Ferroamp', 'data': {'name': 'Ferroamp', 'prefix': 'extapi'}, 'description': None, 'description_placeholders': None, 'result': mock.ANY}
assert (expected == result) | async def test_flow_user_creates_config_entry(hass, mqtt_mock):
_result = (await hass.config_entries.flow.async_init(config_flow.DOMAIN, context={'source': 'user'}))
result = (await hass.config_entries.flow.async_configure(_result['flow_id'], user_input={}))
expected = {'version': 1, 'type': 'create_entry', 'flow_id': mock.ANY, 'handler': 'ferroamp', 'options': {}, 'title': 'Ferroamp', 'data': {'name': 'Ferroamp', 'prefix': 'extapi'}, 'description': None, 'description_placeholders': None, 'result': mock.ANY}
assert (expected == result)<|docstring|>Test the config entry is successfully created.<|endoftext|> |
43b65c0ed01748391fcf93c5ef1ffe3064c8a51569480dfff2fb1a62bb4882eb | async def test_options_flow(hass, mqtt_mock):
'Test config flow options.'
config_entry = MockConfigEntry(domain=config_flow.DOMAIN, unique_id='ferroamp', data={CONF_NAME: 'Ferroamp', CONF_PREFIX: 'extapi'})
config_entry.add_to_hass(hass)
assert (await hass.config_entries.async_setup(config_entry.entry_id))
(await hass.async_block_till_done())
result = (await hass.config_entries.options.async_init(config_entry.entry_id))
assert ('form' == result['type'])
assert ('init' == result['step_id'])
assert ({} == result['errors'])
result = (await hass.config_entries.options.async_configure(result['flow_id'], user_input={CONF_INTERVAL: 20, CONF_PRECISION_BATTERY: 1, CONF_PRECISION_CURRENT: 2, CONF_PRECISION_ENERGY: 3, CONF_PRECISION_FREQUENCY: 6, CONF_PRECISION_TEMPERATURE: 4, CONF_PRECISION_VOLTAGE: 5}))
assert (result['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY)
assert (result['data'] == {CONF_INTERVAL: 20, CONF_PRECISION_BATTERY: 1, CONF_PRECISION_CURRENT: 2, CONF_PRECISION_ENERGY: 3, CONF_PRECISION_FREQUENCY: 6, CONF_PRECISION_TEMPERATURE: 4, CONF_PRECISION_VOLTAGE: 5})
assert (config_entry.data == {CONF_NAME: 'Ferroamp', CONF_PREFIX: 'extapi'})
assert (config_entry.options == {CONF_INTERVAL: 20, CONF_PRECISION_BATTERY: 1, CONF_PRECISION_CURRENT: 2, CONF_PRECISION_ENERGY: 3, CONF_PRECISION_FREQUENCY: 6, CONF_PRECISION_TEMPERATURE: 4, CONF_PRECISION_VOLTAGE: 5}) | Test config flow options. | tests/test_config_flow.py | test_options_flow | TurboJonte/ha-ferroamp | 22 | python | async def test_options_flow(hass, mqtt_mock):
config_entry = MockConfigEntry(domain=config_flow.DOMAIN, unique_id='ferroamp', data={CONF_NAME: 'Ferroamp', CONF_PREFIX: 'extapi'})
config_entry.add_to_hass(hass)
assert (await hass.config_entries.async_setup(config_entry.entry_id))
(await hass.async_block_till_done())
result = (await hass.config_entries.options.async_init(config_entry.entry_id))
assert ('form' == result['type'])
assert ('init' == result['step_id'])
assert ({} == result['errors'])
result = (await hass.config_entries.options.async_configure(result['flow_id'], user_input={CONF_INTERVAL: 20, CONF_PRECISION_BATTERY: 1, CONF_PRECISION_CURRENT: 2, CONF_PRECISION_ENERGY: 3, CONF_PRECISION_FREQUENCY: 6, CONF_PRECISION_TEMPERATURE: 4, CONF_PRECISION_VOLTAGE: 5}))
assert (result['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY)
assert (result['data'] == {CONF_INTERVAL: 20, CONF_PRECISION_BATTERY: 1, CONF_PRECISION_CURRENT: 2, CONF_PRECISION_ENERGY: 3, CONF_PRECISION_FREQUENCY: 6, CONF_PRECISION_TEMPERATURE: 4, CONF_PRECISION_VOLTAGE: 5})
assert (config_entry.data == {CONF_NAME: 'Ferroamp', CONF_PREFIX: 'extapi'})
assert (config_entry.options == {CONF_INTERVAL: 20, CONF_PRECISION_BATTERY: 1, CONF_PRECISION_CURRENT: 2, CONF_PRECISION_ENERGY: 3, CONF_PRECISION_FREQUENCY: 6, CONF_PRECISION_TEMPERATURE: 4, CONF_PRECISION_VOLTAGE: 5}) | async def test_options_flow(hass, mqtt_mock):
config_entry = MockConfigEntry(domain=config_flow.DOMAIN, unique_id='ferroamp', data={CONF_NAME: 'Ferroamp', CONF_PREFIX: 'extapi'})
config_entry.add_to_hass(hass)
assert (await hass.config_entries.async_setup(config_entry.entry_id))
(await hass.async_block_till_done())
result = (await hass.config_entries.options.async_init(config_entry.entry_id))
assert ('form' == result['type'])
assert ('init' == result['step_id'])
assert ({} == result['errors'])
result = (await hass.config_entries.options.async_configure(result['flow_id'], user_input={CONF_INTERVAL: 20, CONF_PRECISION_BATTERY: 1, CONF_PRECISION_CURRENT: 2, CONF_PRECISION_ENERGY: 3, CONF_PRECISION_FREQUENCY: 6, CONF_PRECISION_TEMPERATURE: 4, CONF_PRECISION_VOLTAGE: 5}))
assert (result['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY)
assert (result['data'] == {CONF_INTERVAL: 20, CONF_PRECISION_BATTERY: 1, CONF_PRECISION_CURRENT: 2, CONF_PRECISION_ENERGY: 3, CONF_PRECISION_FREQUENCY: 6, CONF_PRECISION_TEMPERATURE: 4, CONF_PRECISION_VOLTAGE: 5})
assert (config_entry.data == {CONF_NAME: 'Ferroamp', CONF_PREFIX: 'extapi'})
assert (config_entry.options == {CONF_INTERVAL: 20, CONF_PRECISION_BATTERY: 1, CONF_PRECISION_CURRENT: 2, CONF_PRECISION_ENERGY: 3, CONF_PRECISION_FREQUENCY: 6, CONF_PRECISION_TEMPERATURE: 4, CONF_PRECISION_VOLTAGE: 5})<|docstring|>Test config flow options.<|endoftext|> |
35ed61f6cfbb17b4ac06d7bb07f14ee6893fa90438b3f406483f21b4b487ea74 | def select_data_adapter(x, y):
'Selects a data adapter than can handle a given x and y.'
adapter_cls = [cls for cls in ALL_ADAPTER_CLS if cls.can_handle(x, y)]
if (not adapter_cls):
raise ValueError('Failed to find data adapter that can handle input: {}, {}'.format(_type_name(x), _type_name(y)))
elif (len(adapter_cls) > 1):
raise RuntimeError('Data adapters should be mutually exclusive for handling inputs. Found multiple adapters {} to handle input: {}, {}'.format(adapter_cls, _type_name(x), _type_name(y)))
return adapter_cls[0] | Selects a data adapter than can handle a given x and y. | tensorflow/python/keras/engine/data_adapter.py | select_data_adapter | bbbboom/tensorflow | 50 | python | def select_data_adapter(x, y):
adapter_cls = [cls for cls in ALL_ADAPTER_CLS if cls.can_handle(x, y)]
if (not adapter_cls):
raise ValueError('Failed to find data adapter that can handle input: {}, {}'.format(_type_name(x), _type_name(y)))
elif (len(adapter_cls) > 1):
raise RuntimeError('Data adapters should be mutually exclusive for handling inputs. Found multiple adapters {} to handle input: {}, {}'.format(adapter_cls, _type_name(x), _type_name(y)))
return adapter_cls[0] | def select_data_adapter(x, y):
adapter_cls = [cls for cls in ALL_ADAPTER_CLS if cls.can_handle(x, y)]
if (not adapter_cls):
raise ValueError('Failed to find data adapter that can handle input: {}, {}'.format(_type_name(x), _type_name(y)))
elif (len(adapter_cls) > 1):
raise RuntimeError('Data adapters should be mutually exclusive for handling inputs. Found multiple adapters {} to handle input: {}, {}'.format(adapter_cls, _type_name(x), _type_name(y)))
return adapter_cls[0]<|docstring|>Selects a data adapter than can handle a given x and y.<|endoftext|> |
7f74033531516f66e1006bafc912efcee4327b39f345ce8f863a181822e6dec5 | def _type_name(x):
'Generates a description of the type of an object.'
if isinstance(x, dict):
key_types = set((_type_name(key) for key in x.keys()))
val_types = set((_type_name(key) for key in x.values()))
return '({} containing {} keys and {} values)'.format(type(x), key_types, val_types)
if isinstance(x, (list, tuple)):
types = set((_type_name(val) for val in x))
return '({} containing values of types {})'.format(type(x), types)
return str(type(x)) | Generates a description of the type of an object. | tensorflow/python/keras/engine/data_adapter.py | _type_name | bbbboom/tensorflow | 50 | python | def _type_name(x):
if isinstance(x, dict):
key_types = set((_type_name(key) for key in x.keys()))
val_types = set((_type_name(key) for key in x.values()))
return '({} containing {} keys and {} values)'.format(type(x), key_types, val_types)
if isinstance(x, (list, tuple)):
types = set((_type_name(val) for val in x))
return '({} containing values of types {})'.format(type(x), types)
return str(type(x)) | def _type_name(x):
if isinstance(x, dict):
key_types = set((_type_name(key) for key in x.keys()))
val_types = set((_type_name(key) for key in x.values()))
return '({} containing {} keys and {} values)'.format(type(x), key_types, val_types)
if isinstance(x, (list, tuple)):
types = set((_type_name(val) for val in x))
return '({} containing values of types {})'.format(type(x), types)
return str(type(x))<|docstring|>Generates a description of the type of an object.<|endoftext|> |
896e9a9132dfa278573f00f4ee430813bfe128b18a6604fcc0f3e5a8a28eb9f2 | def _process_tensorlike(inputs):
'Process tensor-like inputs.\n\n This function:\n\n (1) Converts `Numpy` arrays to `Tensor`s.\n (2) Converts `Scipy` sparse matrices to `SparseTensor`s.\n (2) Converts `list`s to `tuple`s (for `tf.data` support).\n\n Args:\n inputs: Structure of `Tensor`s, `NumPy` arrays, or tensor-like.\n\n Returns:\n Structure of `Tensor`s or tensor-like.\n '
def _convert_numpy_and_scipy(x):
if isinstance(x, np.ndarray):
dtype = None
if issubclass(x.dtype.type, np.floating):
dtype = backend.floatx()
return ops.convert_to_tensor(x, dtype=dtype)
elif (scipy_sparse and scipy_sparse.issparse(x)):
return _scipy_sparse_to_sparse_tensor(x)
return x
inputs = nest.map_structure(_convert_numpy_and_scipy, inputs)
return nest._list_to_tuple(inputs) | Process tensor-like inputs.
This function:
(1) Converts `Numpy` arrays to `Tensor`s.
(2) Converts `Scipy` sparse matrices to `SparseTensor`s.
(2) Converts `list`s to `tuple`s (for `tf.data` support).
Args:
inputs: Structure of `Tensor`s, `NumPy` arrays, or tensor-like.
Returns:
Structure of `Tensor`s or tensor-like. | tensorflow/python/keras/engine/data_adapter.py | _process_tensorlike | bbbboom/tensorflow | 50 | python | def _process_tensorlike(inputs):
'Process tensor-like inputs.\n\n This function:\n\n (1) Converts `Numpy` arrays to `Tensor`s.\n (2) Converts `Scipy` sparse matrices to `SparseTensor`s.\n (2) Converts `list`s to `tuple`s (for `tf.data` support).\n\n Args:\n inputs: Structure of `Tensor`s, `NumPy` arrays, or tensor-like.\n\n Returns:\n Structure of `Tensor`s or tensor-like.\n '
def _convert_numpy_and_scipy(x):
if isinstance(x, np.ndarray):
dtype = None
if issubclass(x.dtype.type, np.floating):
dtype = backend.floatx()
return ops.convert_to_tensor(x, dtype=dtype)
elif (scipy_sparse and scipy_sparse.issparse(x)):
return _scipy_sparse_to_sparse_tensor(x)
return x
inputs = nest.map_structure(_convert_numpy_and_scipy, inputs)
return nest._list_to_tuple(inputs) | def _process_tensorlike(inputs):
'Process tensor-like inputs.\n\n This function:\n\n (1) Converts `Numpy` arrays to `Tensor`s.\n (2) Converts `Scipy` sparse matrices to `SparseTensor`s.\n (2) Converts `list`s to `tuple`s (for `tf.data` support).\n\n Args:\n inputs: Structure of `Tensor`s, `NumPy` arrays, or tensor-like.\n\n Returns:\n Structure of `Tensor`s or tensor-like.\n '
def _convert_numpy_and_scipy(x):
if isinstance(x, np.ndarray):
dtype = None
if issubclass(x.dtype.type, np.floating):
dtype = backend.floatx()
return ops.convert_to_tensor(x, dtype=dtype)
elif (scipy_sparse and scipy_sparse.issparse(x)):
return _scipy_sparse_to_sparse_tensor(x)
return x
inputs = nest.map_structure(_convert_numpy_and_scipy, inputs)
return nest._list_to_tuple(inputs)<|docstring|>Process tensor-like inputs.
This function:
(1) Converts `Numpy` arrays to `Tensor`s.
(2) Converts `Scipy` sparse matrices to `SparseTensor`s.
(2) Converts `list`s to `tuple`s (for `tf.data` support).
Args:
inputs: Structure of `Tensor`s, `NumPy` arrays, or tensor-like.
Returns:
Structure of `Tensor`s or tensor-like.<|endoftext|> |
187cc5a6e44e7d91e664efcb9f0235b6072d1e4d519278afc915ff49c3bce858 | def broadcast_sample_weight_modes(target_structure, sample_weight_modes):
'Match sample_weight_modes structure with output structure.'
if ((target_structure is None) or (not nest.flatten(target_structure))):
return sample_weight_modes
if isinstance(sample_weight_modes, str):
if isinstance(target_structure, dict):
return {key: sample_weight_modes for key in target_structure.keys()}
return [sample_weight_modes for _ in target_structure]
if sample_weight_modes:
try:
nest.assert_same_structure(training_utils.list_to_tuple(target_structure), training_utils.list_to_tuple(sample_weight_modes))
except (ValueError, TypeError):
target_str = str(nest.map_structure((lambda _: '...'), target_structure))
mode_str = str(nest.map_structure((lambda _: '...'), sample_weight_modes))
try:
sample_weight_modes = nest.pack_sequence_as(target_structure, nest.flatten(sample_weight_modes))
logging.warning('sample_weight modes were coerced from\n {}\n to \n {}'.format(target_str, mode_str))
except (ValueError, TypeError):
raise ValueError('Unable to match target structure and sample_weight_modes structure:\n {}\n to \n {}'.format(target_str, mode_str))
return sample_weight_modes | Match sample_weight_modes structure with output structure. | tensorflow/python/keras/engine/data_adapter.py | broadcast_sample_weight_modes | bbbboom/tensorflow | 50 | python | def broadcast_sample_weight_modes(target_structure, sample_weight_modes):
if ((target_structure is None) or (not nest.flatten(target_structure))):
return sample_weight_modes
if isinstance(sample_weight_modes, str):
if isinstance(target_structure, dict):
return {key: sample_weight_modes for key in target_structure.keys()}
return [sample_weight_modes for _ in target_structure]
if sample_weight_modes:
try:
nest.assert_same_structure(training_utils.list_to_tuple(target_structure), training_utils.list_to_tuple(sample_weight_modes))
except (ValueError, TypeError):
target_str = str(nest.map_structure((lambda _: '...'), target_structure))
mode_str = str(nest.map_structure((lambda _: '...'), sample_weight_modes))
try:
sample_weight_modes = nest.pack_sequence_as(target_structure, nest.flatten(sample_weight_modes))
logging.warning('sample_weight modes were coerced from\n {}\n to \n {}'.format(target_str, mode_str))
except (ValueError, TypeError):
raise ValueError('Unable to match target structure and sample_weight_modes structure:\n {}\n to \n {}'.format(target_str, mode_str))
return sample_weight_modes | def broadcast_sample_weight_modes(target_structure, sample_weight_modes):
if ((target_structure is None) or (not nest.flatten(target_structure))):
return sample_weight_modes
if isinstance(sample_weight_modes, str):
if isinstance(target_structure, dict):
return {key: sample_weight_modes for key in target_structure.keys()}
return [sample_weight_modes for _ in target_structure]
if sample_weight_modes:
try:
nest.assert_same_structure(training_utils.list_to_tuple(target_structure), training_utils.list_to_tuple(sample_weight_modes))
except (ValueError, TypeError):
target_str = str(nest.map_structure((lambda _: '...'), target_structure))
mode_str = str(nest.map_structure((lambda _: '...'), sample_weight_modes))
try:
sample_weight_modes = nest.pack_sequence_as(target_structure, nest.flatten(sample_weight_modes))
logging.warning('sample_weight modes were coerced from\n {}\n to \n {}'.format(target_str, mode_str))
except (ValueError, TypeError):
raise ValueError('Unable to match target structure and sample_weight_modes structure:\n {}\n to \n {}'.format(target_str, mode_str))
return sample_weight_modes<|docstring|>Match sample_weight_modes structure with output structure.<|endoftext|> |
ddecea543bc5d40ad1cd5ecdd21a68184ded28f24f96535763ae263e63584644 | def _make_class_weight_map_fn(class_weight):
'Applies class weighting to a `Dataset`.\n\n The `Dataset` is assumed to be in format `(x, y)` or `(x, y, sw)`, where\n `y` must be a single `Tensor`.\n\n Arguments:\n class_weight: A map where the keys are integer class ids and values are\n the class weights, e.g. `{0: 0.2, 1: 0.6, 2: 0.3}`\n\n Returns:\n A function that can be used with `tf.data.Dataset.map` to apply class\n weighting.\n '
class_ids = list(sorted(class_weight.keys()))
expected_class_ids = list(range(len(class_ids)))
if (class_ids != expected_class_ids):
error_msg = 'Expected `class_weight` to be a dict with keys from 0 to one less than the number of classes, found {}'.format(class_weight)
raise ValueError(error_msg)
class_weight_tensor = ops.convert_to_tensor_v2([int(class_weight[c]) for c in class_ids], dtype='int64')
def _class_weights_map_fn(*data):
'Convert `class_weight` to `sample_weight`.'
(x, y, sw) = unpack_x_y_sample_weight(data)
if nest.is_sequence(y):
raise ValueError('`class_weight` is only supported for Models with a single output.')
if (y.shape.rank > 2):
raise ValueError('`class_weight` not supported for 3+ dimensional targets.')
y_classes = smart_cond.smart_cond(((y.shape.rank == 2) and (backend.shape(y)[1] > 1)), (lambda : backend.argmax(y, axis=1)), (lambda : math_ops.cast(backend.reshape(y, ((- 1),)), dtypes.int64)))
cw = array_ops.gather_v2(class_weight_tensor, y_classes)
if (sw is not None):
cw = math_ops.cast(cw, sw.dtype)
(sw, cw) = expand_1d((sw, cw))
sw = (sw * cw)
else:
sw = cw
return (x, y, sw)
return _class_weights_map_fn | Applies class weighting to a `Dataset`.
The `Dataset` is assumed to be in format `(x, y)` or `(x, y, sw)`, where
`y` must be a single `Tensor`.
Arguments:
class_weight: A map where the keys are integer class ids and values are
the class weights, e.g. `{0: 0.2, 1: 0.6, 2: 0.3}`
Returns:
A function that can be used with `tf.data.Dataset.map` to apply class
weighting. | tensorflow/python/keras/engine/data_adapter.py | _make_class_weight_map_fn | bbbboom/tensorflow | 50 | python | def _make_class_weight_map_fn(class_weight):
'Applies class weighting to a `Dataset`.\n\n The `Dataset` is assumed to be in format `(x, y)` or `(x, y, sw)`, where\n `y` must be a single `Tensor`.\n\n Arguments:\n class_weight: A map where the keys are integer class ids and values are\n the class weights, e.g. `{0: 0.2, 1: 0.6, 2: 0.3}`\n\n Returns:\n A function that can be used with `tf.data.Dataset.map` to apply class\n weighting.\n '
class_ids = list(sorted(class_weight.keys()))
expected_class_ids = list(range(len(class_ids)))
if (class_ids != expected_class_ids):
error_msg = 'Expected `class_weight` to be a dict with keys from 0 to one less than the number of classes, found {}'.format(class_weight)
raise ValueError(error_msg)
class_weight_tensor = ops.convert_to_tensor_v2([int(class_weight[c]) for c in class_ids], dtype='int64')
def _class_weights_map_fn(*data):
'Convert `class_weight` to `sample_weight`.'
(x, y, sw) = unpack_x_y_sample_weight(data)
if nest.is_sequence(y):
raise ValueError('`class_weight` is only supported for Models with a single output.')
if (y.shape.rank > 2):
raise ValueError('`class_weight` not supported for 3+ dimensional targets.')
y_classes = smart_cond.smart_cond(((y.shape.rank == 2) and (backend.shape(y)[1] > 1)), (lambda : backend.argmax(y, axis=1)), (lambda : math_ops.cast(backend.reshape(y, ((- 1),)), dtypes.int64)))
cw = array_ops.gather_v2(class_weight_tensor, y_classes)
if (sw is not None):
cw = math_ops.cast(cw, sw.dtype)
(sw, cw) = expand_1d((sw, cw))
sw = (sw * cw)
else:
sw = cw
return (x, y, sw)
return _class_weights_map_fn | def _make_class_weight_map_fn(class_weight):
'Applies class weighting to a `Dataset`.\n\n The `Dataset` is assumed to be in format `(x, y)` or `(x, y, sw)`, where\n `y` must be a single `Tensor`.\n\n Arguments:\n class_weight: A map where the keys are integer class ids and values are\n the class weights, e.g. `{0: 0.2, 1: 0.6, 2: 0.3}`\n\n Returns:\n A function that can be used with `tf.data.Dataset.map` to apply class\n weighting.\n '
class_ids = list(sorted(class_weight.keys()))
expected_class_ids = list(range(len(class_ids)))
if (class_ids != expected_class_ids):
error_msg = 'Expected `class_weight` to be a dict with keys from 0 to one less than the number of classes, found {}'.format(class_weight)
raise ValueError(error_msg)
class_weight_tensor = ops.convert_to_tensor_v2([int(class_weight[c]) for c in class_ids], dtype='int64')
def _class_weights_map_fn(*data):
'Convert `class_weight` to `sample_weight`.'
(x, y, sw) = unpack_x_y_sample_weight(data)
if nest.is_sequence(y):
raise ValueError('`class_weight` is only supported for Models with a single output.')
if (y.shape.rank > 2):
raise ValueError('`class_weight` not supported for 3+ dimensional targets.')
y_classes = smart_cond.smart_cond(((y.shape.rank == 2) and (backend.shape(y)[1] > 1)), (lambda : backend.argmax(y, axis=1)), (lambda : math_ops.cast(backend.reshape(y, ((- 1),)), dtypes.int64)))
cw = array_ops.gather_v2(class_weight_tensor, y_classes)
if (sw is not None):
cw = math_ops.cast(cw, sw.dtype)
(sw, cw) = expand_1d((sw, cw))
sw = (sw * cw)
else:
sw = cw
return (x, y, sw)
return _class_weights_map_fn<|docstring|>Applies class weighting to a `Dataset`.
The `Dataset` is assumed to be in format `(x, y)` or `(x, y, sw)`, where
`y` must be a single `Tensor`.
Arguments:
class_weight: A map where the keys are integer class ids and values are
the class weights, e.g. `{0: 0.2, 1: 0.6, 2: 0.3}`
Returns:
A function that can be used with `tf.data.Dataset.map` to apply class
weighting.<|endoftext|> |
487ccd9690b847d1da9f69517675f986371e43d676af8e9e39e406c7b93c6dfe | def expand_1d(data):
'Expands 1-dimensional `Tensor`s into 2-dimensional `Tensor`s.'
def _expand_single_1d_tensor(t):
if (isinstance(t, ops.Tensor) and isinstance(t.shape, tensor_shape.TensorShape) and (t.shape.rank == 1)):
return array_ops.expand_dims_v2(t, axis=(- 1))
return t
return nest.map_structure(_expand_single_1d_tensor, data) | Expands 1-dimensional `Tensor`s into 2-dimensional `Tensor`s. | tensorflow/python/keras/engine/data_adapter.py | expand_1d | bbbboom/tensorflow | 50 | python | def expand_1d(data):
def _expand_single_1d_tensor(t):
if (isinstance(t, ops.Tensor) and isinstance(t.shape, tensor_shape.TensorShape) and (t.shape.rank == 1)):
return array_ops.expand_dims_v2(t, axis=(- 1))
return t
return nest.map_structure(_expand_single_1d_tensor, data) | def expand_1d(data):
def _expand_single_1d_tensor(t):
if (isinstance(t, ops.Tensor) and isinstance(t.shape, tensor_shape.TensorShape) and (t.shape.rank == 1)):
return array_ops.expand_dims_v2(t, axis=(- 1))
return t
return nest.map_structure(_expand_single_1d_tensor, data)<|docstring|>Expands 1-dimensional `Tensor`s into 2-dimensional `Tensor`s.<|endoftext|> |
c5e44386278ab5bb858d60bbe36f2b398cb9b921f872672a0e80d213797d0261 | def train_validation_split(arrays, validation_split, shuffle=True):
'Split arrays into random train and validation subsets.\n\n Arguments:\n arrays: Tensors to split. Allowed inputs are arbitrarily nested structures\n of Tensors and NumPy arrays.\n validation_split: Float between 0 and 1. The proportion of the dataset to\n include in the validation split. The rest of the dataset will be included\n in the training split.\n shuffle: Bool. Whether to shuffle the data before performing a split. If\n `False`, the last `validation_split` fraction of that training data will\n become the validation split.\n\n Returns:\n `(train_arrays, validation_arrays)`\n '
def _can_split(t):
tensor_types = (ops.Tensor, np.ndarray)
if pd:
tensor_types = (ops.Tensor, np.ndarray, pd.Series, pd.DataFrame)
return (isinstance(t, tensor_types) or (t is None))
flat_arrays = nest.flatten(arrays)
if (not all((_can_split(t) for t in flat_arrays))):
raise ValueError('`validation_split` is only supported for Tensors or NumPy arrays, found: {}'.format(arrays))
if all(((t is None) for t in flat_arrays)):
return (arrays, arrays)
first_non_none = None
for t in flat_arrays:
if (t is not None):
first_non_none = t
break
batch_dim = int(first_non_none.shape[0])
indices = ops.convert_to_tensor_v2(range(batch_dim))
if shuffle:
indices = random_ops.random_shuffle(indices)
split_at = int(math.floor((batch_dim * (1.0 - validation_split))))
train_indices = indices[:split_at]
val_indices = indices[split_at:]
def _split(t, indices):
if (t is None):
return t
t = ops.convert_to_tensor_v2(t)
return array_ops.gather_v2(t, indices)
train_arrays = nest.map_structure(functools.partial(_split, indices=train_indices), arrays)
val_arrays = nest.map_structure(functools.partial(_split, indices=val_indices), arrays)
return (train_arrays, val_arrays) | Split arrays into random train and validation subsets.
Arguments:
arrays: Tensors to split. Allowed inputs are arbitrarily nested structures
of Tensors and NumPy arrays.
validation_split: Float between 0 and 1. The proportion of the dataset to
include in the validation split. The rest of the dataset will be included
in the training split.
shuffle: Bool. Whether to shuffle the data before performing a split. If
`False`, the last `validation_split` fraction of that training data will
become the validation split.
Returns:
`(train_arrays, validation_arrays)` | tensorflow/python/keras/engine/data_adapter.py | train_validation_split | bbbboom/tensorflow | 50 | python | def train_validation_split(arrays, validation_split, shuffle=True):
'Split arrays into random train and validation subsets.\n\n Arguments:\n arrays: Tensors to split. Allowed inputs are arbitrarily nested structures\n of Tensors and NumPy arrays.\n validation_split: Float between 0 and 1. The proportion of the dataset to\n include in the validation split. The rest of the dataset will be included\n in the training split.\n shuffle: Bool. Whether to shuffle the data before performing a split. If\n `False`, the last `validation_split` fraction of that training data will\n become the validation split.\n\n Returns:\n `(train_arrays, validation_arrays)`\n '
def _can_split(t):
tensor_types = (ops.Tensor, np.ndarray)
if pd:
tensor_types = (ops.Tensor, np.ndarray, pd.Series, pd.DataFrame)
return (isinstance(t, tensor_types) or (t is None))
flat_arrays = nest.flatten(arrays)
if (not all((_can_split(t) for t in flat_arrays))):
raise ValueError('`validation_split` is only supported for Tensors or NumPy arrays, found: {}'.format(arrays))
if all(((t is None) for t in flat_arrays)):
return (arrays, arrays)
first_non_none = None
for t in flat_arrays:
if (t is not None):
first_non_none = t
break
batch_dim = int(first_non_none.shape[0])
indices = ops.convert_to_tensor_v2(range(batch_dim))
if shuffle:
indices = random_ops.random_shuffle(indices)
split_at = int(math.floor((batch_dim * (1.0 - validation_split))))
train_indices = indices[:split_at]
val_indices = indices[split_at:]
def _split(t, indices):
if (t is None):
return t
t = ops.convert_to_tensor_v2(t)
return array_ops.gather_v2(t, indices)
train_arrays = nest.map_structure(functools.partial(_split, indices=train_indices), arrays)
val_arrays = nest.map_structure(functools.partial(_split, indices=val_indices), arrays)
return (train_arrays, val_arrays) | def train_validation_split(arrays, validation_split, shuffle=True):
'Split arrays into random train and validation subsets.\n\n Arguments:\n arrays: Tensors to split. Allowed inputs are arbitrarily nested structures\n of Tensors and NumPy arrays.\n validation_split: Float between 0 and 1. The proportion of the dataset to\n include in the validation split. The rest of the dataset will be included\n in the training split.\n shuffle: Bool. Whether to shuffle the data before performing a split. If\n `False`, the last `validation_split` fraction of that training data will\n become the validation split.\n\n Returns:\n `(train_arrays, validation_arrays)`\n '
def _can_split(t):
tensor_types = (ops.Tensor, np.ndarray)
if pd:
tensor_types = (ops.Tensor, np.ndarray, pd.Series, pd.DataFrame)
return (isinstance(t, tensor_types) or (t is None))
flat_arrays = nest.flatten(arrays)
if (not all((_can_split(t) for t in flat_arrays))):
raise ValueError('`validation_split` is only supported for Tensors or NumPy arrays, found: {}'.format(arrays))
if all(((t is None) for t in flat_arrays)):
return (arrays, arrays)
first_non_none = None
for t in flat_arrays:
if (t is not None):
first_non_none = t
break
batch_dim = int(first_non_none.shape[0])
indices = ops.convert_to_tensor_v2(range(batch_dim))
if shuffle:
indices = random_ops.random_shuffle(indices)
split_at = int(math.floor((batch_dim * (1.0 - validation_split))))
train_indices = indices[:split_at]
val_indices = indices[split_at:]
def _split(t, indices):
if (t is None):
return t
t = ops.convert_to_tensor_v2(t)
return array_ops.gather_v2(t, indices)
train_arrays = nest.map_structure(functools.partial(_split, indices=train_indices), arrays)
val_arrays = nest.map_structure(functools.partial(_split, indices=val_indices), arrays)
return (train_arrays, val_arrays)<|docstring|>Split arrays into random train and validation subsets.
Arguments:
arrays: Tensors to split. Allowed inputs are arbitrarily nested structures
of Tensors and NumPy arrays.
validation_split: Float between 0 and 1. The proportion of the dataset to
include in the validation split. The rest of the dataset will be included
in the training split.
shuffle: Bool. Whether to shuffle the data before performing a split. If
`False`, the last `validation_split` fraction of that training data will
become the validation split.
Returns:
`(train_arrays, validation_arrays)`<|endoftext|> |
28ef3cd0cd92426fb3f8b30a9716f8778117332ea8d40dcb5c39f76582b9dfa0 | def unpack_x_y_sample_weight(data):
'Unpacks user-provided data tuple.'
if (not isinstance(data, tuple)):
return (data, None, None)
elif (len(data) == 1):
return (data[0], None, None)
elif (len(data) == 2):
return (data[0], data[1], None)
elif (len(data) == 3):
return (data[0], data[1], data[2])
raise ValueError('Data not understood.') | Unpacks user-provided data tuple. | tensorflow/python/keras/engine/data_adapter.py | unpack_x_y_sample_weight | bbbboom/tensorflow | 50 | python | def unpack_x_y_sample_weight(data):
if (not isinstance(data, tuple)):
return (data, None, None)
elif (len(data) == 1):
return (data[0], None, None)
elif (len(data) == 2):
return (data[0], data[1], None)
elif (len(data) == 3):
return (data[0], data[1], data[2])
raise ValueError('Data not understood.') | def unpack_x_y_sample_weight(data):
if (not isinstance(data, tuple)):
return (data, None, None)
elif (len(data) == 1):
return (data[0], None, None)
elif (len(data) == 2):
return (data[0], data[1], None)
elif (len(data) == 3):
return (data[0], data[1], data[2])
raise ValueError('Data not understood.')<|docstring|>Unpacks user-provided data tuple.<|endoftext|> |
991e80c30da5dcdd82a0bbccbe08e3352d30c14dbb30a051e1a065d73cba1a77 | def pack_x_y_sample_weight(x, y=None, sample_weight=None):
'Packs user-provided data into a tuple.'
if (y is None):
return (x,)
elif (sample_weight is None):
return (x, y)
else:
return (x, y, sample_weight) | Packs user-provided data into a tuple. | tensorflow/python/keras/engine/data_adapter.py | pack_x_y_sample_weight | bbbboom/tensorflow | 50 | python | def pack_x_y_sample_weight(x, y=None, sample_weight=None):
if (y is None):
return (x,)
elif (sample_weight is None):
return (x, y)
else:
return (x, y, sample_weight) | def pack_x_y_sample_weight(x, y=None, sample_weight=None):
if (y is None):
return (x,)
elif (sample_weight is None):
return (x, y)
else:
return (x, y, sample_weight)<|docstring|>Packs user-provided data into a tuple.<|endoftext|> |
ae50269358cb1dd63d534dde18d3d21c2461a500907348f29771fe3d815a86e8 | def single_batch_iterator(strategy, x, y=None, sample_weight=None, class_weight=None):
'Creates a single-batch dataset.'
(x, y, sample_weight) = _process_tensorlike((x, y, sample_weight))
if (y is None):
data = (x,)
elif (sample_weight is None):
data = (x, y)
else:
data = (x, y, sample_weight)
dataset = dataset_ops.DatasetV2.from_tensors(data)
if class_weight:
dataset = dataset.map(_make_class_weight_map_fn(class_weight))
dataset = strategy.experimental_distribute_dataset(dataset)
return iter(dataset) | Creates a single-batch dataset. | tensorflow/python/keras/engine/data_adapter.py | single_batch_iterator | bbbboom/tensorflow | 50 | python | def single_batch_iterator(strategy, x, y=None, sample_weight=None, class_weight=None):
(x, y, sample_weight) = _process_tensorlike((x, y, sample_weight))
if (y is None):
data = (x,)
elif (sample_weight is None):
data = (x, y)
else:
data = (x, y, sample_weight)
dataset = dataset_ops.DatasetV2.from_tensors(data)
if class_weight:
dataset = dataset.map(_make_class_weight_map_fn(class_weight))
dataset = strategy.experimental_distribute_dataset(dataset)
return iter(dataset) | def single_batch_iterator(strategy, x, y=None, sample_weight=None, class_weight=None):
(x, y, sample_weight) = _process_tensorlike((x, y, sample_weight))
if (y is None):
data = (x,)
elif (sample_weight is None):
data = (x, y)
else:
data = (x, y, sample_weight)
dataset = dataset_ops.DatasetV2.from_tensors(data)
if class_weight:
dataset = dataset.map(_make_class_weight_map_fn(class_weight))
dataset = strategy.experimental_distribute_dataset(dataset)
return iter(dataset)<|docstring|>Creates a single-batch dataset.<|endoftext|> |
17a60c6c4e9ba6de0e4e9c721f722e58c131d29a8146c3fe14fb11bc5582553a | def _scipy_sparse_to_sparse_tensor(t):
'Converts a SciPy sparse matrix to a SparseTensor.'
sparse_coo = t.tocoo()
(row, col) = (sparse_coo.row, sparse_coo.col)
(data, shape) = (sparse_coo.data, sparse_coo.shape)
if issubclass(data.dtype.type, np.floating):
data = data.astype(backend.floatx())
indices = np.concatenate((np.expand_dims(row, axis=1), np.expand_dims(col, axis=1)), axis=1)
return sparse_tensor.SparseTensor(indices, data, shape) | Converts a SciPy sparse matrix to a SparseTensor. | tensorflow/python/keras/engine/data_adapter.py | _scipy_sparse_to_sparse_tensor | bbbboom/tensorflow | 50 | python | def _scipy_sparse_to_sparse_tensor(t):
sparse_coo = t.tocoo()
(row, col) = (sparse_coo.row, sparse_coo.col)
(data, shape) = (sparse_coo.data, sparse_coo.shape)
if issubclass(data.dtype.type, np.floating):
data = data.astype(backend.floatx())
indices = np.concatenate((np.expand_dims(row, axis=1), np.expand_dims(col, axis=1)), axis=1)
return sparse_tensor.SparseTensor(indices, data, shape) | def _scipy_sparse_to_sparse_tensor(t):
sparse_coo = t.tocoo()
(row, col) = (sparse_coo.row, sparse_coo.col)
(data, shape) = (sparse_coo.data, sparse_coo.shape)
if issubclass(data.dtype.type, np.floating):
data = data.astype(backend.floatx())
indices = np.concatenate((np.expand_dims(row, axis=1), np.expand_dims(col, axis=1)), axis=1)
return sparse_tensor.SparseTensor(indices, data, shape)<|docstring|>Converts a SciPy sparse matrix to a SparseTensor.<|endoftext|> |
a50463ad58456992e352a5ed96d44f7aeb46502cc61fa4cc1cfeb2b8996e1e98 | @staticmethod
def can_handle(x, y=None):
'Whether the current DataAdapter could handle the input x and y.\n\n Structure wise, x and y can be single object, or list of objects if there\n multiple input/output, or dictionary of objects when the intput/output are\n named.\n\n Args:\n x: input features.\n y: target labels. Note that y could be None in the case of prediction.\n\n Returns:\n boolean\n '
raise NotImplementedError | Whether the current DataAdapter could handle the input x and y.
Structure wise, x and y can be single object, or list of objects if there
multiple input/output, or dictionary of objects when the intput/output are
named.
Args:
x: input features.
y: target labels. Note that y could be None in the case of prediction.
Returns:
boolean | tensorflow/python/keras/engine/data_adapter.py | can_handle | bbbboom/tensorflow | 50 | python | @staticmethod
def can_handle(x, y=None):
'Whether the current DataAdapter could handle the input x and y.\n\n Structure wise, x and y can be single object, or list of objects if there\n multiple input/output, or dictionary of objects when the intput/output are\n named.\n\n Args:\n x: input features.\n y: target labels. Note that y could be None in the case of prediction.\n\n Returns:\n boolean\n '
raise NotImplementedError | @staticmethod
def can_handle(x, y=None):
'Whether the current DataAdapter could handle the input x and y.\n\n Structure wise, x and y can be single object, or list of objects if there\n multiple input/output, or dictionary of objects when the intput/output are\n named.\n\n Args:\n x: input features.\n y: target labels. Note that y could be None in the case of prediction.\n\n Returns:\n boolean\n '
raise NotImplementedError<|docstring|>Whether the current DataAdapter could handle the input x and y.
Structure wise, x and y can be single object, or list of objects if there
multiple input/output, or dictionary of objects when the intput/output are
named.
Args:
x: input features.
y: target labels. Note that y could be None in the case of prediction.
Returns:
boolean<|endoftext|> |
ed8f8b395f99719f520b9dd83b8f51b3a563e42c92d85a309f24522582abefc3 | @abc.abstractmethod
def __init__(self, x, y=None, **kwargs):
"Create a DataAdapter based on data inputs.\n\n The caller must make sure to call `can_handle()` first before invoking this\n method. Provide unsupported data type will result into unexpected behavior.\n\n Args:\n x: input features.\n y: target labels. Note that y could be None in the case of prediction.\n **kwargs: Other keyword arguments for DataAdapter during the construction\n of the tf.dataset.Dataset. For example:\n - Numpy data might have `sample_weights` which will be used for\n weighting the loss function during training.\n - Numpy data might need to have `batch_size` parameter when constructing\n the dataset and iterator.\n - Certain input might need to be distribution strategy aware. When\n `distribution_strategy` is passed, the created dataset need to respect\n the strategy.\n DataAdapter might choose to ignore any keyword argument if it doesn't\n use it, or raise exception if any required argument is not provide.\n "
if (not self.can_handle(x, y)):
raise ValueError('{} Cannot handle input {}, {}'.format(self.__class__, x, y)) | Create a DataAdapter based on data inputs.
The caller must make sure to call `can_handle()` first before invoking this
method. Provide unsupported data type will result into unexpected behavior.
Args:
x: input features.
y: target labels. Note that y could be None in the case of prediction.
**kwargs: Other keyword arguments for DataAdapter during the construction
of the tf.dataset.Dataset. For example:
- Numpy data might have `sample_weights` which will be used for
weighting the loss function during training.
- Numpy data might need to have `batch_size` parameter when constructing
the dataset and iterator.
- Certain input might need to be distribution strategy aware. When
`distribution_strategy` is passed, the created dataset need to respect
the strategy.
DataAdapter might choose to ignore any keyword argument if it doesn't
use it, or raise exception if any required argument is not provide. | tensorflow/python/keras/engine/data_adapter.py | __init__ | bbbboom/tensorflow | 50 | python | @abc.abstractmethod
def __init__(self, x, y=None, **kwargs):
"Create a DataAdapter based on data inputs.\n\n The caller must make sure to call `can_handle()` first before invoking this\n method. Provide unsupported data type will result into unexpected behavior.\n\n Args:\n x: input features.\n y: target labels. Note that y could be None in the case of prediction.\n **kwargs: Other keyword arguments for DataAdapter during the construction\n of the tf.dataset.Dataset. For example:\n - Numpy data might have `sample_weights` which will be used for\n weighting the loss function during training.\n - Numpy data might need to have `batch_size` parameter when constructing\n the dataset and iterator.\n - Certain input might need to be distribution strategy aware. When\n `distribution_strategy` is passed, the created dataset need to respect\n the strategy.\n DataAdapter might choose to ignore any keyword argument if it doesn't\n use it, or raise exception if any required argument is not provide.\n "
if (not self.can_handle(x, y)):
raise ValueError('{} Cannot handle input {}, {}'.format(self.__class__, x, y)) | @abc.abstractmethod
def __init__(self, x, y=None, **kwargs):
"Create a DataAdapter based on data inputs.\n\n The caller must make sure to call `can_handle()` first before invoking this\n method. Provide unsupported data type will result into unexpected behavior.\n\n Args:\n x: input features.\n y: target labels. Note that y could be None in the case of prediction.\n **kwargs: Other keyword arguments for DataAdapter during the construction\n of the tf.dataset.Dataset. For example:\n - Numpy data might have `sample_weights` which will be used for\n weighting the loss function during training.\n - Numpy data might need to have `batch_size` parameter when constructing\n the dataset and iterator.\n - Certain input might need to be distribution strategy aware. When\n `distribution_strategy` is passed, the created dataset need to respect\n the strategy.\n DataAdapter might choose to ignore any keyword argument if it doesn't\n use it, or raise exception if any required argument is not provide.\n "
if (not self.can_handle(x, y)):
raise ValueError('{} Cannot handle input {}, {}'.format(self.__class__, x, y))<|docstring|>Create a DataAdapter based on data inputs.
The caller must make sure to call `can_handle()` first before invoking this
method. Provide unsupported data type will result into unexpected behavior.
Args:
x: input features.
y: target labels. Note that y could be None in the case of prediction.
**kwargs: Other keyword arguments for DataAdapter during the construction
of the tf.dataset.Dataset. For example:
- Numpy data might have `sample_weights` which will be used for
weighting the loss function during training.
- Numpy data might need to have `batch_size` parameter when constructing
the dataset and iterator.
- Certain input might need to be distribution strategy aware. When
`distribution_strategy` is passed, the created dataset need to respect
the strategy.
DataAdapter might choose to ignore any keyword argument if it doesn't
use it, or raise exception if any required argument is not provide.<|endoftext|> |
61c87c02e8283b522d89b8afefdb35a131d67a7c6a160d8d5c568f3e0204c1a7 | @abc.abstractmethod
def get_dataset(self):
'Get a dataset instance for the current DataAdapter.\n\n Note that the dataset returned does not repeat for epoch, so caller might\n need to create new iterator for the same dataset at the beginning of the\n epoch. This behavior might change in future.\n\n Returns:\n An tf.dataset.Dataset. Caller might use the dataset in different\n context, eg iter(dataset) in eager to get the value directly, or in graph\n mode, provide the iterator tensor to Keras model function.\n '
raise NotImplementedError | Get a dataset instance for the current DataAdapter.
Note that the dataset returned does not repeat for epoch, so caller might
need to create new iterator for the same dataset at the beginning of the
epoch. This behavior might change in future.
Returns:
An tf.dataset.Dataset. Caller might use the dataset in different
context, eg iter(dataset) in eager to get the value directly, or in graph
mode, provide the iterator tensor to Keras model function. | tensorflow/python/keras/engine/data_adapter.py | get_dataset | bbbboom/tensorflow | 50 | python | @abc.abstractmethod
def get_dataset(self):
'Get a dataset instance for the current DataAdapter.\n\n Note that the dataset returned does not repeat for epoch, so caller might\n need to create new iterator for the same dataset at the beginning of the\n epoch. This behavior might change in future.\n\n Returns:\n An tf.dataset.Dataset. Caller might use the dataset in different\n context, eg iter(dataset) in eager to get the value directly, or in graph\n mode, provide the iterator tensor to Keras model function.\n '
raise NotImplementedError | @abc.abstractmethod
def get_dataset(self):
'Get a dataset instance for the current DataAdapter.\n\n Note that the dataset returned does not repeat for epoch, so caller might\n need to create new iterator for the same dataset at the beginning of the\n epoch. This behavior might change in future.\n\n Returns:\n An tf.dataset.Dataset. Caller might use the dataset in different\n context, eg iter(dataset) in eager to get the value directly, or in graph\n mode, provide the iterator tensor to Keras model function.\n '
raise NotImplementedError<|docstring|>Get a dataset instance for the current DataAdapter.
Note that the dataset returned does not repeat for epoch, so caller might
need to create new iterator for the same dataset at the beginning of the
epoch. This behavior might change in future.
Returns:
An tf.dataset.Dataset. Caller might use the dataset in different
context, eg iter(dataset) in eager to get the value directly, or in graph
mode, provide the iterator tensor to Keras model function.<|endoftext|> |
f2b115204d9519a764d529c10913e5b91776cb7c06a3df6d46641e7089149daf | @abc.abstractmethod
def get_size(self):
'Return the size (number of batches) for the dataset created.\n\n For certain type of the data input, the number of batches is known, eg for\n Numpy data, the size is same as (number_of_element / batch_size). Whereas\n for dataset or python generator, the size is unknown since it may or may not\n have a end state.\n\n Returns:\n int, the number of batches for the dataset, or None if it is unknown. The\n caller could use this to control the loop of training, show progress bar,\n or handle unexpected StopIteration error.\n '
raise NotImplementedError | Return the size (number of batches) for the dataset created.
For certain type of the data input, the number of batches is known, eg for
Numpy data, the size is same as (number_of_element / batch_size). Whereas
for dataset or python generator, the size is unknown since it may or may not
have a end state.
Returns:
int, the number of batches for the dataset, or None if it is unknown. The
caller could use this to control the loop of training, show progress bar,
or handle unexpected StopIteration error. | tensorflow/python/keras/engine/data_adapter.py | get_size | bbbboom/tensorflow | 50 | python | @abc.abstractmethod
def get_size(self):
'Return the size (number of batches) for the dataset created.\n\n For certain type of the data input, the number of batches is known, eg for\n Numpy data, the size is same as (number_of_element / batch_size). Whereas\n for dataset or python generator, the size is unknown since it may or may not\n have a end state.\n\n Returns:\n int, the number of batches for the dataset, or None if it is unknown. The\n caller could use this to control the loop of training, show progress bar,\n or handle unexpected StopIteration error.\n '
raise NotImplementedError | @abc.abstractmethod
def get_size(self):
'Return the size (number of batches) for the dataset created.\n\n For certain type of the data input, the number of batches is known, eg for\n Numpy data, the size is same as (number_of_element / batch_size). Whereas\n for dataset or python generator, the size is unknown since it may or may not\n have a end state.\n\n Returns:\n int, the number of batches for the dataset, or None if it is unknown. The\n caller could use this to control the loop of training, show progress bar,\n or handle unexpected StopIteration error.\n '
raise NotImplementedError<|docstring|>Return the size (number of batches) for the dataset created.
For certain type of the data input, the number of batches is known, eg for
Numpy data, the size is same as (number_of_element / batch_size). Whereas
for dataset or python generator, the size is unknown since it may or may not
have a end state.
Returns:
int, the number of batches for the dataset, or None if it is unknown. The
caller could use this to control the loop of training, show progress bar,
or handle unexpected StopIteration error.<|endoftext|> |
32a1a6ecf0743e98ca71bf3f61e8a132adffccfea395cf449ee3b64cb03c73d7 | @abc.abstractmethod
def batch_size(self):
'Return the batch size of the dataset created.\n\n For certain type of the data input, the batch size is known, and even\n required, like numpy array. Where as for dataset, the batch is unknown\n unless we take a peek.\n\n Returns:\n int, the batch size of the dataset, or None if it is unknown.\n '
raise NotImplementedError | Return the batch size of the dataset created.
For certain type of the data input, the batch size is known, and even
required, like numpy array. Where as for dataset, the batch is unknown
unless we take a peek.
Returns:
int, the batch size of the dataset, or None if it is unknown. | tensorflow/python/keras/engine/data_adapter.py | batch_size | bbbboom/tensorflow | 50 | python | @abc.abstractmethod
def batch_size(self):
'Return the batch size of the dataset created.\n\n For certain type of the data input, the batch size is known, and even\n required, like numpy array. Where as for dataset, the batch is unknown\n unless we take a peek.\n\n Returns:\n int, the batch size of the dataset, or None if it is unknown.\n '
raise NotImplementedError | @abc.abstractmethod
def batch_size(self):
'Return the batch size of the dataset created.\n\n For certain type of the data input, the batch size is known, and even\n required, like numpy array. Where as for dataset, the batch is unknown\n unless we take a peek.\n\n Returns:\n int, the batch size of the dataset, or None if it is unknown.\n '
raise NotImplementedError<|docstring|>Return the batch size of the dataset created.
For certain type of the data input, the batch size is known, and even
required, like numpy array. Where as for dataset, the batch is unknown
unless we take a peek.
Returns:
int, the batch size of the dataset, or None if it is unknown.<|endoftext|> |
c6ed40b1e95e6a87c0a2a149e258e2066cd0106d9769932b17ac10e16ad65f0e | def representative_batch_size(self):
'Return a representative size for batches in the dataset.\n\n This is not guaranteed to be the batch size for all batches in the\n dataset. It just needs to be a rough approximation for batch sizes in\n the dataset.\n\n Returns:\n int, a representative size for batches found in the dataset,\n or None if it is unknown.\n '
return self.batch_size() | Return a representative size for batches in the dataset.
This is not guaranteed to be the batch size for all batches in the
dataset. It just needs to be a rough approximation for batch sizes in
the dataset.
Returns:
int, a representative size for batches found in the dataset,
or None if it is unknown. | tensorflow/python/keras/engine/data_adapter.py | representative_batch_size | bbbboom/tensorflow | 50 | python | def representative_batch_size(self):
'Return a representative size for batches in the dataset.\n\n This is not guaranteed to be the batch size for all batches in the\n dataset. It just needs to be a rough approximation for batch sizes in\n the dataset.\n\n Returns:\n int, a representative size for batches found in the dataset,\n or None if it is unknown.\n '
return self.batch_size() | def representative_batch_size(self):
'Return a representative size for batches in the dataset.\n\n This is not guaranteed to be the batch size for all batches in the\n dataset. It just needs to be a rough approximation for batch sizes in\n the dataset.\n\n Returns:\n int, a representative size for batches found in the dataset,\n or None if it is unknown.\n '
return self.batch_size()<|docstring|>Return a representative size for batches in the dataset.
This is not guaranteed to be the batch size for all batches in the
dataset. It just needs to be a rough approximation for batch sizes in
the dataset.
Returns:
int, a representative size for batches found in the dataset,
or None if it is unknown.<|endoftext|> |
236231450e58cb8f0a4659675cbee57680d68d4fe15e2b89f9f93999c96c65c2 | @abc.abstractmethod
def has_partial_batch(self):
'Whether the dataset has partial batch at the end.'
raise NotImplementedError | Whether the dataset has partial batch at the end. | tensorflow/python/keras/engine/data_adapter.py | has_partial_batch | bbbboom/tensorflow | 50 | python | @abc.abstractmethod
def has_partial_batch(self):
raise NotImplementedError | @abc.abstractmethod
def has_partial_batch(self):
raise NotImplementedError<|docstring|>Whether the dataset has partial batch at the end.<|endoftext|> |
64dc2edeb404382f0c174498c83713f6ae9152585766a86c3c53cc0d960020ab | @abc.abstractmethod
def partial_batch_size(self):
'The size of the final partial batch for dataset.\n\n Will return None if has_partial_batch is False or batch_size is None.\n '
raise NotImplementedError | The size of the final partial batch for dataset.
Will return None if has_partial_batch is False or batch_size is None. | tensorflow/python/keras/engine/data_adapter.py | partial_batch_size | bbbboom/tensorflow | 50 | python | @abc.abstractmethod
def partial_batch_size(self):
'The size of the final partial batch for dataset.\n\n Will return None if has_partial_batch is False or batch_size is None.\n '
raise NotImplementedError | @abc.abstractmethod
def partial_batch_size(self):
'The size of the final partial batch for dataset.\n\n Will return None if has_partial_batch is False or batch_size is None.\n '
raise NotImplementedError<|docstring|>The size of the final partial batch for dataset.
Will return None if has_partial_batch is False or batch_size is None.<|endoftext|> |
b1052a9345568be1d0b2bfd3c5c7c428269030053f0dca0c39bbe44cdd991f8d | @abc.abstractmethod
def should_recreate_iterator(self):
'Returns whether a new iterator should be created every epoch.'
raise NotImplementedError | Returns whether a new iterator should be created every epoch. | tensorflow/python/keras/engine/data_adapter.py | should_recreate_iterator | bbbboom/tensorflow | 50 | python | @abc.abstractmethod
def should_recreate_iterator(self):
raise NotImplementedError | @abc.abstractmethod
def should_recreate_iterator(self):
raise NotImplementedError<|docstring|>Returns whether a new iterator should be created every epoch.<|endoftext|> |
242fff6c408dcb9126d78935e090b657059216dc70c13b41492ac2bad45f90e4 | def get_samples(self):
'Returns number of samples in the data, or `None`.'
if ((not self.get_size()) or (not self.batch_size())):
return None
total_sample = (self.get_size() * self.batch_size())
if self.has_partial_batch():
total_sample -= (self.batch_size() - self.partial_batch_size())
return total_sample | Returns number of samples in the data, or `None`. | tensorflow/python/keras/engine/data_adapter.py | get_samples | bbbboom/tensorflow | 50 | python | def get_samples(self):
if ((not self.get_size()) or (not self.batch_size())):
return None
total_sample = (self.get_size() * self.batch_size())
if self.has_partial_batch():
total_sample -= (self.batch_size() - self.partial_batch_size())
return total_sample | def get_samples(self):
if ((not self.get_size()) or (not self.batch_size())):
return None
total_sample = (self.get_size() * self.batch_size())
if self.has_partial_batch():
total_sample -= (self.batch_size() - self.partial_batch_size())
return total_sample<|docstring|>Returns number of samples in the data, or `None`.<|endoftext|> |
0f7ff45551a4cf6e3fe38e9a56ef01cac81fb716e76086f11b7e8fb5aa32c88b | def on_epoch_end(self):
'A hook called after each epoch.'
pass | A hook called after each epoch. | tensorflow/python/keras/engine/data_adapter.py | on_epoch_end | bbbboom/tensorflow | 50 | python | def on_epoch_end(self):
pass | def on_epoch_end(self):
pass<|docstring|>A hook called after each epoch.<|endoftext|> |
bb8ca5de57d1d78ce84cfdc7081a34de2a26497ff98c24e64863343794efdbe9 | def slice_inputs(self, indices_dataset, inputs):
'Slice inputs into a Dataset of batches.\n\n Given a Dataset of batch indices and the unsliced inputs,\n this step slices the inputs in a parallelized fashion\n and produces a dataset of input batches.\n\n Args:\n indices_dataset: A Dataset of batched indices\n inputs: A python data structure that contains the inputs, targets,\n and possibly sample weights.\n\n Returns:\n A Dataset of input batches matching the batch indices.\n '
dataset = dataset_ops.DatasetV2.zip((indices_dataset, dataset_ops.DatasetV2.from_tensors(inputs).repeat()))
def grab_batch(i, data):
return nest.map_structure((lambda d: array_ops.gather(d, i, axis=0)), data)
dataset = dataset.map(grab_batch, num_parallel_calls=dataset_ops.AUTOTUNE)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
if self._shuffle:
options.experimental_external_state_policy = distribute_options.ExternalStatePolicy.IGNORE
dataset = dataset.with_options(options)
return dataset | Slice inputs into a Dataset of batches.
Given a Dataset of batch indices and the unsliced inputs,
this step slices the inputs in a parallelized fashion
and produces a dataset of input batches.
Args:
indices_dataset: A Dataset of batched indices
inputs: A python data structure that contains the inputs, targets,
and possibly sample weights.
Returns:
A Dataset of input batches matching the batch indices. | tensorflow/python/keras/engine/data_adapter.py | slice_inputs | bbbboom/tensorflow | 50 | python | def slice_inputs(self, indices_dataset, inputs):
'Slice inputs into a Dataset of batches.\n\n Given a Dataset of batch indices and the unsliced inputs,\n this step slices the inputs in a parallelized fashion\n and produces a dataset of input batches.\n\n Args:\n indices_dataset: A Dataset of batched indices\n inputs: A python data structure that contains the inputs, targets,\n and possibly sample weights.\n\n Returns:\n A Dataset of input batches matching the batch indices.\n '
dataset = dataset_ops.DatasetV2.zip((indices_dataset, dataset_ops.DatasetV2.from_tensors(inputs).repeat()))
def grab_batch(i, data):
return nest.map_structure((lambda d: array_ops.gather(d, i, axis=0)), data)
dataset = dataset.map(grab_batch, num_parallel_calls=dataset_ops.AUTOTUNE)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
if self._shuffle:
options.experimental_external_state_policy = distribute_options.ExternalStatePolicy.IGNORE
dataset = dataset.with_options(options)
return dataset | def slice_inputs(self, indices_dataset, inputs):
'Slice inputs into a Dataset of batches.\n\n Given a Dataset of batch indices and the unsliced inputs,\n this step slices the inputs in a parallelized fashion\n and produces a dataset of input batches.\n\n Args:\n indices_dataset: A Dataset of batched indices\n inputs: A python data structure that contains the inputs, targets,\n and possibly sample weights.\n\n Returns:\n A Dataset of input batches matching the batch indices.\n '
dataset = dataset_ops.DatasetV2.zip((indices_dataset, dataset_ops.DatasetV2.from_tensors(inputs).repeat()))
def grab_batch(i, data):
return nest.map_structure((lambda d: array_ops.gather(d, i, axis=0)), data)
dataset = dataset.map(grab_batch, num_parallel_calls=dataset_ops.AUTOTUNE)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
if self._shuffle:
options.experimental_external_state_policy = distribute_options.ExternalStatePolicy.IGNORE
dataset = dataset.with_options(options)
return dataset<|docstring|>Slice inputs into a Dataset of batches.
Given a Dataset of batch indices and the unsliced inputs,
this step slices the inputs in a parallelized fashion
and produces a dataset of input batches.
Args:
indices_dataset: A Dataset of batched indices
inputs: A python data structure that contains the inputs, targets,
and possibly sample weights.
Returns:
A Dataset of input batches matching the batch indices.<|endoftext|> |
1ba3fe9b6de78ad5afa692954ce5f98859beea0bde139407cc05c6a6911b5217 | def slice_inputs(self, indices_dataset, inputs):
'Slice inputs into a Dataset of batches.\n\n Given a Dataset of batch indices and the unsliced inputs,\n this step slices the inputs in a parallelized fashion\n and produces a dataset of input batches.\n\n Args:\n indices_dataset: A Dataset of batched indices\n inputs: A python data structure that contains the inputs, targets,\n and possibly sample weights.\n\n Returns:\n A Dataset of input batches matching the batch indices.\n '
flat_inputs = nest.flatten(inputs)
def dynamic_shape_like(t):
shape = list(t.shape)
shape[0] = None
return tuple(shape)
flat_dtypes = [inp.dtype for inp in flat_inputs]
contiguous = True
if (self._shuffle and (self._shuffle != 'batch')):
contiguous = False
def grab_batch(indices):
'Grab a batch of data from the inputs.'
def py_method(ind):
def slice_array(data):
return training_utils.slice_arrays(data, ind.numpy(), contiguous=contiguous)
return [slice_array(inp) for inp in flat_inputs]
flat_out = script_ops.eager_py_func(py_method, [indices], flat_dtypes)
for (v, original_inp) in zip(flat_out, flat_inputs):
v.set_shape(dynamic_shape_like(original_inp))
return nest.pack_sequence_as(inputs, flat_out)
dataset = indices_dataset.map(grab_batch, num_parallel_calls=dataset_ops.AUTOTUNE)
return dataset | Slice inputs into a Dataset of batches.
Given a Dataset of batch indices and the unsliced inputs,
this step slices the inputs in a parallelized fashion
and produces a dataset of input batches.
Args:
indices_dataset: A Dataset of batched indices
inputs: A python data structure that contains the inputs, targets,
and possibly sample weights.
Returns:
A Dataset of input batches matching the batch indices. | tensorflow/python/keras/engine/data_adapter.py | slice_inputs | bbbboom/tensorflow | 50 | python | def slice_inputs(self, indices_dataset, inputs):
'Slice inputs into a Dataset of batches.\n\n Given a Dataset of batch indices and the unsliced inputs,\n this step slices the inputs in a parallelized fashion\n and produces a dataset of input batches.\n\n Args:\n indices_dataset: A Dataset of batched indices\n inputs: A python data structure that contains the inputs, targets,\n and possibly sample weights.\n\n Returns:\n A Dataset of input batches matching the batch indices.\n '
flat_inputs = nest.flatten(inputs)
def dynamic_shape_like(t):
shape = list(t.shape)
shape[0] = None
return tuple(shape)
flat_dtypes = [inp.dtype for inp in flat_inputs]
contiguous = True
if (self._shuffle and (self._shuffle != 'batch')):
contiguous = False
def grab_batch(indices):
'Grab a batch of data from the inputs.'
def py_method(ind):
def slice_array(data):
return training_utils.slice_arrays(data, ind.numpy(), contiguous=contiguous)
return [slice_array(inp) for inp in flat_inputs]
flat_out = script_ops.eager_py_func(py_method, [indices], flat_dtypes)
for (v, original_inp) in zip(flat_out, flat_inputs):
v.set_shape(dynamic_shape_like(original_inp))
return nest.pack_sequence_as(inputs, flat_out)
dataset = indices_dataset.map(grab_batch, num_parallel_calls=dataset_ops.AUTOTUNE)
return dataset | def slice_inputs(self, indices_dataset, inputs):
'Slice inputs into a Dataset of batches.\n\n Given a Dataset of batch indices and the unsliced inputs,\n this step slices the inputs in a parallelized fashion\n and produces a dataset of input batches.\n\n Args:\n indices_dataset: A Dataset of batched indices\n inputs: A python data structure that contains the inputs, targets,\n and possibly sample weights.\n\n Returns:\n A Dataset of input batches matching the batch indices.\n '
flat_inputs = nest.flatten(inputs)
def dynamic_shape_like(t):
shape = list(t.shape)
shape[0] = None
return tuple(shape)
flat_dtypes = [inp.dtype for inp in flat_inputs]
contiguous = True
if (self._shuffle and (self._shuffle != 'batch')):
contiguous = False
def grab_batch(indices):
'Grab a batch of data from the inputs.'
def py_method(ind):
def slice_array(data):
return training_utils.slice_arrays(data, ind.numpy(), contiguous=contiguous)
return [slice_array(inp) for inp in flat_inputs]
flat_out = script_ops.eager_py_func(py_method, [indices], flat_dtypes)
for (v, original_inp) in zip(flat_out, flat_inputs):
v.set_shape(dynamic_shape_like(original_inp))
return nest.pack_sequence_as(inputs, flat_out)
dataset = indices_dataset.map(grab_batch, num_parallel_calls=dataset_ops.AUTOTUNE)
return dataset<|docstring|>Slice inputs into a Dataset of batches.
Given a Dataset of batch indices and the unsliced inputs,
this step slices the inputs in a parallelized fashion
and produces a dataset of input batches.
Args:
indices_dataset: A Dataset of batched indices
inputs: A python data structure that contains the inputs, targets,
and possibly sample weights.
Returns:
A Dataset of input batches matching the batch indices.<|endoftext|> |
eb74d778e2f99a8da208895c75e19899ced3405a9e72dca28f122695b213a63a | def _validate_args(self, y, sample_weights, steps):
'Validates `__init__` arguments.'
if (not is_none_or_empty(y)):
raise ValueError('`y` argument is not supported when using dataset as input.')
if (not is_none_or_empty(sample_weights)):
raise ValueError('`sample_weight` argument is not supported when using dataset as input.')
size = cardinality.cardinality(self._dataset).numpy()
if ((size == cardinality.INFINITE) and (steps is None)):
raise ValueError('When providing an infinite dataset, you must specify the number of steps to run.') | Validates `__init__` arguments. | tensorflow/python/keras/engine/data_adapter.py | _validate_args | bbbboom/tensorflow | 50 | python | def _validate_args(self, y, sample_weights, steps):
if (not is_none_or_empty(y)):
raise ValueError('`y` argument is not supported when using dataset as input.')
if (not is_none_or_empty(sample_weights)):
raise ValueError('`sample_weight` argument is not supported when using dataset as input.')
size = cardinality.cardinality(self._dataset).numpy()
if ((size == cardinality.INFINITE) and (steps is None)):
raise ValueError('When providing an infinite dataset, you must specify the number of steps to run.') | def _validate_args(self, y, sample_weights, steps):
if (not is_none_or_empty(y)):
raise ValueError('`y` argument is not supported when using dataset as input.')
if (not is_none_or_empty(sample_weights)):
raise ValueError('`sample_weight` argument is not supported when using dataset as input.')
size = cardinality.cardinality(self._dataset).numpy()
if ((size == cardinality.INFINITE) and (steps is None)):
raise ValueError('When providing an infinite dataset, you must specify the number of steps to run.')<|docstring|>Validates `__init__` arguments.<|endoftext|> |
127620fcc70c89c4574a1665cc6910c67db50b72ddf915e0bdc496147c920646 | def _standardize_batch(self, data):
'Standardizes a batch output by a generator.'
(x, y, sample_weight) = unpack_x_y_sample_weight(data)
data = pack_x_y_sample_weight(x, y, sample_weight)
data = nest._list_to_tuple(data)
def _convert_dtype(t):
if (isinstance(t, np.ndarray) and issubclass(t.dtype.type, np.floating)):
return np.array(t, dtype=backend.floatx())
return t
data = nest.map_structure(_convert_dtype, data)
return data | Standardizes a batch output by a generator. | tensorflow/python/keras/engine/data_adapter.py | _standardize_batch | bbbboom/tensorflow | 50 | python | def _standardize_batch(self, data):
(x, y, sample_weight) = unpack_x_y_sample_weight(data)
data = pack_x_y_sample_weight(x, y, sample_weight)
data = nest._list_to_tuple(data)
def _convert_dtype(t):
if (isinstance(t, np.ndarray) and issubclass(t.dtype.type, np.floating)):
return np.array(t, dtype=backend.floatx())
return t
data = nest.map_structure(_convert_dtype, data)
return data | def _standardize_batch(self, data):
(x, y, sample_weight) = unpack_x_y_sample_weight(data)
data = pack_x_y_sample_weight(x, y, sample_weight)
data = nest._list_to_tuple(data)
def _convert_dtype(t):
if (isinstance(t, np.ndarray) and issubclass(t.dtype.type, np.floating)):
return np.array(t, dtype=backend.floatx())
return t
data = nest.map_structure(_convert_dtype, data)
return data<|docstring|>Standardizes a batch output by a generator.<|endoftext|> |
eaa063c3b39a2ade49435d058d650ed4fe96d020bf1fd176fe3f35465dcdce2d | def _handle_multiprocessing(self, x, workers, use_multiprocessing, max_queue_size):
'Create a callable, possibly including an Enqueuer.'
if ((workers > 1) or ((workers > 0) and use_multiprocessing)):
if use_multiprocessing:
logging.warning(UserWarning('Using a generator with `use_multiprocessing=True` and multiple workers may duplicate your data. Please consider using the `tf.data.Dataset`.'))
def generator_fn():
enqueuer = data_utils.GeneratorEnqueuer(x, use_multiprocessing=use_multiprocessing)
enqueuer.start(workers=workers, max_queue_size=max_queue_size)
return enqueuer.get()
else:
generator_fn = (lambda : x)
return generator_fn | Create a callable, possibly including an Enqueuer. | tensorflow/python/keras/engine/data_adapter.py | _handle_multiprocessing | bbbboom/tensorflow | 50 | python | def _handle_multiprocessing(self, x, workers, use_multiprocessing, max_queue_size):
if ((workers > 1) or ((workers > 0) and use_multiprocessing)):
if use_multiprocessing:
logging.warning(UserWarning('Using a generator with `use_multiprocessing=True` and multiple workers may duplicate your data. Please consider using the `tf.data.Dataset`.'))
def generator_fn():
enqueuer = data_utils.GeneratorEnqueuer(x, use_multiprocessing=use_multiprocessing)
enqueuer.start(workers=workers, max_queue_size=max_queue_size)
return enqueuer.get()
else:
generator_fn = (lambda : x)
return generator_fn | def _handle_multiprocessing(self, x, workers, use_multiprocessing, max_queue_size):
if ((workers > 1) or ((workers > 0) and use_multiprocessing)):
if use_multiprocessing:
logging.warning(UserWarning('Using a generator with `use_multiprocessing=True` and multiple workers may duplicate your data. Please consider using the `tf.data.Dataset`.'))
def generator_fn():
enqueuer = data_utils.GeneratorEnqueuer(x, use_multiprocessing=use_multiprocessing)
enqueuer.start(workers=workers, max_queue_size=max_queue_size)
return enqueuer.get()
else:
generator_fn = (lambda : x)
return generator_fn<|docstring|>Create a callable, possibly including an Enqueuer.<|endoftext|> |
dc79a81c9486d18d657ead276d64527963c0b78feadede6e09cb0a113ab67081 | def enumerate_epochs(self):
'Yields `(epoch, tf.data.Iterator)`.'
data_iterator = iter(self._dataset)
for epoch in range(self._initial_epoch, self._epochs):
if self._insufficient_data:
break
if self._adapter.should_recreate_iterator():
if ds_context.has_strategy():
data_iterator._initializer
else:
data_iterator = iter(self._dataset)
(yield (epoch, data_iterator))
self._adapter.on_epoch_end() | Yields `(epoch, tf.data.Iterator)`. | tensorflow/python/keras/engine/data_adapter.py | enumerate_epochs | bbbboom/tensorflow | 50 | python | def enumerate_epochs(self):
data_iterator = iter(self._dataset)
for epoch in range(self._initial_epoch, self._epochs):
if self._insufficient_data:
break
if self._adapter.should_recreate_iterator():
if ds_context.has_strategy():
data_iterator._initializer
else:
data_iterator = iter(self._dataset)
(yield (epoch, data_iterator))
self._adapter.on_epoch_end() | def enumerate_epochs(self):
data_iterator = iter(self._dataset)
for epoch in range(self._initial_epoch, self._epochs):
if self._insufficient_data:
break
if self._adapter.should_recreate_iterator():
if ds_context.has_strategy():
data_iterator._initializer
else:
data_iterator = iter(self._dataset)
(yield (epoch, data_iterator))
self._adapter.on_epoch_end()<|docstring|>Yields `(epoch, tf.data.Iterator)`.<|endoftext|> |
9c81f62c9710868fa4ac4745501cf94b55ad324fa3067bf83709364be478858a | @contextlib.contextmanager
def catch_stop_iteration(self):
'Catches errors when an iterator runs out of data.'
try:
(yield)
context.async_wait()
except (StopIteration, errors.OutOfRangeError):
if ((self._adapter.get_size() is None) and (self._inferred_steps is None) and (self._current_step > 0)):
self._inferred_steps = self._current_step
else:
self._insufficient_data = True
total_epochs = (self._epochs - self._initial_epoch)
logging.warning('Your input ran out of data; interrupting training. Make sure that your dataset or generator can generate at least `steps_per_epoch * epochs` batches (in this case, {} batches). You may need to use the repeat() function when building your dataset.'.format((total_epochs * self._inferred_steps))) | Catches errors when an iterator runs out of data. | tensorflow/python/keras/engine/data_adapter.py | catch_stop_iteration | bbbboom/tensorflow | 50 | python | @contextlib.contextmanager
def catch_stop_iteration(self):
try:
(yield)
context.async_wait()
except (StopIteration, errors.OutOfRangeError):
if ((self._adapter.get_size() is None) and (self._inferred_steps is None) and (self._current_step > 0)):
self._inferred_steps = self._current_step
else:
self._insufficient_data = True
total_epochs = (self._epochs - self._initial_epoch)
logging.warning('Your input ran out of data; interrupting training. Make sure that your dataset or generator can generate at least `steps_per_epoch * epochs` batches (in this case, {} batches). You may need to use the repeat() function when building your dataset.'.format((total_epochs * self._inferred_steps))) | @contextlib.contextmanager
def catch_stop_iteration(self):
try:
(yield)
context.async_wait()
except (StopIteration, errors.OutOfRangeError):
if ((self._adapter.get_size() is None) and (self._inferred_steps is None) and (self._current_step > 0)):
self._inferred_steps = self._current_step
else:
self._insufficient_data = True
total_epochs = (self._epochs - self._initial_epoch)
logging.warning('Your input ran out of data; interrupting training. Make sure that your dataset or generator can generate at least `steps_per_epoch * epochs` batches (in this case, {} batches). You may need to use the repeat() function when building your dataset.'.format((total_epochs * self._inferred_steps)))<|docstring|>Catches errors when an iterator runs out of data.<|endoftext|> |
f65fdaf8008be2cd9fdf7d541f22bcb445add07160575fefc7acc1ec274666e6 | def steps(self):
'Yields steps for the current epoch.'
self._current_step = 0
while ((self._inferred_steps is None) or (self._current_step < self._inferred_steps)):
if self._insufficient_data:
break
(yield self._current_step)
self._current_step += 1 | Yields steps for the current epoch. | tensorflow/python/keras/engine/data_adapter.py | steps | bbbboom/tensorflow | 50 | python | def steps(self):
self._current_step = 0
while ((self._inferred_steps is None) or (self._current_step < self._inferred_steps)):
if self._insufficient_data:
break
(yield self._current_step)
self._current_step += 1 | def steps(self):
self._current_step = 0
while ((self._inferred_steps is None) or (self._current_step < self._inferred_steps)):
if self._insufficient_data:
break
(yield self._current_step)
self._current_step += 1<|docstring|>Yields steps for the current epoch.<|endoftext|> |
dd5221edfc17a38ed3491e9de1dcde08fb297947def39ad329f1ddceb613abb9 | @property
def inferred_steps(self):
'The inferred steps per epoch of the created `Dataset`.\n\n This will be `None` in the case where:\n\n (1) A `Dataset` of unknown cardinality was passed to the `DataHandler`, and\n (2) `steps_per_epoch` was not provided, and\n (3) The first epoch of iteration has not yet completed.\n\n Returns:\n The inferred steps per epoch of the created `Dataset`.\n '
return self._inferred_steps | The inferred steps per epoch of the created `Dataset`.
This will be `None` in the case where:
(1) A `Dataset` of unknown cardinality was passed to the `DataHandler`, and
(2) `steps_per_epoch` was not provided, and
(3) The first epoch of iteration has not yet completed.
Returns:
The inferred steps per epoch of the created `Dataset`. | tensorflow/python/keras/engine/data_adapter.py | inferred_steps | bbbboom/tensorflow | 50 | python | @property
def inferred_steps(self):
'The inferred steps per epoch of the created `Dataset`.\n\n This will be `None` in the case where:\n\n (1) A `Dataset` of unknown cardinality was passed to the `DataHandler`, and\n (2) `steps_per_epoch` was not provided, and\n (3) The first epoch of iteration has not yet completed.\n\n Returns:\n The inferred steps per epoch of the created `Dataset`.\n '
return self._inferred_steps | @property
def inferred_steps(self):
'The inferred steps per epoch of the created `Dataset`.\n\n This will be `None` in the case where:\n\n (1) A `Dataset` of unknown cardinality was passed to the `DataHandler`, and\n (2) `steps_per_epoch` was not provided, and\n (3) The first epoch of iteration has not yet completed.\n\n Returns:\n The inferred steps per epoch of the created `Dataset`.\n '
return self._inferred_steps<|docstring|>The inferred steps per epoch of the created `Dataset`.
This will be `None` in the case where:
(1) A `Dataset` of unknown cardinality was passed to the `DataHandler`, and
(2) `steps_per_epoch` was not provided, and
(3) The first epoch of iteration has not yet completed.
Returns:
The inferred steps per epoch of the created `Dataset`.<|endoftext|> |
f734b240e1993050c85d433110775dde5f873895008e573efc8417f09117d112 | def _infer_steps(self, steps, dataset):
'Infers steps_per_epoch needed to loop through a dataset.'
if (steps is not None):
return steps
adapter_steps = self._adapter.get_size()
if (adapter_steps is not None):
return adapter_steps
if (ds_context.get_strategy().extended._in_multi_worker_mode() and (dataset.options().experimental_distribute.auto_shard_policy != distribute_options.AutoShardPolicy.OFF)):
raise ValueError('When dataset is sharded across workers, please specify a reasonable `steps_per_epoch` such that all workers will train the same number of steps and each step can get data from dataset without EOF. This is required for allreduce to succeed. We will handle the last partial batch in the future.')
size = cardinality.cardinality(dataset)
if ((size == cardinality.INFINITE) and (steps is None)):
raise ValueError('When passing an infinitely repeating dataset, you must specify how many steps to draw.')
if (size >= 0):
return size.numpy().item()
return None | Infers steps_per_epoch needed to loop through a dataset. | tensorflow/python/keras/engine/data_adapter.py | _infer_steps | bbbboom/tensorflow | 50 | python | def _infer_steps(self, steps, dataset):
if (steps is not None):
return steps
adapter_steps = self._adapter.get_size()
if (adapter_steps is not None):
return adapter_steps
if (ds_context.get_strategy().extended._in_multi_worker_mode() and (dataset.options().experimental_distribute.auto_shard_policy != distribute_options.AutoShardPolicy.OFF)):
raise ValueError('When dataset is sharded across workers, please specify a reasonable `steps_per_epoch` such that all workers will train the same number of steps and each step can get data from dataset without EOF. This is required for allreduce to succeed. We will handle the last partial batch in the future.')
size = cardinality.cardinality(dataset)
if ((size == cardinality.INFINITE) and (steps is None)):
raise ValueError('When passing an infinitely repeating dataset, you must specify how many steps to draw.')
if (size >= 0):
return size.numpy().item()
return None | def _infer_steps(self, steps, dataset):
if (steps is not None):
return steps
adapter_steps = self._adapter.get_size()
if (adapter_steps is not None):
return adapter_steps
if (ds_context.get_strategy().extended._in_multi_worker_mode() and (dataset.options().experimental_distribute.auto_shard_policy != distribute_options.AutoShardPolicy.OFF)):
raise ValueError('When dataset is sharded across workers, please specify a reasonable `steps_per_epoch` such that all workers will train the same number of steps and each step can get data from dataset without EOF. This is required for allreduce to succeed. We will handle the last partial batch in the future.')
size = cardinality.cardinality(dataset)
if ((size == cardinality.INFINITE) and (steps is None)):
raise ValueError('When passing an infinitely repeating dataset, you must specify how many steps to draw.')
if (size >= 0):
return size.numpy().item()
return None<|docstring|>Infers steps_per_epoch needed to loop through a dataset.<|endoftext|> |
b63ae4239b54337579347a553e899c05a5c35893b7a0e43b26dd0f031757bca7 | def _class_weights_map_fn(*data):
'Convert `class_weight` to `sample_weight`.'
(x, y, sw) = unpack_x_y_sample_weight(data)
if nest.is_sequence(y):
raise ValueError('`class_weight` is only supported for Models with a single output.')
if (y.shape.rank > 2):
raise ValueError('`class_weight` not supported for 3+ dimensional targets.')
y_classes = smart_cond.smart_cond(((y.shape.rank == 2) and (backend.shape(y)[1] > 1)), (lambda : backend.argmax(y, axis=1)), (lambda : math_ops.cast(backend.reshape(y, ((- 1),)), dtypes.int64)))
cw = array_ops.gather_v2(class_weight_tensor, y_classes)
if (sw is not None):
cw = math_ops.cast(cw, sw.dtype)
(sw, cw) = expand_1d((sw, cw))
sw = (sw * cw)
else:
sw = cw
return (x, y, sw) | Convert `class_weight` to `sample_weight`. | tensorflow/python/keras/engine/data_adapter.py | _class_weights_map_fn | bbbboom/tensorflow | 50 | python | def _class_weights_map_fn(*data):
(x, y, sw) = unpack_x_y_sample_weight(data)
if nest.is_sequence(y):
raise ValueError('`class_weight` is only supported for Models with a single output.')
if (y.shape.rank > 2):
raise ValueError('`class_weight` not supported for 3+ dimensional targets.')
y_classes = smart_cond.smart_cond(((y.shape.rank == 2) and (backend.shape(y)[1] > 1)), (lambda : backend.argmax(y, axis=1)), (lambda : math_ops.cast(backend.reshape(y, ((- 1),)), dtypes.int64)))
cw = array_ops.gather_v2(class_weight_tensor, y_classes)
if (sw is not None):
cw = math_ops.cast(cw, sw.dtype)
(sw, cw) = expand_1d((sw, cw))
sw = (sw * cw)
else:
sw = cw
return (x, y, sw) | def _class_weights_map_fn(*data):
(x, y, sw) = unpack_x_y_sample_weight(data)
if nest.is_sequence(y):
raise ValueError('`class_weight` is only supported for Models with a single output.')
if (y.shape.rank > 2):
raise ValueError('`class_weight` not supported for 3+ dimensional targets.')
y_classes = smart_cond.smart_cond(((y.shape.rank == 2) and (backend.shape(y)[1] > 1)), (lambda : backend.argmax(y, axis=1)), (lambda : math_ops.cast(backend.reshape(y, ((- 1),)), dtypes.int64)))
cw = array_ops.gather_v2(class_weight_tensor, y_classes)
if (sw is not None):
cw = math_ops.cast(cw, sw.dtype)
(sw, cw) = expand_1d((sw, cw))
sw = (sw * cw)
else:
sw = cw
return (x, y, sw)<|docstring|>Convert `class_weight` to `sample_weight`.<|endoftext|> |
36845b4c1b9a1a87f99f772348b2c3730ace2c2a6b93e89cbcbd5b3551d56835 | def slice_batch_indices(indices):
'Convert a Tensor of indices into a dataset of batched indices.\n\n This step can be accomplished in several ways. The most natural is to\n slice the Tensor in a Dataset map. (With a condition on the upper index to\n handle the partial batch.) However it turns out that coercing the Tensor\n into a shape which is divisible by the batch size (and handling the last\n partial batch separately) allows for a much more favorable memory access\n pattern and improved performance.\n\n Args:\n indices: Tensor which determines the data order for an entire epoch.\n\n Returns:\n A Dataset of batched indices.\n '
num_in_full_batch = (num_full_batches * batch_size)
first_k_indices = array_ops.slice(indices, [0], [num_in_full_batch])
first_k_indices = array_ops.reshape(first_k_indices, [num_full_batches, batch_size])
flat_dataset = dataset_ops.DatasetV2.from_tensor_slices(first_k_indices)
if self._partial_batch_size:
index_remainder = dataset_ops.DatasetV2.from_tensors(array_ops.slice(indices, [num_in_full_batch], [self._partial_batch_size]))
flat_dataset = flat_dataset.concatenate(index_remainder)
if (shuffle == 'batch'):
flat_dataset = flat_dataset.shuffle(1024).repeat(epochs)
return flat_dataset | Convert a Tensor of indices into a dataset of batched indices.
This step can be accomplished in several ways. The most natural is to
slice the Tensor in a Dataset map. (With a condition on the upper index to
handle the partial batch.) However it turns out that coercing the Tensor
into a shape which is divisible by the batch size (and handling the last
partial batch separately) allows for a much more favorable memory access
pattern and improved performance.
Args:
indices: Tensor which determines the data order for an entire epoch.
Returns:
A Dataset of batched indices. | tensorflow/python/keras/engine/data_adapter.py | slice_batch_indices | bbbboom/tensorflow | 50 | python | def slice_batch_indices(indices):
'Convert a Tensor of indices into a dataset of batched indices.\n\n This step can be accomplished in several ways. The most natural is to\n slice the Tensor in a Dataset map. (With a condition on the upper index to\n handle the partial batch.) However it turns out that coercing the Tensor\n into a shape which is divisible by the batch size (and handling the last\n partial batch separately) allows for a much more favorable memory access\n pattern and improved performance.\n\n Args:\n indices: Tensor which determines the data order for an entire epoch.\n\n Returns:\n A Dataset of batched indices.\n '
num_in_full_batch = (num_full_batches * batch_size)
first_k_indices = array_ops.slice(indices, [0], [num_in_full_batch])
first_k_indices = array_ops.reshape(first_k_indices, [num_full_batches, batch_size])
flat_dataset = dataset_ops.DatasetV2.from_tensor_slices(first_k_indices)
if self._partial_batch_size:
index_remainder = dataset_ops.DatasetV2.from_tensors(array_ops.slice(indices, [num_in_full_batch], [self._partial_batch_size]))
flat_dataset = flat_dataset.concatenate(index_remainder)
if (shuffle == 'batch'):
flat_dataset = flat_dataset.shuffle(1024).repeat(epochs)
return flat_dataset | def slice_batch_indices(indices):
'Convert a Tensor of indices into a dataset of batched indices.\n\n This step can be accomplished in several ways. The most natural is to\n slice the Tensor in a Dataset map. (With a condition on the upper index to\n handle the partial batch.) However it turns out that coercing the Tensor\n into a shape which is divisible by the batch size (and handling the last\n partial batch separately) allows for a much more favorable memory access\n pattern and improved performance.\n\n Args:\n indices: Tensor which determines the data order for an entire epoch.\n\n Returns:\n A Dataset of batched indices.\n '
num_in_full_batch = (num_full_batches * batch_size)
first_k_indices = array_ops.slice(indices, [0], [num_in_full_batch])
first_k_indices = array_ops.reshape(first_k_indices, [num_full_batches, batch_size])
flat_dataset = dataset_ops.DatasetV2.from_tensor_slices(first_k_indices)
if self._partial_batch_size:
index_remainder = dataset_ops.DatasetV2.from_tensors(array_ops.slice(indices, [num_in_full_batch], [self._partial_batch_size]))
flat_dataset = flat_dataset.concatenate(index_remainder)
if (shuffle == 'batch'):
flat_dataset = flat_dataset.shuffle(1024).repeat(epochs)
return flat_dataset<|docstring|>Convert a Tensor of indices into a dataset of batched indices.
This step can be accomplished in several ways. The most natural is to
slice the Tensor in a Dataset map. (With a condition on the upper index to
handle the partial batch.) However it turns out that coercing the Tensor
into a shape which is divisible by the batch size (and handling the last
partial batch separately) allows for a much more favorable memory access
pattern and improved performance.
Args:
indices: Tensor which determines the data order for an entire epoch.
Returns:
A Dataset of batched indices.<|endoftext|> |
b892d5f72256cd8a2ecf777bc1117701f751cef2deb0621c1acfd91633d8eb6d | def _is_array_like(v):
'Return True if v is a Tensor, array, or is array-like.'
return (hasattr(v, '__getitem__') and hasattr(v, 'shape') and hasattr(v, 'dtype') and hasattr(v, '__len__')) | Return True if v is a Tensor, array, or is array-like. | tensorflow/python/keras/engine/data_adapter.py | _is_array_like | bbbboom/tensorflow | 50 | python | def _is_array_like(v):
return (hasattr(v, '__getitem__') and hasattr(v, 'shape') and hasattr(v, 'dtype') and hasattr(v, '__len__')) | def _is_array_like(v):
return (hasattr(v, '__getitem__') and hasattr(v, 'shape') and hasattr(v, 'dtype') and hasattr(v, '__len__'))<|docstring|>Return True if v is a Tensor, array, or is array-like.<|endoftext|> |
44b44ce1a5e55a23a56d5df3f6eee6da06fe018309522e84dd624570ede2c1f9 | def grab_batch(indices):
'Grab a batch of data from the inputs.'
def py_method(ind):
def slice_array(data):
return training_utils.slice_arrays(data, ind.numpy(), contiguous=contiguous)
return [slice_array(inp) for inp in flat_inputs]
flat_out = script_ops.eager_py_func(py_method, [indices], flat_dtypes)
for (v, original_inp) in zip(flat_out, flat_inputs):
v.set_shape(dynamic_shape_like(original_inp))
return nest.pack_sequence_as(inputs, flat_out) | Grab a batch of data from the inputs. | tensorflow/python/keras/engine/data_adapter.py | grab_batch | bbbboom/tensorflow | 50 | python | def grab_batch(indices):
def py_method(ind):
def slice_array(data):
return training_utils.slice_arrays(data, ind.numpy(), contiguous=contiguous)
return [slice_array(inp) for inp in flat_inputs]
flat_out = script_ops.eager_py_func(py_method, [indices], flat_dtypes)
for (v, original_inp) in zip(flat_out, flat_inputs):
v.set_shape(dynamic_shape_like(original_inp))
return nest.pack_sequence_as(inputs, flat_out) | def grab_batch(indices):
def py_method(ind):
def slice_array(data):
return training_utils.slice_arrays(data, ind.numpy(), contiguous=contiguous)
return [slice_array(inp) for inp in flat_inputs]
flat_out = script_ops.eager_py_func(py_method, [indices], flat_dtypes)
for (v, original_inp) in zip(flat_out, flat_inputs):
v.set_shape(dynamic_shape_like(original_inp))
return nest.pack_sequence_as(inputs, flat_out)<|docstring|>Grab a batch of data from the inputs.<|endoftext|> |
fd821dadec8f0edf4b82e3a2f05532b8d6f11bb62d0d1986028d0a1713e52c1a | def test_find_https_url():
'\n unit test multiple strings for urls in bulk - rather than separate test functions for each\n one way to rapidly iterate on your code, nicely encapsulates similar cases\n\n requires editing REGEX in excel_read_cell_info.find_url to make this test pass\n '
result = {}
input01 = 'Coed Boarding/Day School Grades 6-12; Enrollment 350 http://www.prioryca.org'
input02 = 'https://windwardschool.org'
assert (find_url(input01, result) == 'http://www.prioryca.org')
assert (find_url(input02, result) == 'https://windwardschool.org') | unit test multiple strings for urls in bulk - rather than separate test functions for each
one way to rapidly iterate on your code, nicely encapsulates similar cases
requires editing REGEX in excel_read_cell_info.find_url to make this test pass | tests/test_03_cais_find_https_url.py | test_find_https_url | surfaceowl/pythontalk_tdd_for_data | 0 | python | def test_find_https_url():
'\n unit test multiple strings for urls in bulk - rather than separate test functions for each\n one way to rapidly iterate on your code, nicely encapsulates similar cases\n\n requires editing REGEX in excel_read_cell_info.find_url to make this test pass\n '
result = {}
input01 = 'Coed Boarding/Day School Grades 6-12; Enrollment 350 http://www.prioryca.org'
input02 = 'https://windwardschool.org'
assert (find_url(input01, result) == 'http://www.prioryca.org')
assert (find_url(input02, result) == 'https://windwardschool.org') | def test_find_https_url():
'\n unit test multiple strings for urls in bulk - rather than separate test functions for each\n one way to rapidly iterate on your code, nicely encapsulates similar cases\n\n requires editing REGEX in excel_read_cell_info.find_url to make this test pass\n '
result = {}
input01 = 'Coed Boarding/Day School Grades 6-12; Enrollment 350 http://www.prioryca.org'
input02 = 'https://windwardschool.org'
assert (find_url(input01, result) == 'http://www.prioryca.org')
assert (find_url(input02, result) == 'https://windwardschool.org')<|docstring|>unit test multiple strings for urls in bulk - rather than separate test functions for each
one way to rapidly iterate on your code, nicely encapsulates similar cases
requires editing REGEX in excel_read_cell_info.find_url to make this test pass<|endoftext|> |
484dfb4e9a07b0c952dab29ba4c4039dcca7b3ed235acfa55fb57e5899b299b4 | def get_notes(file):
'\n get a string of all the notes/rest and their durations for a given file\n '
notes = []
midi = converter.parse(file)
notes_to_parse = None
parts = instrument.partitionByInstrument(midi)
if parts:
notes_to_parse = parts.parts[0].recurse()
else:
notes_to_parse = midi.flat.notes
for element in notes_to_parse:
if isinstance(element, note.Note):
string = ((((' ' + str(element.pitch.name)) + str(element.pitch.octave)) + ' ') + str(element.duration.quarterLength))
notes.append(string)
if (getattr(element, 'isRest', None) and element.isRest):
string = (((' ' + 'Rest') + ' ') + str(element.duration.quarterLength))
notes.append(string)
elif isinstance(element, chord.Chord):
notes.append('.'.join((str(n) for n in element.normalOrder)))
return ''.join(chain.from_iterable(notes)) | get a string of all the notes/rest and their durations for a given file | src/midi.py | get_notes | eraoul/hamr2018 | 0 | python | def get_notes(file):
'\n \n '
notes = []
midi = converter.parse(file)
notes_to_parse = None
parts = instrument.partitionByInstrument(midi)
if parts:
notes_to_parse = parts.parts[0].recurse()
else:
notes_to_parse = midi.flat.notes
for element in notes_to_parse:
if isinstance(element, note.Note):
string = ((((' ' + str(element.pitch.name)) + str(element.pitch.octave)) + ' ') + str(element.duration.quarterLength))
notes.append(string)
if (getattr(element, 'isRest', None) and element.isRest):
string = (((' ' + 'Rest') + ' ') + str(element.duration.quarterLength))
notes.append(string)
elif isinstance(element, chord.Chord):
notes.append('.'.join((str(n) for n in element.normalOrder)))
return .join(chain.from_iterable(notes)) | def get_notes(file):
'\n \n '
notes = []
midi = converter.parse(file)
notes_to_parse = None
parts = instrument.partitionByInstrument(midi)
if parts:
notes_to_parse = parts.parts[0].recurse()
else:
notes_to_parse = midi.flat.notes
for element in notes_to_parse:
if isinstance(element, note.Note):
string = ((((' ' + str(element.pitch.name)) + str(element.pitch.octave)) + ' ') + str(element.duration.quarterLength))
notes.append(string)
if (getattr(element, 'isRest', None) and element.isRest):
string = (((' ' + 'Rest') + ' ') + str(element.duration.quarterLength))
notes.append(string)
elif isinstance(element, chord.Chord):
notes.append('.'.join((str(n) for n in element.normalOrder)))
return .join(chain.from_iterable(notes))<|docstring|>get a string of all the notes/rest and their durations for a given file<|endoftext|> |
83974639b7e9b8bfa53ec62af11cd6727e4f99fcf1c840f6f871592c9bb3e65b | @factory.post_generation
def channels(self, create, extracted, **kwargs):
' Create associated channels'
if (not create):
return
if extracted:
for channel in extracted:
self.channels.add(channel) | Create associated channels | moira_lists/factories.py | channels | mitodl/open-discussions | 12 | python | @factory.post_generation
def channels(self, create, extracted, **kwargs):
' '
if (not create):
return
if extracted:
for channel in extracted:
self.channels.add(channel) | @factory.post_generation
def channels(self, create, extracted, **kwargs):
' '
if (not create):
return
if extracted:
for channel in extracted:
self.channels.add(channel)<|docstring|>Create associated channels<|endoftext|> |
507bc5628be3fcf165c641a920ec5b3dc77fb0e7d768382ba9470d97883387a3 | @factory.post_generation
def users(self, create, extracted, **kwargs):
' Create associated users'
if (not create):
return
if extracted:
for user in extracted:
self.users.add(user) | Create associated users | moira_lists/factories.py | users | mitodl/open-discussions | 12 | python | @factory.post_generation
def users(self, create, extracted, **kwargs):
' '
if (not create):
return
if extracted:
for user in extracted:
self.users.add(user) | @factory.post_generation
def users(self, create, extracted, **kwargs):
' '
if (not create):
return
if extracted:
for user in extracted:
self.users.add(user)<|docstring|>Create associated users<|endoftext|> |
f28d3655873ccc4679ef7a5b7bfde668f917832e05ec892a071af631bd4ddfdd | def inIPython():
' Checks if tellurium is used in IPython.\n\n Returns true if tellurium is being using in\n an IPython environment, false otherwise.\n :return: boolean\n '
global __in_ipython
return __in_ipython | Checks if tellurium is used in IPython.
Returns true if tellurium is being using in
an IPython environment, false otherwise.
:return: boolean | tellurium/bombBeetle.py | inIPython | madfain/BombBeetle | 1 | python | def inIPython():
' Checks if tellurium is used in IPython.\n\n Returns true if tellurium is being using in\n an IPython environment, false otherwise.\n :return: boolean\n '
global __in_ipython
return __in_ipython | def inIPython():
' Checks if tellurium is used in IPython.\n\n Returns true if tellurium is being using in\n an IPython environment, false otherwise.\n :return: boolean\n '
global __in_ipython
return __in_ipython<|docstring|>Checks if tellurium is used in IPython.
Returns true if tellurium is being using in
an IPython environment, false otherwise.
:return: boolean<|endoftext|> |
cfae52eb7b2b929c82ff675849d93f3275d68bb7fbc3456ea6ba5126028dbfa7 | def getDefaultPlottingEngine():
" Get the default plotting engine.\n Options are 'matplotlib' or 'plotly'.\n :return:\n "
global __default_plotting_engine
return __default_plotting_engine | Get the default plotting engine.
Options are 'matplotlib' or 'plotly'.
:return: | tellurium/bombBeetle.py | getDefaultPlottingEngine | madfain/BombBeetle | 1 | python | def getDefaultPlottingEngine():
" Get the default plotting engine.\n Options are 'matplotlib' or 'plotly'.\n :return:\n "
global __default_plotting_engine
return __default_plotting_engine | def getDefaultPlottingEngine():
" Get the default plotting engine.\n Options are 'matplotlib' or 'plotly'.\n :return:\n "
global __default_plotting_engine
return __default_plotting_engine<|docstring|>Get the default plotting engine.
Options are 'matplotlib' or 'plotly'.
:return:<|endoftext|> |
cf273ce8bbb264e6b14e929e54cf8c309775a03b3cc14939c810cea7fad93876 | def setDefaultPlottingEngine(engine):
" Set the default plotting engine. Overrides current value.\n\n :param engine: A string describing which plotting engine to use. Valid values are 'matplotlib' and 'plotly'.\n "
if (engine not in [PLOTTING_ENGINE_PLOTLY, PLOTTING_ENGINE_MATPLOTLIB, PLOTTING_ENGINE_NULL]):
raise ValueError('Plotting engine is not supported: {}'.format(engine))
global __default_plotting_engine
__default_plotting_engine = engine | Set the default plotting engine. Overrides current value.
:param engine: A string describing which plotting engine to use. Valid values are 'matplotlib' and 'plotly'. | tellurium/bombBeetle.py | setDefaultPlottingEngine | madfain/BombBeetle | 1 | python | def setDefaultPlottingEngine(engine):
" Set the default plotting engine. Overrides current value.\n\n :param engine: A string describing which plotting engine to use. Valid values are 'matplotlib' and 'plotly'.\n "
if (engine not in [PLOTTING_ENGINE_PLOTLY, PLOTTING_ENGINE_MATPLOTLIB, PLOTTING_ENGINE_NULL]):
raise ValueError('Plotting engine is not supported: {}'.format(engine))
global __default_plotting_engine
__default_plotting_engine = engine | def setDefaultPlottingEngine(engine):
" Set the default plotting engine. Overrides current value.\n\n :param engine: A string describing which plotting engine to use. Valid values are 'matplotlib' and 'plotly'.\n "
if (engine not in [PLOTTING_ENGINE_PLOTLY, PLOTTING_ENGINE_MATPLOTLIB, PLOTTING_ENGINE_NULL]):
raise ValueError('Plotting engine is not supported: {}'.format(engine))
global __default_plotting_engine
__default_plotting_engine = engine<|docstring|>Set the default plotting engine. Overrides current value.
:param engine: A string describing which plotting engine to use. Valid values are 'matplotlib' and 'plotly'.<|endoftext|> |
d476f70256c21598e383ffb1a22569b995c81c77817a292f13a29bac8377d7df | def setSavePlotsToPDF(value):
' Sets whether plots should be saved to PDF. '
global __save_plots_to_pdf
__save_plots_to_pdf = value | Sets whether plots should be saved to PDF. | tellurium/bombBeetle.py | setSavePlotsToPDF | madfain/BombBeetle | 1 | python | def setSavePlotsToPDF(value):
' '
global __save_plots_to_pdf
__save_plots_to_pdf = value | def setSavePlotsToPDF(value):
' '
global __save_plots_to_pdf
__save_plots_to_pdf = value<|docstring|>Sets whether plots should be saved to PDF.<|endoftext|> |
dd489123dc9a9c6d2efac1108209275d20e9fe45a99b9c89989e9541415cb75c | def getVersionInfo():
' Returns version information for tellurium included packages.\n\n :returns: list of tuples (package, version)\n '
versions = [('tellurium', getTelluriumVersion()), ('roadrunner', roadrunner.__version__), ('antimony', antimony.__version__)]
if libsbml:
versions.append(('libsbml', libsbml.getLibSBMLDottedVersion()))
if libsedml:
versions.append(('libsedml', libsedml.getLibSEDMLDottedVersion()))
if phrasedml:
versions.append(('phrasedml', phrasedml.__version__))
if sbol:
versions.append(('pySBOL', sbol.__version__))
return versions | Returns version information for tellurium included packages.
:returns: list of tuples (package, version) | tellurium/bombBeetle.py | getVersionInfo | madfain/BombBeetle | 1 | python | def getVersionInfo():
' Returns version information for tellurium included packages.\n\n :returns: list of tuples (package, version)\n '
versions = [('tellurium', getTelluriumVersion()), ('roadrunner', roadrunner.__version__), ('antimony', antimony.__version__)]
if libsbml:
versions.append(('libsbml', libsbml.getLibSBMLDottedVersion()))
if libsedml:
versions.append(('libsedml', libsedml.getLibSEDMLDottedVersion()))
if phrasedml:
versions.append(('phrasedml', phrasedml.__version__))
if sbol:
versions.append(('pySBOL', sbol.__version__))
return versions | def getVersionInfo():
' Returns version information for tellurium included packages.\n\n :returns: list of tuples (package, version)\n '
versions = [('tellurium', getTelluriumVersion()), ('roadrunner', roadrunner.__version__), ('antimony', antimony.__version__)]
if libsbml:
versions.append(('libsbml', libsbml.getLibSBMLDottedVersion()))
if libsedml:
versions.append(('libsedml', libsedml.getLibSEDMLDottedVersion()))
if phrasedml:
versions.append(('phrasedml', phrasedml.__version__))
if sbol:
versions.append(('pySBOL', sbol.__version__))
return versions<|docstring|>Returns version information for tellurium included packages.
:returns: list of tuples (package, version)<|endoftext|> |
7c734294aa76bb0470b29f0d63df322a8901bbd01d1fc494aa48b0ba981ef1e2 | def printVersionInfo():
' Prints version information for tellurium included packages.\n\n see also: :func:`getVersionInfo`\n '
versions = getVersionInfo()
for v in versions:
print(v[0], ':', v[1]) | Prints version information for tellurium included packages.
see also: :func:`getVersionInfo` | tellurium/bombBeetle.py | printVersionInfo | madfain/BombBeetle | 1 | python | def printVersionInfo():
' Prints version information for tellurium included packages.\n\n see also: :func:`getVersionInfo`\n '
versions = getVersionInfo()
for v in versions:
print(v[0], ':', v[1]) | def printVersionInfo():
' Prints version information for tellurium included packages.\n\n see also: :func:`getVersionInfo`\n '
versions = getVersionInfo()
for v in versions:
print(v[0], ':', v[1])<|docstring|>Prints version information for tellurium included packages.
see also: :func:`getVersionInfo`<|endoftext|> |
60ffe1f3e99fec3f163c4e846c32a4dcec48e8889e84176c4effe5f2be1dfe4a | def getTelluriumVersion():
' Version number of tellurium.\n\n :returns: version\n :rtype: str\n '
try:
with open(os.path.join(os.path.dirname(__file__), '..', 'VERSION.txt'), 'r') as f:
version = f.read().rstrip()
except:
with open(os.path.join(os.path.dirname(__file__), 'VERSION.txt'), 'r') as f:
version = f.read().rstrip()
return version | Version number of tellurium.
:returns: version
:rtype: str | tellurium/bombBeetle.py | getTelluriumVersion | madfain/BombBeetle | 1 | python | def getTelluriumVersion():
' Version number of tellurium.\n\n :returns: version\n :rtype: str\n '
try:
with open(os.path.join(os.path.dirname(__file__), '..', 'VERSION.txt'), 'r') as f:
version = f.read().rstrip()
except:
with open(os.path.join(os.path.dirname(__file__), 'VERSION.txt'), 'r') as f:
version = f.read().rstrip()
return version | def getTelluriumVersion():
' Version number of tellurium.\n\n :returns: version\n :rtype: str\n '
try:
with open(os.path.join(os.path.dirname(__file__), '..', 'VERSION.txt'), 'r') as f:
version = f.read().rstrip()
except:
with open(os.path.join(os.path.dirname(__file__), 'VERSION.txt'), 'r') as f:
version = f.read().rstrip()
return version<|docstring|>Version number of tellurium.
:returns: version
:rtype: str<|endoftext|> |
a7a7191a5864caa871dd9af421cd2e1be2d5361a9e11bdacf2caae0c70bc189d | def noticesOff():
'Switch off the generation of notices to the user.\n Call this to stop roadrunner from printing warning message to the console.\n\n See also :func:`noticesOn`\n '
roadrunner.Logger.setLevel(roadrunner.Logger.LOG_WARNING) | Switch off the generation of notices to the user.
Call this to stop roadrunner from printing warning message to the console.
See also :func:`noticesOn` | tellurium/bombBeetle.py | noticesOff | madfain/BombBeetle | 1 | python | def noticesOff():
'Switch off the generation of notices to the user.\n Call this to stop roadrunner from printing warning message to the console.\n\n See also :func:`noticesOn`\n '
roadrunner.Logger.setLevel(roadrunner.Logger.LOG_WARNING) | def noticesOff():
'Switch off the generation of notices to the user.\n Call this to stop roadrunner from printing warning message to the console.\n\n See also :func:`noticesOn`\n '
roadrunner.Logger.setLevel(roadrunner.Logger.LOG_WARNING)<|docstring|>Switch off the generation of notices to the user.
Call this to stop roadrunner from printing warning message to the console.
See also :func:`noticesOn`<|endoftext|> |
c294ffdafc7a8e28e9eed95c222db2e046fbd961e10686dbe8136f17478aa6eb | def noticesOn():
' Switch on notice generation to the user.\n\n See also :func:`noticesOff`\n '
roadrunner.Logger.setLevel(roadrunner.Logger.LOG_NOTICE) | Switch on notice generation to the user.
See also :func:`noticesOff` | tellurium/bombBeetle.py | noticesOn | madfain/BombBeetle | 1 | python | def noticesOn():
' Switch on notice generation to the user.\n\n See also :func:`noticesOff`\n '
roadrunner.Logger.setLevel(roadrunner.Logger.LOG_NOTICE) | def noticesOn():
' Switch on notice generation to the user.\n\n See also :func:`noticesOff`\n '
roadrunner.Logger.setLevel(roadrunner.Logger.LOG_NOTICE)<|docstring|>Switch on notice generation to the user.
See also :func:`noticesOff`<|endoftext|> |
9c20d600b5a24de4b0a399af15a5d1d5a40c65fe34f6c4057a28db9783869702 | def model(model_name):
'Retrieve a model which has already been loaded.\n\n :param model_name: the name of the model\n :type model_name: str\n '
if (not (model_name in __te_models)):
raise KeyError('No such model has been loaded: {}'.format(model_name))
return __te_models[model_name] | Retrieve a model which has already been loaded.
:param model_name: the name of the model
:type model_name: str | tellurium/bombBeetle.py | model | madfain/BombBeetle | 1 | python | def model(model_name):
'Retrieve a model which has already been loaded.\n\n :param model_name: the name of the model\n :type model_name: str\n '
if (not (model_name in __te_models)):
raise KeyError('No such model has been loaded: {}'.format(model_name))
return __te_models[model_name] | def model(model_name):
'Retrieve a model which has already been loaded.\n\n :param model_name: the name of the model\n :type model_name: str\n '
if (not (model_name in __te_models)):
raise KeyError('No such model has been loaded: {}'.format(model_name))
return __te_models[model_name]<|docstring|>Retrieve a model which has already been loaded.
:param model_name: the name of the model
:type model_name: str<|endoftext|> |
12d3ab1aa3c4ea82c7cb66189bd4b6aadbd59373530ceefe4c03c92de49517a9 | def _checkAntimonyReturnCode(code):
' Helper for checking the antimony response code.\n Raises Exception if error in antimony.\n\n :param code: antimony response\n :type code: int\n '
if (code < 0):
raise Exception('Antimony: {}'.format(antimony.getLastError())) | Helper for checking the antimony response code.
Raises Exception if error in antimony.
:param code: antimony response
:type code: int | tellurium/bombBeetle.py | _checkAntimonyReturnCode | madfain/BombBeetle | 1 | python | def _checkAntimonyReturnCode(code):
' Helper for checking the antimony response code.\n Raises Exception if error in antimony.\n\n :param code: antimony response\n :type code: int\n '
if (code < 0):
raise Exception('Antimony: {}'.format(antimony.getLastError())) | def _checkAntimonyReturnCode(code):
' Helper for checking the antimony response code.\n Raises Exception if error in antimony.\n\n :param code: antimony response\n :type code: int\n '
if (code < 0):
raise Exception('Antimony: {}'.format(antimony.getLastError()))<|docstring|>Helper for checking the antimony response code.
Raises Exception if error in antimony.
:param code: antimony response
:type code: int<|endoftext|> |
a0ab5f6fe86da990705629ce2c6e5704199a927ea3c5643d3853eca959254afb | def colorCycle(color, polyNumber):
' Adjusts contents of self.color as needed for plotting methods.'
if (len(color) < polyNumber):
for i in range((polyNumber - len(color))):
color.append(color[i])
else:
for i in range((len(color) - polyNumber)):
del color[(- (i + 1))]
return color | Adjusts contents of self.color as needed for plotting methods. | tellurium/bombBeetle.py | colorCycle | madfain/BombBeetle | 1 | python | def colorCycle(color, polyNumber):
' '
if (len(color) < polyNumber):
for i in range((polyNumber - len(color))):
color.append(color[i])
else:
for i in range((len(color) - polyNumber)):
del color[(- (i + 1))]
return color | def colorCycle(color, polyNumber):
' '
if (len(color) < polyNumber):
for i in range((polyNumber - len(color))):
color.append(color[i])
else:
for i in range((len(color) - polyNumber)):
del color[(- (i + 1))]
return color<|docstring|>Adjusts contents of self.color as needed for plotting methods.<|endoftext|> |
bdfbcaea937e81cfcdab5908f95409687c8c19d447db8ac67d2f80a263b4c207 | def loada(ant):
"Load model from Antimony string.\n\n See also: :func:`loadAntimonyModel`\n ::\n\n r = te.loada('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')\n\n :param ant: Antimony model\n :type ant: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n "
return loadAntimonyModel(ant) | Load model from Antimony string.
See also: :func:`loadAntimonyModel`
::
r = te.loada('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')
:param ant: Antimony model
:type ant: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner | tellurium/bombBeetle.py | loada | madfain/BombBeetle | 1 | python | def loada(ant):
"Load model from Antimony string.\n\n See also: :func:`loadAntimonyModel`\n ::\n\n r = te.loada('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')\n\n :param ant: Antimony model\n :type ant: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n "
return loadAntimonyModel(ant) | def loada(ant):
"Load model from Antimony string.\n\n See also: :func:`loadAntimonyModel`\n ::\n\n r = te.loada('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')\n\n :param ant: Antimony model\n :type ant: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n "
return loadAntimonyModel(ant)<|docstring|>Load model from Antimony string.
See also: :func:`loadAntimonyModel`
::
r = te.loada('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')
:param ant: Antimony model
:type ant: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner<|endoftext|> |
5353bb22f8d352802e1ea727bba687379536f0634a2f038e348b7f680b717107 | def loadAntimonyModel(ant):
"Load Antimony model with tellurium.\n\n See also: :func:`loada`\n ::\n\n r = te.loadAntimonyModel('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')\n\n :param ant: Antimony model\n :type ant: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n "
sbml = antimonyToSBML(ant)
return roadrunner.RoadRunner(sbml) | Load Antimony model with tellurium.
See also: :func:`loada`
::
r = te.loadAntimonyModel('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')
:param ant: Antimony model
:type ant: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner | tellurium/bombBeetle.py | loadAntimonyModel | madfain/BombBeetle | 1 | python | def loadAntimonyModel(ant):
"Load Antimony model with tellurium.\n\n See also: :func:`loada`\n ::\n\n r = te.loadAntimonyModel('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')\n\n :param ant: Antimony model\n :type ant: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n "
sbml = antimonyToSBML(ant)
return roadrunner.RoadRunner(sbml) | def loadAntimonyModel(ant):
"Load Antimony model with tellurium.\n\n See also: :func:`loada`\n ::\n\n r = te.loadAntimonyModel('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')\n\n :param ant: Antimony model\n :type ant: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n "
sbml = antimonyToSBML(ant)
return roadrunner.RoadRunner(sbml)<|docstring|>Load Antimony model with tellurium.
See also: :func:`loada`
::
r = te.loadAntimonyModel('S1 -> S2; k1*S1; k1=0.1; S1=10.0; S2 = 0.0')
:param ant: Antimony model
:type ant: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner<|endoftext|> |
ea47dc20cf1d14013198d288bb599b3d46169cd30084f0693c1a5fc2ca5e68d1 | def loads(ant):
'Load SBML model with tellurium\n\n See also: :func:`loadSBMLModel`\n\n :param ant: SBML model\n :type ant: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n '
return loadSBMLModel(ant) | Load SBML model with tellurium
See also: :func:`loadSBMLModel`
:param ant: SBML model
:type ant: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner | tellurium/bombBeetle.py | loads | madfain/BombBeetle | 1 | python | def loads(ant):
'Load SBML model with tellurium\n\n See also: :func:`loadSBMLModel`\n\n :param ant: SBML model\n :type ant: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n '
return loadSBMLModel(ant) | def loads(ant):
'Load SBML model with tellurium\n\n See also: :func:`loadSBMLModel`\n\n :param ant: SBML model\n :type ant: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n '
return loadSBMLModel(ant)<|docstring|>Load SBML model with tellurium
See also: :func:`loadSBMLModel`
:param ant: SBML model
:type ant: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner<|endoftext|> |
9b6d21e35e20f145abb3662951042ee53f5fe39e400b187895a430981ad97819 | def loadSBMLModel(sbml):
' Load SBML model from a string or file.\n\n :param sbml: SBML model\n :type sbml: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n '
return roadrunner.RoadRunner(sbml) | Load SBML model from a string or file.
:param sbml: SBML model
:type sbml: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner | tellurium/bombBeetle.py | loadSBMLModel | madfain/BombBeetle | 1 | python | def loadSBMLModel(sbml):
' Load SBML model from a string or file.\n\n :param sbml: SBML model\n :type sbml: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n '
return roadrunner.RoadRunner(sbml) | def loadSBMLModel(sbml):
' Load SBML model from a string or file.\n\n :param sbml: SBML model\n :type sbml: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n '
return roadrunner.RoadRunner(sbml)<|docstring|>Load SBML model from a string or file.
:param sbml: SBML model
:type sbml: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner<|endoftext|> |
8ab20fa5d3e4d6af6d08b7ef185dfea1d962df3d4f87f30b9743d2b9cb6bc731 | def loadCellMLModel(cellml):
' Load CellML model with tellurium.\n\n :param cellml: CellML model\n :type cellml: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n '
sbml = cellmlToSBML(cellml)
return roadrunner.RoadRunner(sbml) | Load CellML model with tellurium.
:param cellml: CellML model
:type cellml: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner | tellurium/bombBeetle.py | loadCellMLModel | madfain/BombBeetle | 1 | python | def loadCellMLModel(cellml):
' Load CellML model with tellurium.\n\n :param cellml: CellML model\n :type cellml: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n '
sbml = cellmlToSBML(cellml)
return roadrunner.RoadRunner(sbml) | def loadCellMLModel(cellml):
' Load CellML model with tellurium.\n\n :param cellml: CellML model\n :type cellml: str | file\n :returns: RoadRunner instance with model loaded\n :rtype: roadrunner.ExtendedRoadRunner\n '
sbml = cellmlToSBML(cellml)
return roadrunner.RoadRunner(sbml)<|docstring|>Load CellML model with tellurium.
:param cellml: CellML model
:type cellml: str | file
:returns: RoadRunner instance with model loaded
:rtype: roadrunner.ExtendedRoadRunner<|endoftext|> |
5ed3f9801ede77f53ccac0f21b23e8c75469ab2c0500837a3b5772f97f6129f0 | def antimonyToSBML(ant):
' Convert Antimony to SBML string.\n\n :param ant: Antimony string or file\n :type ant: str | file\n :return: SBML\n :rtype: str\n '
try:
isfile = os.path.isfile(ant)
except ValueError:
isfile = False
if isfile:
code = antimony.loadAntimonyFile(ant)
else:
code = antimony.loadAntimonyString(ant)
_checkAntimonyReturnCode(code)
mid = antimony.getMainModuleName()
return antimony.getSBMLString(mid) | Convert Antimony to SBML string.
:param ant: Antimony string or file
:type ant: str | file
:return: SBML
:rtype: str | tellurium/bombBeetle.py | antimonyToSBML | madfain/BombBeetle | 1 | python | def antimonyToSBML(ant):
' Convert Antimony to SBML string.\n\n :param ant: Antimony string or file\n :type ant: str | file\n :return: SBML\n :rtype: str\n '
try:
isfile = os.path.isfile(ant)
except ValueError:
isfile = False
if isfile:
code = antimony.loadAntimonyFile(ant)
else:
code = antimony.loadAntimonyString(ant)
_checkAntimonyReturnCode(code)
mid = antimony.getMainModuleName()
return antimony.getSBMLString(mid) | def antimonyToSBML(ant):
' Convert Antimony to SBML string.\n\n :param ant: Antimony string or file\n :type ant: str | file\n :return: SBML\n :rtype: str\n '
try:
isfile = os.path.isfile(ant)
except ValueError:
isfile = False
if isfile:
code = antimony.loadAntimonyFile(ant)
else:
code = antimony.loadAntimonyString(ant)
_checkAntimonyReturnCode(code)
mid = antimony.getMainModuleName()
return antimony.getSBMLString(mid)<|docstring|>Convert Antimony to SBML string.
:param ant: Antimony string or file
:type ant: str | file
:return: SBML
:rtype: str<|endoftext|> |
861bd062573088b81d0c9ef4f92ae3ab9ac4b090023c26d1377e3692f08516ed | def antimonyToCellML(ant):
' Convert Antimony to CellML string.\n\n :param ant: Antimony string or file\n :type ant: str | file\n :return: CellML\n :rtype: str\n '
if os.path.isfile(ant):
code = antimony.loadAntimonyFile(ant)
else:
code = antimony.loadAntimonyString(ant)
_checkAntimonyReturnCode(code)
mid = antimony.getMainModuleName()
return antimony.getCellMLString(mid) | Convert Antimony to CellML string.
:param ant: Antimony string or file
:type ant: str | file
:return: CellML
:rtype: str | tellurium/bombBeetle.py | antimonyToCellML | madfain/BombBeetle | 1 | python | def antimonyToCellML(ant):
' Convert Antimony to CellML string.\n\n :param ant: Antimony string or file\n :type ant: str | file\n :return: CellML\n :rtype: str\n '
if os.path.isfile(ant):
code = antimony.loadAntimonyFile(ant)
else:
code = antimony.loadAntimonyString(ant)
_checkAntimonyReturnCode(code)
mid = antimony.getMainModuleName()
return antimony.getCellMLString(mid) | def antimonyToCellML(ant):
' Convert Antimony to CellML string.\n\n :param ant: Antimony string or file\n :type ant: str | file\n :return: CellML\n :rtype: str\n '
if os.path.isfile(ant):
code = antimony.loadAntimonyFile(ant)
else:
code = antimony.loadAntimonyString(ant)
_checkAntimonyReturnCode(code)
mid = antimony.getMainModuleName()
return antimony.getCellMLString(mid)<|docstring|>Convert Antimony to CellML string.
:param ant: Antimony string or file
:type ant: str | file
:return: CellML
:rtype: str<|endoftext|> |
366b86905f12bc4a1413ad73207a4837f5c89878a1cd05b11372d4d8e8a191cd | def sbmlToAntimony(sbml):
' Convert SBML to antimony string.\n\n :param sbml: SBML string or file\n :type sbml: str | file\n :return: Antimony\n :rtype: str\n '
isfile = False
try:
isfile = os.path.isfile(sbml)
except:
pass
if isfile:
code = antimony.loadSBMLFile(sbml)
else:
code = antimony.loadSBMLString(str(sbml))
_checkAntimonyReturnCode(code)
return antimony.getAntimonyString(None) | Convert SBML to antimony string.
:param sbml: SBML string or file
:type sbml: str | file
:return: Antimony
:rtype: str | tellurium/bombBeetle.py | sbmlToAntimony | madfain/BombBeetle | 1 | python | def sbmlToAntimony(sbml):
' Convert SBML to antimony string.\n\n :param sbml: SBML string or file\n :type sbml: str | file\n :return: Antimony\n :rtype: str\n '
isfile = False
try:
isfile = os.path.isfile(sbml)
except:
pass
if isfile:
code = antimony.loadSBMLFile(sbml)
else:
code = antimony.loadSBMLString(str(sbml))
_checkAntimonyReturnCode(code)
return antimony.getAntimonyString(None) | def sbmlToAntimony(sbml):
' Convert SBML to antimony string.\n\n :param sbml: SBML string or file\n :type sbml: str | file\n :return: Antimony\n :rtype: str\n '
isfile = False
try:
isfile = os.path.isfile(sbml)
except:
pass
if isfile:
code = antimony.loadSBMLFile(sbml)
else:
code = antimony.loadSBMLString(str(sbml))
_checkAntimonyReturnCode(code)
return antimony.getAntimonyString(None)<|docstring|>Convert SBML to antimony string.
:param sbml: SBML string or file
:type sbml: str | file
:return: Antimony
:rtype: str<|endoftext|> |
9f8c067331fc410c0a7e9b63d5c89e23cd49bc812d571a045d2dd26942f320ea | def sbmlToCellML(sbml):
' Convert SBML to CellML string.\n\n :param sbml: SBML string or file\n :type sbml: str | file\n :return: CellML\n :rtype: str\n '
if os.path.isfile(sbml):
code = antimony.loadSBMLFile(sbml)
else:
code = antimony.loadSBMLString(sbml)
_checkAntimonyReturnCode(code)
return antimony.getCellMLString(None) | Convert SBML to CellML string.
:param sbml: SBML string or file
:type sbml: str | file
:return: CellML
:rtype: str | tellurium/bombBeetle.py | sbmlToCellML | madfain/BombBeetle | 1 | python | def sbmlToCellML(sbml):
' Convert SBML to CellML string.\n\n :param sbml: SBML string or file\n :type sbml: str | file\n :return: CellML\n :rtype: str\n '
if os.path.isfile(sbml):
code = antimony.loadSBMLFile(sbml)
else:
code = antimony.loadSBMLString(sbml)
_checkAntimonyReturnCode(code)
return antimony.getCellMLString(None) | def sbmlToCellML(sbml):
' Convert SBML to CellML string.\n\n :param sbml: SBML string or file\n :type sbml: str | file\n :return: CellML\n :rtype: str\n '
if os.path.isfile(sbml):
code = antimony.loadSBMLFile(sbml)
else:
code = antimony.loadSBMLString(sbml)
_checkAntimonyReturnCode(code)
return antimony.getCellMLString(None)<|docstring|>Convert SBML to CellML string.
:param sbml: SBML string or file
:type sbml: str | file
:return: CellML
:rtype: str<|endoftext|> |
6c5ba80a8fbeda60e49fd7dc8d1ba5fc3da51064b757a2cdd750ddb4e3b6b2d2 | def cellmlToAntimony(cellml):
' Convert CellML to antimony string.\n\n :param cellml: CellML string or file\n :type cellml: str | file\n :return: antimony\n :rtype: str\n '
if os.path.isfile(cellml):
code = antimony.loadCellMLFile(cellml)
else:
code = antimony.loadCellMLString(cellml)
_checkAntimonyReturnCode(code)
return antimony.getAntimonyString(None) | Convert CellML to antimony string.
:param cellml: CellML string or file
:type cellml: str | file
:return: antimony
:rtype: str | tellurium/bombBeetle.py | cellmlToAntimony | madfain/BombBeetle | 1 | python | def cellmlToAntimony(cellml):
' Convert CellML to antimony string.\n\n :param cellml: CellML string or file\n :type cellml: str | file\n :return: antimony\n :rtype: str\n '
if os.path.isfile(cellml):
code = antimony.loadCellMLFile(cellml)
else:
code = antimony.loadCellMLString(cellml)
_checkAntimonyReturnCode(code)
return antimony.getAntimonyString(None) | def cellmlToAntimony(cellml):
' Convert CellML to antimony string.\n\n :param cellml: CellML string or file\n :type cellml: str | file\n :return: antimony\n :rtype: str\n '
if os.path.isfile(cellml):
code = antimony.loadCellMLFile(cellml)
else:
code = antimony.loadCellMLString(cellml)
_checkAntimonyReturnCode(code)
return antimony.getAntimonyString(None)<|docstring|>Convert CellML to antimony string.
:param cellml: CellML string or file
:type cellml: str | file
:return: antimony
:rtype: str<|endoftext|> |
c829dea3be013b5d8006aaa519970660a866b3697c85e8a6350ea025d7dddd07 | def cellmlToSBML(cellml):
' Convert CellML to SBML string.\n\n :param cellml: CellML string or file\n :type cellml: str | file\n :return: SBML\n :rtype: str\n '
if os.path.isfile(cellml):
code = antimony.loadCellMLFile(cellml)
else:
code = antimony.loadCellMLString(cellml)
_checkAntimonyReturnCode(code)
return antimony.getSBMLString(None) | Convert CellML to SBML string.
:param cellml: CellML string or file
:type cellml: str | file
:return: SBML
:rtype: str | tellurium/bombBeetle.py | cellmlToSBML | madfain/BombBeetle | 1 | python | def cellmlToSBML(cellml):
' Convert CellML to SBML string.\n\n :param cellml: CellML string or file\n :type cellml: str | file\n :return: SBML\n :rtype: str\n '
if os.path.isfile(cellml):
code = antimony.loadCellMLFile(cellml)
else:
code = antimony.loadCellMLString(cellml)
_checkAntimonyReturnCode(code)
return antimony.getSBMLString(None) | def cellmlToSBML(cellml):
' Convert CellML to SBML string.\n\n :param cellml: CellML string or file\n :type cellml: str | file\n :return: SBML\n :rtype: str\n '
if os.path.isfile(cellml):
code = antimony.loadCellMLFile(cellml)
else:
code = antimony.loadCellMLString(cellml)
_checkAntimonyReturnCode(code)
return antimony.getSBMLString(None)<|docstring|>Convert CellML to SBML string.
:param cellml: CellML string or file
:type cellml: str | file
:return: SBML
:rtype: str<|endoftext|> |
48f1d8f0a256c4940938e3d14247bb75d1cfea374567198e3240fb52fa311989 | def exportInlineOmex(inline_omex, export_location):
' Export an inline OMEX string to a COMBINE archive.\n\n :param inline_omex: String containing inline OMEX describing models and simulations.\n :param export_location: Filepath of Combine archive to create.\n '
from .teconverters import saveInlineOMEX
saveInlineOMEX(inline_omex, export_location) | Export an inline OMEX string to a COMBINE archive.
:param inline_omex: String containing inline OMEX describing models and simulations.
:param export_location: Filepath of Combine archive to create. | tellurium/bombBeetle.py | exportInlineOmex | madfain/BombBeetle | 1 | python | def exportInlineOmex(inline_omex, export_location):
' Export an inline OMEX string to a COMBINE archive.\n\n :param inline_omex: String containing inline OMEX describing models and simulations.\n :param export_location: Filepath of Combine archive to create.\n '
from .teconverters import saveInlineOMEX
saveInlineOMEX(inline_omex, export_location) | def exportInlineOmex(inline_omex, export_location):
' Export an inline OMEX string to a COMBINE archive.\n\n :param inline_omex: String containing inline OMEX describing models and simulations.\n :param export_location: Filepath of Combine archive to create.\n '
from .teconverters import saveInlineOMEX
saveInlineOMEX(inline_omex, export_location)<|docstring|>Export an inline OMEX string to a COMBINE archive.
:param inline_omex: String containing inline OMEX describing models and simulations.
:param export_location: Filepath of Combine archive to create.<|endoftext|> |
dda9f52042cdf1696f519575bb388c47aa807fd7130960ae65093e8e4361c5ca | def executeInlineOmex(inline_omex, comp=False):
' Execute inline phrasedml and antimony.\n\n :param inline_omex: String containing inline phrasedml and antimony.\n '
in_omex = teconverters.inlineOmex.fromString(inline_omex, comp=comp)
in_omex.executeOmex() | Execute inline phrasedml and antimony.
:param inline_omex: String containing inline phrasedml and antimony. | tellurium/bombBeetle.py | executeInlineOmex | madfain/BombBeetle | 1 | python | def executeInlineOmex(inline_omex, comp=False):
' Execute inline phrasedml and antimony.\n\n :param inline_omex: String containing inline phrasedml and antimony.\n '
in_omex = teconverters.inlineOmex.fromString(inline_omex, comp=comp)
in_omex.executeOmex() | def executeInlineOmex(inline_omex, comp=False):
' Execute inline phrasedml and antimony.\n\n :param inline_omex: String containing inline phrasedml and antimony.\n '
in_omex = teconverters.inlineOmex.fromString(inline_omex, comp=comp)
in_omex.executeOmex()<|docstring|>Execute inline phrasedml and antimony.
:param inline_omex: String containing inline phrasedml and antimony.<|endoftext|> |
3ead6176487d787f545641d505f084f899d7b32fef4fc14e60f4bfd987107e31 | def executeInlineOmexFromFile(filepath):
' Execute inline OMEX with simulations described in phrasedml and models described in antimony.\n\n :param filepath: Path to file containing inline phrasedml and antimony.\n '
with open(filepath) as f:
executeInlineOmex(f.read()) | Execute inline OMEX with simulations described in phrasedml and models described in antimony.
:param filepath: Path to file containing inline phrasedml and antimony. | tellurium/bombBeetle.py | executeInlineOmexFromFile | madfain/BombBeetle | 1 | python | def executeInlineOmexFromFile(filepath):
' Execute inline OMEX with simulations described in phrasedml and models described in antimony.\n\n :param filepath: Path to file containing inline phrasedml and antimony.\n '
with open(filepath) as f:
executeInlineOmex(f.read()) | def executeInlineOmexFromFile(filepath):
' Execute inline OMEX with simulations described in phrasedml and models described in antimony.\n\n :param filepath: Path to file containing inline phrasedml and antimony.\n '
with open(filepath) as f:
executeInlineOmex(f.read())<|docstring|>Execute inline OMEX with simulations described in phrasedml and models described in antimony.
:param filepath: Path to file containing inline phrasedml and antimony.<|endoftext|> |
d6eaf252ce9e71f1ed68be90b7d29e980a0d37d9560d0ec16abf3cf8c779de6a | def convertCombineArchive(location):
' Read a COMBINE archive and convert its contents to an\n inline Omex.\n\n :param location: Filesystem path to the archive.\n '
from .teconverters import inlineOmexImporter
return inlineOmexImporter.fromFile(location).toInlineOmex() | Read a COMBINE archive and convert its contents to an
inline Omex.
:param location: Filesystem path to the archive. | tellurium/bombBeetle.py | convertCombineArchive | madfain/BombBeetle | 1 | python | def convertCombineArchive(location):
' Read a COMBINE archive and convert its contents to an\n inline Omex.\n\n :param location: Filesystem path to the archive.\n '
from .teconverters import inlineOmexImporter
return inlineOmexImporter.fromFile(location).toInlineOmex() | def convertCombineArchive(location):
' Read a COMBINE archive and convert its contents to an\n inline Omex.\n\n :param location: Filesystem path to the archive.\n '
from .teconverters import inlineOmexImporter
return inlineOmexImporter.fromFile(location).toInlineOmex()<|docstring|>Read a COMBINE archive and convert its contents to an
inline Omex.
:param location: Filesystem path to the archive.<|endoftext|> |
fac0d888b7ff919cb15091e9236cd560f01466863703437f84603841d82bad8a | def convertAndExecuteCombineArchive(location):
' Read and execute a COMBINE archive.\n\n :param location: Filesystem path to the archive.\n '
from .teconverters import inlineOmexImporter
inlineomex = inlineOmexImporter.fromFile(location).toInlineOmex()
executeInlineOmex(inlineomex) | Read and execute a COMBINE archive.
:param location: Filesystem path to the archive. | tellurium/bombBeetle.py | convertAndExecuteCombineArchive | madfain/BombBeetle | 1 | python | def convertAndExecuteCombineArchive(location):
' Read and execute a COMBINE archive.\n\n :param location: Filesystem path to the archive.\n '
from .teconverters import inlineOmexImporter
inlineomex = inlineOmexImporter.fromFile(location).toInlineOmex()
executeInlineOmex(inlineomex) | def convertAndExecuteCombineArchive(location):
' Read and execute a COMBINE archive.\n\n :param location: Filesystem path to the archive.\n '
from .teconverters import inlineOmexImporter
inlineomex = inlineOmexImporter.fromFile(location).toInlineOmex()
executeInlineOmex(inlineomex)<|docstring|>Read and execute a COMBINE archive.
:param location: Filesystem path to the archive.<|endoftext|> |
7de5f14daae51639eac7ab0974438e30e6917dafd8f05ecb6938e1c0bfb3381d | def extractFileFromCombineArchive(archive_path, entry_location):
' Extract a single file from a COMBINE archive and return it as a string.\n '
warnings.warn('Use libcombine instead.', DeprecationWarning)
import tecombine
archive = tecombine.CombineArchive()
if (not archive.initializeFromArchive(archive_path)):
raise RuntimeError('Failed to initialize archive')
try:
entry = archive.getEntryByLocation(entry_location)
except:
raise RuntimeError('Could not find entry {}'.format(entry_location))
return archive.extractEntryToString(entry_location) | Extract a single file from a COMBINE archive and return it as a string. | tellurium/bombBeetle.py | extractFileFromCombineArchive | madfain/BombBeetle | 1 | python | def extractFileFromCombineArchive(archive_path, entry_location):
' \n '
warnings.warn('Use libcombine instead.', DeprecationWarning)
import tecombine
archive = tecombine.CombineArchive()
if (not archive.initializeFromArchive(archive_path)):
raise RuntimeError('Failed to initialize archive')
try:
entry = archive.getEntryByLocation(entry_location)
except:
raise RuntimeError('Could not find entry {}'.format(entry_location))
return archive.extractEntryToString(entry_location) | def extractFileFromCombineArchive(archive_path, entry_location):
' \n '
warnings.warn('Use libcombine instead.', DeprecationWarning)
import tecombine
archive = tecombine.CombineArchive()
if (not archive.initializeFromArchive(archive_path)):
raise RuntimeError('Failed to initialize archive')
try:
entry = archive.getEntryByLocation(entry_location)
except:
raise RuntimeError('Could not find entry {}'.format(entry_location))
return archive.extractEntryToString(entry_location)<|docstring|>Extract a single file from a COMBINE archive and return it as a string.<|endoftext|> |
325161e81cb33a42a6cddc175e6b32a9d8eb7cdf7500dbd14ea7eb9014e37a7f | def addFileToCombineArchive(archive_path, file_name, entry_location, file_format, master, out_archive_path):
' Add a file to an existing COMBINE archive on disk and save the result as a new archive.\n\n :param archive_path: The path to the archive.\n :param file_name: The name of the file to add.\n :param entry_location: The location to store the entry in the archive.\n :param file_format: The format of the file. Can use tecombine.KnownFormats.lookupFormat for common formats.\n :param master: Whether the file should be marked master.\n :param out_archive_path: The path to the output archive.\n '
addFilesToCombineArchive(archive_path, [file_name], [entry_location], [file_format], [master], out_archive_path) | Add a file to an existing COMBINE archive on disk and save the result as a new archive.
:param archive_path: The path to the archive.
:param file_name: The name of the file to add.
:param entry_location: The location to store the entry in the archive.
:param file_format: The format of the file. Can use tecombine.KnownFormats.lookupFormat for common formats.
:param master: Whether the file should be marked master.
:param out_archive_path: The path to the output archive. | tellurium/bombBeetle.py | addFileToCombineArchive | madfain/BombBeetle | 1 | python | def addFileToCombineArchive(archive_path, file_name, entry_location, file_format, master, out_archive_path):
' Add a file to an existing COMBINE archive on disk and save the result as a new archive.\n\n :param archive_path: The path to the archive.\n :param file_name: The name of the file to add.\n :param entry_location: The location to store the entry in the archive.\n :param file_format: The format of the file. Can use tecombine.KnownFormats.lookupFormat for common formats.\n :param master: Whether the file should be marked master.\n :param out_archive_path: The path to the output archive.\n '
addFilesToCombineArchive(archive_path, [file_name], [entry_location], [file_format], [master], out_archive_path) | def addFileToCombineArchive(archive_path, file_name, entry_location, file_format, master, out_archive_path):
' Add a file to an existing COMBINE archive on disk and save the result as a new archive.\n\n :param archive_path: The path to the archive.\n :param file_name: The name of the file to add.\n :param entry_location: The location to store the entry in the archive.\n :param file_format: The format of the file. Can use tecombine.KnownFormats.lookupFormat for common formats.\n :param master: Whether the file should be marked master.\n :param out_archive_path: The path to the output archive.\n '
addFilesToCombineArchive(archive_path, [file_name], [entry_location], [file_format], [master], out_archive_path)<|docstring|>Add a file to an existing COMBINE archive on disk and save the result as a new archive.
:param archive_path: The path to the archive.
:param file_name: The name of the file to add.
:param entry_location: The location to store the entry in the archive.
:param file_format: The format of the file. Can use tecombine.KnownFormats.lookupFormat for common formats.
:param master: Whether the file should be marked master.
:param out_archive_path: The path to the output archive.<|endoftext|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.