code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
import cx_Oracle
import datetime
SDATE = '01.01.2014'
FDATE = '01.01.2020'
#p.PRESZAB,
#GDR_RATE.RFLUID,
#p.NRES
#join GDR_RATE on GDR_RATE.IDWELL = p.IDWELL and GDR_RATE.DTBGN = p.DTBGN and GDR_RATE.NRES = p.NRES)
pbu_query_raw = f"""
select
WELLNAME,
DTBGN,
DPDEVICE,
(TVDSS-(MD - DPDEVICE)*(cos(INKL/57.2958))) as TVDDEVICE
from(
select
p.IDWELL as IDWELL,
BASP_REGISTRYWELL.WELLNAME as WELLNAME,
p.DTBGN as DTBGN,
GDR_TEST.DPDEVICE as DPDEVICE,
itb.MD as MD,
itb.TVDSS as TVDSS,
itb.INKL as INKL,
itb.AZIM as AZIM,
row_number() over(partition by p.IDWELL, p.DTBGN order by abs(itb.MD-GDR_TEST.DPDEVICE) asc) as RN
from GDR_MSRPRESS p
join GDR_TEST on GDR_TEST.IDWELL = p.IDWELL and GDR_TEST.DTBGN = p.DTBGN and GDR_TEST.NRES = p.NRES
join BASP_REGISTRYWELL on BASP_REGISTRYWELL.IDWELL = p.IDWELL
join (select
RSRC_REGISTRYINKL.IDWELL as IDWELL,
i.DPTINKL as MD,
i.AGLINKL as INKL,
i.AZMINKL as AZIM,
i.AOINKL as TVDSS
from RSRC_INKL i
JOIN RSRC_REGISTRYINKL ON i.IDINKL = RSRC_REGISTRYINKL.IDINKL
order by RSRC_REGISTRYINKL.IDWELL, i.DPTINKL) itb
on itb.IDWELL=p.IDWELL and itb.MD > GDR_TEST.DPDEVICE
where p.DTBGN > TO_DATE('{SDATE}','DD.MM.YYYY')
order by p.DTBGN, p.IDWELL
)
where RN = 1
order by IDWELL, DTBGN
""" # PBU press
def get_data_from_database_cns(connection, query_string, delimiter = ';'):
with connection.cursor() as cur:
cur.execute(query_string)
[print(x[0], end=delimiter) for x in cur.description] # print table headers
print()
for result in cur:
#print(result)
for w in result:
if w == None:
print("",end = delimiter)
elif isinstance(w, datetime.datetime):
print(f"{w:%d.%m.%Y %H:%M:%S}",end = delimiter)
else:
print(f"{w}",end = delimiter)
print()
def connect_database():
host_name = '10.201.194.37'
port_number = 1521
service_name = 'WQ2'
user = 'WQ2_RO'
password = user
dsn_tns = cx_Oracle.makedsn(host_name, port_number, service_name)
return cx_Oracle.connect(user, password, dsn_tns)
def connect_and_query():
connection = connect_database() #print(connection.version)
get_data_from_database_cns(connection, pbu_query_raw,' ') #
connection.close()
connect_and_query()
|
normal
|
{
"blob_id": "39f1595374147c71bc2d4c945a0f1149891f1883",
"index": 5300,
"step-1": "<mask token>\n\n\ndef get_data_from_database_cns(connection, query_string, delimiter=';'):\n with connection.cursor() as cur:\n cur.execute(query_string)\n [print(x[0], end=delimiter) for x in cur.description]\n print()\n for result in cur:\n for w in result:\n if w == None:\n print('', end=delimiter)\n elif isinstance(w, datetime.datetime):\n print(f'{w:%d.%m.%Y %H:%M:%S}', end=delimiter)\n else:\n print(f'{w}', end=delimiter)\n print()\n\n\ndef connect_database():\n host_name = '10.201.194.37'\n port_number = 1521\n service_name = 'WQ2'\n user = 'WQ2_RO'\n password = user\n dsn_tns = cx_Oracle.makedsn(host_name, port_number, service_name)\n return cx_Oracle.connect(user, password, dsn_tns)\n\n\ndef connect_and_query():\n connection = connect_database()\n get_data_from_database_cns(connection, pbu_query_raw, ' ')\n connection.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_data_from_database_cns(connection, query_string, delimiter=';'):\n with connection.cursor() as cur:\n cur.execute(query_string)\n [print(x[0], end=delimiter) for x in cur.description]\n print()\n for result in cur:\n for w in result:\n if w == None:\n print('', end=delimiter)\n elif isinstance(w, datetime.datetime):\n print(f'{w:%d.%m.%Y %H:%M:%S}', end=delimiter)\n else:\n print(f'{w}', end=delimiter)\n print()\n\n\ndef connect_database():\n host_name = '10.201.194.37'\n port_number = 1521\n service_name = 'WQ2'\n user = 'WQ2_RO'\n password = user\n dsn_tns = cx_Oracle.makedsn(host_name, port_number, service_name)\n return cx_Oracle.connect(user, password, dsn_tns)\n\n\ndef connect_and_query():\n connection = connect_database()\n get_data_from_database_cns(connection, pbu_query_raw, ' ')\n connection.close()\n\n\nconnect_and_query()\n",
"step-3": "<mask token>\nSDATE = '01.01.2014'\nFDATE = '01.01.2020'\npbu_query_raw = f\"\"\"\nselect \n WELLNAME,\n DTBGN,\n DPDEVICE,\n (TVDSS-(MD - DPDEVICE)*(cos(INKL/57.2958))) as TVDDEVICE\nfrom(\n select \n p.IDWELL as IDWELL, \n BASP_REGISTRYWELL.WELLNAME as WELLNAME,\n p.DTBGN as DTBGN, \n GDR_TEST.DPDEVICE as DPDEVICE,\n itb.MD as MD,\n itb.TVDSS as TVDSS,\n itb.INKL as INKL,\n itb.AZIM as AZIM,\n row_number() over(partition by p.IDWELL, p.DTBGN order by abs(itb.MD-GDR_TEST.DPDEVICE) asc) as RN\n from GDR_MSRPRESS p \n join GDR_TEST on GDR_TEST.IDWELL = p.IDWELL and GDR_TEST.DTBGN = p.DTBGN and GDR_TEST.NRES = p.NRES \n join BASP_REGISTRYWELL on BASP_REGISTRYWELL.IDWELL = p.IDWELL\n\n join (select \n RSRC_REGISTRYINKL.IDWELL as IDWELL,\n i.DPTINKL as MD, \n i.AGLINKL as INKL, \n i.AZMINKL as AZIM, \n i.AOINKL as TVDSS\n from RSRC_INKL i \n JOIN RSRC_REGISTRYINKL ON i.IDINKL = RSRC_REGISTRYINKL.IDINKL\n order by RSRC_REGISTRYINKL.IDWELL, i.DPTINKL) itb\n on itb.IDWELL=p.IDWELL and itb.MD > GDR_TEST.DPDEVICE\n where p.DTBGN > TO_DATE('{SDATE}','DD.MM.YYYY') \n order by p.DTBGN, p.IDWELL\n ) \n where RN = 1\n order by IDWELL, DTBGN\n\n \"\"\"\n\n\ndef get_data_from_database_cns(connection, query_string, delimiter=';'):\n with connection.cursor() as cur:\n cur.execute(query_string)\n [print(x[0], end=delimiter) for x in cur.description]\n print()\n for result in cur:\n for w in result:\n if w == None:\n print('', end=delimiter)\n elif isinstance(w, datetime.datetime):\n print(f'{w:%d.%m.%Y %H:%M:%S}', end=delimiter)\n else:\n print(f'{w}', end=delimiter)\n print()\n\n\ndef connect_database():\n host_name = '10.201.194.37'\n port_number = 1521\n service_name = 'WQ2'\n user = 'WQ2_RO'\n password = user\n dsn_tns = cx_Oracle.makedsn(host_name, port_number, service_name)\n return cx_Oracle.connect(user, password, dsn_tns)\n\n\ndef connect_and_query():\n connection = connect_database()\n get_data_from_database_cns(connection, pbu_query_raw, ' ')\n connection.close()\n\n\nconnect_and_query()\n",
"step-4": "import cx_Oracle\nimport datetime\nSDATE = '01.01.2014'\nFDATE = '01.01.2020'\npbu_query_raw = f\"\"\"\nselect \n WELLNAME,\n DTBGN,\n DPDEVICE,\n (TVDSS-(MD - DPDEVICE)*(cos(INKL/57.2958))) as TVDDEVICE\nfrom(\n select \n p.IDWELL as IDWELL, \n BASP_REGISTRYWELL.WELLNAME as WELLNAME,\n p.DTBGN as DTBGN, \n GDR_TEST.DPDEVICE as DPDEVICE,\n itb.MD as MD,\n itb.TVDSS as TVDSS,\n itb.INKL as INKL,\n itb.AZIM as AZIM,\n row_number() over(partition by p.IDWELL, p.DTBGN order by abs(itb.MD-GDR_TEST.DPDEVICE) asc) as RN\n from GDR_MSRPRESS p \n join GDR_TEST on GDR_TEST.IDWELL = p.IDWELL and GDR_TEST.DTBGN = p.DTBGN and GDR_TEST.NRES = p.NRES \n join BASP_REGISTRYWELL on BASP_REGISTRYWELL.IDWELL = p.IDWELL\n\n join (select \n RSRC_REGISTRYINKL.IDWELL as IDWELL,\n i.DPTINKL as MD, \n i.AGLINKL as INKL, \n i.AZMINKL as AZIM, \n i.AOINKL as TVDSS\n from RSRC_INKL i \n JOIN RSRC_REGISTRYINKL ON i.IDINKL = RSRC_REGISTRYINKL.IDINKL\n order by RSRC_REGISTRYINKL.IDWELL, i.DPTINKL) itb\n on itb.IDWELL=p.IDWELL and itb.MD > GDR_TEST.DPDEVICE\n where p.DTBGN > TO_DATE('{SDATE}','DD.MM.YYYY') \n order by p.DTBGN, p.IDWELL\n ) \n where RN = 1\n order by IDWELL, DTBGN\n\n \"\"\"\n\n\ndef get_data_from_database_cns(connection, query_string, delimiter=';'):\n with connection.cursor() as cur:\n cur.execute(query_string)\n [print(x[0], end=delimiter) for x in cur.description]\n print()\n for result in cur:\n for w in result:\n if w == None:\n print('', end=delimiter)\n elif isinstance(w, datetime.datetime):\n print(f'{w:%d.%m.%Y %H:%M:%S}', end=delimiter)\n else:\n print(f'{w}', end=delimiter)\n print()\n\n\ndef connect_database():\n host_name = '10.201.194.37'\n port_number = 1521\n service_name = 'WQ2'\n user = 'WQ2_RO'\n password = user\n dsn_tns = cx_Oracle.makedsn(host_name, port_number, service_name)\n return cx_Oracle.connect(user, password, dsn_tns)\n\n\ndef connect_and_query():\n connection = connect_database()\n get_data_from_database_cns(connection, pbu_query_raw, ' ')\n connection.close()\n\n\nconnect_and_query()\n",
"step-5": "import cx_Oracle\nimport datetime\n\nSDATE = '01.01.2014'\nFDATE = '01.01.2020'\n\n#p.PRESZAB, \n#GDR_RATE.RFLUID, \n#p.NRES \n#join GDR_RATE on GDR_RATE.IDWELL = p.IDWELL and GDR_RATE.DTBGN = p.DTBGN and GDR_RATE.NRES = p.NRES)\n\npbu_query_raw = f\"\"\"\nselect \n WELLNAME,\n DTBGN,\n DPDEVICE,\n (TVDSS-(MD - DPDEVICE)*(cos(INKL/57.2958))) as TVDDEVICE\nfrom(\n select \n p.IDWELL as IDWELL, \n BASP_REGISTRYWELL.WELLNAME as WELLNAME,\n p.DTBGN as DTBGN, \n GDR_TEST.DPDEVICE as DPDEVICE,\n itb.MD as MD,\n itb.TVDSS as TVDSS,\n itb.INKL as INKL,\n itb.AZIM as AZIM,\n row_number() over(partition by p.IDWELL, p.DTBGN order by abs(itb.MD-GDR_TEST.DPDEVICE) asc) as RN\n from GDR_MSRPRESS p \n join GDR_TEST on GDR_TEST.IDWELL = p.IDWELL and GDR_TEST.DTBGN = p.DTBGN and GDR_TEST.NRES = p.NRES \n join BASP_REGISTRYWELL on BASP_REGISTRYWELL.IDWELL = p.IDWELL\n\n join (select \n RSRC_REGISTRYINKL.IDWELL as IDWELL,\n i.DPTINKL as MD, \n i.AGLINKL as INKL, \n i.AZMINKL as AZIM, \n i.AOINKL as TVDSS\n from RSRC_INKL i \n JOIN RSRC_REGISTRYINKL ON i.IDINKL = RSRC_REGISTRYINKL.IDINKL\n order by RSRC_REGISTRYINKL.IDWELL, i.DPTINKL) itb\n on itb.IDWELL=p.IDWELL and itb.MD > GDR_TEST.DPDEVICE\n where p.DTBGN > TO_DATE('{SDATE}','DD.MM.YYYY') \n order by p.DTBGN, p.IDWELL\n ) \n where RN = 1\n order by IDWELL, DTBGN\n\n \"\"\" # PBU press\n\n\ndef get_data_from_database_cns(connection, query_string, delimiter = ';'):\n with connection.cursor() as cur:\n cur.execute(query_string)\n [print(x[0], end=delimiter) for x in cur.description] # print table headers\n print()\n for result in cur:\n #print(result)\n for w in result:\n if w == None:\n print(\"\",end = delimiter)\n elif isinstance(w, datetime.datetime):\n print(f\"{w:%d.%m.%Y %H:%M:%S}\",end = delimiter)\n else:\n print(f\"{w}\",end = delimiter)\n print()\n\ndef connect_database():\n host_name = '10.201.194.37'\n port_number = 1521\n service_name = 'WQ2'\n user = 'WQ2_RO'\n password = user\n dsn_tns = cx_Oracle.makedsn(host_name, port_number, service_name)\n return cx_Oracle.connect(user, password, dsn_tns)\n\ndef connect_and_query():\n connection = connect_database() #print(connection.version)\n get_data_from_database_cns(connection, pbu_query_raw,' ') # \n connection.close()\n\nconnect_and_query()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
"""
"""
#####################################################################
#This software was developed by the University of Tennessee as part of the
#Distributed Data Analysis of Neutron Scattering Experiments (DANSE)
#project funded by the US National Science Foundation.
#See the license text in license.txt
#copyright 2008, University of Tennessee
######################################################################
import numpy as np
import os
from sas.sascalc.dataloader.data_info import Data1D
from sas.sascalc.dataloader.data_info import Detector
has_converter = True
try:
from sas.sascalc.data_util.nxsunit import Converter
except:
has_converter = False
class Reader:
"""
Class to load IGOR reduced .ABS files
"""
## File type
type_name = "IGOR 1D"
## Wildcards
type = ["IGOR 1D files (*.abs)|*.abs"]
## List of allowed extensions
ext = ['.abs', '.ABS']
def read(self, path):
"""
Load data file.
:param path: file path
:return: Data1D object, or None
:raise RuntimeError: when the file can't be opened
:raise ValueError: when the length of the data vectors are inconsistent
"""
if os.path.isfile(path):
basename = os.path.basename(path)
root, extension = os.path.splitext(basename)
if extension.lower() in self.ext:
try:
input_f = open(path,'r')
except:
raise RuntimeError, "abs_reader: cannot open %s" % path
buff = input_f.read()
lines = buff.split('\n')
x = np.zeros(0)
y = np.zeros(0)
dy = np.zeros(0)
dx = np.zeros(0)
output = Data1D(x, y, dy=dy, dx=dx)
detector = Detector()
output.detector.append(detector)
output.filename = basename
is_info = False
is_center = False
is_data_started = False
data_conv_q = None
data_conv_i = None
if has_converter == True and output.x_unit != '1/A':
data_conv_q = Converter('1/A')
# Test it
data_conv_q(1.0, output.x_unit)
if has_converter == True and output.y_unit != '1/cm':
data_conv_i = Converter('1/cm')
# Test it
data_conv_i(1.0, output.y_unit)
for line in lines:
# Information line 1
if is_info == True:
is_info = False
line_toks = line.split()
# Wavelength in Angstrom
try:
value = float(line_toks[1])
if has_converter == True and \
output.source.wavelength_unit != 'A':
conv = Converter('A')
output.source.wavelength = conv(value,
units=output.source.wavelength_unit)
else:
output.source.wavelength = value
except:
#goes to ASC reader
msg = "abs_reader: cannot open %s" % path
raise RuntimeError, msg
# Distance in meters
try:
value = float(line_toks[3])
if has_converter == True and \
detector.distance_unit != 'm':
conv = Converter('m')
detector.distance = conv(value,
units=detector.distance_unit)
else:
detector.distance = value
except:
#goes to ASC reader
msg = "abs_reader: cannot open %s" % path
raise RuntimeError, msg
# Transmission
try:
output.sample.transmission = float(line_toks[4])
except:
# Transmission is not a mandatory entry
pass
# Thickness in mm
try:
value = float(line_toks[5])
if has_converter == True and \
output.sample.thickness_unit != 'cm':
conv = Converter('cm')
output.sample.thickness = conv(value,
units=output.sample.thickness_unit)
else:
output.sample.thickness = value
except:
# Thickness is not a mandatory entry
pass
#MON CNT LAMBDA DET ANG DET DIST TRANS THICK
# AVE STEP
if line.count("LAMBDA") > 0:
is_info = True
# Find center info line
if is_center == True:
is_center = False
line_toks = line.split()
# Center in bin number
center_x = float(line_toks[0])
center_y = float(line_toks[1])
# Bin size
if has_converter == True and \
detector.pixel_size_unit != 'mm':
conv = Converter('mm')
detector.pixel_size.x = conv(5.0,
units=detector.pixel_size_unit)
detector.pixel_size.y = conv(5.0,
units=detector.pixel_size_unit)
else:
detector.pixel_size.x = 5.0
detector.pixel_size.y = 5.0
# Store beam center in distance units
# Det 640 x 640 mm
if has_converter == True and \
detector.beam_center_unit != 'mm':
conv = Converter('mm')
detector.beam_center.x = conv(center_x * 5.0,
units=detector.beam_center_unit)
detector.beam_center.y = conv(center_y * 5.0,
units=detector.beam_center_unit)
else:
detector.beam_center.x = center_x * 5.0
detector.beam_center.y = center_y * 5.0
# Detector type
try:
detector.name = line_toks[7]
except:
# Detector name is not a mandatory entry
pass
#BCENT(X,Y) A1(mm) A2(mm) A1A2DIST(m) DL/L
# BSTOP(mm) DET_TYP
if line.count("BCENT") > 0:
is_center = True
# Parse the data
if is_data_started == True:
toks = line.split()
try:
_x = float(toks[0])
_y = float(toks[1])
_dy = float(toks[2])
_dx = float(toks[3])
if data_conv_q is not None:
_x = data_conv_q(_x, units=output.x_unit)
_dx = data_conv_i(_dx, units=output.x_unit)
if data_conv_i is not None:
_y = data_conv_i(_y, units=output.y_unit)
_dy = data_conv_i(_dy, units=output.y_unit)
x = np.append(x, _x)
y = np.append(y, _y)
dy = np.append(dy, _dy)
dx = np.append(dx, _dx)
except:
# Could not read this data line. If we are here
# it is because we are in the data section. Just
# skip it.
pass
#The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev.
# I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor|
if line.count("The 6 columns") > 0:
is_data_started = True
# Sanity check
if not len(y) == len(dy):
msg = "abs_reader: y and dy have different length"
raise ValueError, msg
# If the data length is zero, consider this as
# though we were not able to read the file.
if len(x) == 0:
raise ValueError, "ascii_reader: could not load file"
output.x = x[x != 0]
output.y = y[x != 0]
output.dy = dy[x != 0]
output.dx = dx[x != 0]
if data_conv_q is not None:
output.xaxis("\\rm{Q}", output.x_unit)
else:
output.xaxis("\\rm{Q}", 'A^{-1}')
if data_conv_i is not None:
output.yaxis("\\rm{Intensity}", output.y_unit)
else:
output.yaxis("\\rm{Intensity}", "cm^{-1}")
# Store loading process information
output.meta_data['loader'] = self.type_name
return output
else:
raise RuntimeError, "%s is not a file" % path
return None
|
normal
|
{
"blob_id": "3cdb39e201983e672f6c22c25492a120be3d0d48",
"index": 9937,
"step-1": "\"\"\"\n\"\"\"\n#####################################################################\n#This software was developed by the University of Tennessee as part of the\n#Distributed Data Analysis of Neutron Scattering Experiments (DANSE)\n#project funded by the US National Science Foundation.\n#See the license text in license.txt\n#copyright 2008, University of Tennessee\n######################################################################\n\nimport numpy as np\nimport os\nfrom sas.sascalc.dataloader.data_info import Data1D\nfrom sas.sascalc.dataloader.data_info import Detector\n\nhas_converter = True\ntry:\n from sas.sascalc.data_util.nxsunit import Converter\nexcept:\n has_converter = False\n \n \nclass Reader:\n \"\"\"\n Class to load IGOR reduced .ABS files\n \"\"\"\n ## File type\n type_name = \"IGOR 1D\"\n ## Wildcards\n type = [\"IGOR 1D files (*.abs)|*.abs\"]\n ## List of allowed extensions\n ext = ['.abs', '.ABS']\n \n def read(self, path):\n \"\"\" \n Load data file.\n \n :param path: file path\n \n :return: Data1D object, or None\n \n :raise RuntimeError: when the file can't be opened\n :raise ValueError: when the length of the data vectors are inconsistent\n \"\"\"\n if os.path.isfile(path):\n basename = os.path.basename(path)\n root, extension = os.path.splitext(basename)\n if extension.lower() in self.ext:\n try:\n input_f = open(path,'r')\n except:\n raise RuntimeError, \"abs_reader: cannot open %s\" % path\n buff = input_f.read()\n lines = buff.split('\\n')\n x = np.zeros(0)\n y = np.zeros(0)\n dy = np.zeros(0)\n dx = np.zeros(0)\n output = Data1D(x, y, dy=dy, dx=dx)\n detector = Detector()\n output.detector.append(detector)\n output.filename = basename\n \n is_info = False\n is_center = False\n is_data_started = False\n \n data_conv_q = None\n data_conv_i = None\n \n if has_converter == True and output.x_unit != '1/A':\n data_conv_q = Converter('1/A')\n # Test it\n data_conv_q(1.0, output.x_unit)\n \n if has_converter == True and output.y_unit != '1/cm':\n data_conv_i = Converter('1/cm')\n # Test it\n data_conv_i(1.0, output.y_unit)\n \n for line in lines:\n \n # Information line 1\n if is_info == True:\n is_info = False\n line_toks = line.split()\n \n # Wavelength in Angstrom\n try:\n value = float(line_toks[1])\n if has_converter == True and \\\n output.source.wavelength_unit != 'A':\n conv = Converter('A')\n output.source.wavelength = conv(value,\n units=output.source.wavelength_unit)\n else:\n output.source.wavelength = value\n except:\n #goes to ASC reader\n msg = \"abs_reader: cannot open %s\" % path\n raise RuntimeError, msg\n \n # Distance in meters\n try:\n value = float(line_toks[3])\n if has_converter == True and \\\n detector.distance_unit != 'm':\n conv = Converter('m')\n detector.distance = conv(value,\n units=detector.distance_unit)\n else:\n detector.distance = value\n except:\n #goes to ASC reader\n msg = \"abs_reader: cannot open %s\" % path\n raise RuntimeError, msg\n # Transmission\n try:\n output.sample.transmission = float(line_toks[4])\n except:\n # Transmission is not a mandatory entry\n pass\n \n # Thickness in mm\n try:\n value = float(line_toks[5])\n if has_converter == True and \\\n output.sample.thickness_unit != 'cm':\n conv = Converter('cm')\n output.sample.thickness = conv(value,\n units=output.sample.thickness_unit)\n else:\n output.sample.thickness = value\n except:\n # Thickness is not a mandatory entry\n pass\n \n #MON CNT LAMBDA DET ANG DET DIST TRANS THICK \n # AVE STEP\n if line.count(\"LAMBDA\") > 0:\n is_info = True\n \n # Find center info line\n if is_center == True:\n is_center = False\n line_toks = line.split()\n # Center in bin number\n center_x = float(line_toks[0])\n center_y = float(line_toks[1])\n \n # Bin size\n if has_converter == True and \\\n detector.pixel_size_unit != 'mm':\n conv = Converter('mm')\n detector.pixel_size.x = conv(5.0,\n units=detector.pixel_size_unit)\n detector.pixel_size.y = conv(5.0,\n units=detector.pixel_size_unit)\n else:\n detector.pixel_size.x = 5.0\n detector.pixel_size.y = 5.0\n \n # Store beam center in distance units\n # Det 640 x 640 mm\n if has_converter == True and \\\n detector.beam_center_unit != 'mm':\n conv = Converter('mm')\n detector.beam_center.x = conv(center_x * 5.0,\n units=detector.beam_center_unit)\n detector.beam_center.y = conv(center_y * 5.0,\n units=detector.beam_center_unit)\n else:\n detector.beam_center.x = center_x * 5.0\n detector.beam_center.y = center_y * 5.0\n \n # Detector type\n try:\n detector.name = line_toks[7]\n except:\n # Detector name is not a mandatory entry\n pass\n \n #BCENT(X,Y) A1(mm) A2(mm) A1A2DIST(m) DL/L\n # BSTOP(mm) DET_TYP\n if line.count(\"BCENT\") > 0:\n is_center = True\n \n # Parse the data\n if is_data_started == True:\n toks = line.split()\n\n try:\n _x = float(toks[0])\n _y = float(toks[1])\n _dy = float(toks[2])\n _dx = float(toks[3])\n \n if data_conv_q is not None:\n _x = data_conv_q(_x, units=output.x_unit)\n _dx = data_conv_i(_dx, units=output.x_unit)\n \n if data_conv_i is not None:\n _y = data_conv_i(_y, units=output.y_unit)\n _dy = data_conv_i(_dy, units=output.y_unit)\n \n x = np.append(x, _x)\n y = np.append(y, _y)\n dy = np.append(dy, _dy)\n dx = np.append(dx, _dx)\n \n except:\n # Could not read this data line. If we are here\n # it is because we are in the data section. Just\n # skip it.\n pass\n \n #The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev.\n # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor|\n if line.count(\"The 6 columns\") > 0:\n is_data_started = True\n \n # Sanity check\n if not len(y) == len(dy):\n msg = \"abs_reader: y and dy have different length\"\n raise ValueError, msg\n # If the data length is zero, consider this as\n # though we were not able to read the file.\n if len(x) == 0:\n raise ValueError, \"ascii_reader: could not load file\"\n \n output.x = x[x != 0]\n output.y = y[x != 0]\n output.dy = dy[x != 0]\n output.dx = dx[x != 0]\n if data_conv_q is not None:\n output.xaxis(\"\\\\rm{Q}\", output.x_unit)\n else:\n output.xaxis(\"\\\\rm{Q}\", 'A^{-1}')\n if data_conv_i is not None:\n output.yaxis(\"\\\\rm{Intensity}\", output.y_unit)\n else:\n output.yaxis(\"\\\\rm{Intensity}\", \"cm^{-1}\")\n \n # Store loading process information\n output.meta_data['loader'] = self.type_name\n return output\n else:\n raise RuntimeError, \"%s is not a file\" % path\n return None\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from os import listdir
import re
import numpy as np
from sklearn.metrics import f1_score
from sklearn.naive_bayes import MultinomialNB
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.model_selection import LeaveOneOut
import matplotlib.pyplot as plt
n_gram_range = (1, 1)
alpha_smoothing = 1e-10
lambdas_best = [1e190, 1]
def parse_doc_line(line):
parsed = re.search(r'\d[\d\s]+\d', line)
return "empty" if parsed is None else parsed[0]
def get_roc_point(clf, x_set, y_set, threshold):
loo = LeaveOneOut()
vectorizer = CountVectorizer(ngram_range=n_gram_range)
roc_predictions = np.empty(0)
answers = np.empty(0)
i = 1
for train_index, test_index in loo.split(x_set):
x_train = [obj for partition in x_set[train_index] for obj in partition]
x_test = [obj for partition in x_set[test_index] for obj in partition]
x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()
x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(x_test):]
y_train, y_test = y_set[train_index], y_set[test_index]
clf.fit(x_train, y_train.flatten())
answers = np.append(answers, y_test)
roc_predictions = np.append(roc_predictions,
['spmsg' if prediction[0] <= threshold else 'legit' for prediction in
clf.predict_proba(x_test)])
print(f'Finished iteration {i} / 10')
i += 1
true_negatives_, true_positives_, false_negatives_, false_positives_ = 0, 0, 0, 0
for prediction, answer in zip(roc_predictions, answers):
if prediction == 'spmsg':
if answer == 'spmsg':
true_positives_ += 1
else:
false_positives_ += 1
else:
if answer == 'legit':
true_negatives_ += 1
else:
false_negatives_ += 1
roc_point_ = (
1 - (true_negatives_ / (true_negatives_ + false_positives_)),
true_positives_ / (true_positives_ + false_negatives_))
return roc_point_
def get_cv_score(clf, x_set, y_set):
loo = LeaveOneOut()
vectorizer = CountVectorizer(ngram_range=n_gram_range)
predictions = np.empty(0)
answers = np.empty(0)
i = 1
for train_index, test_index in loo.split(x_set):
x_train = [obj for partition in x_set[train_index] for obj in partition]
x_test = [obj for partition in x_set[test_index] for obj in partition]
x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()
x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(x_test):]
y_train, y_test = y_set[train_index], y_set[test_index]
clf.fit(x_train, y_train.flatten())
predictions = np.append(predictions, clf.predict(x_test))
answers = np.append(answers, y_test)
print(f'Finished iteration {i} / 10')
i += 1
true_negatives_, true_positives_, false_negatives_, false_positives_ = 0, 0, 0, 0
for prediction, answer in zip(predictions, answers):
if prediction == 'spmsg':
if answer == 'spmsg':
true_positives_ += 1
else:
false_positives_ += 1
else:
if answer == 'legit':
true_negatives_ += 1
else:
false_negatives_ += 1
f1_result = f1_score(answers, predictions, average='macro')
return f1_result, true_negatives_, true_positives_, false_negatives_, false_positives_
parts_X = []
parts_Y = []
for part in range(1, 11):
parts_X.append([])
parts_Y.append([])
for file in listdir(f'messages/part{part}'):
f = open(f'messages/part{part}/{file}', "r")
one = parse_doc_line(f.readline())
f.readline()
two = parse_doc_line(f.readline())
curr_obj = one + " " + two
parts_Y[-1].append(re.findall(r'\D+', file)[0])
parts_X[-1].append(curr_obj)
f.close()
roc_points = []
for thresh in range(0, 11):
roc_points.append(get_roc_point(
MultinomialNB(alpha=alpha_smoothing), np.array(parts_X), np.array(parts_Y), thresh / 10))
f1_points = []
true_positives_list = []
false_positives_list = []
true_negatives_list = []
false_negatives_list = []
lambda_ratios = [1, 1e5, 1e10, 1e20, 1e40, 1e80, 1e160, 1e190]
for lambda_ratio in lambda_ratios:
f1, true_negatives, true_positives, false_negatives, false_positives = get_cv_score(
MultinomialNB(class_prior=(lambda_ratio, 1), alpha=alpha_smoothing), np.array(parts_X), np.array(parts_Y))
print(f'F1 score: {f1}\n True negatives: {true_negatives}\n True positives: {true_positives}\n False negatives: '
f'{false_negatives}\n False positives: {false_positives}')
f1_points.append(f1)
true_positives_list.append(true_positives)
false_positives_list.append(false_positives)
true_negatives_list.append(true_negatives)
false_negatives_list.append(false_negatives)
fig, plts = plt.subplots(3)
plts[0].margins(0.0)
plts[0].set_ylim(ymin=0)
plts[0].plot([point[0] for point in roc_points], [point[1] for point in roc_points])
plts[0].set_ylabel('Roc Curve')
plts[1].set_xscale('log')
plts[1].plot(lambda_ratios, f1_points, '-b')
plts[1].set_ylabel('F1 score')
plts[1].set_xlim(xmin=1)
plts[2].set_xscale('log')
plts[2].set_yscale('log')
plts[2].plot(lambda_ratios, true_positives_list, '-r', label='True positives')
plts[2].plot(lambda_ratios, false_positives_list, '-g', label='False positives')
plts[2].plot(lambda_ratios, true_negatives_list, '-b', label='True negatives')
plts[2].plot(lambda_ratios, false_negatives_list, '-y', label='False negatives')
plts[2].legend(loc="upper right")
plts[2].set_xlabel('Lambda_legit / Lambda_spam')
plts[2].set_xlim(xmin=1)
plt.show()
|
normal
|
{
"blob_id": "8bb67317ede277e03e8cbdefefeffa3d206ece65",
"index": 9434,
"step-1": "<mask token>\n\n\ndef parse_doc_line(line):\n parsed = re.search('\\\\d[\\\\d\\\\s]+\\\\d', line)\n return 'empty' if parsed is None else parsed[0]\n\n\ndef get_roc_point(clf, x_set, y_set, threshold):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n roc_predictions = np.empty(0)\n answers = np.empty(0)\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition\n ]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(\n x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n answers = np.append(answers, y_test)\n roc_predictions = np.append(roc_predictions, [('spmsg' if \n prediction[0] <= threshold else 'legit') for prediction in clf.\n predict_proba(x_test)])\n print(f'Finished iteration {i} / 10')\n i += 1\n (true_negatives_, true_positives_, false_negatives_, false_positives_\n ) = 0, 0, 0, 0\n for prediction, answer in zip(roc_predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n elif answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n roc_point_ = 1 - true_negatives_ / (true_negatives_ + false_positives_\n ), true_positives_ / (true_positives_ + false_negatives_)\n return roc_point_\n\n\ndef get_cv_score(clf, x_set, y_set):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n predictions = np.empty(0)\n answers = np.empty(0)\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition\n ]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(\n x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n predictions = np.append(predictions, clf.predict(x_test))\n answers = np.append(answers, y_test)\n print(f'Finished iteration {i} / 10')\n i += 1\n (true_negatives_, true_positives_, false_negatives_, false_positives_\n ) = 0, 0, 0, 0\n for prediction, answer in zip(predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n elif answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n f1_result = f1_score(answers, predictions, average='macro')\n return (f1_result, true_negatives_, true_positives_, false_negatives_,\n false_positives_)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_doc_line(line):\n parsed = re.search('\\\\d[\\\\d\\\\s]+\\\\d', line)\n return 'empty' if parsed is None else parsed[0]\n\n\ndef get_roc_point(clf, x_set, y_set, threshold):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n roc_predictions = np.empty(0)\n answers = np.empty(0)\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition\n ]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(\n x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n answers = np.append(answers, y_test)\n roc_predictions = np.append(roc_predictions, [('spmsg' if \n prediction[0] <= threshold else 'legit') for prediction in clf.\n predict_proba(x_test)])\n print(f'Finished iteration {i} / 10')\n i += 1\n (true_negatives_, true_positives_, false_negatives_, false_positives_\n ) = 0, 0, 0, 0\n for prediction, answer in zip(roc_predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n elif answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n roc_point_ = 1 - true_negatives_ / (true_negatives_ + false_positives_\n ), true_positives_ / (true_positives_ + false_negatives_)\n return roc_point_\n\n\ndef get_cv_score(clf, x_set, y_set):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n predictions = np.empty(0)\n answers = np.empty(0)\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition\n ]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(\n x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n predictions = np.append(predictions, clf.predict(x_test))\n answers = np.append(answers, y_test)\n print(f'Finished iteration {i} / 10')\n i += 1\n (true_negatives_, true_positives_, false_negatives_, false_positives_\n ) = 0, 0, 0, 0\n for prediction, answer in zip(predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n elif answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n f1_result = f1_score(answers, predictions, average='macro')\n return (f1_result, true_negatives_, true_positives_, false_negatives_,\n false_positives_)\n\n\n<mask token>\nfor part in range(1, 11):\n parts_X.append([])\n parts_Y.append([])\n for file in listdir(f'messages/part{part}'):\n f = open(f'messages/part{part}/{file}', 'r')\n one = parse_doc_line(f.readline())\n f.readline()\n two = parse_doc_line(f.readline())\n curr_obj = one + ' ' + two\n parts_Y[-1].append(re.findall('\\\\D+', file)[0])\n parts_X[-1].append(curr_obj)\n f.close()\n<mask token>\nfor thresh in range(0, 11):\n roc_points.append(get_roc_point(MultinomialNB(alpha=alpha_smoothing),\n np.array(parts_X), np.array(parts_Y), thresh / 10))\n<mask token>\nfor lambda_ratio in lambda_ratios:\n (f1, true_negatives, true_positives, false_negatives, false_positives) = (\n get_cv_score(MultinomialNB(class_prior=(lambda_ratio, 1), alpha=\n alpha_smoothing), np.array(parts_X), np.array(parts_Y)))\n print(\n f\"\"\"F1 score: {f1}\n True negatives: {true_negatives}\n True positives: {true_positives}\n False negatives: {false_negatives}\n False positives: {false_positives}\"\"\"\n )\n f1_points.append(f1)\n true_positives_list.append(true_positives)\n false_positives_list.append(false_positives)\n true_negatives_list.append(true_negatives)\n false_negatives_list.append(false_negatives)\n<mask token>\nplts[0].margins(0.0)\nplts[0].set_ylim(ymin=0)\nplts[0].plot([point[0] for point in roc_points], [point[1] for point in\n roc_points])\nplts[0].set_ylabel('Roc Curve')\nplts[1].set_xscale('log')\nplts[1].plot(lambda_ratios, f1_points, '-b')\nplts[1].set_ylabel('F1 score')\nplts[1].set_xlim(xmin=1)\nplts[2].set_xscale('log')\nplts[2].set_yscale('log')\nplts[2].plot(lambda_ratios, true_positives_list, '-r', label='True positives')\nplts[2].plot(lambda_ratios, false_positives_list, '-g', label='False positives'\n )\nplts[2].plot(lambda_ratios, true_negatives_list, '-b', label='True negatives')\nplts[2].plot(lambda_ratios, false_negatives_list, '-y', label='False negatives'\n )\nplts[2].legend(loc='upper right')\nplts[2].set_xlabel('Lambda_legit / Lambda_spam')\nplts[2].set_xlim(xmin=1)\nplt.show()\n",
"step-3": "<mask token>\nn_gram_range = 1, 1\nalpha_smoothing = 1e-10\nlambdas_best = [1e+190, 1]\n\n\ndef parse_doc_line(line):\n parsed = re.search('\\\\d[\\\\d\\\\s]+\\\\d', line)\n return 'empty' if parsed is None else parsed[0]\n\n\ndef get_roc_point(clf, x_set, y_set, threshold):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n roc_predictions = np.empty(0)\n answers = np.empty(0)\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition\n ]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(\n x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n answers = np.append(answers, y_test)\n roc_predictions = np.append(roc_predictions, [('spmsg' if \n prediction[0] <= threshold else 'legit') for prediction in clf.\n predict_proba(x_test)])\n print(f'Finished iteration {i} / 10')\n i += 1\n (true_negatives_, true_positives_, false_negatives_, false_positives_\n ) = 0, 0, 0, 0\n for prediction, answer in zip(roc_predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n elif answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n roc_point_ = 1 - true_negatives_ / (true_negatives_ + false_positives_\n ), true_positives_ / (true_positives_ + false_negatives_)\n return roc_point_\n\n\ndef get_cv_score(clf, x_set, y_set):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n predictions = np.empty(0)\n answers = np.empty(0)\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition\n ]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(\n x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n predictions = np.append(predictions, clf.predict(x_test))\n answers = np.append(answers, y_test)\n print(f'Finished iteration {i} / 10')\n i += 1\n (true_negatives_, true_positives_, false_negatives_, false_positives_\n ) = 0, 0, 0, 0\n for prediction, answer in zip(predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n elif answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n f1_result = f1_score(answers, predictions, average='macro')\n return (f1_result, true_negatives_, true_positives_, false_negatives_,\n false_positives_)\n\n\nparts_X = []\nparts_Y = []\nfor part in range(1, 11):\n parts_X.append([])\n parts_Y.append([])\n for file in listdir(f'messages/part{part}'):\n f = open(f'messages/part{part}/{file}', 'r')\n one = parse_doc_line(f.readline())\n f.readline()\n two = parse_doc_line(f.readline())\n curr_obj = one + ' ' + two\n parts_Y[-1].append(re.findall('\\\\D+', file)[0])\n parts_X[-1].append(curr_obj)\n f.close()\nroc_points = []\nfor thresh in range(0, 11):\n roc_points.append(get_roc_point(MultinomialNB(alpha=alpha_smoothing),\n np.array(parts_X), np.array(parts_Y), thresh / 10))\nf1_points = []\ntrue_positives_list = []\nfalse_positives_list = []\ntrue_negatives_list = []\nfalse_negatives_list = []\nlambda_ratios = [1, 100000.0, 10000000000.0, 1e+20, 1e+40, 1e+80, 1e+160, \n 1e+190]\nfor lambda_ratio in lambda_ratios:\n (f1, true_negatives, true_positives, false_negatives, false_positives) = (\n get_cv_score(MultinomialNB(class_prior=(lambda_ratio, 1), alpha=\n alpha_smoothing), np.array(parts_X), np.array(parts_Y)))\n print(\n f\"\"\"F1 score: {f1}\n True negatives: {true_negatives}\n True positives: {true_positives}\n False negatives: {false_negatives}\n False positives: {false_positives}\"\"\"\n )\n f1_points.append(f1)\n true_positives_list.append(true_positives)\n false_positives_list.append(false_positives)\n true_negatives_list.append(true_negatives)\n false_negatives_list.append(false_negatives)\nfig, plts = plt.subplots(3)\nplts[0].margins(0.0)\nplts[0].set_ylim(ymin=0)\nplts[0].plot([point[0] for point in roc_points], [point[1] for point in\n roc_points])\nplts[0].set_ylabel('Roc Curve')\nplts[1].set_xscale('log')\nplts[1].plot(lambda_ratios, f1_points, '-b')\nplts[1].set_ylabel('F1 score')\nplts[1].set_xlim(xmin=1)\nplts[2].set_xscale('log')\nplts[2].set_yscale('log')\nplts[2].plot(lambda_ratios, true_positives_list, '-r', label='True positives')\nplts[2].plot(lambda_ratios, false_positives_list, '-g', label='False positives'\n )\nplts[2].plot(lambda_ratios, true_negatives_list, '-b', label='True negatives')\nplts[2].plot(lambda_ratios, false_negatives_list, '-y', label='False negatives'\n )\nplts[2].legend(loc='upper right')\nplts[2].set_xlabel('Lambda_legit / Lambda_spam')\nplts[2].set_xlim(xmin=1)\nplt.show()\n",
"step-4": "from os import listdir\nimport re\nimport numpy as np\nfrom sklearn.metrics import f1_score\nfrom sklearn.naive_bayes import MultinomialNB\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.model_selection import LeaveOneOut\nimport matplotlib.pyplot as plt\nn_gram_range = 1, 1\nalpha_smoothing = 1e-10\nlambdas_best = [1e+190, 1]\n\n\ndef parse_doc_line(line):\n parsed = re.search('\\\\d[\\\\d\\\\s]+\\\\d', line)\n return 'empty' if parsed is None else parsed[0]\n\n\ndef get_roc_point(clf, x_set, y_set, threshold):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n roc_predictions = np.empty(0)\n answers = np.empty(0)\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition\n ]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(\n x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n answers = np.append(answers, y_test)\n roc_predictions = np.append(roc_predictions, [('spmsg' if \n prediction[0] <= threshold else 'legit') for prediction in clf.\n predict_proba(x_test)])\n print(f'Finished iteration {i} / 10')\n i += 1\n (true_negatives_, true_positives_, false_negatives_, false_positives_\n ) = 0, 0, 0, 0\n for prediction, answer in zip(roc_predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n elif answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n roc_point_ = 1 - true_negatives_ / (true_negatives_ + false_positives_\n ), true_positives_ / (true_positives_ + false_negatives_)\n return roc_point_\n\n\ndef get_cv_score(clf, x_set, y_set):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n predictions = np.empty(0)\n answers = np.empty(0)\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition\n ]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(\n x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n predictions = np.append(predictions, clf.predict(x_test))\n answers = np.append(answers, y_test)\n print(f'Finished iteration {i} / 10')\n i += 1\n (true_negatives_, true_positives_, false_negatives_, false_positives_\n ) = 0, 0, 0, 0\n for prediction, answer in zip(predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n elif answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n f1_result = f1_score(answers, predictions, average='macro')\n return (f1_result, true_negatives_, true_positives_, false_negatives_,\n false_positives_)\n\n\nparts_X = []\nparts_Y = []\nfor part in range(1, 11):\n parts_X.append([])\n parts_Y.append([])\n for file in listdir(f'messages/part{part}'):\n f = open(f'messages/part{part}/{file}', 'r')\n one = parse_doc_line(f.readline())\n f.readline()\n two = parse_doc_line(f.readline())\n curr_obj = one + ' ' + two\n parts_Y[-1].append(re.findall('\\\\D+', file)[0])\n parts_X[-1].append(curr_obj)\n f.close()\nroc_points = []\nfor thresh in range(0, 11):\n roc_points.append(get_roc_point(MultinomialNB(alpha=alpha_smoothing),\n np.array(parts_X), np.array(parts_Y), thresh / 10))\nf1_points = []\ntrue_positives_list = []\nfalse_positives_list = []\ntrue_negatives_list = []\nfalse_negatives_list = []\nlambda_ratios = [1, 100000.0, 10000000000.0, 1e+20, 1e+40, 1e+80, 1e+160, \n 1e+190]\nfor lambda_ratio in lambda_ratios:\n (f1, true_negatives, true_positives, false_negatives, false_positives) = (\n get_cv_score(MultinomialNB(class_prior=(lambda_ratio, 1), alpha=\n alpha_smoothing), np.array(parts_X), np.array(parts_Y)))\n print(\n f\"\"\"F1 score: {f1}\n True negatives: {true_negatives}\n True positives: {true_positives}\n False negatives: {false_negatives}\n False positives: {false_positives}\"\"\"\n )\n f1_points.append(f1)\n true_positives_list.append(true_positives)\n false_positives_list.append(false_positives)\n true_negatives_list.append(true_negatives)\n false_negatives_list.append(false_negatives)\nfig, plts = plt.subplots(3)\nplts[0].margins(0.0)\nplts[0].set_ylim(ymin=0)\nplts[0].plot([point[0] for point in roc_points], [point[1] for point in\n roc_points])\nplts[0].set_ylabel('Roc Curve')\nplts[1].set_xscale('log')\nplts[1].plot(lambda_ratios, f1_points, '-b')\nplts[1].set_ylabel('F1 score')\nplts[1].set_xlim(xmin=1)\nplts[2].set_xscale('log')\nplts[2].set_yscale('log')\nplts[2].plot(lambda_ratios, true_positives_list, '-r', label='True positives')\nplts[2].plot(lambda_ratios, false_positives_list, '-g', label='False positives'\n )\nplts[2].plot(lambda_ratios, true_negatives_list, '-b', label='True negatives')\nplts[2].plot(lambda_ratios, false_negatives_list, '-y', label='False negatives'\n )\nplts[2].legend(loc='upper right')\nplts[2].set_xlabel('Lambda_legit / Lambda_spam')\nplts[2].set_xlim(xmin=1)\nplt.show()\n",
"step-5": "from os import listdir\nimport re\nimport numpy as np\nfrom sklearn.metrics import f1_score\nfrom sklearn.naive_bayes import MultinomialNB\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.model_selection import LeaveOneOut\nimport matplotlib.pyplot as plt\n\nn_gram_range = (1, 1)\nalpha_smoothing = 1e-10\nlambdas_best = [1e190, 1]\n\n\ndef parse_doc_line(line):\n parsed = re.search(r'\\d[\\d\\s]+\\d', line)\n return \"empty\" if parsed is None else parsed[0]\n\n\ndef get_roc_point(clf, x_set, y_set, threshold):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n roc_predictions = np.empty(0)\n answers = np.empty(0)\n\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n answers = np.append(answers, y_test)\n roc_predictions = np.append(roc_predictions,\n ['spmsg' if prediction[0] <= threshold else 'legit' for prediction in\n clf.predict_proba(x_test)])\n print(f'Finished iteration {i} / 10')\n i += 1\n\n true_negatives_, true_positives_, false_negatives_, false_positives_ = 0, 0, 0, 0\n for prediction, answer in zip(roc_predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n else:\n if answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n roc_point_ = (\n 1 - (true_negatives_ / (true_negatives_ + false_positives_)),\n true_positives_ / (true_positives_ + false_negatives_))\n return roc_point_\n\n\ndef get_cv_score(clf, x_set, y_set):\n loo = LeaveOneOut()\n vectorizer = CountVectorizer(ngram_range=n_gram_range)\n predictions = np.empty(0)\n answers = np.empty(0)\n\n i = 1\n for train_index, test_index in loo.split(x_set):\n x_train = [obj for partition in x_set[train_index] for obj in partition]\n x_test = [obj for partition in x_set[test_index] for obj in partition]\n x_vectorized = vectorizer.fit_transform(x_train + x_test).toarray()\n x_train, x_test = x_vectorized[:len(x_train)], x_vectorized[-len(x_test):]\n y_train, y_test = y_set[train_index], y_set[test_index]\n clf.fit(x_train, y_train.flatten())\n predictions = np.append(predictions, clf.predict(x_test))\n answers = np.append(answers, y_test)\n print(f'Finished iteration {i} / 10')\n i += 1\n\n true_negatives_, true_positives_, false_negatives_, false_positives_ = 0, 0, 0, 0\n for prediction, answer in zip(predictions, answers):\n if prediction == 'spmsg':\n if answer == 'spmsg':\n true_positives_ += 1\n else:\n false_positives_ += 1\n else:\n if answer == 'legit':\n true_negatives_ += 1\n else:\n false_negatives_ += 1\n f1_result = f1_score(answers, predictions, average='macro')\n return f1_result, true_negatives_, true_positives_, false_negatives_, false_positives_\n\n\nparts_X = []\nparts_Y = []\n\nfor part in range(1, 11):\n parts_X.append([])\n parts_Y.append([])\n for file in listdir(f'messages/part{part}'):\n f = open(f'messages/part{part}/{file}', \"r\")\n one = parse_doc_line(f.readline())\n f.readline()\n two = parse_doc_line(f.readline())\n curr_obj = one + \" \" + two\n parts_Y[-1].append(re.findall(r'\\D+', file)[0])\n parts_X[-1].append(curr_obj)\n f.close()\n\nroc_points = []\nfor thresh in range(0, 11):\n roc_points.append(get_roc_point(\n MultinomialNB(alpha=alpha_smoothing), np.array(parts_X), np.array(parts_Y), thresh / 10))\n\nf1_points = []\ntrue_positives_list = []\nfalse_positives_list = []\ntrue_negatives_list = []\nfalse_negatives_list = []\nlambda_ratios = [1, 1e5, 1e10, 1e20, 1e40, 1e80, 1e160, 1e190]\nfor lambda_ratio in lambda_ratios:\n f1, true_negatives, true_positives, false_negatives, false_positives = get_cv_score(\n MultinomialNB(class_prior=(lambda_ratio, 1), alpha=alpha_smoothing), np.array(parts_X), np.array(parts_Y))\n print(f'F1 score: {f1}\\n True negatives: {true_negatives}\\n True positives: {true_positives}\\n False negatives: '\n f'{false_negatives}\\n False positives: {false_positives}')\n f1_points.append(f1)\n true_positives_list.append(true_positives)\n false_positives_list.append(false_positives)\n true_negatives_list.append(true_negatives)\n false_negatives_list.append(false_negatives)\n\nfig, plts = plt.subplots(3)\nplts[0].margins(0.0)\nplts[0].set_ylim(ymin=0)\nplts[0].plot([point[0] for point in roc_points], [point[1] for point in roc_points])\nplts[0].set_ylabel('Roc Curve')\n\nplts[1].set_xscale('log')\nplts[1].plot(lambda_ratios, f1_points, '-b')\nplts[1].set_ylabel('F1 score')\nplts[1].set_xlim(xmin=1)\n\nplts[2].set_xscale('log')\nplts[2].set_yscale('log')\nplts[2].plot(lambda_ratios, true_positives_list, '-r', label='True positives')\nplts[2].plot(lambda_ratios, false_positives_list, '-g', label='False positives')\nplts[2].plot(lambda_ratios, true_negatives_list, '-b', label='True negatives')\nplts[2].plot(lambda_ratios, false_negatives_list, '-y', label='False negatives')\nplts[2].legend(loc=\"upper right\")\nplts[2].set_xlabel('Lambda_legit / Lambda_spam')\nplts[2].set_xlim(xmin=1)\nplt.show()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import datetime
import json
import logging
import requests
from lib.crits.exceptions import CRITsOperationalError
from lib.crits.vocabulary.indicators import IndicatorThreatTypes as itt
from lib.crits.vocabulary.indicators import IndicatorAttackTypes as iat
log = logging.getLogger()
class CRITsAPI():
def __init__(self, api_url='', api_key='', username='', verify=True,
proxies={}):
self.url = api_url
if self.url[-1] == '/':
self.url = self.url[:-1]
self.api_key = api_key
self.username = username
self.verify = verify
self.proxies = proxies
def get_object(self, obj_id, obj_type):
type_trans = self._type_translation(obj_type)
get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)
params = {
'username' : self.username,
'api_key' : self.api_key,
}
r = requests.get(get_url, params=params, proxies=self.proxies, verify=self.verify)
if r.status_code == 200:
return json.loads(r.text)
else:
print('Status code returned for query {}, '
'was: {}'.format(get_url, r.status_code))
return None
def add_indicator(self, source = '', reference = '', method = '',
campaign = None, confidence = None, bucket_list = [], ticket = '',
add_domain = True, add_relationship = True,
indicator_confidence = 'unknown', indicator_impact = 'unknown',
type = None, threat_type = itt.UNKNOWN, attack_type = iat.UNKNOWN,
value = None, description = ''):
# Time to upload these indicators
data = {
'api_key' : self.api_key,
'username' : self.username,
'source' : source,
'reference' : reference,
'method' : '',
'campaign' : campaign,
'confidence' : confidence,
'bucket_list' : bucket_list,
'ticket' : ticket,
'add_domain' : True,
'add_relationship' : True,
'indicator_confidence' : indicator_confidence,
'indicator_impact' : indicator_impact,
'type' : type,
'threat_type' : threat_type,
'attack_type' : attack_type,
'value' : value,
'description' : description,
}
r = requests.post("{0}/indicators/".format(self.url), data=data,
verify=self.verify, proxies=self.proxies)
if r.status_code == 200:
log.debug("Indicator uploaded successfully - {}".format(value))
ind = json.loads(r.text)
return ind
return None
def has_relationship(self, left_id, left_type, right_id, right_type,
rel_type='Related To'):
data = self.get_object(left_id, left_type)
if not data:
raise CRITsOperationalError('Crits Object not found with id {} and '
'type {}'.format(left_id, left_type))
if not 'relationships' in data:
return False
for relationship in data['relationships']:
if relationship['relationship'] != rel_type:
continue
if relationship['value'] != right_id:
continue
if relationship['type'] != right_type:
continue
return True
return False
def forge_relationship(self, left_id, left_type, right_id, right_type,
rel_type, rel_date='', rel_confidence='high',
rel_reason=''):
if not rel_date:
rel_date = datetime.datetime.now()
type_trans = self._type_translation(left_type)
submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)
headers = {
'Content-Type' : 'application/json',
}
params = {
'api_key' : self.api_key,
'username' : self.username,
}
data = {
'action' : 'forge_relationship',
'right_type' : right_type,
'right_id' : right_id,
'rel_type' : rel_type,
'rel_date' : rel_date,
'rel_confidence' : rel_confidence,
'rel_reason' : rel_reason
}
r = requests.patch(submit_url, params=params, data=data,
proxies=self.proxies, verify=self.verify)
if r.status_code == 200:
log.debug('Relationship built successfully: {0} <-> '
'{1}'.format(left_id, right_id))
return True
else:
log.error('Error with status code {0} and message {1} between '
'these indicators: {2} <-> '
'{3}'.format(r.status_code, r.text, left_id, right_id))
return False
def add_campaign_to_object(self, id, type, campaign, confidence, analyst,
date, description):
# TODO: Make sure the object does not already have the campaign
# Return if it does. Add it if it doesn't
obj = getattr(self.db, type)
result = obj.find( { '_id' : id, 'campaign.name' : campaign } )
if result:
import pdb
pdb.set_trace()
def _type_translation(self, str_type):
if str_type == 'Indicator':
return 'indicators'
if str_type == 'Domain':
return 'domains'
if str_type == 'IP':
return 'ips'
if str_type == 'Sample':
return 'samples'
if str_type == 'Event':
return 'events'
if str_type == 'Actor':
return 'actors'
if str_type == 'Email':
return 'emails'
if str_type == 'Backdoor':
return 'backdoors'
raise CRITsOperationalError('Invalid object type specified: '
'{}'.format(str_type))
|
normal
|
{
"blob_id": "a505cc0e382554d65447a3fe3a56fac43c1964f2",
"index": 8133,
"step-1": "<mask token>\n\n\nclass CRITsAPI:\n <mask token>\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {'username': self.username, 'api_key': self.api_key}\n r = requests.get(get_url, params=params, proxies=self.proxies,\n verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, was: {}'.format(\n get_url, r.status_code))\n return None\n\n def add_indicator(self, source='', reference='', method='', campaign=\n None, confidence=None, bucket_list=[], ticket='', add_domain=True,\n add_relationship=True, indicator_confidence='unknown',\n indicator_impact='unknown', type=None, threat_type=itt.UNKNOWN,\n attack_type=iat.UNKNOWN, value=None, description=''):\n data = {'api_key': self.api_key, 'username': self.username,\n 'source': source, 'reference': reference, 'method': '',\n 'campaign': campaign, 'confidence': confidence, 'bucket_list':\n bucket_list, 'ticket': ticket, 'add_domain': True,\n 'add_relationship': True, 'indicator_confidence':\n indicator_confidence, 'indicator_impact': indicator_impact,\n 'type': type, 'threat_type': threat_type, 'attack_type':\n attack_type, 'value': value, 'description': description}\n r = requests.post('{0}/indicators/'.format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug('Indicator uploaded successfully - {}'.format(value))\n ind = json.loads(r.text)\n return ind\n return None\n <mask token>\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high', rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {'Content-Type': 'application/json'}\n params = {'api_key': self.api_key, 'username': self.username}\n data = {'action': 'forge_relationship', 'right_type': right_type,\n 'right_id': right_id, 'rel_type': rel_type, 'rel_date':\n rel_date, 'rel_confidence': rel_confidence, 'rel_reason':\n rel_reason}\n r = requests.patch(submit_url, params=params, data=data, proxies=\n self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> {1}'.format\n (left_id, right_id))\n return True\n else:\n log.error(\n 'Error with status code {0} and message {1} between these indicators: {2} <-> {3}'\n .format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence,\n analyst, date, description):\n obj = getattr(self.db, type)\n result = obj.find({'_id': id, 'campaign.name': campaign})\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n raise CRITsOperationalError('Invalid object type specified: {}'.\n format(str_type))\n",
"step-2": "<mask token>\n\n\nclass CRITsAPI:\n <mask token>\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {'username': self.username, 'api_key': self.api_key}\n r = requests.get(get_url, params=params, proxies=self.proxies,\n verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, was: {}'.format(\n get_url, r.status_code))\n return None\n\n def add_indicator(self, source='', reference='', method='', campaign=\n None, confidence=None, bucket_list=[], ticket='', add_domain=True,\n add_relationship=True, indicator_confidence='unknown',\n indicator_impact='unknown', type=None, threat_type=itt.UNKNOWN,\n attack_type=iat.UNKNOWN, value=None, description=''):\n data = {'api_key': self.api_key, 'username': self.username,\n 'source': source, 'reference': reference, 'method': '',\n 'campaign': campaign, 'confidence': confidence, 'bucket_list':\n bucket_list, 'ticket': ticket, 'add_domain': True,\n 'add_relationship': True, 'indicator_confidence':\n indicator_confidence, 'indicator_impact': indicator_impact,\n 'type': type, 'threat_type': threat_type, 'attack_type':\n attack_type, 'value': value, 'description': description}\n r = requests.post('{0}/indicators/'.format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug('Indicator uploaded successfully - {}'.format(value))\n ind = json.loads(r.text)\n return ind\n return None\n\n def has_relationship(self, left_id, left_type, right_id, right_type,\n rel_type='Related To'):\n data = self.get_object(left_id, left_type)\n if not data:\n raise CRITsOperationalError(\n 'Crits Object not found with id {} and type {}'.format(\n left_id, left_type))\n if not 'relationships' in data:\n return False\n for relationship in data['relationships']:\n if relationship['relationship'] != rel_type:\n continue\n if relationship['value'] != right_id:\n continue\n if relationship['type'] != right_type:\n continue\n return True\n return False\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high', rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {'Content-Type': 'application/json'}\n params = {'api_key': self.api_key, 'username': self.username}\n data = {'action': 'forge_relationship', 'right_type': right_type,\n 'right_id': right_id, 'rel_type': rel_type, 'rel_date':\n rel_date, 'rel_confidence': rel_confidence, 'rel_reason':\n rel_reason}\n r = requests.patch(submit_url, params=params, data=data, proxies=\n self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> {1}'.format\n (left_id, right_id))\n return True\n else:\n log.error(\n 'Error with status code {0} and message {1} between these indicators: {2} <-> {3}'\n .format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence,\n analyst, date, description):\n obj = getattr(self.db, type)\n result = obj.find({'_id': id, 'campaign.name': campaign})\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n raise CRITsOperationalError('Invalid object type specified: {}'.\n format(str_type))\n",
"step-3": "<mask token>\n\n\nclass CRITsAPI:\n\n def __init__(self, api_url='', api_key='', username='', verify=True,\n proxies={}):\n self.url = api_url\n if self.url[-1] == '/':\n self.url = self.url[:-1]\n self.api_key = api_key\n self.username = username\n self.verify = verify\n self.proxies = proxies\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {'username': self.username, 'api_key': self.api_key}\n r = requests.get(get_url, params=params, proxies=self.proxies,\n verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, was: {}'.format(\n get_url, r.status_code))\n return None\n\n def add_indicator(self, source='', reference='', method='', campaign=\n None, confidence=None, bucket_list=[], ticket='', add_domain=True,\n add_relationship=True, indicator_confidence='unknown',\n indicator_impact='unknown', type=None, threat_type=itt.UNKNOWN,\n attack_type=iat.UNKNOWN, value=None, description=''):\n data = {'api_key': self.api_key, 'username': self.username,\n 'source': source, 'reference': reference, 'method': '',\n 'campaign': campaign, 'confidence': confidence, 'bucket_list':\n bucket_list, 'ticket': ticket, 'add_domain': True,\n 'add_relationship': True, 'indicator_confidence':\n indicator_confidence, 'indicator_impact': indicator_impact,\n 'type': type, 'threat_type': threat_type, 'attack_type':\n attack_type, 'value': value, 'description': description}\n r = requests.post('{0}/indicators/'.format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug('Indicator uploaded successfully - {}'.format(value))\n ind = json.loads(r.text)\n return ind\n return None\n\n def has_relationship(self, left_id, left_type, right_id, right_type,\n rel_type='Related To'):\n data = self.get_object(left_id, left_type)\n if not data:\n raise CRITsOperationalError(\n 'Crits Object not found with id {} and type {}'.format(\n left_id, left_type))\n if not 'relationships' in data:\n return False\n for relationship in data['relationships']:\n if relationship['relationship'] != rel_type:\n continue\n if relationship['value'] != right_id:\n continue\n if relationship['type'] != right_type:\n continue\n return True\n return False\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high', rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {'Content-Type': 'application/json'}\n params = {'api_key': self.api_key, 'username': self.username}\n data = {'action': 'forge_relationship', 'right_type': right_type,\n 'right_id': right_id, 'rel_type': rel_type, 'rel_date':\n rel_date, 'rel_confidence': rel_confidence, 'rel_reason':\n rel_reason}\n r = requests.patch(submit_url, params=params, data=data, proxies=\n self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> {1}'.format\n (left_id, right_id))\n return True\n else:\n log.error(\n 'Error with status code {0} and message {1} between these indicators: {2} <-> {3}'\n .format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence,\n analyst, date, description):\n obj = getattr(self.db, type)\n result = obj.find({'_id': id, 'campaign.name': campaign})\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n raise CRITsOperationalError('Invalid object type specified: {}'.\n format(str_type))\n",
"step-4": "<mask token>\nlog = logging.getLogger()\n\n\nclass CRITsAPI:\n\n def __init__(self, api_url='', api_key='', username='', verify=True,\n proxies={}):\n self.url = api_url\n if self.url[-1] == '/':\n self.url = self.url[:-1]\n self.api_key = api_key\n self.username = username\n self.verify = verify\n self.proxies = proxies\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {'username': self.username, 'api_key': self.api_key}\n r = requests.get(get_url, params=params, proxies=self.proxies,\n verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, was: {}'.format(\n get_url, r.status_code))\n return None\n\n def add_indicator(self, source='', reference='', method='', campaign=\n None, confidence=None, bucket_list=[], ticket='', add_domain=True,\n add_relationship=True, indicator_confidence='unknown',\n indicator_impact='unknown', type=None, threat_type=itt.UNKNOWN,\n attack_type=iat.UNKNOWN, value=None, description=''):\n data = {'api_key': self.api_key, 'username': self.username,\n 'source': source, 'reference': reference, 'method': '',\n 'campaign': campaign, 'confidence': confidence, 'bucket_list':\n bucket_list, 'ticket': ticket, 'add_domain': True,\n 'add_relationship': True, 'indicator_confidence':\n indicator_confidence, 'indicator_impact': indicator_impact,\n 'type': type, 'threat_type': threat_type, 'attack_type':\n attack_type, 'value': value, 'description': description}\n r = requests.post('{0}/indicators/'.format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug('Indicator uploaded successfully - {}'.format(value))\n ind = json.loads(r.text)\n return ind\n return None\n\n def has_relationship(self, left_id, left_type, right_id, right_type,\n rel_type='Related To'):\n data = self.get_object(left_id, left_type)\n if not data:\n raise CRITsOperationalError(\n 'Crits Object not found with id {} and type {}'.format(\n left_id, left_type))\n if not 'relationships' in data:\n return False\n for relationship in data['relationships']:\n if relationship['relationship'] != rel_type:\n continue\n if relationship['value'] != right_id:\n continue\n if relationship['type'] != right_type:\n continue\n return True\n return False\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high', rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {'Content-Type': 'application/json'}\n params = {'api_key': self.api_key, 'username': self.username}\n data = {'action': 'forge_relationship', 'right_type': right_type,\n 'right_id': right_id, 'rel_type': rel_type, 'rel_date':\n rel_date, 'rel_confidence': rel_confidence, 'rel_reason':\n rel_reason}\n r = requests.patch(submit_url, params=params, data=data, proxies=\n self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> {1}'.format\n (left_id, right_id))\n return True\n else:\n log.error(\n 'Error with status code {0} and message {1} between these indicators: {2} <-> {3}'\n .format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence,\n analyst, date, description):\n obj = getattr(self.db, type)\n result = obj.find({'_id': id, 'campaign.name': campaign})\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n raise CRITsOperationalError('Invalid object type specified: {}'.\n format(str_type))\n",
"step-5": "import datetime\nimport json\nimport logging\nimport requests\n\nfrom lib.crits.exceptions import CRITsOperationalError\nfrom lib.crits.vocabulary.indicators import IndicatorThreatTypes as itt\nfrom lib.crits.vocabulary.indicators import IndicatorAttackTypes as iat\n\nlog = logging.getLogger()\n\nclass CRITsAPI():\n\n def __init__(self, api_url='', api_key='', username='', verify=True,\n proxies={}):\n self.url = api_url\n if self.url[-1] == '/':\n self.url = self.url[:-1]\n self.api_key = api_key\n self.username = username\n self.verify = verify\n self.proxies = proxies\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {\n 'username' : self.username,\n 'api_key' : self.api_key,\n }\n r = requests.get(get_url, params=params, proxies=self.proxies, verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, '\n 'was: {}'.format(get_url, r.status_code))\n return None\n\n def add_indicator(self, source = '', reference = '', method = '',\n campaign = None, confidence = None, bucket_list = [], ticket = '',\n add_domain = True, add_relationship = True,\n indicator_confidence = 'unknown', indicator_impact = 'unknown',\n type = None, threat_type = itt.UNKNOWN, attack_type = iat.UNKNOWN,\n value = None, description = ''):\n # Time to upload these indicators\n data = {\n 'api_key' : self.api_key,\n 'username' : self.username,\n 'source' : source,\n 'reference' : reference,\n 'method' : '',\n 'campaign' : campaign,\n 'confidence' : confidence,\n 'bucket_list' : bucket_list,\n 'ticket' : ticket,\n 'add_domain' : True,\n 'add_relationship' : True,\n 'indicator_confidence' : indicator_confidence,\n 'indicator_impact' : indicator_impact,\n 'type' : type,\n 'threat_type' : threat_type,\n 'attack_type' : attack_type,\n 'value' : value,\n 'description' : description,\n }\n\n r = requests.post(\"{0}/indicators/\".format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug(\"Indicator uploaded successfully - {}\".format(value))\n ind = json.loads(r.text)\n return ind\n\n return None\n\n def has_relationship(self, left_id, left_type, right_id, right_type,\n rel_type='Related To'):\n data = self.get_object(left_id, left_type)\n if not data:\n raise CRITsOperationalError('Crits Object not found with id {} and '\n 'type {}'.format(left_id, left_type))\n if not 'relationships' in data:\n return False\n for relationship in data['relationships']:\n if relationship['relationship'] != rel_type:\n continue\n if relationship['value'] != right_id:\n continue\n if relationship['type'] != right_type:\n continue\n return True\n return False\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high',\n rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {\n 'Content-Type' : 'application/json',\n }\n\n params = {\n 'api_key' : self.api_key,\n 'username' : self.username,\n }\n\n data = {\n 'action' : 'forge_relationship',\n 'right_type' : right_type,\n 'right_id' : right_id,\n 'rel_type' : rel_type,\n 'rel_date' : rel_date,\n 'rel_confidence' : rel_confidence,\n 'rel_reason' : rel_reason\n }\n\n r = requests.patch(submit_url, params=params, data=data,\n proxies=self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> '\n '{1}'.format(left_id, right_id))\n return True\n else:\n log.error('Error with status code {0} and message {1} between '\n 'these indicators: {2} <-> '\n '{3}'.format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence, analyst,\n date, description):\n # TODO: Make sure the object does not already have the campaign\n # Return if it does. Add it if it doesn't\n obj = getattr(self.db, type)\n result = obj.find( { '_id' : id, 'campaign.name' : campaign } )\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n\n raise CRITsOperationalError('Invalid object type specified: '\n '{}'.format(str_type))\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
from django.shortcuts import render
from django.http import Http404
from thermometer.models import Therm
def index(request):
therms = Therm.objects.all()
return render(request, 'thermometer/index.html', {
'therms': therms,
})
def fetchsquare(request, id):
try:
therm = Therm.objects.get(id=id)
except Therm.DoesNotExist:
raise Http404('This item does not exist')
return render(request, 'thermometer/fetchsquare.html', {
'therm': therm,
})
|
normal
|
{
"blob_id": "504d4afc4b3e708d43110a2d85676fb745f1aba8",
"index": 9874,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef fetchsquare(request, id):\n try:\n therm = Therm.objects.get(id=id)\n except Therm.DoesNotExist:\n raise Http404('This item does not exist')\n return render(request, 'thermometer/fetchsquare.html', {'therm': therm})\n",
"step-3": "<mask token>\n\n\ndef index(request):\n therms = Therm.objects.all()\n return render(request, 'thermometer/index.html', {'therms': therms})\n\n\ndef fetchsquare(request, id):\n try:\n therm = Therm.objects.get(id=id)\n except Therm.DoesNotExist:\n raise Http404('This item does not exist')\n return render(request, 'thermometer/fetchsquare.html', {'therm': therm})\n",
"step-4": "from django.shortcuts import render\nfrom django.http import Http404\nfrom thermometer.models import Therm\n\n\ndef index(request):\n therms = Therm.objects.all()\n return render(request, 'thermometer/index.html', {'therms': therms})\n\n\ndef fetchsquare(request, id):\n try:\n therm = Therm.objects.get(id=id)\n except Therm.DoesNotExist:\n raise Http404('This item does not exist')\n return render(request, 'thermometer/fetchsquare.html', {'therm': therm})\n",
"step-5": "from django.shortcuts import render\nfrom django.http import Http404\n\nfrom thermometer.models import Therm\n\ndef index(request):\n\ttherms = Therm.objects.all()\n\treturn render(request, 'thermometer/index.html', {\n\t\t'therms': therms,\n\t})\n\ndef fetchsquare(request, id):\n\ttry:\n\t\ttherm = Therm.objects.get(id=id)\n\texcept Therm.DoesNotExist:\n\t\traise Http404('This item does not exist')\n\treturn render(request, 'thermometer/fetchsquare.html', {\n\t\t'therm': therm,\n\t})",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from ..core import promise, rule
_context = {
'@vocab': 'https://schema.org/',
'fairsharing': 'https://fairsharing.org/',
'html': 'fairsharing:bsg-s001284',
}
@promise
def resolve_html(url):
from urllib.request import urlopen
return urlopen(url).read().decode()
@rule({
'@context': _context,
'@type': 'WebSite',
'@id': {},
'url': {},
})
def html_resolver(ld):
return dict(ld, **{
'html': str(resolve_html(ld['url'])),
})
|
normal
|
{
"blob_id": "3272296bca0d6343540597baebef8d882a1267c0",
"index": 3111,
"step-1": "<mask token>\n\n\n@rule({'@context': _context, '@type': 'WebSite', '@id': {}, 'url': {}})\ndef html_resolver(ld):\n return dict(ld, **{'html': str(resolve_html(ld['url']))})\n",
"step-2": "<mask token>\n\n\n@promise\ndef resolve_html(url):\n from urllib.request import urlopen\n return urlopen(url).read().decode()\n\n\n@rule({'@context': _context, '@type': 'WebSite', '@id': {}, 'url': {}})\ndef html_resolver(ld):\n return dict(ld, **{'html': str(resolve_html(ld['url']))})\n",
"step-3": "<mask token>\n_context = {'@vocab': 'https://schema.org/', 'fairsharing':\n 'https://fairsharing.org/', 'html': 'fairsharing:bsg-s001284'}\n\n\n@promise\ndef resolve_html(url):\n from urllib.request import urlopen\n return urlopen(url).read().decode()\n\n\n@rule({'@context': _context, '@type': 'WebSite', '@id': {}, 'url': {}})\ndef html_resolver(ld):\n return dict(ld, **{'html': str(resolve_html(ld['url']))})\n",
"step-4": "from ..core import promise, rule\n_context = {'@vocab': 'https://schema.org/', 'fairsharing':\n 'https://fairsharing.org/', 'html': 'fairsharing:bsg-s001284'}\n\n\n@promise\ndef resolve_html(url):\n from urllib.request import urlopen\n return urlopen(url).read().decode()\n\n\n@rule({'@context': _context, '@type': 'WebSite', '@id': {}, 'url': {}})\ndef html_resolver(ld):\n return dict(ld, **{'html': str(resolve_html(ld['url']))})\n",
"step-5": "from ..core import promise, rule\n\n_context = {\n '@vocab': 'https://schema.org/',\n 'fairsharing': 'https://fairsharing.org/',\n 'html': 'fairsharing:bsg-s001284',\n}\n\n@promise\ndef resolve_html(url):\n from urllib.request import urlopen\n return urlopen(url).read().decode()\n\n@rule({\n '@context': _context,\n '@type': 'WebSite',\n '@id': {},\n 'url': {},\n})\ndef html_resolver(ld):\n return dict(ld, **{\n 'html': str(resolve_html(ld['url'])),\n })\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
def h1_wrap(func):
def func_wrapper(param):
return "<h1>"+func(param) + "</h1>"
return func_wrapper
@h1_wrap
def say_hi(name):
return "Hello, " + name.capitalize()
print(say_hi("Stephan"))
|
normal
|
{
"blob_id": "9c9005acb40e4b89ca215345361e21f08f984847",
"index": 5735,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@h1_wrap\ndef say_hi(name):\n return 'Hello, ' + name.capitalize()\n\n\n<mask token>\n",
"step-3": "def h1_wrap(func):\n\n def func_wrapper(param):\n return '<h1>' + func(param) + '</h1>'\n return func_wrapper\n\n\n@h1_wrap\ndef say_hi(name):\n return 'Hello, ' + name.capitalize()\n\n\n<mask token>\n",
"step-4": "def h1_wrap(func):\n\n def func_wrapper(param):\n return '<h1>' + func(param) + '</h1>'\n return func_wrapper\n\n\n@h1_wrap\ndef say_hi(name):\n return 'Hello, ' + name.capitalize()\n\n\nprint(say_hi('Stephan'))\n",
"step-5": "def h1_wrap(func):\n def func_wrapper(param):\n return \"<h1>\"+func(param) + \"</h1>\"\n return func_wrapper\n\n\n@h1_wrap\ndef say_hi(name):\n return \"Hello, \" + name.capitalize()\n\n\nprint(say_hi(\"Stephan\"))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
#
# This file is part of REANA.
# Copyright (C) 2017, 2018 CERN.
#
# REANA is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Pytest configuration for REANA-Workflow-Controller."""
from __future__ import absolute_import, print_function
import os
import shutil
import pytest
from reana_db.models import Base, User
from sqlalchemy_utils import create_database, database_exists, drop_database
from reana_workflow_controller.factory import create_app
@pytest.fixture(scope="module")
def base_app(tmp_shared_volume_path):
"""Flask application fixture."""
config_mapping = {
"SERVER_NAME": "localhost:5000",
"SECRET_KEY": "SECRET_KEY",
"TESTING": True,
"SHARED_VOLUME_PATH": tmp_shared_volume_path,
"SQLALCHEMY_DATABASE_URI": "sqlite:///testdb.db",
"SQLALCHEMY_TRACK_MODIFICATIONS": False,
"ORGANIZATIONS": ["default"],
}
app_ = create_app(config_mapping)
return app_
|
normal
|
{
"blob_id": "502e92d3e5d059d73016702ce0b2591a123810d3",
"index": 6892,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\[email protected](scope='module')\ndef base_app(tmp_shared_volume_path):\n \"\"\"Flask application fixture.\"\"\"\n config_mapping = {'SERVER_NAME': 'localhost:5000', 'SECRET_KEY':\n 'SECRET_KEY', 'TESTING': True, 'SHARED_VOLUME_PATH':\n tmp_shared_volume_path, 'SQLALCHEMY_DATABASE_URI':\n 'sqlite:///testdb.db', 'SQLALCHEMY_TRACK_MODIFICATIONS': False,\n 'ORGANIZATIONS': ['default']}\n app_ = create_app(config_mapping)\n return app_\n",
"step-3": "<mask token>\nfrom __future__ import absolute_import, print_function\nimport os\nimport shutil\nimport pytest\nfrom reana_db.models import Base, User\nfrom sqlalchemy_utils import create_database, database_exists, drop_database\nfrom reana_workflow_controller.factory import create_app\n\n\[email protected](scope='module')\ndef base_app(tmp_shared_volume_path):\n \"\"\"Flask application fixture.\"\"\"\n config_mapping = {'SERVER_NAME': 'localhost:5000', 'SECRET_KEY':\n 'SECRET_KEY', 'TESTING': True, 'SHARED_VOLUME_PATH':\n tmp_shared_volume_path, 'SQLALCHEMY_DATABASE_URI':\n 'sqlite:///testdb.db', 'SQLALCHEMY_TRACK_MODIFICATIONS': False,\n 'ORGANIZATIONS': ['default']}\n app_ = create_app(config_mapping)\n return app_\n",
"step-4": "# -*- coding: utf-8 -*-\n#\n# This file is part of REANA.\n# Copyright (C) 2017, 2018 CERN.\n#\n# REANA is free software; you can redistribute it and/or modify it\n# under the terms of the MIT License; see LICENSE file for more details.\n\n\"\"\"Pytest configuration for REANA-Workflow-Controller.\"\"\"\n\nfrom __future__ import absolute_import, print_function\n\nimport os\nimport shutil\n\nimport pytest\nfrom reana_db.models import Base, User\nfrom sqlalchemy_utils import create_database, database_exists, drop_database\n\nfrom reana_workflow_controller.factory import create_app\n\n\[email protected](scope=\"module\")\ndef base_app(tmp_shared_volume_path):\n \"\"\"Flask application fixture.\"\"\"\n config_mapping = {\n \"SERVER_NAME\": \"localhost:5000\",\n \"SECRET_KEY\": \"SECRET_KEY\",\n \"TESTING\": True,\n \"SHARED_VOLUME_PATH\": tmp_shared_volume_path,\n \"SQLALCHEMY_DATABASE_URI\": \"sqlite:///testdb.db\",\n \"SQLALCHEMY_TRACK_MODIFICATIONS\": False,\n \"ORGANIZATIONS\": [\"default\"],\n }\n app_ = create_app(config_mapping)\n return app_\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import time
from wxpy import *
bot = Bot(cache_path='wxpy.pkl')
def get(i):
with open('晚安.txt', 'r', encoding='utf-8') as f:
line = f.readlines()[i]
return line
def send(i):
myfriend = bot.friends().search('微信好友昵称')[0]
myfriend.send(get(i))
i += 1
def main():
for i in range(3650):
send(i)
time.sleep(5)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "a7d11f130e0d5d6c9b4ac7c5d3a804fb9f79b943",
"index": 2284,
"step-1": "<mask token>\n\n\ndef get(i):\n with open('晚安.txt', 'r', encoding='utf-8') as f:\n line = f.readlines()[i]\n return line\n\n\n<mask token>\n\n\ndef main():\n for i in range(3650):\n send(i)\n time.sleep(5)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get(i):\n with open('晚安.txt', 'r', encoding='utf-8') as f:\n line = f.readlines()[i]\n return line\n\n\ndef send(i):\n myfriend = bot.friends().search('微信好友昵称')[0]\n myfriend.send(get(i))\n i += 1\n\n\ndef main():\n for i in range(3650):\n send(i)\n time.sleep(5)\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nbot = Bot(cache_path='wxpy.pkl')\n\n\ndef get(i):\n with open('晚安.txt', 'r', encoding='utf-8') as f:\n line = f.readlines()[i]\n return line\n\n\ndef send(i):\n myfriend = bot.friends().search('微信好友昵称')[0]\n myfriend.send(get(i))\n i += 1\n\n\ndef main():\n for i in range(3650):\n send(i)\n time.sleep(5)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import time\nfrom wxpy import *\nbot = Bot(cache_path='wxpy.pkl')\n\n\ndef get(i):\n with open('晚安.txt', 'r', encoding='utf-8') as f:\n line = f.readlines()[i]\n return line\n\n\ndef send(i):\n myfriend = bot.friends().search('微信好友昵称')[0]\n myfriend.send(get(i))\n i += 1\n\n\ndef main():\n for i in range(3650):\n send(i)\n time.sleep(5)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": null,
"step-ids": [
2,
4,
5,
6
]
}
|
[
2,
4,
5,
6
] |
import torch
import numpy as np
# source: https://github.com/krasserm/bayesian-machine-learning/blob/master/gaussian_processes.ipynb
def kernel(X1, X2, l=1.0, sigma_f=1.0):
''' Isotropic squared exponential kernel. Computes a covariance matrix from points in X1 and X2. Args: X1: Array of m points (m x d). X2: Array of n points (n x d). Returns: Covariance matrix (m x n). '''
sqdist = np.sum(X1**2, 1).reshape(-1, 1) + np.sum(X2**2, 1) - 2 * np.dot(X1, X2.T)
return sigma_f**2 * np.exp(-0.5 / l**2 * sqdist)
# source: # https://github.com/krasserm/bayesian-machine-learning/blob/master/gaussian_processes.ipynb
def posterior_predictive(X_s, X_train, Y_train, l=1.0, sigma_f=1.0, sigma_y=1e-8):
''' Computes the sufficient statistics of the GP posterior predictive distribution from m training data X_train and Y_train and n new inputs X_s. Args: X_s: New input locations (n x d). X_train: Training locations (m x d). Y_train: Training targets (m x 1). l: Kernel length parameter. sigma_f: Kernel vertical variation parameter. sigma_y: Noise parameter. Returns: Posterior mean vector (n x d) and covariance matrix (n x n). '''
K = kernel(X_train, X_train, l, sigma_f) + sigma_y**2 * np.eye(len(X_train))
K_s = kernel(X_s, X_train, l, sigma_f)
K_ss = kernel(X_s, X_s, l, sigma_f) + sigma_y**2 * np.eye(len(X_s))
mu_s = np.matmul(K_s, np.linalg.solve(K, Y_train))
cov_s = K_ss - np.matmul(K_s, np.linalg.solve(K, K_s.T))
return mu_s, cov_s
class CNP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer):
super(CNP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [
torch.nn.Linear(in_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(en_layer-2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim+query_dim, out_dim)
else:
self.decoder = [
torch.nn.Linear(hidden_dim+query_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(dec_layer-2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
def forward(self, context, query, key=None):
query = query.view(query.shape[0], -1)
# encode
h = self.encoder(context)
# aggregate
h = h.mean(dim=0)
h = torch.stack([h]*(query.shape[0]), dim=0)
r = torch.cat([h, query], dim=1)
# predict
out = self.decoder(r)
return out
class ANP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer, nhead):
super(ANP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [
torch.nn.Linear(in_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(en_layer-2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [
torch.nn.Linear(hidden_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(dec_layer-2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.projector = torch.nn.Linear(query_dim, hidden_dim)
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim, num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
# encode
h = self.encoder(context)
h.unsqueeze_(1)
# aggregate
q_t = self.projector(query)
k_t = self.projector(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
# predict
pred = self.decoder(h)
return pred
class ANPv2(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer, nhead):
super(ANPv2, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [
torch.nn.Linear(in_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(en_layer-2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [
torch.nn.Linear(hidden_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(dec_layer-2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.key_mlp = torch.nn.Sequential(
torch.nn.Linear(query_dim, hidden_dim),
torch.nn.ReLU(),
torch.nn.Linear(hidden_dim, hidden_dim)
)
self.query_mlp = torch.nn.Sequential(
torch.nn.Linear(query_dim, hidden_dim),
torch.nn.ReLU(),
torch.nn.Linear(hidden_dim, hidden_dim)
)
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim, num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
# encode
h = self.encoder(context)
h.unsqueeze_(1)
# aggregate
q_t = self.query_mlp(query)
k_t = self.key_mlp(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
# predict
pred = self.decoder(h)
return pred
|
normal
|
{
"blob_id": "82c3bde5746d04c126a93851844f775e7ce65f4b",
"index": 9442,
"step-1": "<mask token>\n\n\nclass CNP(torch.nn.Module):\n <mask token>\n <mask token>\n\n\nclass ANP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n\n\nclass ANPv2(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n",
"step-2": "<mask token>\n\n\nclass CNP(torch.nn.Module):\n <mask token>\n\n def forward(self, context, query, key=None):\n query = query.view(query.shape[0], -1)\n h = self.encoder(context)\n h = h.mean(dim=0)\n h = torch.stack([h] * query.shape[0], dim=0)\n r = torch.cat([h, query], dim=1)\n out = self.decoder(r)\n return out\n\n\nclass ANP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n\n\nclass ANPv2(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n",
"step-3": "<mask token>\n\n\nclass CNP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer):\n super(CNP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim + query_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim + query_dim,\n hidden_dim), torch.nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n\n def forward(self, context, query, key=None):\n query = query.view(query.shape[0], -1)\n h = self.encoder(context)\n h = h.mean(dim=0)\n h = torch.stack([h] * query.shape[0], dim=0)\n r = torch.cat([h, query], dim=1)\n out = self.decoder(r)\n return out\n\n\nclass ANP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n\n\nclass ANPv2(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n",
"step-4": "<mask token>\n\n\ndef posterior_predictive(X_s, X_train, Y_train, l=1.0, sigma_f=1.0, sigma_y\n =1e-08):\n \"\"\" Computes the sufficient statistics of the GP posterior predictive distribution from m training data X_train and Y_train and n new inputs X_s. Args: X_s: New input locations (n x d). X_train: Training locations (m x d). Y_train: Training targets (m x 1). l: Kernel length parameter. sigma_f: Kernel vertical variation parameter. sigma_y: Noise parameter. Returns: Posterior mean vector (n x d) and covariance matrix (n x n). \"\"\"\n K = kernel(X_train, X_train, l, sigma_f) + sigma_y ** 2 * np.eye(len(\n X_train))\n K_s = kernel(X_s, X_train, l, sigma_f)\n K_ss = kernel(X_s, X_s, l, sigma_f) + sigma_y ** 2 * np.eye(len(X_s))\n mu_s = np.matmul(K_s, np.linalg.solve(K, Y_train))\n cov_s = K_ss - np.matmul(K_s, np.linalg.solve(K, K_s.T))\n return mu_s, cov_s\n\n\nclass CNP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer):\n super(CNP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim + query_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim + query_dim,\n hidden_dim), torch.nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n\n def forward(self, context, query, key=None):\n query = query.view(query.shape[0], -1)\n h = self.encoder(context)\n h = h.mean(dim=0)\n h = torch.stack([h] * query.shape[0], dim=0)\n r = torch.cat([h, query], dim=1)\n out = self.decoder(r)\n return out\n\n\nclass ANP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n\n\nclass ANPv2(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n",
"step-5": "import torch\nimport numpy as np\n\n\n# source: https://github.com/krasserm/bayesian-machine-learning/blob/master/gaussian_processes.ipynb\ndef kernel(X1, X2, l=1.0, sigma_f=1.0):\n ''' Isotropic squared exponential kernel. Computes a covariance matrix from points in X1 and X2. Args: X1: Array of m points (m x d). X2: Array of n points (n x d). Returns: Covariance matrix (m x n). '''\n sqdist = np.sum(X1**2, 1).reshape(-1, 1) + np.sum(X2**2, 1) - 2 * np.dot(X1, X2.T)\n return sigma_f**2 * np.exp(-0.5 / l**2 * sqdist)\n \n# source: # https://github.com/krasserm/bayesian-machine-learning/blob/master/gaussian_processes.ipynb\ndef posterior_predictive(X_s, X_train, Y_train, l=1.0, sigma_f=1.0, sigma_y=1e-8):\n ''' Computes the sufficient statistics of the GP posterior predictive distribution from m training data X_train and Y_train and n new inputs X_s. Args: X_s: New input locations (n x d). X_train: Training locations (m x d). Y_train: Training targets (m x 1). l: Kernel length parameter. sigma_f: Kernel vertical variation parameter. sigma_y: Noise parameter. Returns: Posterior mean vector (n x d) and covariance matrix (n x n). '''\n K = kernel(X_train, X_train, l, sigma_f) + sigma_y**2 * np.eye(len(X_train))\n K_s = kernel(X_s, X_train, l, sigma_f)\n K_ss = kernel(X_s, X_s, l, sigma_f) + sigma_y**2 * np.eye(len(X_s))\n \n mu_s = np.matmul(K_s, np.linalg.solve(K, Y_train))\n cov_s = K_ss - np.matmul(K_s, np.linalg.solve(K, K_s.T))\n \n return mu_s, cov_s\n\nclass CNP(torch.nn.Module):\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer):\n super(CNP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [\n torch.nn.Linear(in_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(en_layer-2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n \n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim+query_dim, out_dim)\n else:\n self.decoder = [\n torch.nn.Linear(hidden_dim+query_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(dec_layer-2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n \n def forward(self, context, query, key=None):\n query = query.view(query.shape[0], -1)\n # encode\n h = self.encoder(context)\n # aggregate\n h = h.mean(dim=0)\n h = torch.stack([h]*(query.shape[0]), dim=0)\n r = torch.cat([h, query], dim=1)\n # predict\n out = self.decoder(r)\n return out\n\n\nclass ANP(torch.nn.Module):\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [\n torch.nn.Linear(in_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(en_layer-2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n \n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [\n torch.nn.Linear(hidden_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(dec_layer-2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim, num_heads=nhead)\n\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n # encode\n h = self.encoder(context)\n h.unsqueeze_(1)\n # aggregate\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n # predict\n pred = self.decoder(h)\n return pred\n\nclass ANPv2(torch.nn.Module):\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [\n torch.nn.Linear(in_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(en_layer-2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n \n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [\n torch.nn.Linear(hidden_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(dec_layer-2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n \n self.key_mlp = torch.nn.Sequential(\n torch.nn.Linear(query_dim, hidden_dim),\n torch.nn.ReLU(),\n torch.nn.Linear(hidden_dim, hidden_dim)\n )\n\n self.query_mlp = torch.nn.Sequential(\n torch.nn.Linear(query_dim, hidden_dim),\n torch.nn.ReLU(),\n torch.nn.Linear(hidden_dim, hidden_dim)\n )\n\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim, num_heads=nhead)\n\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n # encode\n h = self.encoder(context)\n h.unsqueeze_(1)\n # aggregate\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n # predict\n pred = self.decoder(h)\n return pred\n",
"step-ids": [
7,
8,
9,
10,
13
]
}
|
[
7,
8,
9,
10,
13
] |
print(" whats your name boi ?")
name = input();
if name == "arrya":
print("u are a boi");
elif name == "jon":
print("basterd")
elif name == "ned":
print("you are dead man")
elif name == "rob":
print("the king in the north")
else:
print("carry on")
|
normal
|
{
"blob_id": "483a5e95a7bfca2cc6b1e7e81740620468fb5623",
"index": 9646,
"step-1": "<mask token>\n",
"step-2": "print(' whats your name boi ?')\n<mask token>\nif name == 'arrya':\n print('u are a boi')\nelif name == 'jon':\n print('basterd')\nelif name == 'ned':\n print('you are dead man')\nelif name == 'rob':\n print('the king in the north')\nelse:\n print('carry on')\n",
"step-3": "print(' whats your name boi ?')\nname = input()\nif name == 'arrya':\n print('u are a boi')\nelif name == 'jon':\n print('basterd')\nelif name == 'ned':\n print('you are dead man')\nelif name == 'rob':\n print('the king in the north')\nelse:\n print('carry on')\n",
"step-4": "print(\" whats your name boi ?\")\r\nname = input();\r\nif name == \"arrya\":\r\n print(\"u are a boi\");\r\nelif name == \"jon\":\r\n print(\"basterd\")\r\nelif name == \"ned\":\r\n print(\"you are dead man\")\r\nelif name == \"rob\":\r\n print(\"the king in the north\")\r\nelse:\r\n print(\"carry on\")\r\n\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
'''
Created on 17.05.2018
@author: markus
'''
import Ship
import Player
import Planet
import random
from FighterShip import FighterShip
turnCounter = 0
def cleanScreen():
for i in range(0,50):
print("")
def spacePirates(player):#space prites attack, their firepower is +/-20% of player firepower
while True:# loop
cleanScreen()
print("*****F*U*C*K****S*P*A*C*E*P*I*R*A*T*E*S***A*T*T*A*C*K*****")
playerFirepower = player.getTotalFirepower()
piratesFirepower = int(playerFirepower*(1+random.randint(-20,20)/100))
if ((random.randint(0,playerFirepower) > playerFirepower/3) and
(random.randint(0,piratesFirepower) < piratesFirepower/3) or (playerFirepower == 0)):
print("Damm, you got robbed by the pirates!")
print("You lost all your cargo and half your money!")
player.clearTech()
player.clearFood()
player.updateCargoUnits()
player.setCredits(player.getCredits()/2)
else:
print("Lucky you! Your fighters drove them off!")
print("**********************************************************")
input("Hit enter to continue")
break
def shipyardMenu(player, planet):
while True:# loop
cleanScreen()
print("*****W*E*L*C*O*M*E****T*O****T*H*E****S*H*I*P*Y*A*R*D*****")
player.printStats()
print("**********************************************************")
shipList = planet.getShipyard()
print("Available Ships:")
print("**********************************************************")
i = 0
for s in shipList:
print("Nr.:"+str(i)+":"+s.toString())
i += 1
print("**********************************************************")
userInput = input("Enter the number you would like to by or x to leave:")
if (userInput == "x"):
break;
else:
ui = int(userInput)
if (ui <= i):
if(player.getCredits() > shipList[ui].getPrice()): #has enough money
if(type(shipList[ui]) == FighterShip):
player.addFighterShip(shipList[ui])
player.updateFirePower()
else:
player.addCargoShip(shipList[ui])
player.updateCargoUnits()
player.setCredits(player.getCredits() - shipList[ui].getPrice())
player.updateMaintenance()
del shipList[ui]
else:
print("wrong number, try again ....")
def spacePortMenu(player, planet):
global turnCounter
while True:# loop
cleanScreen()
print("****W*E*L*C*O*M*E****T*O****T*H*E****S*P*A*C*E*P*O*R*T****")
print("Enter 1 to jump to a agri planet (risk 5%)")
print("Enter 2 to jump to a tech planet (risk 10%)")
print("Enter 3 to jump to a war planet (risk 20%)")
userInput = input("Or enter x to exit:")
risk = 0
if (userInput == "x"):
return planet
elif (userInput == "1"):
risk = 5
elif(userInput == "2"):
risk = 10
else:
risk = 20
if (random.randint(0,100) <= risk):
spacePirates(player)
player.setCredits(player.getCredits() - player.getTotalMaintenance())
turnCounter += 1
return Planet.Planet(int(userInput))
def marketMenu(player, planet):
while True:# loop
cleanScreen()
print("*******W*E*L*C*O*M*E****T*O****T*H*E****M*A*R*K*E*T*******")
player.printStats()
print("**********************************************************")
market = planet.getMarket()
print("Price for Food = ",market["Food"])
print("Price for Tech = ",market["Tech"])
print("**********************************************************")
userInput = input("Enter 1 for Food, 2 for Tech or x for exit:")
str =""
if (userInput == "1"):
str = "Food"
elif(userInput == "2"):
str= "Tech"
else:
break
print("**********************************************************")
max = 0
if(market[str]*player.freeCargoUnits <= player.getCredits()):#enough credit?
max = player.freeCargoUnits
else:
max = int(player.getCredits()/market[str])
print("Price for "+str+" = ",market[str])
secondInput = input("Would you like to buy (enter b) or sell (enter s)?")
if (secondInput == "b"):#buying
print("You can buy a maximum of",max,"units")
nr = input("How much would you like to buy? Or press x to exit")
if (nr == "x"):
pass
else:
nr = int(nr)
if((player.getCredits() > market[str]*nr) and (nr <= max)): #has enough money and space
if (str == "Food"):
player.addFood(nr)
else:
player.addTech(nr)
player.setCredits(player.getCredits() - market[str]*nr)
player.updateCargoUnits()
else:#selling
if (str == "Food"):
print("You can sell a maximum of",player.getFood(),"food units")
nr = input("How much would you like to sell? Or press x to exit")
if (nr == "x"):
pass
else:
nr = int(nr)
if (nr <= player.getFood()):
player.sellFood(nr)
player.setCredits(player.getCredits() + nr*market["Food"])
else:
print("You can sell a maximum of",player.getTech(),"tech units")
nr = input("How much would you like to sell? Or press x to exit")
if (nr == "x"):
pass
else:
nr = int(nr)
if (nr <= player.getTech()):
player.sellTech(nr)
player.setCredits(player.getCredits() + nr*market["Tech"])
def menu(player):
global turnCounter
notFinished = True
planet = Planet.Planet(random.randint(1,3))
while notFinished:#main game loop
cleanScreen()
if (player.getCredits() < 0):
print("Sorry, but you ran out of credits and therefore lost the game in round,",turnCounter,"!")
break
print("**********************************************************")
print("Turn nr.",turnCounter,"in this glorious space trading simulation")
player.printStats()
print("**********************************************************")
print("You are on Planet:",planet.getName())
print("**********************************************************")
print("Enter 1 to go to the shipyard")
print("Enter 2 to go to the market")
print("Enter 3 to go to the spaceport")
print("Enter exit to leave the game")
userinput = input("Your Input:")
if (userinput == "1"):
shipyardMenu(player, planet)
elif (userinput == "2"):
marketMenu(player, planet)
elif (userinput == "3"):
planet = spacePortMenu(player, planet)
else:
notFinished = False
print("***************************************")
print(" Welcome to StarSim")
print("***************************************")
name = input("Please enter your Name:")
player = Player.Player(name)
menu(player)
|
normal
|
{
"blob_id": "97611fef5faafe660c7640e4a5aec8456e52135c",
"index": 9960,
"step-1": "<mask token>\n\n\ndef spacePortMenu(player, planet):\n global turnCounter\n while True:\n cleanScreen()\n print('****W*E*L*C*O*M*E****T*O****T*H*E****S*P*A*C*E*P*O*R*T****')\n print('Enter 1 to jump to a agri planet (risk 5%)')\n print('Enter 2 to jump to a tech planet (risk 10%)')\n print('Enter 3 to jump to a war planet (risk 20%)')\n userInput = input('Or enter x to exit:')\n risk = 0\n if userInput == 'x':\n return planet\n elif userInput == '1':\n risk = 5\n elif userInput == '2':\n risk = 10\n else:\n risk = 20\n if random.randint(0, 100) <= risk:\n spacePirates(player)\n player.setCredits(player.getCredits() - player.getTotalMaintenance())\n turnCounter += 1\n return Planet.Planet(int(userInput))\n\n\ndef marketMenu(player, planet):\n while True:\n cleanScreen()\n print('*******W*E*L*C*O*M*E****T*O****T*H*E****M*A*R*K*E*T*******')\n player.printStats()\n print('**********************************************************')\n market = planet.getMarket()\n print('Price for Food = ', market['Food'])\n print('Price for Tech = ', market['Tech'])\n print('**********************************************************')\n userInput = input('Enter 1 for Food, 2 for Tech or x for exit:')\n str = ''\n if userInput == '1':\n str = 'Food'\n elif userInput == '2':\n str = 'Tech'\n else:\n break\n print('**********************************************************')\n max = 0\n if market[str] * player.freeCargoUnits <= player.getCredits():\n max = player.freeCargoUnits\n else:\n max = int(player.getCredits() / market[str])\n print('Price for ' + str + ' = ', market[str])\n secondInput = input(\n 'Would you like to buy (enter b) or sell (enter s)?')\n if secondInput == 'b':\n print('You can buy a maximum of', max, 'units')\n nr = input('How much would you like to buy? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if player.getCredits() > market[str] * nr and nr <= max:\n if str == 'Food':\n player.addFood(nr)\n else:\n player.addTech(nr)\n player.setCredits(player.getCredits() - market[str] * nr)\n player.updateCargoUnits()\n elif str == 'Food':\n print('You can sell a maximum of', player.getFood(), 'food units')\n nr = input('How much would you like to sell? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if nr <= player.getFood():\n player.sellFood(nr)\n player.setCredits(player.getCredits() + nr * market['Food']\n )\n else:\n print('You can sell a maximum of', player.getTech(), 'tech units')\n nr = input('How much would you like to sell? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if nr <= player.getTech():\n player.sellTech(nr)\n player.setCredits(player.getCredits() + nr * market['Tech']\n )\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef cleanScreen():\n for i in range(0, 50):\n print('')\n\n\n<mask token>\n\n\ndef shipyardMenu(player, planet):\n while True:\n cleanScreen()\n print('*****W*E*L*C*O*M*E****T*O****T*H*E****S*H*I*P*Y*A*R*D*****')\n player.printStats()\n print('**********************************************************')\n shipList = planet.getShipyard()\n print('Available Ships:')\n print('**********************************************************')\n i = 0\n for s in shipList:\n print('Nr.:' + str(i) + ':' + s.toString())\n i += 1\n print('**********************************************************')\n userInput = input(\n 'Enter the number you would like to by or x to leave:')\n if userInput == 'x':\n break\n else:\n ui = int(userInput)\n if ui <= i:\n if player.getCredits() > shipList[ui].getPrice():\n if type(shipList[ui]) == FighterShip:\n player.addFighterShip(shipList[ui])\n player.updateFirePower()\n else:\n player.addCargoShip(shipList[ui])\n player.updateCargoUnits()\n player.setCredits(player.getCredits() - shipList[ui].\n getPrice())\n player.updateMaintenance()\n del shipList[ui]\n else:\n print('wrong number, try again ....')\n\n\ndef spacePortMenu(player, planet):\n global turnCounter\n while True:\n cleanScreen()\n print('****W*E*L*C*O*M*E****T*O****T*H*E****S*P*A*C*E*P*O*R*T****')\n print('Enter 1 to jump to a agri planet (risk 5%)')\n print('Enter 2 to jump to a tech planet (risk 10%)')\n print('Enter 3 to jump to a war planet (risk 20%)')\n userInput = input('Or enter x to exit:')\n risk = 0\n if userInput == 'x':\n return planet\n elif userInput == '1':\n risk = 5\n elif userInput == '2':\n risk = 10\n else:\n risk = 20\n if random.randint(0, 100) <= risk:\n spacePirates(player)\n player.setCredits(player.getCredits() - player.getTotalMaintenance())\n turnCounter += 1\n return Planet.Planet(int(userInput))\n\n\ndef marketMenu(player, planet):\n while True:\n cleanScreen()\n print('*******W*E*L*C*O*M*E****T*O****T*H*E****M*A*R*K*E*T*******')\n player.printStats()\n print('**********************************************************')\n market = planet.getMarket()\n print('Price for Food = ', market['Food'])\n print('Price for Tech = ', market['Tech'])\n print('**********************************************************')\n userInput = input('Enter 1 for Food, 2 for Tech or x for exit:')\n str = ''\n if userInput == '1':\n str = 'Food'\n elif userInput == '2':\n str = 'Tech'\n else:\n break\n print('**********************************************************')\n max = 0\n if market[str] * player.freeCargoUnits <= player.getCredits():\n max = player.freeCargoUnits\n else:\n max = int(player.getCredits() / market[str])\n print('Price for ' + str + ' = ', market[str])\n secondInput = input(\n 'Would you like to buy (enter b) or sell (enter s)?')\n if secondInput == 'b':\n print('You can buy a maximum of', max, 'units')\n nr = input('How much would you like to buy? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if player.getCredits() > market[str] * nr and nr <= max:\n if str == 'Food':\n player.addFood(nr)\n else:\n player.addTech(nr)\n player.setCredits(player.getCredits() - market[str] * nr)\n player.updateCargoUnits()\n elif str == 'Food':\n print('You can sell a maximum of', player.getFood(), 'food units')\n nr = input('How much would you like to sell? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if nr <= player.getFood():\n player.sellFood(nr)\n player.setCredits(player.getCredits() + nr * market['Food']\n )\n else:\n print('You can sell a maximum of', player.getTech(), 'tech units')\n nr = input('How much would you like to sell? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if nr <= player.getTech():\n player.sellTech(nr)\n player.setCredits(player.getCredits() + nr * market['Tech']\n )\n\n\ndef menu(player):\n global turnCounter\n notFinished = True\n planet = Planet.Planet(random.randint(1, 3))\n while notFinished:\n cleanScreen()\n if player.getCredits() < 0:\n print(\n 'Sorry, but you ran out of credits and therefore lost the game in round,'\n , turnCounter, '!')\n break\n print('**********************************************************')\n print('Turn nr.', turnCounter,\n 'in this glorious space trading simulation')\n player.printStats()\n print('**********************************************************')\n print('You are on Planet:', planet.getName())\n print('**********************************************************')\n print('Enter 1 to go to the shipyard')\n print('Enter 2 to go to the market')\n print('Enter 3 to go to the spaceport')\n print('Enter exit to leave the game')\n userinput = input('Your Input:')\n if userinput == '1':\n shipyardMenu(player, planet)\n elif userinput == '2':\n marketMenu(player, planet)\n elif userinput == '3':\n planet = spacePortMenu(player, planet)\n else:\n notFinished = False\n\n\n<mask token>\n",
"step-3": "<mask token>\nturnCounter = 0\n\n\ndef cleanScreen():\n for i in range(0, 50):\n print('')\n\n\ndef spacePirates(player):\n while True:\n cleanScreen()\n print('*****F*U*C*K****S*P*A*C*E*P*I*R*A*T*E*S***A*T*T*A*C*K*****')\n playerFirepower = player.getTotalFirepower()\n piratesFirepower = int(playerFirepower * (1 + random.randint(-20, \n 20) / 100))\n if random.randint(0, playerFirepower\n ) > playerFirepower / 3 and random.randint(0, piratesFirepower\n ) < piratesFirepower / 3 or playerFirepower == 0:\n print('Damm, you got robbed by the pirates!')\n print('You lost all your cargo and half your money!')\n player.clearTech()\n player.clearFood()\n player.updateCargoUnits()\n player.setCredits(player.getCredits() / 2)\n else:\n print('Lucky you! Your fighters drove them off!')\n print('**********************************************************')\n input('Hit enter to continue')\n break\n\n\ndef shipyardMenu(player, planet):\n while True:\n cleanScreen()\n print('*****W*E*L*C*O*M*E****T*O****T*H*E****S*H*I*P*Y*A*R*D*****')\n player.printStats()\n print('**********************************************************')\n shipList = planet.getShipyard()\n print('Available Ships:')\n print('**********************************************************')\n i = 0\n for s in shipList:\n print('Nr.:' + str(i) + ':' + s.toString())\n i += 1\n print('**********************************************************')\n userInput = input(\n 'Enter the number you would like to by or x to leave:')\n if userInput == 'x':\n break\n else:\n ui = int(userInput)\n if ui <= i:\n if player.getCredits() > shipList[ui].getPrice():\n if type(shipList[ui]) == FighterShip:\n player.addFighterShip(shipList[ui])\n player.updateFirePower()\n else:\n player.addCargoShip(shipList[ui])\n player.updateCargoUnits()\n player.setCredits(player.getCredits() - shipList[ui].\n getPrice())\n player.updateMaintenance()\n del shipList[ui]\n else:\n print('wrong number, try again ....')\n\n\ndef spacePortMenu(player, planet):\n global turnCounter\n while True:\n cleanScreen()\n print('****W*E*L*C*O*M*E****T*O****T*H*E****S*P*A*C*E*P*O*R*T****')\n print('Enter 1 to jump to a agri planet (risk 5%)')\n print('Enter 2 to jump to a tech planet (risk 10%)')\n print('Enter 3 to jump to a war planet (risk 20%)')\n userInput = input('Or enter x to exit:')\n risk = 0\n if userInput == 'x':\n return planet\n elif userInput == '1':\n risk = 5\n elif userInput == '2':\n risk = 10\n else:\n risk = 20\n if random.randint(0, 100) <= risk:\n spacePirates(player)\n player.setCredits(player.getCredits() - player.getTotalMaintenance())\n turnCounter += 1\n return Planet.Planet(int(userInput))\n\n\ndef marketMenu(player, planet):\n while True:\n cleanScreen()\n print('*******W*E*L*C*O*M*E****T*O****T*H*E****M*A*R*K*E*T*******')\n player.printStats()\n print('**********************************************************')\n market = planet.getMarket()\n print('Price for Food = ', market['Food'])\n print('Price for Tech = ', market['Tech'])\n print('**********************************************************')\n userInput = input('Enter 1 for Food, 2 for Tech or x for exit:')\n str = ''\n if userInput == '1':\n str = 'Food'\n elif userInput == '2':\n str = 'Tech'\n else:\n break\n print('**********************************************************')\n max = 0\n if market[str] * player.freeCargoUnits <= player.getCredits():\n max = player.freeCargoUnits\n else:\n max = int(player.getCredits() / market[str])\n print('Price for ' + str + ' = ', market[str])\n secondInput = input(\n 'Would you like to buy (enter b) or sell (enter s)?')\n if secondInput == 'b':\n print('You can buy a maximum of', max, 'units')\n nr = input('How much would you like to buy? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if player.getCredits() > market[str] * nr and nr <= max:\n if str == 'Food':\n player.addFood(nr)\n else:\n player.addTech(nr)\n player.setCredits(player.getCredits() - market[str] * nr)\n player.updateCargoUnits()\n elif str == 'Food':\n print('You can sell a maximum of', player.getFood(), 'food units')\n nr = input('How much would you like to sell? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if nr <= player.getFood():\n player.sellFood(nr)\n player.setCredits(player.getCredits() + nr * market['Food']\n )\n else:\n print('You can sell a maximum of', player.getTech(), 'tech units')\n nr = input('How much would you like to sell? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if nr <= player.getTech():\n player.sellTech(nr)\n player.setCredits(player.getCredits() + nr * market['Tech']\n )\n\n\ndef menu(player):\n global turnCounter\n notFinished = True\n planet = Planet.Planet(random.randint(1, 3))\n while notFinished:\n cleanScreen()\n if player.getCredits() < 0:\n print(\n 'Sorry, but you ran out of credits and therefore lost the game in round,'\n , turnCounter, '!')\n break\n print('**********************************************************')\n print('Turn nr.', turnCounter,\n 'in this glorious space trading simulation')\n player.printStats()\n print('**********************************************************')\n print('You are on Planet:', planet.getName())\n print('**********************************************************')\n print('Enter 1 to go to the shipyard')\n print('Enter 2 to go to the market')\n print('Enter 3 to go to the spaceport')\n print('Enter exit to leave the game')\n userinput = input('Your Input:')\n if userinput == '1':\n shipyardMenu(player, planet)\n elif userinput == '2':\n marketMenu(player, planet)\n elif userinput == '3':\n planet = spacePortMenu(player, planet)\n else:\n notFinished = False\n\n\nprint('***************************************')\nprint(' Welcome to StarSim')\nprint('***************************************')\nname = input('Please enter your Name:')\nplayer = Player.Player(name)\nmenu(player)\n",
"step-4": "<mask token>\nimport Ship\nimport Player\nimport Planet\nimport random\nfrom FighterShip import FighterShip\nturnCounter = 0\n\n\ndef cleanScreen():\n for i in range(0, 50):\n print('')\n\n\ndef spacePirates(player):\n while True:\n cleanScreen()\n print('*****F*U*C*K****S*P*A*C*E*P*I*R*A*T*E*S***A*T*T*A*C*K*****')\n playerFirepower = player.getTotalFirepower()\n piratesFirepower = int(playerFirepower * (1 + random.randint(-20, \n 20) / 100))\n if random.randint(0, playerFirepower\n ) > playerFirepower / 3 and random.randint(0, piratesFirepower\n ) < piratesFirepower / 3 or playerFirepower == 0:\n print('Damm, you got robbed by the pirates!')\n print('You lost all your cargo and half your money!')\n player.clearTech()\n player.clearFood()\n player.updateCargoUnits()\n player.setCredits(player.getCredits() / 2)\n else:\n print('Lucky you! Your fighters drove them off!')\n print('**********************************************************')\n input('Hit enter to continue')\n break\n\n\ndef shipyardMenu(player, planet):\n while True:\n cleanScreen()\n print('*****W*E*L*C*O*M*E****T*O****T*H*E****S*H*I*P*Y*A*R*D*****')\n player.printStats()\n print('**********************************************************')\n shipList = planet.getShipyard()\n print('Available Ships:')\n print('**********************************************************')\n i = 0\n for s in shipList:\n print('Nr.:' + str(i) + ':' + s.toString())\n i += 1\n print('**********************************************************')\n userInput = input(\n 'Enter the number you would like to by or x to leave:')\n if userInput == 'x':\n break\n else:\n ui = int(userInput)\n if ui <= i:\n if player.getCredits() > shipList[ui].getPrice():\n if type(shipList[ui]) == FighterShip:\n player.addFighterShip(shipList[ui])\n player.updateFirePower()\n else:\n player.addCargoShip(shipList[ui])\n player.updateCargoUnits()\n player.setCredits(player.getCredits() - shipList[ui].\n getPrice())\n player.updateMaintenance()\n del shipList[ui]\n else:\n print('wrong number, try again ....')\n\n\ndef spacePortMenu(player, planet):\n global turnCounter\n while True:\n cleanScreen()\n print('****W*E*L*C*O*M*E****T*O****T*H*E****S*P*A*C*E*P*O*R*T****')\n print('Enter 1 to jump to a agri planet (risk 5%)')\n print('Enter 2 to jump to a tech planet (risk 10%)')\n print('Enter 3 to jump to a war planet (risk 20%)')\n userInput = input('Or enter x to exit:')\n risk = 0\n if userInput == 'x':\n return planet\n elif userInput == '1':\n risk = 5\n elif userInput == '2':\n risk = 10\n else:\n risk = 20\n if random.randint(0, 100) <= risk:\n spacePirates(player)\n player.setCredits(player.getCredits() - player.getTotalMaintenance())\n turnCounter += 1\n return Planet.Planet(int(userInput))\n\n\ndef marketMenu(player, planet):\n while True:\n cleanScreen()\n print('*******W*E*L*C*O*M*E****T*O****T*H*E****M*A*R*K*E*T*******')\n player.printStats()\n print('**********************************************************')\n market = planet.getMarket()\n print('Price for Food = ', market['Food'])\n print('Price for Tech = ', market['Tech'])\n print('**********************************************************')\n userInput = input('Enter 1 for Food, 2 for Tech or x for exit:')\n str = ''\n if userInput == '1':\n str = 'Food'\n elif userInput == '2':\n str = 'Tech'\n else:\n break\n print('**********************************************************')\n max = 0\n if market[str] * player.freeCargoUnits <= player.getCredits():\n max = player.freeCargoUnits\n else:\n max = int(player.getCredits() / market[str])\n print('Price for ' + str + ' = ', market[str])\n secondInput = input(\n 'Would you like to buy (enter b) or sell (enter s)?')\n if secondInput == 'b':\n print('You can buy a maximum of', max, 'units')\n nr = input('How much would you like to buy? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if player.getCredits() > market[str] * nr and nr <= max:\n if str == 'Food':\n player.addFood(nr)\n else:\n player.addTech(nr)\n player.setCredits(player.getCredits() - market[str] * nr)\n player.updateCargoUnits()\n elif str == 'Food':\n print('You can sell a maximum of', player.getFood(), 'food units')\n nr = input('How much would you like to sell? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if nr <= player.getFood():\n player.sellFood(nr)\n player.setCredits(player.getCredits() + nr * market['Food']\n )\n else:\n print('You can sell a maximum of', player.getTech(), 'tech units')\n nr = input('How much would you like to sell? Or press x to exit')\n if nr == 'x':\n pass\n else:\n nr = int(nr)\n if nr <= player.getTech():\n player.sellTech(nr)\n player.setCredits(player.getCredits() + nr * market['Tech']\n )\n\n\ndef menu(player):\n global turnCounter\n notFinished = True\n planet = Planet.Planet(random.randint(1, 3))\n while notFinished:\n cleanScreen()\n if player.getCredits() < 0:\n print(\n 'Sorry, but you ran out of credits and therefore lost the game in round,'\n , turnCounter, '!')\n break\n print('**********************************************************')\n print('Turn nr.', turnCounter,\n 'in this glorious space trading simulation')\n player.printStats()\n print('**********************************************************')\n print('You are on Planet:', planet.getName())\n print('**********************************************************')\n print('Enter 1 to go to the shipyard')\n print('Enter 2 to go to the market')\n print('Enter 3 to go to the spaceport')\n print('Enter exit to leave the game')\n userinput = input('Your Input:')\n if userinput == '1':\n shipyardMenu(player, planet)\n elif userinput == '2':\n marketMenu(player, planet)\n elif userinput == '3':\n planet = spacePortMenu(player, planet)\n else:\n notFinished = False\n\n\nprint('***************************************')\nprint(' Welcome to StarSim')\nprint('***************************************')\nname = input('Please enter your Name:')\nplayer = Player.Player(name)\nmenu(player)\n",
"step-5": "'''\nCreated on 17.05.2018\n\n@author: markus\n'''\nimport Ship\nimport Player\nimport Planet\nimport random\nfrom FighterShip import FighterShip\n\nturnCounter = 0\n\ndef cleanScreen():\n for i in range(0,50):\n print(\"\")\n \ndef spacePirates(player):#space prites attack, their firepower is +/-20% of player firepower\n while True:# loop\n cleanScreen()\n print(\"*****F*U*C*K****S*P*A*C*E*P*I*R*A*T*E*S***A*T*T*A*C*K*****\")\n playerFirepower = player.getTotalFirepower()\n piratesFirepower = int(playerFirepower*(1+random.randint(-20,20)/100))\n if ((random.randint(0,playerFirepower) > playerFirepower/3) and \n (random.randint(0,piratesFirepower) < piratesFirepower/3) or (playerFirepower == 0)):\n print(\"Damm, you got robbed by the pirates!\")\n print(\"You lost all your cargo and half your money!\")\n player.clearTech()\n player.clearFood()\n player.updateCargoUnits()\n player.setCredits(player.getCredits()/2)\n else:\n print(\"Lucky you! Your fighters drove them off!\")\n print(\"**********************************************************\")\n input(\"Hit enter to continue\")\n break\n \n\ndef shipyardMenu(player, planet):\n while True:# loop\n cleanScreen()\n print(\"*****W*E*L*C*O*M*E****T*O****T*H*E****S*H*I*P*Y*A*R*D*****\")\n player.printStats()\n print(\"**********************************************************\")\n shipList = planet.getShipyard()\n print(\"Available Ships:\")\n print(\"**********************************************************\")\n i = 0\n for s in shipList:\n print(\"Nr.:\"+str(i)+\":\"+s.toString())\n i += 1\n print(\"**********************************************************\") \n userInput = input(\"Enter the number you would like to by or x to leave:\") \n if (userInput == \"x\"):\n break;\n else:\n ui = int(userInput)\n if (ui <= i):\n if(player.getCredits() > shipList[ui].getPrice()): #has enough money\n if(type(shipList[ui]) == FighterShip):\n player.addFighterShip(shipList[ui])\n player.updateFirePower()\n else:\n player.addCargoShip(shipList[ui])\n player.updateCargoUnits()\n player.setCredits(player.getCredits() - shipList[ui].getPrice())\n player.updateMaintenance()\n del shipList[ui]\n else:\n print(\"wrong number, try again ....\")\n\ndef spacePortMenu(player, planet):\n global turnCounter\n while True:# loop\n cleanScreen()\n print(\"****W*E*L*C*O*M*E****T*O****T*H*E****S*P*A*C*E*P*O*R*T****\")\n print(\"Enter 1 to jump to a agri planet (risk 5%)\")\n print(\"Enter 2 to jump to a tech planet (risk 10%)\")\n print(\"Enter 3 to jump to a war planet (risk 20%)\")\n userInput = input(\"Or enter x to exit:\")\n risk = 0\n if (userInput == \"x\"):\n return planet\n elif (userInput == \"1\"):\n risk = 5\n elif(userInput == \"2\"):\n risk = 10\n else:\n risk = 20 \n if (random.randint(0,100) <= risk):\n spacePirates(player)\n player.setCredits(player.getCredits() - player.getTotalMaintenance())\n turnCounter += 1 \n return Planet.Planet(int(userInput))\n \ndef marketMenu(player, planet):\n while True:# loop\n cleanScreen()\n print(\"*******W*E*L*C*O*M*E****T*O****T*H*E****M*A*R*K*E*T*******\")\n player.printStats()\n print(\"**********************************************************\")\n market = planet.getMarket()\n print(\"Price for Food = \",market[\"Food\"])\n print(\"Price for Tech = \",market[\"Tech\"])\n print(\"**********************************************************\")\n userInput = input(\"Enter 1 for Food, 2 for Tech or x for exit:\")\n str =\"\"\n if (userInput == \"1\"):\n str = \"Food\"\n elif(userInput == \"2\"):\n str= \"Tech\"\n else:\n break\n print(\"**********************************************************\")\n max = 0\n if(market[str]*player.freeCargoUnits <= player.getCredits()):#enough credit?\n max = player.freeCargoUnits\n else:\n max = int(player.getCredits()/market[str])\n print(\"Price for \"+str+\" = \",market[str])\n secondInput = input(\"Would you like to buy (enter b) or sell (enter s)?\")\n if (secondInput == \"b\"):#buying\n print(\"You can buy a maximum of\",max,\"units\")\n nr = input(\"How much would you like to buy? Or press x to exit\")\n if (nr == \"x\"):\n pass\n else:\n nr = int(nr)\n if((player.getCredits() > market[str]*nr) and (nr <= max)): #has enough money and space\n if (str == \"Food\"):\n player.addFood(nr)\n else:\n player.addTech(nr)\n player.setCredits(player.getCredits() - market[str]*nr)\n player.updateCargoUnits()\n else:#selling\n if (str == \"Food\"):\n print(\"You can sell a maximum of\",player.getFood(),\"food units\")\n nr = input(\"How much would you like to sell? Or press x to exit\")\n if (nr == \"x\"):\n pass\n else:\n nr = int(nr)\n if (nr <= player.getFood()):\n player.sellFood(nr)\n player.setCredits(player.getCredits() + nr*market[\"Food\"])\n else:\n print(\"You can sell a maximum of\",player.getTech(),\"tech units\")\n nr = input(\"How much would you like to sell? Or press x to exit\")\n if (nr == \"x\"):\n pass\n else:\n nr = int(nr)\n if (nr <= player.getTech()):\n player.sellTech(nr)\n player.setCredits(player.getCredits() + nr*market[\"Tech\"])\n \n \n \n \ndef menu(player):\n global turnCounter\n notFinished = True\n planet = Planet.Planet(random.randint(1,3))\n while notFinished:#main game loop \n cleanScreen()\n if (player.getCredits() < 0):\n print(\"Sorry, but you ran out of credits and therefore lost the game in round,\",turnCounter,\"!\")\n break\n print(\"**********************************************************\")\n print(\"Turn nr.\",turnCounter,\"in this glorious space trading simulation\")\n player.printStats()\n print(\"**********************************************************\")\n print(\"You are on Planet:\",planet.getName())\n print(\"**********************************************************\")\n print(\"Enter 1 to go to the shipyard\")\n print(\"Enter 2 to go to the market\")\n print(\"Enter 3 to go to the spaceport\")\n print(\"Enter exit to leave the game\")\n userinput = input(\"Your Input:\")\n if (userinput == \"1\"):\n shipyardMenu(player, planet)\n elif (userinput == \"2\"):\n marketMenu(player, planet)\n elif (userinput == \"3\"):\n planet = spacePortMenu(player, planet)\n else: \n notFinished = False\n \n \n \n\nprint(\"***************************************\")\nprint(\" Welcome to StarSim\")\nprint(\"***************************************\")\nname = input(\"Please enter your Name:\")\nplayer = Player.Player(name)\nmenu(player)\n\n\n\n\n\n",
"step-ids": [
2,
5,
8,
9,
10
]
}
|
[
2,
5,
8,
9,
10
] |
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from sklearn.metrics import precision_score, recall_score, f1_score
from scipy.optimize import fsolve
import numba
from numba import njit,jit
#
@jit(parallel = True)
def conventional_test(subject_array, typeII_error, typeI_error, repeat = 1,
seq = True):
"""
A function gives the test results to a subject array given the probability of
type II error, the probability of Type I error, and the number of repeatition,
and setting of sequence testing or not.
Input:
subject_array(Numpy Array): an array contains subject id and subject's
condition (1 stands for infection and 0 stands for uninfection)
typeII_error (float): probability of type II error
typeI_error (float): probability of type I error
repeat (int): the number of repetition
seq (boolean): True stands for sequential testing. The test will end
when the test result is positive or run up the number of repetition.
False stands for simutanlous testing with majority voting.
Output:
test_result (Numpy Array): an array contains subjects' id and test results
consum (int): the total test consumption
"""
# Sequential Testing
if seq == True:
consum = 0
test_result = np.zeros(subject_array.shape, dtype = int)
random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat))
for i in range(len(subject_array)):
temp = 0
j = 0
subject = subject_array[i,1]
while j < repeat and temp == 0:
random_num = random_table[i, j]
consum += 1
if subject == 1:
temp = 1 if random_num > typeII_error else 0
else:
temp = 1 if random_num < typeI_error else 0
j += 1
test_result[i,0] = subject_array[i,0]
test_result[i,1] = temp
return test_result, consum
# Simultanous Testing
else:
test_result = np.zeros(subject_array.shape, dtype = int)
random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat))
for i in range(len(subject_array)):
temp = 0
for j in range(repeat):
temp_random = random_table[i, j]
if subject_array[i, 1] == 1:
temp_1 = 1 if temp_random > typeII_error else 0
elif subject_array[i, 1] == 0:
temp_1 = 1 if temp_random < typeI_error else 0
temp += temp_1
temp = 1 if temp >= repeat/2 else 0
test_result[i,0] = subject_array[i,0]
test_result[i,1] = temp
return test_result, len(subject_array)*repeat
@njit(parallel = True)
def parallel_test(subject_array, typeII_error, typeI_error, num):
test_result = np.zeros(subject_array.shape, dtype = int)
random_table = np.random.uniform(0, 1, (subject_array.shape[0], num))
for i in range(len(subject_array)):
subject = subject_array[i, 1]
if subject == 1:
temp = 1 if max(random_table[i,:]) > typeII_error else 0
elif subject == 0:
temp = 1 if min(random_table[i,:]) < typeI_error else 0
test_result[i,0] = subject_array[i,0]
test_result[i,1] = temp
return test_result,len(subject_array)*num,len(subject_array)*num
def infection_rate_on_negative_batch(p,batch_size,typeII_error, typeI_error):
"""
Given infection rate, batch size, prob of type II error and prob of type I error, this
function gives the infection rate on the negative batch.
Input:
p (float): the infection rate
batch_size (int): the batch size
typeII_error (float): the prob of type II error
typeI_error (float): the prob of type I error
Output:
(float): the infection rate on the negative batch
"""
q = 1-p
r = typeII_error * (1 - q ** batch_size)/((1 - typeI_error) * q ** batch_size + typeII_error *(1 - q**batch_size))
return p*r/(1-q**batch_size)
def infection_rate_on_positive_batch(p, batch_size, typeII_error, typeI_error):
"""
Given infection rate, batch size, prob of type II error and prob of type I error, this
function gives the infection rate on the positive batch.
Input:
p (float): the infection rate
batch_size (int): the batch size
typeII_error (float): the prob of type II error
typeI_error (float): the prob of type I error
Output:
(float): the infection rate on the positive batch
"""
q = 1-p
r = (1 - typeII_error) * (1 - q ** batch_size)/(typeI_error * q ** batch_size + (1 - typeII_error) * (1 - q **batch_size))
return p*r/(1 - q** batch_size)
def one_batch_test_solver(prevalence_rate,typeII_error, typeI_error,n_initial_guess = 2):
"""
A function gives (float) the best batch size for one batch test given the infection rate
Inputs:
prevalence_rate(float): infection rate
typeII_error(float): the prob of type II error
typeI_error(float): the prob of type I error
n_initial_guess(float): the initial guess
Output:
(float): the optimal batch size
"""
q = 1- prevalence_rate # To consistent with the notation of our document
func = lambda n : n*q**(n/2) - (-(1-typeII_error - typeI_error)*np.log(q))**(-1/2)
# print(func(n_initial_guess))
n_solution = fsolve(func, n_initial_guess)
return float(n_solution)
def one_batch_test_int_solver(prevalence_rate,typeII_error, typeI_error,batch_limit,n_initial_guess = 2):
"""
A function gives (int) the best batch size for one batch test given the infection rate
Inputs:
prevalence_rate(float): infection rate
n_initial_guess(float): the initial guess
typeII_error(float): the prob of type II error
typeI_error(float): the prob of type I error
n_initial_guess:
batch_limit (int): the upper limit of batch size
Output:
(int): the optimal batch size
"""
sol_float = one_batch_test_solver(prevalence_rate,typeII_error, typeI_error, n_initial_guess)
floor, ceil = np.floor(sol_float), np.ceil(sol_float)
func = lambda batch_size: 1/batch_size + 1 - typeII_error -(1 - typeII_error - typeI_error)*(1-prevalence_rate)**batch_size
if func(floor) < func(ceil):
temp = int(floor)
else:
temp = int(ceil)
if temp <= batch_limit:
return temp
else:
return int(batch_limit)
def neg_pos_batch_split(subject_array, batch_size, typeII_error, typeI_error):
"""
A function gives a list of sujects on the negative batch(es),
a list of subjects on the postive batch(es) and the test-kit
consumption given the probability of type II error, the
probability of Type I error.
Input:
subject_array (Numpy Array): an array contains subject id and subject's
condition (1 stands for infection and 0 stands for uninfection)
batch_size (int): batch size
typeII_error (float): probability of type II error
typeI_error (float): probability of type I error
Output:
neg_batch (Numpy Array): an array of subjects on the negative batch(es)
pos_batch (Numpy Array): an array of subjects on the postive batch(es)
test_consum (int): the number of test-kit consumptions
"""
neg_batch = []
pos_batch = []
test_consum = np.ceil(len(subject_array)/batch_size)
random_table = np.random.uniform(0, 1, int(test_consum))
i = 0
for temp_batch in np.array_split(subject_array, test_consum):
if 1 in (temp_batch[:,1]):
if random_table[i] > typeII_error:
pos_batch.append(temp_batch)
else:
neg_batch.append(temp_batch)
else:
if random_table[i] > typeI_error:
neg_batch.append(temp_batch)
else:
pos_batch.append(temp_batch)
i += 1
neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([])
pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([])
return (neg_batch, pos_batch, test_consum)
def helpfunction(subject_array, p, batch_size ,typeII_error, typeI_error, batch_limit):
"""
The helpfunction is a handy function to give the list of subjects on the
negative batch(es), the list of subjects on the postive batch(es), the
test-kit consumption, the infection rate on the negative batches, the
infection rate on the positive batches, the optimal batch size for
negative batches and the optimal batch size for positive batches.
Input:
subject_array (Numpy Array): an array contains subject id and subject's
condition (1 stands for infection and 0 stands for uninfection)
p (float): Infection rate
batch_size (int): batch size
typeII_error (float): probability of type II error
typeI_error (float): probability of type I error
batch_limit (int): batch size upper limit
Output:
temp0 (Numpy Array): an array of subjects on the negative batch(es)
temp1 (Numpy Array): an array of subjects on the postive batch(es)
temp_con (int): the number of test-kit consumptions
p0 (float): the infection rate on the negative batches
p1 (float): the infection rate on the positive batches
n0 (float): the optimal batch size for the negative batches
n1 (float): the optimal batch size for the positive batches
"""
batch_size = min(batch_size, batch_limit)
p0 = infection_rate_on_negative_batch(p, batch_size, typeII_error, typeI_error)
p1 = infection_rate_on_positive_batch(p, batch_size, typeII_error, typeI_error)
n0= one_batch_test_int_solver(p0, typeII_error, typeI_error, batch_limit)
n1 = one_batch_test_int_solver(p1, typeII_error, typeI_error, batch_limit)
if subject_array == np.array([]):
return (np.array([]), np.array([]), p0, p1, n0, n1)
temp0, temp1, temp_con = neg_pos_batch_split(subject_array,batch_size,typeII_error, typeI_error)
return(temp0, temp1, temp_con, p0, p1, n0, n1)
def seq_test(subject_array,stop_rule,p, batch_size, typeII_error, typeI_error, repeat = 1,
prob_threshold = 1, seq = True, batch_limit = 32):
"""
A function gives the test results to a subject array and the total number of
test-kit consumption and the individual testing number given the subject array,
the stop rule, the batch size, the probability of type II error, the probability of
Type I error, and the number of repeatition, the probability threshold, and
setting of sequence testing or not.
Input:
subject_array(Numpy Array): an array contains subject id and subject's
condition (1 stands for infection and 0 stands for uninfection)
stop_rule (int): the number of postive batches to enter individual testing
p (float): infection rate
batch_size (int): batch size
typeII_error (float): probability of type II error
typeI_error (float): probability of type I error
repeat (int): the number of repetition
prob_threshold (float): if the infection rate of a batch is beyond prob_threshold,
the subjects on that batch will enter individual testing phase
seq (boolean): True stands for sequential testing. The test will end
when the test result is positive or run up the number of repetition.
False stands for simutanlous testing with majority voting.
batch_limit (int):
Output:
result (Numpy Array): an array contains subjects' id and test results
consum (int): the total test consumption
individual_con (int): the test consumption for individual testings
"""
temp_list = []
neg_list = [] #renamed to negativeInfoList
pos_list = [] #renamed to positiveInfoList
consum = 0
temp = {'data': subject_array,
'NB_Num': 0,
'PB_Num': 0,
'p': p,
'batch_size': batch_size}
temp_list.append(temp)
new_list = []
neg_array = [] #renamed to negativeBatches
pos_array = [] #renamed to positiveBatches
while len(temp_list) > 0:
for i in temp_list:
temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'], i['p'], i['batch_size'],
typeII_error, typeI_error,
batch_limit = batch_limit)
temp0 = {'data': temp0,
'NB_Num': i['NB_Num'] + 1,
'PB_Num': i['PB_Num'],
'p': p0,
'batch_size': n0}
temp1 = {'data': temp1,
'NB_Num': i['NB_Num'],
'PB_Num': i['PB_Num'] + 1,
'p': p1,
'batch_size': n1}
if len(temp0['data']) > 0:
if temp0['NB_Num'] >= stop_rule:
neg_list.append(temp0)
else:
new_list.append(temp0)
if len(temp1['data'])>0:
if temp1['PB_Num'] >= stop_rule or temp1['p']>=prob_threshold:
pos_list.append(temp1)
else:
new_list.append(temp1)
consum += temp_con
temp_list = new_list
new_list = []
for j in neg_list:
neg_array.append(j['data'])
neg_array = np.concatenate(neg_array)
for k in pos_list:
pos_array.append(k['data'])
pos_array = np.concatenate(pos_array)
neg_array[:,1] = 0
individual_test, individual_con = conventional_test(pos_array, typeII_error, typeI_error, repeat, seq)
pos_array = individual_test
consum += individual_con
result = np.concatenate((pos_array, neg_array))
result = result[result[:,0].argsort()]
result = result.astype('int64')
return (result, consum, individual_con)
def npv_score(y_true, y_pred):
"""
A function provides npv given the prediction and the truth
"""
tn, _, fn, _ = confusion_matrix(y_true = y_true,
y_pred = y_pred).ravel()
return tn/(tn + fn)
def specificity_score(y_true, y_pred):
"""
A function provides specificty given the prediction and the truth
"""
tn, fp, _, _ = confusion_matrix(y_true = y_true,
y_pred = y_pred).ravel()
return tn/(tn + fp)
@jit(parallel = True)
def data_gen(size, p):
"""
data_gen provides a faster way to generate a random population with
infection rate p.
Input:
size (int): the size of population
p (float): the infection rate
Output:
test_array (array): the first column is for id and the second column
is the condition, where 1 stands for infection and 0 stands for uninfection
"""
#print(np.random.get_state()[1][0])
random_table = np.random.binomial(size = size, p = p, n = 1)
test_array = np.zeros((size, 2), dtype = int)
for i in range(size):
test_array[i,0] = i
test_array[i,1] = random_table[i]
return test_array
def test_result(data, seq_test, **kwargs):
"""
a helper function provides convenient results for a given test method with its **kwargs
Input:
data (array or list of arrays)
seq_test (test_method object): could be seq_test, matrix_test and other test_method objects
Output:
result (DataFrame): a dataframe contains important evaluation metrics for the test method
"""
if isinstance(data, list) == False:
pred,consum, ind_con = seq_test(data, **kwargs)
result = {'acc': np.mean(pred[:,1] == data[:,1]),
'sens': recall_score(data[:,1], pred[:,1]),
'spec': specificity_score(data[:,1], pred[:,1]),
'PPV': precision_score(data[:, 1], pred[:,1]),
'NPV': npv_score(data[:, 1], pred[:,1]),
'test_consum': consum,
'ind_consum': ind_con,
'batch_consum': consum - ind_con}
return result
else:
length = len(data)
acc = np.zeros(length)
sens = np.zeros(length)
spec = np.zeros(length)
ppv = np.zeros(length)
npv = np.zeros(length)
test_consum = np.zeros(length)
ind_consum = np.zeros(length)
batch_consum = np.zeros(length)
for i in range(length):
pred,consum, ind_con = seq_test(data[i], **kwargs)
acc[i] = np.mean(pred[:,1] == data[i][:,1])
sens[i] = recall_score(data[i][:,1], pred[:,1])
spec[i] = specificity_score(data[i][:,1], pred[:,1])
ppv[i] = precision_score(data[i][:,1], pred[:,1])
npv[i] = npv_score(data[i][:,1], pred[:,1])
test_consum[i] = consum
ind_consum[i] = ind_con
batch_consum[i] = consum-ind_con
result = {'acc': acc,
'sens': sens,
'spec': spec,
'PPV': ppv,
'NPV': npv,
'test_consum': test_consum,
'ind_consum': ind_consum,
'batch_consum': batch_consum}
return pd.DataFrame(result)
def matrix_test(subject_array, side_length, typeII_error, typeI_error, sq_repeat = 1 ,ind_repeat = 1, seq = True):
"""
This function provides the matrix testing results for a given subject array.
Input:
subject_array(Numpy Array): an array contains subject id and subject's
condition (1 stands for infection and 0 stands for uninfection)
side_length (int): the side length of the matrix testing
typeII_error (float): probability of type II error
typeI_error (float): probability of type I error
sq_repeat (int): the number of parallel testing for the column/row batch testing
ind_repeat (int): the number of potential individual testing for the positive crossings
seq (boolean): True stands for sequential testing. The test will end
when the test result is positive or run up the number of repetition.
False stands for simutanlous testing with majority voting.
Output:
result (Numpy Array): an array contains subjects' id and test results
consum (int): the total test consumption
individual_con (int): the test consumption for individual testings
"""
matrix_test_num = len(subject_array)//(side_length**2)
matrix_test_array = subject_array[0:matrix_test_num*side_length**2, :]
ind_test_array = subject_array[matrix_test_num*side_length**2:, :]
ind_idx = []
for temp_batch in np.array_split(matrix_test_array, matrix_test_num):
temp_batch = temp_batch.reshape(side_length, side_length, 2)
temp_row = []
temp_col = []
random_num_row = np.random.uniform(0, 1, sq_repeat)
random_num_col = np.random.uniform(0, 1, sq_repeat)
for i in range(side_length):
if 1 in (temp_batch[i,:,1]):
if max(random_num_row) > typeII_error:
temp_row.append(temp_batch[i,:,0])
else:
if min(random_num_row) < typeI_error:
temp_row.append(temp_batch[i, :, 0])
if 1 in (temp_batch[:,i,1]):
if max(random_num_col) > typeII_error:
temp_col.append(temp_batch[:,i,0])
else:
if min(random_num_col) < typeI_error:
temp_col.append(temp_batch[:, i, 0])
ind_idx.append(np.intersect1d(temp_row, temp_col))
ind_idx = np.concatenate(ind_idx)
ind_idx = ind_idx.astype('int')
if len(ind_idx) == 0:
neg_array = matrix_test_array
else:
mask = np.zeros(subject_array.shape[0], dtype = bool)
mask[ind_idx] = True
mask[matrix_test_num*side_length**2:] = True
ind_test_array = subject_array[mask,:]
neg_array = subject_array[~mask, :]
neg_array[:, 1] = 0
ind_test, ind_con = conventional_test(ind_test_array,
typeII_error, typeI_error, repeat = ind_repeat, seq = seq)
batch_test_num = matrix_test_num * 2 * side_length * sq_repeat
result = np.concatenate((neg_array, ind_test))
result = result[result[:, 0].argsort()]
return (result, batch_test_num + ind_con, ind_con)
def parallel_batch_testing(subject_array, batch_size, typeII_error, typeI_error, parallel_num, ind_repeat, seq):
"""
This function provides the parallel batch testing results for a given subject array.
Input:
subject_array(Numpy Array): an array contains subject id and subject's
condition (1 stands for infection and 0 stands for uninfection)
batch_size (int): batch size
typeII_error (float): probability of type II error
typeI_error (float): probability of type I error
parallel_num (int): the number of parallel testing for the batch testing
ind_repeat (int): the number of potential individual testing for the positive batches
seq (boolean): True stands for sequential testing. The test will end
when the test result is positive or run up the number of repetition.
False stands for simutanlous testing with majority voting.
Output:
result (Numpy Array): an array contains subjects' id and test results
consum (int): the total test consumption
individual_con (int): the test consumption for individual testings
"""
neg_batch = []
pos_batch = []
batch_consum = np.ceil(len(subject_array)/batch_size)* parallel_num
for temp_batch in np.array_split(subject_array, np.ceil(len(subject_array)/batch_size)):
random_table = np.random.uniform(0, 1, (1, parallel_num))
if 1 in (temp_batch[:, 1]):
if random_table.max() > typeII_error:
pos_batch.append(temp_batch)
else:
neg_batch.append(temp_batch)
else:
if random_table.min() < typeI_error:
pos_batch.append(temp_batch)
else:
neg_batch.append(temp_batch)
neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([])
pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([])
neg_batch[:, 1] = 0
individual_test, individual_con = conventional_test(pos_batch, typeII_error, typeI_error,
repeat = ind_repeat, seq = seq)
result = np.concatenate((individual_test, neg_batch))
result = result[result[:,0].argsort()]
result = result.astype('int64')
return (result, batch_consum+individual_con, individual_con)
def fixed_batch_seq_test(subject_array,stop_rule, p, batch_size, typeII_error, typeI_error, repeat, prob_threshold = 0.3, seq = True):
"""
This function provides the parallel batch testing results for a given subject array.
Input:
subject_array(Numpy Array): an array contains subject id and subject's
condition (1 stands for infection and 0 stands for uninfection)
stop_rule (int): the number of positive batches to enter the individual testing phase
batch_size (int): batch size
typeII_error (float): probability of type II error
typeI_error (float): probability of type I error
repeat (int): the number of potential individual testing for the positive crossings
prob_threshold (float): if the infection rate of a batch is beyond prob_threshold,
the subjects on that batch will enter individual testing phase
seq (boolean): True stands for sequential testing. The test will end
when the test result is positive or run up the number of repetition.
False stands for simutanlous testing with majority voting.
Output:
result (Numpy Array): an array contains subjects' id and test results
consum (int): the total test consumption
individual_con (int): the test consumption for individual testings
"""
temp_list = []
neg_list = []
pos_list = []
consum = 0
temp = {'data': subject_array,
'NB_Num': 0,
'PB_Num': 0,
'p': p,
'batch_size': batch_size}
temp_list.append(temp)
new_list = []
neg_array = []
pos_array = []
while len(temp_list) > 0:
for i in temp_list:
temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'], i['p'], i['batch_size'],
typeII_error, typeI_error)
temp0 = {'data': np.random.permutation(temp0),
'NB_Num': i['NB_Num'] + 1,
'PB_Num': i['PB_Num'],
'p': p0,
'batch_size': batch_size}
temp1 = {'data': np.random.permutation(temp1),
'NB_Num': i['NB_Num'],
'PB_Num': i['PB_Num'] + 1,
'p': p1,
'batch_size': batch_size}
if len(temp0['data']) > 0:
if temp0['NB_Num'] >= stop_rule:
neg_list.append(temp0)
else:
new_list.append(temp0)
if len(temp1['data'])>0:
if temp1['PB_Num'] >= stop_rule or temp1['p']>=prob_threshold:
pos_list.append(temp1)
else:
new_list.append(temp1)
consum += temp_con
temp_list = new_list
new_list = []
for j in neg_list:
neg_array.append(j['data'])
neg_array = np.concatenate(neg_array)
for k in pos_list:
pos_array.append(k['data'])
pos_array = np.concatenate(pos_array)
neg_array[:,1] = 0
individual_test, individual_con = conventional_test(pos_array, typeII_error, typeI_error, repeat, seq)
pos_array = individual_test
consum += individual_con
result = np.concatenate((pos_array, neg_array))
result = result[result[:,0].argsort()]
result = result.astype('int64')
return (result, consum, individual_con)
def name_fun(n):
"""
input: stopping rule
output: finish nodes
"""
output = []
temp = ['']
for i in range(2*n-1):
temp_cur = []
for j in temp:
candidate_pos = j + '+'
candidate_neg = j + '-'
if str.count(candidate_pos, '+') >= n:
output.append(candidate_pos)
else:
temp_cur.append(candidate_pos)
if str.count(candidate_neg, '-') >= n:
output.append(candidate_neg)
else:
temp_cur.append(candidate_neg)
temp = temp_cur
neg_symbol = [x for x in output if str.count(x, '-') == n]
pos_symbol = [x for x in output if str.count(x, '+') == n]
return output, neg_symbol, pos_symbol
def seq_test_with_node(subject_array,stop_rule,p, batch_size, typeII_error, typeI_error, repeat = 1,
prob_threshold = 1, seq = True, batch_limit = 32):
"""
A function gives the test results to a subject array and the total number of
test-kit consumption and the individual testing number given the subject array,
the stop rule, the batch size, the probability of type II error, the probability of
Type I error, and the number of repeatition, the probability threshold, and
setting of sequence testing or not.
Input:
subject_array(Numpy Array): an array contains subject id and subject's
condition (1 stands for infection and 0 stands for uninfection)
stop_rule (int): the number of postive batches to enter individual testing
p (float): infection rate
batch_size (int): batch size
typeII_error (float): probability of type II error
typeI_error (float): probability of type I error
repeat (int): the number of repetition
prob_threshold (float): if the infection rate of a batch is beyond prob_threshold,
the subjects on that batch will enter individual testing phase
seq (boolean): True stands for sequential testing. The test will end
when the test result is positive or run up the number of repetition.
False stands for simutanlous testing with majority voting.
batch_limit (int):
Output:
result (Numpy Array): an array contains subjects' id and test results
consum (int): the total test consumption
individual_con (int): the test consumption for individual testings
"""
temp_list = []
neg_list = []
pos_list = []
batch_num_list = []
consum = 0
temp = {'data': subject_array,
'NB_Num': 0,
'PB_Num': 0,
'p': p,
'batch_size': batch_size,
'node': ''}
temp_list.append(temp)
new_list = []
neg_array = []
neg_node = []
pos_node = []
pos_array = []
while len(temp_list) > 0:
for i in temp_list:
temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'], i['p'], i['batch_size'],
typeII_error, typeI_error,
batch_limit = batch_limit)
temp0 = {'data': temp0,
'NB_Num': i['NB_Num'] + 1,
'PB_Num': i['PB_Num'],
'p': p0,
'batch_size': n0,
'node': i['node'] + '-'}
temp1 = {'data': temp1,
'NB_Num': i['NB_Num'],
'PB_Num': i['PB_Num'] + 1,
'p': p1,
'batch_size': n1,
'node': i['node'] + '+'}
if len(temp0['data']) > 0:
if temp0['NB_Num'] >= stop_rule:
neg_list.append(temp0)
else:
new_list.append(temp0)
if len(temp1['data'])>0:
if temp1['PB_Num'] >= stop_rule or temp1['p']>=prob_threshold:
pos_list.append(temp1)
else:
new_list.append(temp1)
consum += temp_con
batch_num_list.append(consum)
temp_list = new_list
new_list = []
for j in neg_list:
neg_array.append(j['data'])
temp = [[x, j['node']] for x in j['data'][:,0]]
neg_node.append(temp)
neg_array = np.concatenate(neg_array)
#print(neg_array)
#print(neg_node)
#neg_node = np.concatenate(neg_node)
for k in pos_list:
pos_array.append(k['data'])
#pos_node.append(k['node'])
#pos_node.append(np.column_stack((k['data'][:,0],np.repeat(k['node'], len(k['data'])))))
temp = [[x, k['node']] for x in k['data'][:,0]]
pos_node.append(temp)
pos_array = np.concatenate(pos_array)
#pos_node = np.concatenate(pos_node)
neg_array[:,1] = 0
individual_test, individual_con = conventional_test(pos_array, typeII_error, typeI_error, repeat, seq)
pos_array = individual_test
consum += individual_con
result = np.concatenate((pos_array, neg_array))
#node = np.concatenate((pos_node, neg_node))
pos_node.extend(neg_node)
node = pos_node
node = sum(node, [])
node.sort()
node = [x[1] for x in node]
#node = node[node[:,0].argsort()]
result = result[result[:,0].argsort()]
result = result.astype('int64')
return (result, consum, individual_con, node, batch_num_list)
|
normal
|
{
"blob_id": "e564e0d05c3c0e60f356422722803df510d9dd0b",
"index": 281,
"step-1": "<mask token>\n\n\n@njit(parallel=True)\ndef parallel_test(subject_array, typeII_error, typeI_error, num):\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], num))\n for i in range(len(subject_array)):\n subject = subject_array[i, 1]\n if subject == 1:\n temp = 1 if max(random_table[i, :]) > typeII_error else 0\n elif subject == 0:\n temp = 1 if min(random_table[i, :]) < typeI_error else 0\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, len(subject_array) * num, len(subject_array) * num\n\n\ndef infection_rate_on_negative_batch(p, batch_size, typeII_error, typeI_error):\n \"\"\"\n \n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the negative batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the negative batch\n\n\n\n \"\"\"\n q = 1 - p\n r = typeII_error * (1 - q ** batch_size) / ((1 - typeI_error) * q **\n batch_size + typeII_error * (1 - q ** batch_size))\n return p * r / (1 - q ** batch_size)\n\n\ndef infection_rate_on_positive_batch(p, batch_size, typeII_error, typeI_error):\n \"\"\"\n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the positive batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the positive batch\n \"\"\"\n q = 1 - p\n r = (1 - typeII_error) * (1 - q ** batch_size) / (typeI_error * q **\n batch_size + (1 - typeII_error) * (1 - q ** batch_size))\n return p * r / (1 - q ** batch_size)\n\n\ndef one_batch_test_solver(prevalence_rate, typeII_error, typeI_error,\n n_initial_guess=2):\n \"\"\"\n A function gives (float) the best batch size for one batch test given the infection rate\n \n Inputs:\n prevalence_rate(float): infection rate\n typeII_error(float): the prob of type II error\n typeI_error(float): the prob of type I error\n n_initial_guess(float): the initial guess \n\n Output:\n (float): the optimal batch size\n\n \"\"\"\n q = 1 - prevalence_rate\n func = lambda n: n * q ** (n / 2) - (-(1 - typeII_error - typeI_error) *\n np.log(q)) ** (-1 / 2)\n n_solution = fsolve(func, n_initial_guess)\n return float(n_solution)\n\n\n<mask token>\n\n\ndef helpfunction(subject_array, p, batch_size, typeII_error, typeI_error,\n batch_limit):\n \"\"\"\n The helpfunction is a handy function to give the list of subjects on the\n negative batch(es), the list of subjects on the postive batch(es), the \n test-kit consumption, the infection rate on the negative batches, the \n infection rate on the positive batches, the optimal batch size for\n negative batches and the optimal batch size for positive batches.\n\n Input: \n subject_array (Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n p (float): Infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n batch_limit (int): batch size upper limit\n\n Output:\n temp0 (Numpy Array): an array of subjects on the negative batch(es)\n temp1 (Numpy Array): an array of subjects on the postive batch(es)\n temp_con (int): the number of test-kit consumptions\n p0 (float): the infection rate on the negative batches\n p1 (float): the infection rate on the positive batches\n n0 (float): the optimal batch size for the negative batches\n n1 (float): the optimal batch size for the positive batches\n \"\"\"\n batch_size = min(batch_size, batch_limit)\n p0 = infection_rate_on_negative_batch(p, batch_size, typeII_error,\n typeI_error)\n p1 = infection_rate_on_positive_batch(p, batch_size, typeII_error,\n typeI_error)\n n0 = one_batch_test_int_solver(p0, typeII_error, typeI_error, batch_limit)\n n1 = one_batch_test_int_solver(p1, typeII_error, typeI_error, batch_limit)\n if subject_array == np.array([]):\n return np.array([]), np.array([]), p0, p1, n0, n1\n temp0, temp1, temp_con = neg_pos_batch_split(subject_array, batch_size,\n typeII_error, typeI_error)\n return temp0, temp1, temp_con, p0, p1, n0, n1\n\n\ndef seq_test(subject_array, stop_rule, p, batch_size, typeII_error,\n typeI_error, repeat=1, prob_threshold=1, seq=True, batch_limit=32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error,\n batch_limit=batch_limit)\n temp0 = {'data': temp0, 'NB_Num': i['NB_Num'] + 1, 'PB_Num': i[\n 'PB_Num'], 'p': p0, 'batch_size': n0}\n temp1 = {'data': temp1, 'NB_Num': i['NB_Num'], 'PB_Num': i[\n 'PB_Num'] + 1, 'p': p1, 'batch_size': n1}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con\n\n\n<mask token>\n\n\n@jit(parallel=True)\ndef data_gen(size, p):\n \"\"\"\n data_gen provides a faster way to generate a random population with\n infection rate p.\n Input:\n size (int): the size of population\n p (float): the infection rate\n Output:\n test_array (array): the first column is for id and the second column\n is the condition, where 1 stands for infection and 0 stands for uninfection\n\n \"\"\"\n random_table = np.random.binomial(size=size, p=p, n=1)\n test_array = np.zeros((size, 2), dtype=int)\n for i in range(size):\n test_array[i, 0] = i\n test_array[i, 1] = random_table[i]\n return test_array\n\n\n<mask token>\n\n\ndef fixed_batch_seq_test(subject_array, stop_rule, p, batch_size,\n typeII_error, typeI_error, repeat, prob_threshold=0.3, seq=True):\n \"\"\"\n This function provides the parallel batch testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of positive batches to enter the individual testing phase\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of potential individual testing for the positive crossings\n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error)\n temp0 = {'data': np.random.permutation(temp0), 'NB_Num': i[\n 'NB_Num'] + 1, 'PB_Num': i['PB_Num'], 'p': p0, 'batch_size':\n batch_size}\n temp1 = {'data': np.random.permutation(temp1), 'NB_Num': i[\n 'NB_Num'], 'PB_Num': i['PB_Num'] + 1, 'p': p1, 'batch_size':\n batch_size}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con\n\n\ndef name_fun(n):\n \"\"\"\n input: stopping rule\n output: finish nodes\n \"\"\"\n output = []\n temp = ['']\n for i in range(2 * n - 1):\n temp_cur = []\n for j in temp:\n candidate_pos = j + '+'\n candidate_neg = j + '-'\n if str.count(candidate_pos, '+') >= n:\n output.append(candidate_pos)\n else:\n temp_cur.append(candidate_pos)\n if str.count(candidate_neg, '-') >= n:\n output.append(candidate_neg)\n else:\n temp_cur.append(candidate_neg)\n temp = temp_cur\n neg_symbol = [x for x in output if str.count(x, '-') == n]\n pos_symbol = [x for x in output if str.count(x, '+') == n]\n return output, neg_symbol, pos_symbol\n\n\ndef seq_test_with_node(subject_array, stop_rule, p, batch_size,\n typeII_error, typeI_error, repeat=1, prob_threshold=1, seq=True,\n batch_limit=32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n batch_num_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size, 'node': ''}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n neg_node = []\n pos_node = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error,\n batch_limit=batch_limit)\n temp0 = {'data': temp0, 'NB_Num': i['NB_Num'] + 1, 'PB_Num': i[\n 'PB_Num'], 'p': p0, 'batch_size': n0, 'node': i['node'] + '-'}\n temp1 = {'data': temp1, 'NB_Num': i['NB_Num'], 'PB_Num': i[\n 'PB_Num'] + 1, 'p': p1, 'batch_size': n1, 'node': i['node'] +\n '+'}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n batch_num_list.append(consum)\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n temp = [[x, j['node']] for x in j['data'][:, 0]]\n neg_node.append(temp)\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n temp = [[x, k['node']] for x in k['data'][:, 0]]\n pos_node.append(temp)\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n pos_node.extend(neg_node)\n node = pos_node\n node = sum(node, [])\n node.sort()\n node = [x[1] for x in node]\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con, node, batch_num_list\n",
"step-2": "<mask token>\n\n\n@jit(parallel=True)\ndef conventional_test(subject_array, typeII_error, typeI_error, repeat=1,\n seq=True):\n \"\"\"\n A function gives the test results to a subject array given the probability of\n type II error, the probability of Type I error, and the number of repeatition,\n and setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n test_result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n \"\"\"\n if seq == True:\n consum = 0\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat)\n )\n for i in range(len(subject_array)):\n temp = 0\n j = 0\n subject = subject_array[i, 1]\n while j < repeat and temp == 0:\n random_num = random_table[i, j]\n consum += 1\n if subject == 1:\n temp = 1 if random_num > typeII_error else 0\n else:\n temp = 1 if random_num < typeI_error else 0\n j += 1\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, consum\n else:\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat)\n )\n for i in range(len(subject_array)):\n temp = 0\n for j in range(repeat):\n temp_random = random_table[i, j]\n if subject_array[i, 1] == 1:\n temp_1 = 1 if temp_random > typeII_error else 0\n elif subject_array[i, 1] == 0:\n temp_1 = 1 if temp_random < typeI_error else 0\n temp += temp_1\n temp = 1 if temp >= repeat / 2 else 0\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, len(subject_array) * repeat\n\n\n@njit(parallel=True)\ndef parallel_test(subject_array, typeII_error, typeI_error, num):\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], num))\n for i in range(len(subject_array)):\n subject = subject_array[i, 1]\n if subject == 1:\n temp = 1 if max(random_table[i, :]) > typeII_error else 0\n elif subject == 0:\n temp = 1 if min(random_table[i, :]) < typeI_error else 0\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, len(subject_array) * num, len(subject_array) * num\n\n\ndef infection_rate_on_negative_batch(p, batch_size, typeII_error, typeI_error):\n \"\"\"\n \n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the negative batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the negative batch\n\n\n\n \"\"\"\n q = 1 - p\n r = typeII_error * (1 - q ** batch_size) / ((1 - typeI_error) * q **\n batch_size + typeII_error * (1 - q ** batch_size))\n return p * r / (1 - q ** batch_size)\n\n\ndef infection_rate_on_positive_batch(p, batch_size, typeII_error, typeI_error):\n \"\"\"\n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the positive batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the positive batch\n \"\"\"\n q = 1 - p\n r = (1 - typeII_error) * (1 - q ** batch_size) / (typeI_error * q **\n batch_size + (1 - typeII_error) * (1 - q ** batch_size))\n return p * r / (1 - q ** batch_size)\n\n\ndef one_batch_test_solver(prevalence_rate, typeII_error, typeI_error,\n n_initial_guess=2):\n \"\"\"\n A function gives (float) the best batch size for one batch test given the infection rate\n \n Inputs:\n prevalence_rate(float): infection rate\n typeII_error(float): the prob of type II error\n typeI_error(float): the prob of type I error\n n_initial_guess(float): the initial guess \n\n Output:\n (float): the optimal batch size\n\n \"\"\"\n q = 1 - prevalence_rate\n func = lambda n: n * q ** (n / 2) - (-(1 - typeII_error - typeI_error) *\n np.log(q)) ** (-1 / 2)\n n_solution = fsolve(func, n_initial_guess)\n return float(n_solution)\n\n\n<mask token>\n\n\ndef neg_pos_batch_split(subject_array, batch_size, typeII_error, typeI_error):\n \"\"\"\n A function gives a list of sujects on the negative batch(es),\n a list of subjects on the postive batch(es) and the test-kit \n consumption given the probability of type II error, the \n probability of Type I error.\n \n Input:\n subject_array (Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n \n\n Output:\n neg_batch (Numpy Array): an array of subjects on the negative batch(es)\n pos_batch (Numpy Array): an array of subjects on the postive batch(es)\n test_consum (int): the number of test-kit consumptions\n \n \"\"\"\n neg_batch = []\n pos_batch = []\n test_consum = np.ceil(len(subject_array) / batch_size)\n random_table = np.random.uniform(0, 1, int(test_consum))\n i = 0\n for temp_batch in np.array_split(subject_array, test_consum):\n if 1 in temp_batch[:, 1]:\n if random_table[i] > typeII_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n elif random_table[i] > typeI_error:\n neg_batch.append(temp_batch)\n else:\n pos_batch.append(temp_batch)\n i += 1\n neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([\n ])\n pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([\n ])\n return neg_batch, pos_batch, test_consum\n\n\ndef helpfunction(subject_array, p, batch_size, typeII_error, typeI_error,\n batch_limit):\n \"\"\"\n The helpfunction is a handy function to give the list of subjects on the\n negative batch(es), the list of subjects on the postive batch(es), the \n test-kit consumption, the infection rate on the negative batches, the \n infection rate on the positive batches, the optimal batch size for\n negative batches and the optimal batch size for positive batches.\n\n Input: \n subject_array (Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n p (float): Infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n batch_limit (int): batch size upper limit\n\n Output:\n temp0 (Numpy Array): an array of subjects on the negative batch(es)\n temp1 (Numpy Array): an array of subjects on the postive batch(es)\n temp_con (int): the number of test-kit consumptions\n p0 (float): the infection rate on the negative batches\n p1 (float): the infection rate on the positive batches\n n0 (float): the optimal batch size for the negative batches\n n1 (float): the optimal batch size for the positive batches\n \"\"\"\n batch_size = min(batch_size, batch_limit)\n p0 = infection_rate_on_negative_batch(p, batch_size, typeII_error,\n typeI_error)\n p1 = infection_rate_on_positive_batch(p, batch_size, typeII_error,\n typeI_error)\n n0 = one_batch_test_int_solver(p0, typeII_error, typeI_error, batch_limit)\n n1 = one_batch_test_int_solver(p1, typeII_error, typeI_error, batch_limit)\n if subject_array == np.array([]):\n return np.array([]), np.array([]), p0, p1, n0, n1\n temp0, temp1, temp_con = neg_pos_batch_split(subject_array, batch_size,\n typeII_error, typeI_error)\n return temp0, temp1, temp_con, p0, p1, n0, n1\n\n\ndef seq_test(subject_array, stop_rule, p, batch_size, typeII_error,\n typeI_error, repeat=1, prob_threshold=1, seq=True, batch_limit=32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error,\n batch_limit=batch_limit)\n temp0 = {'data': temp0, 'NB_Num': i['NB_Num'] + 1, 'PB_Num': i[\n 'PB_Num'], 'p': p0, 'batch_size': n0}\n temp1 = {'data': temp1, 'NB_Num': i['NB_Num'], 'PB_Num': i[\n 'PB_Num'] + 1, 'p': p1, 'batch_size': n1}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con\n\n\n<mask token>\n\n\ndef specificity_score(y_true, y_pred):\n \"\"\"\n A function provides specificty given the prediction and the truth \n \"\"\"\n tn, fp, _, _ = confusion_matrix(y_true=y_true, y_pred=y_pred).ravel()\n return tn / (tn + fp)\n\n\n@jit(parallel=True)\ndef data_gen(size, p):\n \"\"\"\n data_gen provides a faster way to generate a random population with\n infection rate p.\n Input:\n size (int): the size of population\n p (float): the infection rate\n Output:\n test_array (array): the first column is for id and the second column\n is the condition, where 1 stands for infection and 0 stands for uninfection\n\n \"\"\"\n random_table = np.random.binomial(size=size, p=p, n=1)\n test_array = np.zeros((size, 2), dtype=int)\n for i in range(size):\n test_array[i, 0] = i\n test_array[i, 1] = random_table[i]\n return test_array\n\n\n<mask token>\n\n\ndef parallel_batch_testing(subject_array, batch_size, typeII_error,\n typeI_error, parallel_num, ind_repeat, seq):\n \"\"\"\n This function provides the parallel batch testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n parallel_num (int): the number of parallel testing for the batch testing\n ind_repeat (int): the number of potential individual testing for the positive batches\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n neg_batch = []\n pos_batch = []\n batch_consum = np.ceil(len(subject_array) / batch_size) * parallel_num\n for temp_batch in np.array_split(subject_array, np.ceil(len(\n subject_array) / batch_size)):\n random_table = np.random.uniform(0, 1, (1, parallel_num))\n if 1 in temp_batch[:, 1]:\n if random_table.max() > typeII_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n elif random_table.min() < typeI_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([\n ])\n pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([\n ])\n neg_batch[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_batch,\n typeII_error, typeI_error, repeat=ind_repeat, seq=seq)\n result = np.concatenate((individual_test, neg_batch))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, batch_consum + individual_con, individual_con\n\n\ndef fixed_batch_seq_test(subject_array, stop_rule, p, batch_size,\n typeII_error, typeI_error, repeat, prob_threshold=0.3, seq=True):\n \"\"\"\n This function provides the parallel batch testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of positive batches to enter the individual testing phase\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of potential individual testing for the positive crossings\n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error)\n temp0 = {'data': np.random.permutation(temp0), 'NB_Num': i[\n 'NB_Num'] + 1, 'PB_Num': i['PB_Num'], 'p': p0, 'batch_size':\n batch_size}\n temp1 = {'data': np.random.permutation(temp1), 'NB_Num': i[\n 'NB_Num'], 'PB_Num': i['PB_Num'] + 1, 'p': p1, 'batch_size':\n batch_size}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con\n\n\ndef name_fun(n):\n \"\"\"\n input: stopping rule\n output: finish nodes\n \"\"\"\n output = []\n temp = ['']\n for i in range(2 * n - 1):\n temp_cur = []\n for j in temp:\n candidate_pos = j + '+'\n candidate_neg = j + '-'\n if str.count(candidate_pos, '+') >= n:\n output.append(candidate_pos)\n else:\n temp_cur.append(candidate_pos)\n if str.count(candidate_neg, '-') >= n:\n output.append(candidate_neg)\n else:\n temp_cur.append(candidate_neg)\n temp = temp_cur\n neg_symbol = [x for x in output if str.count(x, '-') == n]\n pos_symbol = [x for x in output if str.count(x, '+') == n]\n return output, neg_symbol, pos_symbol\n\n\ndef seq_test_with_node(subject_array, stop_rule, p, batch_size,\n typeII_error, typeI_error, repeat=1, prob_threshold=1, seq=True,\n batch_limit=32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n batch_num_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size, 'node': ''}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n neg_node = []\n pos_node = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error,\n batch_limit=batch_limit)\n temp0 = {'data': temp0, 'NB_Num': i['NB_Num'] + 1, 'PB_Num': i[\n 'PB_Num'], 'p': p0, 'batch_size': n0, 'node': i['node'] + '-'}\n temp1 = {'data': temp1, 'NB_Num': i['NB_Num'], 'PB_Num': i[\n 'PB_Num'] + 1, 'p': p1, 'batch_size': n1, 'node': i['node'] +\n '+'}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n batch_num_list.append(consum)\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n temp = [[x, j['node']] for x in j['data'][:, 0]]\n neg_node.append(temp)\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n temp = [[x, k['node']] for x in k['data'][:, 0]]\n pos_node.append(temp)\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n pos_node.extend(neg_node)\n node = pos_node\n node = sum(node, [])\n node.sort()\n node = [x[1] for x in node]\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con, node, batch_num_list\n",
"step-3": "<mask token>\n\n\n@jit(parallel=True)\ndef conventional_test(subject_array, typeII_error, typeI_error, repeat=1,\n seq=True):\n \"\"\"\n A function gives the test results to a subject array given the probability of\n type II error, the probability of Type I error, and the number of repeatition,\n and setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n test_result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n \"\"\"\n if seq == True:\n consum = 0\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat)\n )\n for i in range(len(subject_array)):\n temp = 0\n j = 0\n subject = subject_array[i, 1]\n while j < repeat and temp == 0:\n random_num = random_table[i, j]\n consum += 1\n if subject == 1:\n temp = 1 if random_num > typeII_error else 0\n else:\n temp = 1 if random_num < typeI_error else 0\n j += 1\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, consum\n else:\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat)\n )\n for i in range(len(subject_array)):\n temp = 0\n for j in range(repeat):\n temp_random = random_table[i, j]\n if subject_array[i, 1] == 1:\n temp_1 = 1 if temp_random > typeII_error else 0\n elif subject_array[i, 1] == 0:\n temp_1 = 1 if temp_random < typeI_error else 0\n temp += temp_1\n temp = 1 if temp >= repeat / 2 else 0\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, len(subject_array) * repeat\n\n\n@njit(parallel=True)\ndef parallel_test(subject_array, typeII_error, typeI_error, num):\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], num))\n for i in range(len(subject_array)):\n subject = subject_array[i, 1]\n if subject == 1:\n temp = 1 if max(random_table[i, :]) > typeII_error else 0\n elif subject == 0:\n temp = 1 if min(random_table[i, :]) < typeI_error else 0\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, len(subject_array) * num, len(subject_array) * num\n\n\ndef infection_rate_on_negative_batch(p, batch_size, typeII_error, typeI_error):\n \"\"\"\n \n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the negative batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the negative batch\n\n\n\n \"\"\"\n q = 1 - p\n r = typeII_error * (1 - q ** batch_size) / ((1 - typeI_error) * q **\n batch_size + typeII_error * (1 - q ** batch_size))\n return p * r / (1 - q ** batch_size)\n\n\ndef infection_rate_on_positive_batch(p, batch_size, typeII_error, typeI_error):\n \"\"\"\n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the positive batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the positive batch\n \"\"\"\n q = 1 - p\n r = (1 - typeII_error) * (1 - q ** batch_size) / (typeI_error * q **\n batch_size + (1 - typeII_error) * (1 - q ** batch_size))\n return p * r / (1 - q ** batch_size)\n\n\ndef one_batch_test_solver(prevalence_rate, typeII_error, typeI_error,\n n_initial_guess=2):\n \"\"\"\n A function gives (float) the best batch size for one batch test given the infection rate\n \n Inputs:\n prevalence_rate(float): infection rate\n typeII_error(float): the prob of type II error\n typeI_error(float): the prob of type I error\n n_initial_guess(float): the initial guess \n\n Output:\n (float): the optimal batch size\n\n \"\"\"\n q = 1 - prevalence_rate\n func = lambda n: n * q ** (n / 2) - (-(1 - typeII_error - typeI_error) *\n np.log(q)) ** (-1 / 2)\n n_solution = fsolve(func, n_initial_guess)\n return float(n_solution)\n\n\n<mask token>\n\n\ndef neg_pos_batch_split(subject_array, batch_size, typeII_error, typeI_error):\n \"\"\"\n A function gives a list of sujects on the negative batch(es),\n a list of subjects on the postive batch(es) and the test-kit \n consumption given the probability of type II error, the \n probability of Type I error.\n \n Input:\n subject_array (Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n \n\n Output:\n neg_batch (Numpy Array): an array of subjects on the negative batch(es)\n pos_batch (Numpy Array): an array of subjects on the postive batch(es)\n test_consum (int): the number of test-kit consumptions\n \n \"\"\"\n neg_batch = []\n pos_batch = []\n test_consum = np.ceil(len(subject_array) / batch_size)\n random_table = np.random.uniform(0, 1, int(test_consum))\n i = 0\n for temp_batch in np.array_split(subject_array, test_consum):\n if 1 in temp_batch[:, 1]:\n if random_table[i] > typeII_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n elif random_table[i] > typeI_error:\n neg_batch.append(temp_batch)\n else:\n pos_batch.append(temp_batch)\n i += 1\n neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([\n ])\n pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([\n ])\n return neg_batch, pos_batch, test_consum\n\n\ndef helpfunction(subject_array, p, batch_size, typeII_error, typeI_error,\n batch_limit):\n \"\"\"\n The helpfunction is a handy function to give the list of subjects on the\n negative batch(es), the list of subjects on the postive batch(es), the \n test-kit consumption, the infection rate on the negative batches, the \n infection rate on the positive batches, the optimal batch size for\n negative batches and the optimal batch size for positive batches.\n\n Input: \n subject_array (Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n p (float): Infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n batch_limit (int): batch size upper limit\n\n Output:\n temp0 (Numpy Array): an array of subjects on the negative batch(es)\n temp1 (Numpy Array): an array of subjects on the postive batch(es)\n temp_con (int): the number of test-kit consumptions\n p0 (float): the infection rate on the negative batches\n p1 (float): the infection rate on the positive batches\n n0 (float): the optimal batch size for the negative batches\n n1 (float): the optimal batch size for the positive batches\n \"\"\"\n batch_size = min(batch_size, batch_limit)\n p0 = infection_rate_on_negative_batch(p, batch_size, typeII_error,\n typeI_error)\n p1 = infection_rate_on_positive_batch(p, batch_size, typeII_error,\n typeI_error)\n n0 = one_batch_test_int_solver(p0, typeII_error, typeI_error, batch_limit)\n n1 = one_batch_test_int_solver(p1, typeII_error, typeI_error, batch_limit)\n if subject_array == np.array([]):\n return np.array([]), np.array([]), p0, p1, n0, n1\n temp0, temp1, temp_con = neg_pos_batch_split(subject_array, batch_size,\n typeII_error, typeI_error)\n return temp0, temp1, temp_con, p0, p1, n0, n1\n\n\ndef seq_test(subject_array, stop_rule, p, batch_size, typeII_error,\n typeI_error, repeat=1, prob_threshold=1, seq=True, batch_limit=32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error,\n batch_limit=batch_limit)\n temp0 = {'data': temp0, 'NB_Num': i['NB_Num'] + 1, 'PB_Num': i[\n 'PB_Num'], 'p': p0, 'batch_size': n0}\n temp1 = {'data': temp1, 'NB_Num': i['NB_Num'], 'PB_Num': i[\n 'PB_Num'] + 1, 'p': p1, 'batch_size': n1}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con\n\n\n<mask token>\n\n\ndef specificity_score(y_true, y_pred):\n \"\"\"\n A function provides specificty given the prediction and the truth \n \"\"\"\n tn, fp, _, _ = confusion_matrix(y_true=y_true, y_pred=y_pred).ravel()\n return tn / (tn + fp)\n\n\n@jit(parallel=True)\ndef data_gen(size, p):\n \"\"\"\n data_gen provides a faster way to generate a random population with\n infection rate p.\n Input:\n size (int): the size of population\n p (float): the infection rate\n Output:\n test_array (array): the first column is for id and the second column\n is the condition, where 1 stands for infection and 0 stands for uninfection\n\n \"\"\"\n random_table = np.random.binomial(size=size, p=p, n=1)\n test_array = np.zeros((size, 2), dtype=int)\n for i in range(size):\n test_array[i, 0] = i\n test_array[i, 1] = random_table[i]\n return test_array\n\n\ndef test_result(data, seq_test, **kwargs):\n \"\"\"\n a helper function provides convenient results for a given test method with its **kwargs\n\n Input:\n data (array or list of arrays)\n seq_test (test_method object): could be seq_test, matrix_test and other test_method objects\n Output:\n result (DataFrame): a dataframe contains important evaluation metrics for the test method \n \"\"\"\n if isinstance(data, list) == False:\n pred, consum, ind_con = seq_test(data, **kwargs)\n result = {'acc': np.mean(pred[:, 1] == data[:, 1]), 'sens':\n recall_score(data[:, 1], pred[:, 1]), 'spec': specificity_score\n (data[:, 1], pred[:, 1]), 'PPV': precision_score(data[:, 1],\n pred[:, 1]), 'NPV': npv_score(data[:, 1], pred[:, 1]),\n 'test_consum': consum, 'ind_consum': ind_con, 'batch_consum': \n consum - ind_con}\n return result\n else:\n length = len(data)\n acc = np.zeros(length)\n sens = np.zeros(length)\n spec = np.zeros(length)\n ppv = np.zeros(length)\n npv = np.zeros(length)\n test_consum = np.zeros(length)\n ind_consum = np.zeros(length)\n batch_consum = np.zeros(length)\n for i in range(length):\n pred, consum, ind_con = seq_test(data[i], **kwargs)\n acc[i] = np.mean(pred[:, 1] == data[i][:, 1])\n sens[i] = recall_score(data[i][:, 1], pred[:, 1])\n spec[i] = specificity_score(data[i][:, 1], pred[:, 1])\n ppv[i] = precision_score(data[i][:, 1], pred[:, 1])\n npv[i] = npv_score(data[i][:, 1], pred[:, 1])\n test_consum[i] = consum\n ind_consum[i] = ind_con\n batch_consum[i] = consum - ind_con\n result = {'acc': acc, 'sens': sens, 'spec': spec, 'PPV': ppv, 'NPV':\n npv, 'test_consum': test_consum, 'ind_consum': ind_consum,\n 'batch_consum': batch_consum}\n return pd.DataFrame(result)\n\n\n<mask token>\n\n\ndef parallel_batch_testing(subject_array, batch_size, typeII_error,\n typeI_error, parallel_num, ind_repeat, seq):\n \"\"\"\n This function provides the parallel batch testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n parallel_num (int): the number of parallel testing for the batch testing\n ind_repeat (int): the number of potential individual testing for the positive batches\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n neg_batch = []\n pos_batch = []\n batch_consum = np.ceil(len(subject_array) / batch_size) * parallel_num\n for temp_batch in np.array_split(subject_array, np.ceil(len(\n subject_array) / batch_size)):\n random_table = np.random.uniform(0, 1, (1, parallel_num))\n if 1 in temp_batch[:, 1]:\n if random_table.max() > typeII_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n elif random_table.min() < typeI_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([\n ])\n pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([\n ])\n neg_batch[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_batch,\n typeII_error, typeI_error, repeat=ind_repeat, seq=seq)\n result = np.concatenate((individual_test, neg_batch))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, batch_consum + individual_con, individual_con\n\n\ndef fixed_batch_seq_test(subject_array, stop_rule, p, batch_size,\n typeII_error, typeI_error, repeat, prob_threshold=0.3, seq=True):\n \"\"\"\n This function provides the parallel batch testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of positive batches to enter the individual testing phase\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of potential individual testing for the positive crossings\n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error)\n temp0 = {'data': np.random.permutation(temp0), 'NB_Num': i[\n 'NB_Num'] + 1, 'PB_Num': i['PB_Num'], 'p': p0, 'batch_size':\n batch_size}\n temp1 = {'data': np.random.permutation(temp1), 'NB_Num': i[\n 'NB_Num'], 'PB_Num': i['PB_Num'] + 1, 'p': p1, 'batch_size':\n batch_size}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con\n\n\ndef name_fun(n):\n \"\"\"\n input: stopping rule\n output: finish nodes\n \"\"\"\n output = []\n temp = ['']\n for i in range(2 * n - 1):\n temp_cur = []\n for j in temp:\n candidate_pos = j + '+'\n candidate_neg = j + '-'\n if str.count(candidate_pos, '+') >= n:\n output.append(candidate_pos)\n else:\n temp_cur.append(candidate_pos)\n if str.count(candidate_neg, '-') >= n:\n output.append(candidate_neg)\n else:\n temp_cur.append(candidate_neg)\n temp = temp_cur\n neg_symbol = [x for x in output if str.count(x, '-') == n]\n pos_symbol = [x for x in output if str.count(x, '+') == n]\n return output, neg_symbol, pos_symbol\n\n\ndef seq_test_with_node(subject_array, stop_rule, p, batch_size,\n typeII_error, typeI_error, repeat=1, prob_threshold=1, seq=True,\n batch_limit=32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n batch_num_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size, 'node': ''}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n neg_node = []\n pos_node = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error,\n batch_limit=batch_limit)\n temp0 = {'data': temp0, 'NB_Num': i['NB_Num'] + 1, 'PB_Num': i[\n 'PB_Num'], 'p': p0, 'batch_size': n0, 'node': i['node'] + '-'}\n temp1 = {'data': temp1, 'NB_Num': i['NB_Num'], 'PB_Num': i[\n 'PB_Num'] + 1, 'p': p1, 'batch_size': n1, 'node': i['node'] +\n '+'}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n batch_num_list.append(consum)\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n temp = [[x, j['node']] for x in j['data'][:, 0]]\n neg_node.append(temp)\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n temp = [[x, k['node']] for x in k['data'][:, 0]]\n pos_node.append(temp)\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n pos_node.extend(neg_node)\n node = pos_node\n node = sum(node, [])\n node.sort()\n node = [x[1] for x in node]\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con, node, batch_num_list\n",
"step-4": "<mask token>\n\n\n@jit(parallel=True)\ndef conventional_test(subject_array, typeII_error, typeI_error, repeat=1,\n seq=True):\n \"\"\"\n A function gives the test results to a subject array given the probability of\n type II error, the probability of Type I error, and the number of repeatition,\n and setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n test_result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n \"\"\"\n if seq == True:\n consum = 0\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat)\n )\n for i in range(len(subject_array)):\n temp = 0\n j = 0\n subject = subject_array[i, 1]\n while j < repeat and temp == 0:\n random_num = random_table[i, j]\n consum += 1\n if subject == 1:\n temp = 1 if random_num > typeII_error else 0\n else:\n temp = 1 if random_num < typeI_error else 0\n j += 1\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, consum\n else:\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat)\n )\n for i in range(len(subject_array)):\n temp = 0\n for j in range(repeat):\n temp_random = random_table[i, j]\n if subject_array[i, 1] == 1:\n temp_1 = 1 if temp_random > typeII_error else 0\n elif subject_array[i, 1] == 0:\n temp_1 = 1 if temp_random < typeI_error else 0\n temp += temp_1\n temp = 1 if temp >= repeat / 2 else 0\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, len(subject_array) * repeat\n\n\n@njit(parallel=True)\ndef parallel_test(subject_array, typeII_error, typeI_error, num):\n test_result = np.zeros(subject_array.shape, dtype=int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], num))\n for i in range(len(subject_array)):\n subject = subject_array[i, 1]\n if subject == 1:\n temp = 1 if max(random_table[i, :]) > typeII_error else 0\n elif subject == 0:\n temp = 1 if min(random_table[i, :]) < typeI_error else 0\n test_result[i, 0] = subject_array[i, 0]\n test_result[i, 1] = temp\n return test_result, len(subject_array) * num, len(subject_array) * num\n\n\ndef infection_rate_on_negative_batch(p, batch_size, typeII_error, typeI_error):\n \"\"\"\n \n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the negative batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the negative batch\n\n\n\n \"\"\"\n q = 1 - p\n r = typeII_error * (1 - q ** batch_size) / ((1 - typeI_error) * q **\n batch_size + typeII_error * (1 - q ** batch_size))\n return p * r / (1 - q ** batch_size)\n\n\ndef infection_rate_on_positive_batch(p, batch_size, typeII_error, typeI_error):\n \"\"\"\n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the positive batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the positive batch\n \"\"\"\n q = 1 - p\n r = (1 - typeII_error) * (1 - q ** batch_size) / (typeI_error * q **\n batch_size + (1 - typeII_error) * (1 - q ** batch_size))\n return p * r / (1 - q ** batch_size)\n\n\ndef one_batch_test_solver(prevalence_rate, typeII_error, typeI_error,\n n_initial_guess=2):\n \"\"\"\n A function gives (float) the best batch size for one batch test given the infection rate\n \n Inputs:\n prevalence_rate(float): infection rate\n typeII_error(float): the prob of type II error\n typeI_error(float): the prob of type I error\n n_initial_guess(float): the initial guess \n\n Output:\n (float): the optimal batch size\n\n \"\"\"\n q = 1 - prevalence_rate\n func = lambda n: n * q ** (n / 2) - (-(1 - typeII_error - typeI_error) *\n np.log(q)) ** (-1 / 2)\n n_solution = fsolve(func, n_initial_guess)\n return float(n_solution)\n\n\ndef one_batch_test_int_solver(prevalence_rate, typeII_error, typeI_error,\n batch_limit, n_initial_guess=2):\n \"\"\"\n A function gives (int) the best batch size for one batch test given the infection rate\n \n Inputs:\n prevalence_rate(float): infection rate\n n_initial_guess(float): the initial guess \n typeII_error(float): the prob of type II error\n typeI_error(float): the prob of type I error\n n_initial_guess:\n batch_limit (int): the upper limit of batch size\n\n Output:\n (int): the optimal batch size\n \"\"\"\n sol_float = one_batch_test_solver(prevalence_rate, typeII_error,\n typeI_error, n_initial_guess)\n floor, ceil = np.floor(sol_float), np.ceil(sol_float)\n func = lambda batch_size: 1 / batch_size + 1 - typeII_error - (1 -\n typeII_error - typeI_error) * (1 - prevalence_rate) ** batch_size\n if func(floor) < func(ceil):\n temp = int(floor)\n else:\n temp = int(ceil)\n if temp <= batch_limit:\n return temp\n else:\n return int(batch_limit)\n\n\ndef neg_pos_batch_split(subject_array, batch_size, typeII_error, typeI_error):\n \"\"\"\n A function gives a list of sujects on the negative batch(es),\n a list of subjects on the postive batch(es) and the test-kit \n consumption given the probability of type II error, the \n probability of Type I error.\n \n Input:\n subject_array (Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n \n\n Output:\n neg_batch (Numpy Array): an array of subjects on the negative batch(es)\n pos_batch (Numpy Array): an array of subjects on the postive batch(es)\n test_consum (int): the number of test-kit consumptions\n \n \"\"\"\n neg_batch = []\n pos_batch = []\n test_consum = np.ceil(len(subject_array) / batch_size)\n random_table = np.random.uniform(0, 1, int(test_consum))\n i = 0\n for temp_batch in np.array_split(subject_array, test_consum):\n if 1 in temp_batch[:, 1]:\n if random_table[i] > typeII_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n elif random_table[i] > typeI_error:\n neg_batch.append(temp_batch)\n else:\n pos_batch.append(temp_batch)\n i += 1\n neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([\n ])\n pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([\n ])\n return neg_batch, pos_batch, test_consum\n\n\ndef helpfunction(subject_array, p, batch_size, typeII_error, typeI_error,\n batch_limit):\n \"\"\"\n The helpfunction is a handy function to give the list of subjects on the\n negative batch(es), the list of subjects on the postive batch(es), the \n test-kit consumption, the infection rate on the negative batches, the \n infection rate on the positive batches, the optimal batch size for\n negative batches and the optimal batch size for positive batches.\n\n Input: \n subject_array (Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n p (float): Infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n batch_limit (int): batch size upper limit\n\n Output:\n temp0 (Numpy Array): an array of subjects on the negative batch(es)\n temp1 (Numpy Array): an array of subjects on the postive batch(es)\n temp_con (int): the number of test-kit consumptions\n p0 (float): the infection rate on the negative batches\n p1 (float): the infection rate on the positive batches\n n0 (float): the optimal batch size for the negative batches\n n1 (float): the optimal batch size for the positive batches\n \"\"\"\n batch_size = min(batch_size, batch_limit)\n p0 = infection_rate_on_negative_batch(p, batch_size, typeII_error,\n typeI_error)\n p1 = infection_rate_on_positive_batch(p, batch_size, typeII_error,\n typeI_error)\n n0 = one_batch_test_int_solver(p0, typeII_error, typeI_error, batch_limit)\n n1 = one_batch_test_int_solver(p1, typeII_error, typeI_error, batch_limit)\n if subject_array == np.array([]):\n return np.array([]), np.array([]), p0, p1, n0, n1\n temp0, temp1, temp_con = neg_pos_batch_split(subject_array, batch_size,\n typeII_error, typeI_error)\n return temp0, temp1, temp_con, p0, p1, n0, n1\n\n\ndef seq_test(subject_array, stop_rule, p, batch_size, typeII_error,\n typeI_error, repeat=1, prob_threshold=1, seq=True, batch_limit=32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error,\n batch_limit=batch_limit)\n temp0 = {'data': temp0, 'NB_Num': i['NB_Num'] + 1, 'PB_Num': i[\n 'PB_Num'], 'p': p0, 'batch_size': n0}\n temp1 = {'data': temp1, 'NB_Num': i['NB_Num'], 'PB_Num': i[\n 'PB_Num'] + 1, 'p': p1, 'batch_size': n1}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con\n\n\n<mask token>\n\n\ndef specificity_score(y_true, y_pred):\n \"\"\"\n A function provides specificty given the prediction and the truth \n \"\"\"\n tn, fp, _, _ = confusion_matrix(y_true=y_true, y_pred=y_pred).ravel()\n return tn / (tn + fp)\n\n\n@jit(parallel=True)\ndef data_gen(size, p):\n \"\"\"\n data_gen provides a faster way to generate a random population with\n infection rate p.\n Input:\n size (int): the size of population\n p (float): the infection rate\n Output:\n test_array (array): the first column is for id and the second column\n is the condition, where 1 stands for infection and 0 stands for uninfection\n\n \"\"\"\n random_table = np.random.binomial(size=size, p=p, n=1)\n test_array = np.zeros((size, 2), dtype=int)\n for i in range(size):\n test_array[i, 0] = i\n test_array[i, 1] = random_table[i]\n return test_array\n\n\ndef test_result(data, seq_test, **kwargs):\n \"\"\"\n a helper function provides convenient results for a given test method with its **kwargs\n\n Input:\n data (array or list of arrays)\n seq_test (test_method object): could be seq_test, matrix_test and other test_method objects\n Output:\n result (DataFrame): a dataframe contains important evaluation metrics for the test method \n \"\"\"\n if isinstance(data, list) == False:\n pred, consum, ind_con = seq_test(data, **kwargs)\n result = {'acc': np.mean(pred[:, 1] == data[:, 1]), 'sens':\n recall_score(data[:, 1], pred[:, 1]), 'spec': specificity_score\n (data[:, 1], pred[:, 1]), 'PPV': precision_score(data[:, 1],\n pred[:, 1]), 'NPV': npv_score(data[:, 1], pred[:, 1]),\n 'test_consum': consum, 'ind_consum': ind_con, 'batch_consum': \n consum - ind_con}\n return result\n else:\n length = len(data)\n acc = np.zeros(length)\n sens = np.zeros(length)\n spec = np.zeros(length)\n ppv = np.zeros(length)\n npv = np.zeros(length)\n test_consum = np.zeros(length)\n ind_consum = np.zeros(length)\n batch_consum = np.zeros(length)\n for i in range(length):\n pred, consum, ind_con = seq_test(data[i], **kwargs)\n acc[i] = np.mean(pred[:, 1] == data[i][:, 1])\n sens[i] = recall_score(data[i][:, 1], pred[:, 1])\n spec[i] = specificity_score(data[i][:, 1], pred[:, 1])\n ppv[i] = precision_score(data[i][:, 1], pred[:, 1])\n npv[i] = npv_score(data[i][:, 1], pred[:, 1])\n test_consum[i] = consum\n ind_consum[i] = ind_con\n batch_consum[i] = consum - ind_con\n result = {'acc': acc, 'sens': sens, 'spec': spec, 'PPV': ppv, 'NPV':\n npv, 'test_consum': test_consum, 'ind_consum': ind_consum,\n 'batch_consum': batch_consum}\n return pd.DataFrame(result)\n\n\ndef matrix_test(subject_array, side_length, typeII_error, typeI_error,\n sq_repeat=1, ind_repeat=1, seq=True):\n \"\"\"\n This function provides the matrix testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n side_length (int): the side length of the matrix testing\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n sq_repeat (int): the number of parallel testing for the column/row batch testing\n ind_repeat (int): the number of potential individual testing for the positive crossings\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n matrix_test_num = len(subject_array) // side_length ** 2\n matrix_test_array = subject_array[0:matrix_test_num * side_length ** 2, :]\n ind_test_array = subject_array[matrix_test_num * side_length ** 2:, :]\n ind_idx = []\n for temp_batch in np.array_split(matrix_test_array, matrix_test_num):\n temp_batch = temp_batch.reshape(side_length, side_length, 2)\n temp_row = []\n temp_col = []\n random_num_row = np.random.uniform(0, 1, sq_repeat)\n random_num_col = np.random.uniform(0, 1, sq_repeat)\n for i in range(side_length):\n if 1 in temp_batch[i, :, 1]:\n if max(random_num_row) > typeII_error:\n temp_row.append(temp_batch[i, :, 0])\n elif min(random_num_row) < typeI_error:\n temp_row.append(temp_batch[i, :, 0])\n if 1 in temp_batch[:, i, 1]:\n if max(random_num_col) > typeII_error:\n temp_col.append(temp_batch[:, i, 0])\n elif min(random_num_col) < typeI_error:\n temp_col.append(temp_batch[:, i, 0])\n ind_idx.append(np.intersect1d(temp_row, temp_col))\n ind_idx = np.concatenate(ind_idx)\n ind_idx = ind_idx.astype('int')\n if len(ind_idx) == 0:\n neg_array = matrix_test_array\n else:\n mask = np.zeros(subject_array.shape[0], dtype=bool)\n mask[ind_idx] = True\n mask[matrix_test_num * side_length ** 2:] = True\n ind_test_array = subject_array[mask, :]\n neg_array = subject_array[~mask, :]\n neg_array[:, 1] = 0\n ind_test, ind_con = conventional_test(ind_test_array, typeII_error,\n typeI_error, repeat=ind_repeat, seq=seq)\n batch_test_num = matrix_test_num * 2 * side_length * sq_repeat\n result = np.concatenate((neg_array, ind_test))\n result = result[result[:, 0].argsort()]\n return result, batch_test_num + ind_con, ind_con\n\n\ndef parallel_batch_testing(subject_array, batch_size, typeII_error,\n typeI_error, parallel_num, ind_repeat, seq):\n \"\"\"\n This function provides the parallel batch testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n parallel_num (int): the number of parallel testing for the batch testing\n ind_repeat (int): the number of potential individual testing for the positive batches\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n neg_batch = []\n pos_batch = []\n batch_consum = np.ceil(len(subject_array) / batch_size) * parallel_num\n for temp_batch in np.array_split(subject_array, np.ceil(len(\n subject_array) / batch_size)):\n random_table = np.random.uniform(0, 1, (1, parallel_num))\n if 1 in temp_batch[:, 1]:\n if random_table.max() > typeII_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n elif random_table.min() < typeI_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([\n ])\n pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([\n ])\n neg_batch[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_batch,\n typeII_error, typeI_error, repeat=ind_repeat, seq=seq)\n result = np.concatenate((individual_test, neg_batch))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, batch_consum + individual_con, individual_con\n\n\ndef fixed_batch_seq_test(subject_array, stop_rule, p, batch_size,\n typeII_error, typeI_error, repeat, prob_threshold=0.3, seq=True):\n \"\"\"\n This function provides the parallel batch testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of positive batches to enter the individual testing phase\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of potential individual testing for the positive crossings\n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error)\n temp0 = {'data': np.random.permutation(temp0), 'NB_Num': i[\n 'NB_Num'] + 1, 'PB_Num': i['PB_Num'], 'p': p0, 'batch_size':\n batch_size}\n temp1 = {'data': np.random.permutation(temp1), 'NB_Num': i[\n 'NB_Num'], 'PB_Num': i['PB_Num'] + 1, 'p': p1, 'batch_size':\n batch_size}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con\n\n\ndef name_fun(n):\n \"\"\"\n input: stopping rule\n output: finish nodes\n \"\"\"\n output = []\n temp = ['']\n for i in range(2 * n - 1):\n temp_cur = []\n for j in temp:\n candidate_pos = j + '+'\n candidate_neg = j + '-'\n if str.count(candidate_pos, '+') >= n:\n output.append(candidate_pos)\n else:\n temp_cur.append(candidate_pos)\n if str.count(candidate_neg, '-') >= n:\n output.append(candidate_neg)\n else:\n temp_cur.append(candidate_neg)\n temp = temp_cur\n neg_symbol = [x for x in output if str.count(x, '-') == n]\n pos_symbol = [x for x in output if str.count(x, '+') == n]\n return output, neg_symbol, pos_symbol\n\n\ndef seq_test_with_node(subject_array, stop_rule, p, batch_size,\n typeII_error, typeI_error, repeat=1, prob_threshold=1, seq=True,\n batch_limit=32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n batch_num_list = []\n consum = 0\n temp = {'data': subject_array, 'NB_Num': 0, 'PB_Num': 0, 'p': p,\n 'batch_size': batch_size, 'node': ''}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n neg_node = []\n pos_node = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'],\n i['p'], i['batch_size'], typeII_error, typeI_error,\n batch_limit=batch_limit)\n temp0 = {'data': temp0, 'NB_Num': i['NB_Num'] + 1, 'PB_Num': i[\n 'PB_Num'], 'p': p0, 'batch_size': n0, 'node': i['node'] + '-'}\n temp1 = {'data': temp1, 'NB_Num': i['NB_Num'], 'PB_Num': i[\n 'PB_Num'] + 1, 'p': p1, 'batch_size': n1, 'node': i['node'] +\n '+'}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n if len(temp1['data']) > 0:\n if temp1['PB_Num'] >= stop_rule or temp1['p'\n ] >= prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n batch_num_list.append(consum)\n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n temp = [[x, j['node']] for x in j['data'][:, 0]]\n neg_node.append(temp)\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n temp = [[x, k['node']] for x in k['data'][:, 0]]\n pos_node.append(temp)\n pos_array = np.concatenate(pos_array)\n neg_array[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_array,\n typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n pos_node.extend(neg_node)\n node = pos_node\n node = sum(node, [])\n node.sort()\n node = [x[1] for x in node]\n result = result[result[:, 0].argsort()]\n result = result.astype('int64')\n return result, consum, individual_con, node, batch_num_list\n",
"step-5": "import numpy as np\nimport pandas as pd\nfrom sklearn.metrics import confusion_matrix\nfrom sklearn.metrics import classification_report\nfrom sklearn.metrics import precision_score, recall_score, f1_score\nfrom scipy.optimize import fsolve\nimport numba\nfrom numba import njit,jit\n#\n@jit(parallel = True)\ndef conventional_test(subject_array, typeII_error, typeI_error, repeat = 1,\nseq = True):\n\n\n \"\"\"\n A function gives the test results to a subject array given the probability of\n type II error, the probability of Type I error, and the number of repeatition,\n and setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n test_result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n \"\"\"\n\n\n # Sequential Testing\n if seq == True:\n consum = 0\n \n test_result = np.zeros(subject_array.shape, dtype = int)\n \n random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat))\n for i in range(len(subject_array)):\n temp = 0\n j = 0\n subject = subject_array[i,1]\n while j < repeat and temp == 0:\n random_num = random_table[i, j]\n consum += 1\n if subject == 1:\n temp = 1 if random_num > typeII_error else 0\n else:\n temp = 1 if random_num < typeI_error else 0\n j += 1\n \n\n test_result[i,0] = subject_array[i,0]\n test_result[i,1] = temp\n \n return test_result, consum\n \n # Simultanous Testing \n else: \n test_result = np.zeros(subject_array.shape, dtype = int)\n \n\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], repeat))\n for i in range(len(subject_array)):\n temp = 0\n for j in range(repeat):\n temp_random = random_table[i, j]\n if subject_array[i, 1] == 1:\n temp_1 = 1 if temp_random > typeII_error else 0\n elif subject_array[i, 1] == 0:\n temp_1 = 1 if temp_random < typeI_error else 0\n temp += temp_1\n temp = 1 if temp >= repeat/2 else 0\n test_result[i,0] = subject_array[i,0]\n test_result[i,1] = temp\n \n return test_result, len(subject_array)*repeat\n\n\n@njit(parallel = True)\ndef parallel_test(subject_array, typeII_error, typeI_error, num):\n test_result = np.zeros(subject_array.shape, dtype = int)\n random_table = np.random.uniform(0, 1, (subject_array.shape[0], num))\n for i in range(len(subject_array)):\n subject = subject_array[i, 1]\n if subject == 1:\n temp = 1 if max(random_table[i,:]) > typeII_error else 0\n elif subject == 0:\n temp = 1 if min(random_table[i,:]) < typeI_error else 0\n\n test_result[i,0] = subject_array[i,0]\n test_result[i,1] = temp\n\n return test_result,len(subject_array)*num,len(subject_array)*num\n\n\ndef infection_rate_on_negative_batch(p,batch_size,typeII_error, typeI_error):\n \"\"\"\n \n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the negative batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the negative batch\n\n\n\n \"\"\"\n q = 1-p\n r = typeII_error * (1 - q ** batch_size)/((1 - typeI_error) * q ** batch_size + typeII_error *(1 - q**batch_size))\n return p*r/(1-q**batch_size)\n\n\ndef infection_rate_on_positive_batch(p, batch_size, typeII_error, typeI_error):\n \n \"\"\"\n Given infection rate, batch size, prob of type II error and prob of type I error, this\n function gives the infection rate on the positive batch.\n \n Input:\n p (float): the infection rate\n batch_size (int): the batch size\n typeII_error (float): the prob of type II error\n typeI_error (float): the prob of type I error\n\n Output:\n (float): the infection rate on the positive batch\n \"\"\" \n\n q = 1-p\n r = (1 - typeII_error) * (1 - q ** batch_size)/(typeI_error * q ** batch_size + (1 - typeII_error) * (1 - q **batch_size))\n return p*r/(1 - q** batch_size)\n\n\ndef one_batch_test_solver(prevalence_rate,typeII_error, typeI_error,n_initial_guess = 2):\n \n \"\"\"\n A function gives (float) the best batch size for one batch test given the infection rate\n \n Inputs:\n prevalence_rate(float): infection rate\n typeII_error(float): the prob of type II error\n typeI_error(float): the prob of type I error\n n_initial_guess(float): the initial guess \n\n Output:\n (float): the optimal batch size\n\n \"\"\"\n q = 1- prevalence_rate # To consistent with the notation of our document\n func = lambda n : n*q**(n/2) - (-(1-typeII_error - typeI_error)*np.log(q))**(-1/2)\n # print(func(n_initial_guess))\n n_solution = fsolve(func, n_initial_guess)\n \n return float(n_solution)\n\ndef one_batch_test_int_solver(prevalence_rate,typeII_error, typeI_error,batch_limit,n_initial_guess = 2):\n \"\"\"\n A function gives (int) the best batch size for one batch test given the infection rate\n \n Inputs:\n prevalence_rate(float): infection rate\n n_initial_guess(float): the initial guess \n typeII_error(float): the prob of type II error\n typeI_error(float): the prob of type I error\n n_initial_guess:\n batch_limit (int): the upper limit of batch size\n\n Output:\n (int): the optimal batch size\n \"\"\"\n\n \n sol_float = one_batch_test_solver(prevalence_rate,typeII_error, typeI_error, n_initial_guess)\n floor, ceil = np.floor(sol_float), np.ceil(sol_float)\n func = lambda batch_size: 1/batch_size + 1 - typeII_error -(1 - typeII_error - typeI_error)*(1-prevalence_rate)**batch_size\n if func(floor) < func(ceil):\n temp = int(floor)\n else:\n temp = int(ceil)\n if temp <= batch_limit:\n return temp\n else:\n return int(batch_limit)\n\n\ndef neg_pos_batch_split(subject_array, batch_size, typeII_error, typeI_error):\n \"\"\"\n A function gives a list of sujects on the negative batch(es),\n a list of subjects on the postive batch(es) and the test-kit \n consumption given the probability of type II error, the \n probability of Type I error.\n \n Input:\n subject_array (Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n \n\n Output:\n neg_batch (Numpy Array): an array of subjects on the negative batch(es)\n pos_batch (Numpy Array): an array of subjects on the postive batch(es)\n test_consum (int): the number of test-kit consumptions\n \n \"\"\"\n neg_batch = []\n pos_batch = []\n test_consum = np.ceil(len(subject_array)/batch_size)\n random_table = np.random.uniform(0, 1, int(test_consum))\n i = 0\n for temp_batch in np.array_split(subject_array, test_consum):\n if 1 in (temp_batch[:,1]):\n if random_table[i] > typeII_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n else:\n if random_table[i] > typeI_error:\n neg_batch.append(temp_batch)\n else:\n pos_batch.append(temp_batch)\n i += 1\n neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([])\n pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([])\n return (neg_batch, pos_batch, test_consum)\n\ndef helpfunction(subject_array, p, batch_size ,typeII_error, typeI_error, batch_limit):\n \n \"\"\"\n The helpfunction is a handy function to give the list of subjects on the\n negative batch(es), the list of subjects on the postive batch(es), the \n test-kit consumption, the infection rate on the negative batches, the \n infection rate on the positive batches, the optimal batch size for\n negative batches and the optimal batch size for positive batches.\n\n Input: \n subject_array (Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n p (float): Infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n batch_limit (int): batch size upper limit\n\n Output:\n temp0 (Numpy Array): an array of subjects on the negative batch(es)\n temp1 (Numpy Array): an array of subjects on the postive batch(es)\n temp_con (int): the number of test-kit consumptions\n p0 (float): the infection rate on the negative batches\n p1 (float): the infection rate on the positive batches\n n0 (float): the optimal batch size for the negative batches\n n1 (float): the optimal batch size for the positive batches\n \"\"\"\n batch_size = min(batch_size, batch_limit)\n\n p0 = infection_rate_on_negative_batch(p, batch_size, typeII_error, typeI_error)\n p1 = infection_rate_on_positive_batch(p, batch_size, typeII_error, typeI_error)\n n0= one_batch_test_int_solver(p0, typeII_error, typeI_error, batch_limit)\n n1 = one_batch_test_int_solver(p1, typeII_error, typeI_error, batch_limit)\n if subject_array == np.array([]):\n return (np.array([]), np.array([]), p0, p1, n0, n1)\n temp0, temp1, temp_con = neg_pos_batch_split(subject_array,batch_size,typeII_error, typeI_error)\n return(temp0, temp1, temp_con, p0, p1, n0, n1)\n\ndef seq_test(subject_array,stop_rule,p, batch_size, typeII_error, typeI_error, repeat = 1, \nprob_threshold = 1, seq = True, batch_limit = 32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = [] #renamed to negativeInfoList\n pos_list = [] #renamed to positiveInfoList\n consum = 0\n temp = {'data': subject_array,\n 'NB_Num': 0,\n 'PB_Num': 0,\n 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = [] #renamed to negativeBatches\n pos_array = [] #renamed to positiveBatches\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'], i['p'], i['batch_size'],\n typeII_error, typeI_error, \n batch_limit = batch_limit)\n temp0 = {'data': temp0,\n 'NB_Num': i['NB_Num'] + 1,\n 'PB_Num': i['PB_Num'],\n 'p': p0,\n 'batch_size': n0}\n temp1 = {'data': temp1,\n 'NB_Num': i['NB_Num'],\n 'PB_Num': i['PB_Num'] + 1,\n 'p': p1,\n 'batch_size': n1}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n \n if len(temp1['data'])>0:\n if temp1['PB_Num'] >= stop_rule or temp1['p']>=prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con \n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n \n neg_array[:,1] = 0\n individual_test, individual_con = conventional_test(pos_array, typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:,0].argsort()]\n result = result.astype('int64')\n return (result, consum, individual_con)\n\ndef npv_score(y_true, y_pred):\n \"\"\"\n A function provides npv given the prediction and the truth \n \"\"\"\n tn, _, fn, _ = confusion_matrix(y_true = y_true,\n y_pred = y_pred).ravel()\n return tn/(tn + fn)\n\ndef specificity_score(y_true, y_pred):\n \"\"\"\n A function provides specificty given the prediction and the truth \n \"\"\"\n tn, fp, _, _ = confusion_matrix(y_true = y_true,\n y_pred = y_pred).ravel()\n return tn/(tn + fp)\n\n@jit(parallel = True)\ndef data_gen(size, p):\n \"\"\"\n data_gen provides a faster way to generate a random population with\n infection rate p.\n Input:\n size (int): the size of population\n p (float): the infection rate\n Output:\n test_array (array): the first column is for id and the second column\n is the condition, where 1 stands for infection and 0 stands for uninfection\n\n \"\"\"\n #print(np.random.get_state()[1][0])\n random_table = np.random.binomial(size = size, p = p, n = 1)\n test_array = np.zeros((size, 2), dtype = int)\n for i in range(size):\n test_array[i,0] = i\n test_array[i,1] = random_table[i]\n return test_array\n\n\ndef test_result(data, seq_test, **kwargs):\n \"\"\"\n a helper function provides convenient results for a given test method with its **kwargs\n\n Input:\n data (array or list of arrays)\n seq_test (test_method object): could be seq_test, matrix_test and other test_method objects\n Output:\n result (DataFrame): a dataframe contains important evaluation metrics for the test method \n \"\"\"\n if isinstance(data, list) == False:\n \n pred,consum, ind_con = seq_test(data, **kwargs)\n result = {'acc': np.mean(pred[:,1] == data[:,1]),\n 'sens': recall_score(data[:,1], pred[:,1]),\n 'spec': specificity_score(data[:,1], pred[:,1]),\n 'PPV': precision_score(data[:, 1], pred[:,1]),\n 'NPV': npv_score(data[:, 1], pred[:,1]),\n 'test_consum': consum,\n 'ind_consum': ind_con,\n 'batch_consum': consum - ind_con}\n return result\n else:\n length = len(data)\n acc = np.zeros(length)\n sens = np.zeros(length)\n spec = np.zeros(length)\n ppv = np.zeros(length)\n npv = np.zeros(length)\n test_consum = np.zeros(length)\n ind_consum = np.zeros(length)\n batch_consum = np.zeros(length)\n for i in range(length):\n \n pred,consum, ind_con = seq_test(data[i], **kwargs)\n \n acc[i] = np.mean(pred[:,1] == data[i][:,1])\n sens[i] = recall_score(data[i][:,1], pred[:,1])\n spec[i] = specificity_score(data[i][:,1], pred[:,1])\n ppv[i] = precision_score(data[i][:,1], pred[:,1])\n npv[i] = npv_score(data[i][:,1], pred[:,1])\n test_consum[i] = consum\n ind_consum[i] = ind_con\n batch_consum[i] = consum-ind_con\n\n result = {'acc': acc,\n 'sens': sens,\n 'spec': spec,\n 'PPV': ppv,\n 'NPV': npv,\n 'test_consum': test_consum,\n 'ind_consum': ind_consum,\n 'batch_consum': batch_consum}\n return pd.DataFrame(result)\n\n\n\ndef matrix_test(subject_array, side_length, typeII_error, typeI_error, sq_repeat = 1 ,ind_repeat = 1, seq = True):\n\n \"\"\"\n This function provides the matrix testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n side_length (int): the side length of the matrix testing\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n sq_repeat (int): the number of parallel testing for the column/row batch testing\n ind_repeat (int): the number of potential individual testing for the positive crossings\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n\n\n\n matrix_test_num = len(subject_array)//(side_length**2)\n matrix_test_array = subject_array[0:matrix_test_num*side_length**2, :]\n ind_test_array = subject_array[matrix_test_num*side_length**2:, :]\n \n ind_idx = []\n \n for temp_batch in np.array_split(matrix_test_array, matrix_test_num):\n temp_batch = temp_batch.reshape(side_length, side_length, 2)\n temp_row = []\n temp_col = []\n random_num_row = np.random.uniform(0, 1, sq_repeat)\n random_num_col = np.random.uniform(0, 1, sq_repeat)\n for i in range(side_length):\n if 1 in (temp_batch[i,:,1]):\n if max(random_num_row) > typeII_error:\n temp_row.append(temp_batch[i,:,0])\n else:\n if min(random_num_row) < typeI_error:\n temp_row.append(temp_batch[i, :, 0])\n if 1 in (temp_batch[:,i,1]):\n if max(random_num_col) > typeII_error:\n temp_col.append(temp_batch[:,i,0])\n else:\n if min(random_num_col) < typeI_error:\n temp_col.append(temp_batch[:, i, 0])\n ind_idx.append(np.intersect1d(temp_row, temp_col))\n\n ind_idx = np.concatenate(ind_idx)\n ind_idx = ind_idx.astype('int')\n \n if len(ind_idx) == 0:\n neg_array = matrix_test_array\n else:\n mask = np.zeros(subject_array.shape[0], dtype = bool)\n mask[ind_idx] = True\n mask[matrix_test_num*side_length**2:] = True\n ind_test_array = subject_array[mask,:]\n \n \n neg_array = subject_array[~mask, :]\n \n\n \n \n neg_array[:, 1] = 0\n \n ind_test, ind_con = conventional_test(ind_test_array,\n typeII_error, typeI_error, repeat = ind_repeat, seq = seq)\n \n \n \n batch_test_num = matrix_test_num * 2 * side_length * sq_repeat\n result = np.concatenate((neg_array, ind_test))\n result = result[result[:, 0].argsort()]\n \n return (result, batch_test_num + ind_con, ind_con)\n\n\ndef parallel_batch_testing(subject_array, batch_size, typeII_error, typeI_error, parallel_num, ind_repeat, seq):\n\n \"\"\"\n This function provides the parallel batch testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n parallel_num (int): the number of parallel testing for the batch testing\n ind_repeat (int): the number of potential individual testing for the positive batches\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n\n\n\n neg_batch = []\n pos_batch = []\n batch_consum = np.ceil(len(subject_array)/batch_size)* parallel_num\n for temp_batch in np.array_split(subject_array, np.ceil(len(subject_array)/batch_size)):\n random_table = np.random.uniform(0, 1, (1, parallel_num))\n if 1 in (temp_batch[:, 1]):\n if random_table.max() > typeII_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n else:\n if random_table.min() < typeI_error:\n pos_batch.append(temp_batch)\n else:\n neg_batch.append(temp_batch)\n neg_batch = np.concatenate(neg_batch) if len(neg_batch) > 0 else np.array([])\n pos_batch = np.concatenate(pos_batch) if len(pos_batch) > 0 else np.array([])\n\n neg_batch[:, 1] = 0\n individual_test, individual_con = conventional_test(pos_batch, typeII_error, typeI_error,\n repeat = ind_repeat, seq = seq)\n result = np.concatenate((individual_test, neg_batch))\n result = result[result[:,0].argsort()]\n result = result.astype('int64')\n return (result, batch_consum+individual_con, individual_con)\n \n\ndef fixed_batch_seq_test(subject_array,stop_rule, p, batch_size, typeII_error, typeI_error, repeat, prob_threshold = 0.3, seq = True):\n \"\"\"\n This function provides the parallel batch testing results for a given subject array.\n\n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of positive batches to enter the individual testing phase\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of potential individual testing for the positive crossings\n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n \"\"\"\n \n temp_list = []\n neg_list = []\n pos_list = []\n consum = 0\n temp = {'data': subject_array,\n 'NB_Num': 0,\n 'PB_Num': 0,\n 'p': p,\n 'batch_size': batch_size}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'], i['p'], i['batch_size'],\n typeII_error, typeI_error)\n temp0 = {'data': np.random.permutation(temp0),\n 'NB_Num': i['NB_Num'] + 1,\n 'PB_Num': i['PB_Num'],\n 'p': p0,\n 'batch_size': batch_size}\n temp1 = {'data': np.random.permutation(temp1),\n 'NB_Num': i['NB_Num'],\n 'PB_Num': i['PB_Num'] + 1,\n 'p': p1,\n 'batch_size': batch_size}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n \n if len(temp1['data'])>0:\n if temp1['PB_Num'] >= stop_rule or temp1['p']>=prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con \n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n neg_array = np.concatenate(neg_array)\n for k in pos_list:\n pos_array.append(k['data'])\n pos_array = np.concatenate(pos_array)\n \n neg_array[:,1] = 0\n individual_test, individual_con = conventional_test(pos_array, typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n result = result[result[:,0].argsort()]\n result = result.astype('int64')\n return (result, consum, individual_con)\n\n\n \ndef name_fun(n):\n \"\"\"\n input: stopping rule\n output: finish nodes\n \"\"\"\n output = []\n temp = ['']\n for i in range(2*n-1):\n temp_cur = []\n for j in temp:\n candidate_pos = j + '+'\n candidate_neg = j + '-'\n if str.count(candidate_pos, '+') >= n:\n output.append(candidate_pos)\n else:\n temp_cur.append(candidate_pos)\n\n if str.count(candidate_neg, '-') >= n:\n output.append(candidate_neg)\n else:\n temp_cur.append(candidate_neg)\n\n temp = temp_cur\n\n neg_symbol = [x for x in output if str.count(x, '-') == n]\n pos_symbol = [x for x in output if str.count(x, '+') == n]\n\n return output, neg_symbol, pos_symbol\n\n\n\ndef seq_test_with_node(subject_array,stop_rule,p, batch_size, typeII_error, typeI_error, repeat = 1, \nprob_threshold = 1, seq = True, batch_limit = 32):\n \"\"\"\n A function gives the test results to a subject array and the total number of \n test-kit consumption and the individual testing number given the subject array,\n the stop rule, the batch size, the probability of type II error, the probability of \n Type I error, and the number of repeatition, the probability threshold, and \n setting of sequence testing or not.\n \n Input:\n subject_array(Numpy Array): an array contains subject id and subject's\n condition (1 stands for infection and 0 stands for uninfection)\n stop_rule (int): the number of postive batches to enter individual testing\n p (float): infection rate\n batch_size (int): batch size\n typeII_error (float): probability of type II error \n typeI_error (float): probability of type I error\n repeat (int): the number of repetition \n prob_threshold (float): if the infection rate of a batch is beyond prob_threshold, \n the subjects on that batch will enter individual testing phase\n seq (boolean): True stands for sequential testing. The test will end\n when the test result is positive or run up the number of repetition.\n False stands for simutanlous testing with majority voting.\n batch_limit (int):\n\n Output:\n result (Numpy Array): an array contains subjects' id and test results\n consum (int): the total test consumption\n individual_con (int): the test consumption for individual testings\n\n \"\"\"\n temp_list = []\n neg_list = []\n pos_list = []\n batch_num_list = []\n consum = 0\n temp = {'data': subject_array,\n 'NB_Num': 0,\n 'PB_Num': 0,\n 'p': p,\n 'batch_size': batch_size,\n 'node': ''}\n temp_list.append(temp)\n new_list = []\n neg_array = []\n neg_node = []\n pos_node = []\n pos_array = []\n while len(temp_list) > 0:\n for i in temp_list:\n temp0, temp1, temp_con, p0, p1, n0, n1 = helpfunction(i['data'], i['p'], i['batch_size'],\n typeII_error, typeI_error, \n batch_limit = batch_limit)\n temp0 = {'data': temp0,\n 'NB_Num': i['NB_Num'] + 1,\n 'PB_Num': i['PB_Num'],\n 'p': p0,\n 'batch_size': n0,\n 'node': i['node'] + '-'}\n temp1 = {'data': temp1,\n 'NB_Num': i['NB_Num'],\n 'PB_Num': i['PB_Num'] + 1,\n 'p': p1,\n 'batch_size': n1,\n 'node': i['node'] + '+'}\n if len(temp0['data']) > 0:\n if temp0['NB_Num'] >= stop_rule:\n neg_list.append(temp0)\n else:\n new_list.append(temp0)\n \n if len(temp1['data'])>0:\n if temp1['PB_Num'] >= stop_rule or temp1['p']>=prob_threshold:\n pos_list.append(temp1)\n else:\n new_list.append(temp1)\n consum += temp_con\n batch_num_list.append(consum) \n temp_list = new_list\n new_list = []\n for j in neg_list:\n neg_array.append(j['data'])\n temp = [[x, j['node']] for x in j['data'][:,0]]\n neg_node.append(temp)\n neg_array = np.concatenate(neg_array)\n #print(neg_array)\n #print(neg_node)\n #neg_node = np.concatenate(neg_node)\n\n for k in pos_list:\n pos_array.append(k['data'])\n #pos_node.append(k['node'])\n #pos_node.append(np.column_stack((k['data'][:,0],np.repeat(k['node'], len(k['data'])))))\n temp = [[x, k['node']] for x in k['data'][:,0]]\n pos_node.append(temp)\n pos_array = np.concatenate(pos_array)\n #pos_node = np.concatenate(pos_node)\n\n \n neg_array[:,1] = 0\n individual_test, individual_con = conventional_test(pos_array, typeII_error, typeI_error, repeat, seq)\n pos_array = individual_test\n consum += individual_con\n result = np.concatenate((pos_array, neg_array))\n #node = np.concatenate((pos_node, neg_node))\n pos_node.extend(neg_node)\n node = pos_node\n node = sum(node, [])\n node.sort()\n node = [x[1] for x in node]\n #node = node[node[:,0].argsort()]\n result = result[result[:,0].argsort()]\n result = result.astype('int64')\n return (result, consum, individual_con, node, batch_num_list)\n\n\n\n\n\n\n",
"step-ids": [
10,
14,
15,
17,
20
]
}
|
[
10,
14,
15,
17,
20
] |
#Max Low
#9-25-17
#quiz2.py -- numbers , bigger smaller same, divisible by 3, product and correct person
numone = int(input('Enter a number: '))
numtwo = int(input('Enter a 2nd number: '))
if numone > numtwo:
print('The first number is bigger')
elif numtwo > numone:
print('The second number is bigger')
else:
print('The numbers are the same')
if numone % 3 == 0 and numtwo % 3 == 0:
print('They are both divisible by 3')
elif numone % 3 == 0:
print('Only the first number is divisible by three')
elif numtwo % 3 == 0:
print('Only the second number is divisible by three')
else:
print('Neither number is divisible by 3')
product = int(input('What is the product of your two numbers?: '))
if product == numone*numtwo:
print('correct')
else:
print('incorrect')
|
normal
|
{
"blob_id": "a67612e8301728d1fb366d7c8909fa830f04bf45",
"index": 9739,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif numone > numtwo:\n print('The first number is bigger')\nelif numtwo > numone:\n print('The second number is bigger')\nelse:\n print('The numbers are the same')\nif numone % 3 == 0 and numtwo % 3 == 0:\n print('They are both divisible by 3')\nelif numone % 3 == 0:\n print('Only the first number is divisible by three')\nelif numtwo % 3 == 0:\n print('Only the second number is divisible by three')\nelse:\n print('Neither number is divisible by 3')\n<mask token>\nif product == numone * numtwo:\n print('correct')\nelse:\n print('incorrect')\n",
"step-3": "numone = int(input('Enter a number: '))\nnumtwo = int(input('Enter a 2nd number: '))\nif numone > numtwo:\n print('The first number is bigger')\nelif numtwo > numone:\n print('The second number is bigger')\nelse:\n print('The numbers are the same')\nif numone % 3 == 0 and numtwo % 3 == 0:\n print('They are both divisible by 3')\nelif numone % 3 == 0:\n print('Only the first number is divisible by three')\nelif numtwo % 3 == 0:\n print('Only the second number is divisible by three')\nelse:\n print('Neither number is divisible by 3')\nproduct = int(input('What is the product of your two numbers?: '))\nif product == numone * numtwo:\n print('correct')\nelse:\n print('incorrect')\n",
"step-4": "#Max Low\n#9-25-17\n#quiz2.py -- numbers , bigger smaller same, divisible by 3, product and correct person\n\nnumone = int(input('Enter a number: '))\nnumtwo = int(input('Enter a 2nd number: '))\n\nif numone > numtwo:\n print('The first number is bigger')\nelif numtwo > numone:\n print('The second number is bigger')\nelse:\n print('The numbers are the same')\n\n \nif numone % 3 == 0 and numtwo % 3 == 0:\n print('They are both divisible by 3')\nelif numone % 3 == 0:\n print('Only the first number is divisible by three')\nelif numtwo % 3 == 0:\n print('Only the second number is divisible by three')\nelse:\n print('Neither number is divisible by 3')\n\nproduct = int(input('What is the product of your two numbers?: '))\nif product == numone*numtwo:\n print('correct')\nelse:\n print('incorrect')",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from botocore_eb.model import ServiceModel
from botocore_eb.exceptions import ParamValidationError
from botocore_eb.exceptions import DataNotFoundError
from botocore_eb.exceptions import OperationNotPageableError
from botocore_eb import xform_name
from botocore_eb.paginate import Paginator
import botocore_eb.validate
import botocore_eb.serialize
class ClientError(Exception):
MSG_TEMPLATE = (
'An error occurred ({error_code}) when calling the {operation_name} '
'operation: {error_message}')
def __init__(self, error_response, operation_name):
msg = self.MSG_TEMPLATE.format(
error_code=error_response['Error']['Code'],
error_message=error_response['Error']['Message'],
operation_name=operation_name)
super(ClientError, self).__init__(msg)
self.response = error_response
class ClientCreator(object):
"""Creates client objects for a service."""
def __init__(self, loader, endpoint_creator):
self._loader = loader
self._endpoint_creator = endpoint_creator
def create_client(self, service_name, region_name, is_secure=True,
endpoint_url=None, verify=None):
service_model = self._load_service_model(service_name)
cls = self.create_client_class(service_name)
client_args = self._get_client_args(service_model, region_name, is_secure,
endpoint_url, verify)
return cls(**client_args)
def create_client_class(self, service_name):
service_model = self._load_service_model(service_name)
methods = self._create_methods(service_model)
py_name_to_operation_name = self._create_name_mapping(service_model)
self._add_pagination_methods(service_model, methods,
py_name_to_operation_name)
cls = type(service_name, (BaseClient,), methods)
return cls
def _add_pagination_methods(self, service_model, methods, name_mapping):
loader = self._loader
def get_paginator(self, operation_name):
"""Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you'd normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator("create_foo")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
# Note that the 'self' in this method refers to the self on
# BaseClient, not on ClientCreator.
if not self.can_paginate(operation_name):
raise OperationNotPageableError(operation_name=operation_name)
else:
actual_operation_name = name_mapping[operation_name]
paginator = Paginator(
getattr(self, operation_name),
self._cache['page_config'][actual_operation_name])
return paginator
def can_paginate(self, operation_name):
"""Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you'd normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator("create_foo")``.
:return: ``True`` if the operation can be paginated,
``False`` otherwise.
"""
if 'page_config' not in self._cache:
try:
page_config = loader.load_data('aws/%s/%s.paginators' % (
service_model.endpoint_prefix,
service_model.api_version))['pagination']
self._cache['page_config'] = page_config
except DataNotFoundError:
self._cache['page_config'] = {}
actual_operation_name = name_mapping[operation_name]
return actual_operation_name in self._cache['page_config']
methods['get_paginator'] = get_paginator
methods['can_paginate'] = can_paginate
def _load_service_model(self, service_name):
json_model = self._loader.load_service_model('aws/%s' % service_name)
service_model = ServiceModel(json_model)
return service_model
def _get_client_args(self, service_model, region_name, is_secure,
endpoint_url, verify):
# A client needs:
#
# * serializer
# * endpoint
# * response parser
protocol = service_model.metadata['protocol']
serializer = botocore_eb.serialize.create_serializer(
protocol, include_validation=True)
endpoint = self._endpoint_creator.create_endpoint(
service_model, region_name, is_secure=is_secure,
endpoint_url=endpoint_url, verify=verify)
response_parser = botocore_eb.parsers.create_parser(protocol)
return {
'serializer': serializer,
'endpoint': endpoint,
'response_parser': response_parser
}
def _create_methods(self, service_model):
op_dict = {}
for operation_name in service_model.operation_names:
py_operation_name = xform_name(operation_name)
op_dict[py_operation_name] = self._create_api_method(
py_operation_name, operation_name, service_model)
return op_dict
def _create_name_mapping(self, service_model):
# py_name -> OperationName
mapping = {}
for operation_name in service_model.operation_names:
py_operation_name = xform_name(operation_name)
mapping[py_operation_name] = operation_name
return mapping
def _create_api_method(self, py_operation_name, operation_name,
service_model):
def _api_call(self, **kwargs):
operation_model = service_model.operation_model(operation_name)
request_dict = self._serializer.serialize_to_request(
kwargs, operation_model)
http, parsed_response = self._endpoint.make_request(
operation_model, request_dict)
if http.status_code >= 300:
raise ClientError(parsed_response, operation_name)
else:
return parsed_response
_api_call.__name__ = str(py_operation_name)
# TODO: docstrings.
return _api_call
class BaseClient(object):
def __init__(self, serializer, endpoint, response_parser):
self._serializer = serializer
self._endpoint = endpoint
self._response_parser = response_parser
self._cache = {}
|
normal
|
{
"blob_id": "829c833866198307d7d19c4a0cbe40299ee14eb9",
"index": 5288,
"step-1": "<mask token>\n\n\nclass ClientCreator(object):\n <mask token>\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n <mask token>\n <mask token>\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-2": "<mask token>\n\n\nclass ClientCreator(object):\n <mask token>\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n <mask token>\n <mask token>\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(protocol,\n include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(service_model,\n region_name, is_secure=is_secure, endpoint_url=endpoint_url,\n verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {'serializer': serializer, 'endpoint': endpoint,\n 'response_parser': response_parser}\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n <mask token>\n <mask token>\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-3": "<mask token>\n\n\nclass ClientError(Exception):\n <mask token>\n\n def __init__(self, error_response, operation_name):\n msg = self.MSG_TEMPLATE.format(error_code=error_response['Error'][\n 'Code'], error_message=error_response['Error']['Message'],\n operation_name=operation_name)\n super(ClientError, self).__init__(msg)\n self.response = error_response\n\n\nclass ClientCreator(object):\n \"\"\"Creates client objects for a service.\"\"\"\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n\n def create_client_class(self, service_name):\n service_model = self._load_service_model(service_name)\n methods = self._create_methods(service_model)\n py_name_to_operation_name = self._create_name_mapping(service_model)\n self._add_pagination_methods(service_model, methods,\n py_name_to_operation_name)\n cls = type(service_name, (BaseClient,), methods)\n return cls\n\n def _add_pagination_methods(self, service_model, methods, name_mapping):\n loader = self._loader\n\n def get_paginator(self, operation_name):\n \"\"\"Create a paginator for an operation.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :raise OperationNotPageableError: Raised if the operation is not\n pageable. You can use the ``client.can_paginate`` method to\n check if an operation is pageable.\n\n :rtype: L{botocore.paginate.Paginator}\n :return: A paginator object.\n\n \"\"\"\n if not self.can_paginate(operation_name):\n raise OperationNotPageableError(operation_name=operation_name)\n else:\n actual_operation_name = name_mapping[operation_name]\n paginator = Paginator(getattr(self, operation_name), self.\n _cache['page_config'][actual_operation_name])\n return paginator\n\n def can_paginate(self, operation_name):\n \"\"\"Check if an operation can be paginated.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :return: ``True`` if the operation can be paginated,\n ``False`` otherwise.\n\n \"\"\"\n if 'page_config' not in self._cache:\n try:\n page_config = loader.load_data('aws/%s/%s.paginators' %\n (service_model.endpoint_prefix, service_model.\n api_version))['pagination']\n self._cache['page_config'] = page_config\n except DataNotFoundError:\n self._cache['page_config'] = {}\n actual_operation_name = name_mapping[operation_name]\n return actual_operation_name in self._cache['page_config']\n methods['get_paginator'] = get_paginator\n methods['can_paginate'] = can_paginate\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(protocol,\n include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(service_model,\n region_name, is_secure=is_secure, endpoint_url=endpoint_url,\n verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {'serializer': serializer, 'endpoint': endpoint,\n 'response_parser': response_parser}\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n\n def _create_name_mapping(self, service_model):\n mapping = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n mapping[py_operation_name] = operation_name\n return mapping\n\n def _create_api_method(self, py_operation_name, operation_name,\n service_model):\n\n def _api_call(self, **kwargs):\n operation_model = service_model.operation_model(operation_name)\n request_dict = self._serializer.serialize_to_request(kwargs,\n operation_model)\n http, parsed_response = self._endpoint.make_request(operation_model\n , request_dict)\n if http.status_code >= 300:\n raise ClientError(parsed_response, operation_name)\n else:\n return parsed_response\n _api_call.__name__ = str(py_operation_name)\n return _api_call\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-4": "<mask token>\n\n\nclass ClientError(Exception):\n MSG_TEMPLATE = (\n 'An error occurred ({error_code}) when calling the {operation_name} operation: {error_message}'\n )\n\n def __init__(self, error_response, operation_name):\n msg = self.MSG_TEMPLATE.format(error_code=error_response['Error'][\n 'Code'], error_message=error_response['Error']['Message'],\n operation_name=operation_name)\n super(ClientError, self).__init__(msg)\n self.response = error_response\n\n\nclass ClientCreator(object):\n \"\"\"Creates client objects for a service.\"\"\"\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n\n def create_client_class(self, service_name):\n service_model = self._load_service_model(service_name)\n methods = self._create_methods(service_model)\n py_name_to_operation_name = self._create_name_mapping(service_model)\n self._add_pagination_methods(service_model, methods,\n py_name_to_operation_name)\n cls = type(service_name, (BaseClient,), methods)\n return cls\n\n def _add_pagination_methods(self, service_model, methods, name_mapping):\n loader = self._loader\n\n def get_paginator(self, operation_name):\n \"\"\"Create a paginator for an operation.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :raise OperationNotPageableError: Raised if the operation is not\n pageable. You can use the ``client.can_paginate`` method to\n check if an operation is pageable.\n\n :rtype: L{botocore.paginate.Paginator}\n :return: A paginator object.\n\n \"\"\"\n if not self.can_paginate(operation_name):\n raise OperationNotPageableError(operation_name=operation_name)\n else:\n actual_operation_name = name_mapping[operation_name]\n paginator = Paginator(getattr(self, operation_name), self.\n _cache['page_config'][actual_operation_name])\n return paginator\n\n def can_paginate(self, operation_name):\n \"\"\"Check if an operation can be paginated.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :return: ``True`` if the operation can be paginated,\n ``False`` otherwise.\n\n \"\"\"\n if 'page_config' not in self._cache:\n try:\n page_config = loader.load_data('aws/%s/%s.paginators' %\n (service_model.endpoint_prefix, service_model.\n api_version))['pagination']\n self._cache['page_config'] = page_config\n except DataNotFoundError:\n self._cache['page_config'] = {}\n actual_operation_name = name_mapping[operation_name]\n return actual_operation_name in self._cache['page_config']\n methods['get_paginator'] = get_paginator\n methods['can_paginate'] = can_paginate\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(protocol,\n include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(service_model,\n region_name, is_secure=is_secure, endpoint_url=endpoint_url,\n verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {'serializer': serializer, 'endpoint': endpoint,\n 'response_parser': response_parser}\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n\n def _create_name_mapping(self, service_model):\n mapping = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n mapping[py_operation_name] = operation_name\n return mapping\n\n def _create_api_method(self, py_operation_name, operation_name,\n service_model):\n\n def _api_call(self, **kwargs):\n operation_model = service_model.operation_model(operation_name)\n request_dict = self._serializer.serialize_to_request(kwargs,\n operation_model)\n http, parsed_response = self._endpoint.make_request(operation_model\n , request_dict)\n if http.status_code >= 300:\n raise ClientError(parsed_response, operation_name)\n else:\n return parsed_response\n _api_call.__name__ = str(py_operation_name)\n return _api_call\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-5": "from botocore_eb.model import ServiceModel\nfrom botocore_eb.exceptions import ParamValidationError\nfrom botocore_eb.exceptions import DataNotFoundError\nfrom botocore_eb.exceptions import OperationNotPageableError\nfrom botocore_eb import xform_name\nfrom botocore_eb.paginate import Paginator\nimport botocore_eb.validate\nimport botocore_eb.serialize\n\n\nclass ClientError(Exception):\n MSG_TEMPLATE = (\n 'An error occurred ({error_code}) when calling the {operation_name} '\n 'operation: {error_message}')\n\n def __init__(self, error_response, operation_name):\n msg = self.MSG_TEMPLATE.format(\n error_code=error_response['Error']['Code'],\n error_message=error_response['Error']['Message'],\n operation_name=operation_name)\n super(ClientError, self).__init__(msg)\n self.response = error_response\n\n\nclass ClientCreator(object):\n \"\"\"Creates client objects for a service.\"\"\"\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name, is_secure,\n endpoint_url, verify)\n return cls(**client_args)\n\n def create_client_class(self, service_name):\n service_model = self._load_service_model(service_name)\n methods = self._create_methods(service_model)\n py_name_to_operation_name = self._create_name_mapping(service_model)\n self._add_pagination_methods(service_model, methods,\n py_name_to_operation_name)\n cls = type(service_name, (BaseClient,), methods)\n return cls\n\n def _add_pagination_methods(self, service_model, methods, name_mapping):\n loader = self._loader\n\n def get_paginator(self, operation_name):\n \"\"\"Create a paginator for an operation.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :raise OperationNotPageableError: Raised if the operation is not\n pageable. You can use the ``client.can_paginate`` method to\n check if an operation is pageable.\n\n :rtype: L{botocore.paginate.Paginator}\n :return: A paginator object.\n\n \"\"\"\n # Note that the 'self' in this method refers to the self on\n # BaseClient, not on ClientCreator.\n if not self.can_paginate(operation_name):\n raise OperationNotPageableError(operation_name=operation_name)\n else:\n actual_operation_name = name_mapping[operation_name]\n paginator = Paginator(\n getattr(self, operation_name),\n self._cache['page_config'][actual_operation_name])\n return paginator\n\n def can_paginate(self, operation_name):\n \"\"\"Check if an operation can be paginated.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :return: ``True`` if the operation can be paginated,\n ``False`` otherwise.\n\n \"\"\"\n if 'page_config' not in self._cache:\n try:\n page_config = loader.load_data('aws/%s/%s.paginators' % (\n service_model.endpoint_prefix,\n service_model.api_version))['pagination']\n self._cache['page_config'] = page_config\n except DataNotFoundError:\n self._cache['page_config'] = {}\n actual_operation_name = name_mapping[operation_name]\n return actual_operation_name in self._cache['page_config']\n\n methods['get_paginator'] = get_paginator\n methods['can_paginate'] = can_paginate\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n # A client needs:\n #\n # * serializer\n # * endpoint\n # * response parser\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(\n protocol, include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(\n service_model, region_name, is_secure=is_secure,\n endpoint_url=endpoint_url, verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {\n 'serializer': serializer,\n 'endpoint': endpoint,\n 'response_parser': response_parser\n }\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n\n def _create_name_mapping(self, service_model):\n # py_name -> OperationName\n mapping = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n mapping[py_operation_name] = operation_name\n return mapping\n\n def _create_api_method(self, py_operation_name, operation_name,\n service_model):\n def _api_call(self, **kwargs):\n operation_model = service_model.operation_model(operation_name)\n request_dict = self._serializer.serialize_to_request(\n kwargs, operation_model)\n\n http, parsed_response = self._endpoint.make_request(\n operation_model, request_dict)\n if http.status_code >= 300:\n raise ClientError(parsed_response, operation_name)\n else:\n return parsed_response\n\n _api_call.__name__ = str(py_operation_name)\n # TODO: docstrings.\n return _api_call\n\n\nclass BaseClient(object):\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-ids": [
6,
8,
15,
16,
18
]
}
|
[
6,
8,
15,
16,
18
] |
from django.contrib import admin
from .models import Wbs, Equipment_Type
class WbsAdmin(admin.ModelAdmin):
list_display = ('code','description','equipment_type')
list_filter = ('code','description','equipment_type')
readonly_fields = ('code','description')
class Equipment_TypeAdmin(admin.ModelAdmin):
list_display = ('type',)
list_filter = ('type',)
admin.site.register(Wbs,WbsAdmin)
admin.site.register(Equipment_Type,Equipment_TypeAdmin)
|
normal
|
{
"blob_id": "292c66bd5b7f56ee8c27cabff01cd97ff36a79dc",
"index": 8885,
"step-1": "<mask token>\n\n\nclass WbsAdmin(admin.ModelAdmin):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Equipment_TypeAdmin(admin.ModelAdmin):\n list_display = 'type',\n list_filter = 'type',\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass WbsAdmin(admin.ModelAdmin):\n list_display = 'code', 'description', 'equipment_type'\n list_filter = 'code', 'description', 'equipment_type'\n readonly_fields = 'code', 'description'\n\n\nclass Equipment_TypeAdmin(admin.ModelAdmin):\n list_display = 'type',\n list_filter = 'type',\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass WbsAdmin(admin.ModelAdmin):\n list_display = 'code', 'description', 'equipment_type'\n list_filter = 'code', 'description', 'equipment_type'\n readonly_fields = 'code', 'description'\n\n\nclass Equipment_TypeAdmin(admin.ModelAdmin):\n list_display = 'type',\n list_filter = 'type',\n\n\nadmin.site.register(Wbs, WbsAdmin)\nadmin.site.register(Equipment_Type, Equipment_TypeAdmin)\n",
"step-4": "from django.contrib import admin\nfrom .models import Wbs, Equipment_Type\n\n\nclass WbsAdmin(admin.ModelAdmin):\n list_display = 'code', 'description', 'equipment_type'\n list_filter = 'code', 'description', 'equipment_type'\n readonly_fields = 'code', 'description'\n\n\nclass Equipment_TypeAdmin(admin.ModelAdmin):\n list_display = 'type',\n list_filter = 'type',\n\n\nadmin.site.register(Wbs, WbsAdmin)\nadmin.site.register(Equipment_Type, Equipment_TypeAdmin)\n",
"step-5": "from django.contrib import admin\r\nfrom .models import Wbs, Equipment_Type\r\n\r\nclass WbsAdmin(admin.ModelAdmin):\r\n list_display = ('code','description','equipment_type')\r\n list_filter = ('code','description','equipment_type')\r\n readonly_fields = ('code','description')\r\n\r\nclass Equipment_TypeAdmin(admin.ModelAdmin):\r\n list_display = ('type',)\r\n list_filter = ('type',)\r\n\r\nadmin.site.register(Wbs,WbsAdmin)\r\nadmin.site.register(Equipment_Type,Equipment_TypeAdmin)\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# !/usr/bin/python
# sudo mn --custom _mininet_topo.py --topo mytopo,5
# sudo mn --custom _mininet_topo.py --topo mytopo,3 --test simpletest
# or just run this python file
from mininet.topo import Topo
from mininet.net import Mininet
from mininet.util import dumpNodeConnections
from mininet.log import setLogLevel
from mininet.cli import CLI
class SingleSwitchTopo(Topo):
"Single switch connected to n hosts."
def build(self):
# switch = self.addSwitch('s1')
# # Python's range(N) generates 0..N-1
# for h in range(n):
# host = self.addHost('h%s' % (h + 1))
# self.addLink(host, switch)
s1 = self.addSwitch('s1')
h1 = self.addHost('h1')
h2 = self.addHost('h2')
h3 = self.addHost('h3')
h4 = self.addHost('h4')
h5 = self.addHost('h5')
h6 = self.addHost('h6')
self.addLink(h1, s1)
self.addLink(h2, s1)
self.addLink(h3, s1)
self.addLink(h4, s1)
self.addLink(h5, s1)
self.addLink(h6, s1)
#
def simpleTest():
"Create and test a simple network"
topo = SingleSwitchTopo()
net = Mininet(topo)
net.start()
print "Dumping host connections"
dumpNodeConnections(net.hosts)
print "Testing network connectivity"
net.pingAll()
# net.stop()
h1 = net.get('h1')
h2 = net.get('h2')
h3 = net.get('h3')
h4 = net.get('h4')
h5 = net.get('h5')
h6 = net.get('h6')
for host in [h1, h2, h3, h4, h5, h6]:
host.cmdPrint('cd /media/sf_DHT-Torrent')
h1.cmdPrint('echo \'python /media/sf_DHT-Torrent/start.py --static --id 600 --ip ' + h1.IP() + ' \' > h1.sh')
h2.cmdPrint('echo \'python /media/sf_DHT-Torrent/start.py --static --id 500 --ip ' + h2.IP() + " --nextpeerid 600 --nextpeerip " + h1.IP() + ' \' > h2.sh')
h3.cmdPrint('echo \'python /media/sf_DHT-Torrent/start.py --static --id 400 --ip ' + h3.IP() + " --nextpeerid 500 --nextpeerip " + h2.IP() + ' \' > h3.sh')
h4.cmdPrint('echo \'python /media/sf_DHT-Torrent/start.py --static --id 300 --ip ' + h4.IP() + " --nextpeerid 400 --nextpeerip " + h3.IP() + ' \' > h4.sh')
h5.cmdPrint('echo \'python /media/sf_DHT-Torrent/start.py --static --id 200 --ip ' + h5.IP() + " --nextpeerid 300 --nextpeerip " + h4.IP() + ' \' > h5.sh')
h6.cmdPrint('echo \'python /media/sf_DHT-Torrent/start.py --static --id 100 --ip ' + h6.IP() + " --nextpeerid 200 --nextpeerip " + h5.IP() + ' \' > h6.sh')
# h1.cmdPrint('ls')
net.startTerms()
CLI(net)
# CLI(net).do_xterm(h1)
net.stopXterms()
net.stop()
if __name__ == '__main__':
# Tell mininet to print useful information
setLogLevel('info')
simpleTest()
topos = { 'mytopo': SingleSwitchTopo }
# tests = { 'mytest': simpleTest }
|
normal
|
{
"blob_id": "8fd74287fbc653ea3ed4aa76a272486aa29185cf",
"index": 1032,
"step-1": "# !/usr/bin/python\n\n# sudo mn --custom _mininet_topo.py --topo mytopo,5\n# sudo mn --custom _mininet_topo.py --topo mytopo,3 --test simpletest\n# or just run this python file\n\nfrom mininet.topo import Topo\nfrom mininet.net import Mininet\nfrom mininet.util import dumpNodeConnections\nfrom mininet.log import setLogLevel\nfrom mininet.cli import CLI\n\n\nclass SingleSwitchTopo(Topo):\n \"Single switch connected to n hosts.\"\n\n def build(self):\n # switch = self.addSwitch('s1')\n # # Python's range(N) generates 0..N-1\n # for h in range(n):\n # host = self.addHost('h%s' % (h + 1))\n # self.addLink(host, switch)\n\n s1 = self.addSwitch('s1')\n\n h1 = self.addHost('h1')\n h2 = self.addHost('h2')\n h3 = self.addHost('h3')\n h4 = self.addHost('h4')\n h5 = self.addHost('h5')\n h6 = self.addHost('h6')\n\n self.addLink(h1, s1)\n self.addLink(h2, s1)\n self.addLink(h3, s1)\n self.addLink(h4, s1)\n self.addLink(h5, s1)\n self.addLink(h6, s1)\n\n#\ndef simpleTest():\n \"Create and test a simple network\"\n topo = SingleSwitchTopo()\n net = Mininet(topo)\n net.start()\n print \"Dumping host connections\"\n dumpNodeConnections(net.hosts)\n print \"Testing network connectivity\"\n net.pingAll()\n # net.stop()\n\n h1 = net.get('h1')\n h2 = net.get('h2')\n h3 = net.get('h3')\n h4 = net.get('h4')\n h5 = net.get('h5')\n h6 = net.get('h6')\n\n\n for host in [h1, h2, h3, h4, h5, h6]:\n host.cmdPrint('cd /media/sf_DHT-Torrent')\n\n h1.cmdPrint('echo \\'python /media/sf_DHT-Torrent/start.py --static --id 600 --ip ' + h1.IP() + ' \\' > h1.sh')\n h2.cmdPrint('echo \\'python /media/sf_DHT-Torrent/start.py --static --id 500 --ip ' + h2.IP() + \" --nextpeerid 600 --nextpeerip \" + h1.IP() + ' \\' > h2.sh')\n h3.cmdPrint('echo \\'python /media/sf_DHT-Torrent/start.py --static --id 400 --ip ' + h3.IP() + \" --nextpeerid 500 --nextpeerip \" + h2.IP() + ' \\' > h3.sh')\n h4.cmdPrint('echo \\'python /media/sf_DHT-Torrent/start.py --static --id 300 --ip ' + h4.IP() + \" --nextpeerid 400 --nextpeerip \" + h3.IP() + ' \\' > h4.sh')\n h5.cmdPrint('echo \\'python /media/sf_DHT-Torrent/start.py --static --id 200 --ip ' + h5.IP() + \" --nextpeerid 300 --nextpeerip \" + h4.IP() + ' \\' > h5.sh')\n h6.cmdPrint('echo \\'python /media/sf_DHT-Torrent/start.py --static --id 100 --ip ' + h6.IP() + \" --nextpeerid 200 --nextpeerip \" + h5.IP() + ' \\' > h6.sh')\n\n # h1.cmdPrint('ls')\n\n net.startTerms()\n CLI(net)\n # CLI(net).do_xterm(h1)\n\n net.stopXterms()\n net.stop()\n\nif __name__ == '__main__':\n # Tell mininet to print useful information\n setLogLevel('info')\n simpleTest()\n\ntopos = { 'mytopo': SingleSwitchTopo }\n# tests = { 'mytest': simpleTest }",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
class BucketSort:
def __init__(self, a):
self.a = a
def result(self, bucketCount=10):
buckets = [[] for i in range(bucketCount + 1)]
maxElement = max(self.a)
minElement = min(self.a)
bucketRange = (maxElement - minElement + 1) / bucketCount
for i in range(len(self.a)):
bucketIndex = int((self.a[i] - minElement) / bucketRange)
buckets[bucketIndex].append(self.a[i])
for i in range(len(buckets)):
buckets[i] = sorted(buckets[i])
self.a = []
for bucket in buckets:
self.a.extend(bucket)
return self.a
|
normal
|
{
"blob_id": "3b803850418638bf65528088044918e93ecabff6",
"index": 3085,
"step-1": "<mask token>\n",
"step-2": "class BucketSort:\n <mask token>\n <mask token>\n",
"step-3": "class BucketSort:\n <mask token>\n\n def result(self, bucketCount=10):\n buckets = [[] for i in range(bucketCount + 1)]\n maxElement = max(self.a)\n minElement = min(self.a)\n bucketRange = (maxElement - minElement + 1) / bucketCount\n for i in range(len(self.a)):\n bucketIndex = int((self.a[i] - minElement) / bucketRange)\n buckets[bucketIndex].append(self.a[i])\n for i in range(len(buckets)):\n buckets[i] = sorted(buckets[i])\n self.a = []\n for bucket in buckets:\n self.a.extend(bucket)\n return self.a\n",
"step-4": "class BucketSort:\n\n def __init__(self, a):\n self.a = a\n\n def result(self, bucketCount=10):\n buckets = [[] for i in range(bucketCount + 1)]\n maxElement = max(self.a)\n minElement = min(self.a)\n bucketRange = (maxElement - minElement + 1) / bucketCount\n for i in range(len(self.a)):\n bucketIndex = int((self.a[i] - minElement) / bucketRange)\n buckets[bucketIndex].append(self.a[i])\n for i in range(len(buckets)):\n buckets[i] = sorted(buckets[i])\n self.a = []\n for bucket in buckets:\n self.a.extend(bucket)\n return self.a\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
from pathlib import Path
from ruamel.yaml import YAML
from .screen import color2sgr
def _get(d, *paths):
""" Query into configuration dictionary, return None on any error
usag:
_get(d, 'k1.2.k3.k4', 2, 'name')
"""
if d is None:
return None
if paths is None:
return None
for path in paths:
if path is None:
return None
path = path.split('.')
for key in path:
try:
i = int(key)
if i in d:
return d[i]
else:
return None
except BaseException:
d = d.get(key, None)
if d is None:
return None
return d
class _Settings:
def __init__(self):
self._loadConfigs()
self._loadSymbols()
self._loadColors()
# margin
v, d = self._valueAt('margin')
if isinstance(v, int) and v > 0:
self.margin = v
else:
self.margin = d
# symbolWidth
v, d = self._valueAt('symbols.width')
if isinstance(v, int) and v > 0:
self.symbolWidth = v
else:
self.symbolWidth = d
# sessionTimeLinePadding
v, d = self._valueAt('sessionTimeLinePadding')
if isinstance(v, int) and v > 0:
self.sessionTimeLinePadding = v
else:
self.sessionTimeLinePadding = d
# logTimeLinePadding
v, d = self._valueAt('logTimeLinePadding')
if isinstance(v, int) and v > 0:
self.logTimeLinePadding = v
else:
self.logTimeLinePadding = d
def _valueAt(self, *paths):
u = _get(self.userConfig, *paths)
d = _get(self.defaultConfig, *paths)
return u, d
def _loadConfigs(self):
yaml = YAML()
defaultFile = Path(__file__).parent / 'resources' / 'jaclog.yml'
self.defaultConfig = yaml.load(defaultFile)
userFile = Path('~/.config/jaclog/jaclog.yml').expanduser()
userFile.parent.mkdir(parents=True, exist_ok=True)
if not userFile.exists():
userFile.write_text(defaultFile.read_text())
self.userConfig = yaml.load(userFile)
def _loadSymbols(self):
use = _get(self.userConfig, 'symbols.use')
scheme = _get(self.userConfig, 'symbols.schemes', use)
default = _get(self.defaultConfig, 'symbols.schemes.default')
symbols = {}
for name in default:
v = _get(scheme, name)
d = default[name]
if isinstance(v, str):
symbols[name] = v[0]
else:
symbols[name] = d
self.symbols = symbols
def _loadColors(self):
# colors
use = _get(self.userConfig, 'colors.use')
scheme = _get(self.userConfig, 'colors.schemes', use)
default = _get(self.defaultConfig, 'colors.schemes.default')
colors = {}
for name in default:
colors[name] = color2sgr(_get(scheme, name)) \
or color2sgr(default[name])
self.colors = colors
settings = _Settings()
|
normal
|
{
"blob_id": "784159dfb2e85ca4634adf790e68129834155e4d",
"index": 2702,
"step-1": "<mask token>\n\n\nclass _Settings:\n <mask token>\n\n def _valueAt(self, *paths):\n u = _get(self.userConfig, *paths)\n d = _get(self.defaultConfig, *paths)\n return u, d\n\n def _loadConfigs(self):\n yaml = YAML()\n defaultFile = Path(__file__).parent / 'resources' / 'jaclog.yml'\n self.defaultConfig = yaml.load(defaultFile)\n userFile = Path('~/.config/jaclog/jaclog.yml').expanduser()\n userFile.parent.mkdir(parents=True, exist_ok=True)\n if not userFile.exists():\n userFile.write_text(defaultFile.read_text())\n self.userConfig = yaml.load(userFile)\n\n def _loadSymbols(self):\n use = _get(self.userConfig, 'symbols.use')\n scheme = _get(self.userConfig, 'symbols.schemes', use)\n default = _get(self.defaultConfig, 'symbols.schemes.default')\n symbols = {}\n for name in default:\n v = _get(scheme, name)\n d = default[name]\n if isinstance(v, str):\n symbols[name] = v[0]\n else:\n symbols[name] = d\n self.symbols = symbols\n\n def _loadColors(self):\n use = _get(self.userConfig, 'colors.use')\n scheme = _get(self.userConfig, 'colors.schemes', use)\n default = _get(self.defaultConfig, 'colors.schemes.default')\n colors = {}\n for name in default:\n colors[name] = color2sgr(_get(scheme, name)) or color2sgr(default\n [name])\n self.colors = colors\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass _Settings:\n\n def __init__(self):\n self._loadConfigs()\n self._loadSymbols()\n self._loadColors()\n v, d = self._valueAt('margin')\n if isinstance(v, int) and v > 0:\n self.margin = v\n else:\n self.margin = d\n v, d = self._valueAt('symbols.width')\n if isinstance(v, int) and v > 0:\n self.symbolWidth = v\n else:\n self.symbolWidth = d\n v, d = self._valueAt('sessionTimeLinePadding')\n if isinstance(v, int) and v > 0:\n self.sessionTimeLinePadding = v\n else:\n self.sessionTimeLinePadding = d\n v, d = self._valueAt('logTimeLinePadding')\n if isinstance(v, int) and v > 0:\n self.logTimeLinePadding = v\n else:\n self.logTimeLinePadding = d\n\n def _valueAt(self, *paths):\n u = _get(self.userConfig, *paths)\n d = _get(self.defaultConfig, *paths)\n return u, d\n\n def _loadConfigs(self):\n yaml = YAML()\n defaultFile = Path(__file__).parent / 'resources' / 'jaclog.yml'\n self.defaultConfig = yaml.load(defaultFile)\n userFile = Path('~/.config/jaclog/jaclog.yml').expanduser()\n userFile.parent.mkdir(parents=True, exist_ok=True)\n if not userFile.exists():\n userFile.write_text(defaultFile.read_text())\n self.userConfig = yaml.load(userFile)\n\n def _loadSymbols(self):\n use = _get(self.userConfig, 'symbols.use')\n scheme = _get(self.userConfig, 'symbols.schemes', use)\n default = _get(self.defaultConfig, 'symbols.schemes.default')\n symbols = {}\n for name in default:\n v = _get(scheme, name)\n d = default[name]\n if isinstance(v, str):\n symbols[name] = v[0]\n else:\n symbols[name] = d\n self.symbols = symbols\n\n def _loadColors(self):\n use = _get(self.userConfig, 'colors.use')\n scheme = _get(self.userConfig, 'colors.schemes', use)\n default = _get(self.defaultConfig, 'colors.schemes.default')\n colors = {}\n for name in default:\n colors[name] = color2sgr(_get(scheme, name)) or color2sgr(default\n [name])\n self.colors = colors\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef _get(d, *paths):\n \"\"\" Query into configuration dictionary, return None on any error\n usag:\n _get(d, 'k1.2.k3.k4', 2, 'name')\n \"\"\"\n if d is None:\n return None\n if paths is None:\n return None\n for path in paths:\n if path is None:\n return None\n path = path.split('.')\n for key in path:\n try:\n i = int(key)\n if i in d:\n return d[i]\n else:\n return None\n except BaseException:\n d = d.get(key, None)\n if d is None:\n return None\n return d\n\n\nclass _Settings:\n\n def __init__(self):\n self._loadConfigs()\n self._loadSymbols()\n self._loadColors()\n v, d = self._valueAt('margin')\n if isinstance(v, int) and v > 0:\n self.margin = v\n else:\n self.margin = d\n v, d = self._valueAt('symbols.width')\n if isinstance(v, int) and v > 0:\n self.symbolWidth = v\n else:\n self.symbolWidth = d\n v, d = self._valueAt('sessionTimeLinePadding')\n if isinstance(v, int) and v > 0:\n self.sessionTimeLinePadding = v\n else:\n self.sessionTimeLinePadding = d\n v, d = self._valueAt('logTimeLinePadding')\n if isinstance(v, int) and v > 0:\n self.logTimeLinePadding = v\n else:\n self.logTimeLinePadding = d\n\n def _valueAt(self, *paths):\n u = _get(self.userConfig, *paths)\n d = _get(self.defaultConfig, *paths)\n return u, d\n\n def _loadConfigs(self):\n yaml = YAML()\n defaultFile = Path(__file__).parent / 'resources' / 'jaclog.yml'\n self.defaultConfig = yaml.load(defaultFile)\n userFile = Path('~/.config/jaclog/jaclog.yml').expanduser()\n userFile.parent.mkdir(parents=True, exist_ok=True)\n if not userFile.exists():\n userFile.write_text(defaultFile.read_text())\n self.userConfig = yaml.load(userFile)\n\n def _loadSymbols(self):\n use = _get(self.userConfig, 'symbols.use')\n scheme = _get(self.userConfig, 'symbols.schemes', use)\n default = _get(self.defaultConfig, 'symbols.schemes.default')\n symbols = {}\n for name in default:\n v = _get(scheme, name)\n d = default[name]\n if isinstance(v, str):\n symbols[name] = v[0]\n else:\n symbols[name] = d\n self.symbols = symbols\n\n def _loadColors(self):\n use = _get(self.userConfig, 'colors.use')\n scheme = _get(self.userConfig, 'colors.schemes', use)\n default = _get(self.defaultConfig, 'colors.schemes.default')\n colors = {}\n for name in default:\n colors[name] = color2sgr(_get(scheme, name)) or color2sgr(default\n [name])\n self.colors = colors\n\n\nsettings = _Settings()\n",
"step-4": "from pathlib import Path\nfrom ruamel.yaml import YAML\nfrom .screen import color2sgr\n\n\ndef _get(d, *paths):\n \"\"\" Query into configuration dictionary, return None on any error\n usag:\n _get(d, 'k1.2.k3.k4', 2, 'name')\n \"\"\"\n if d is None:\n return None\n if paths is None:\n return None\n for path in paths:\n if path is None:\n return None\n path = path.split('.')\n for key in path:\n try:\n i = int(key)\n if i in d:\n return d[i]\n else:\n return None\n except BaseException:\n d = d.get(key, None)\n if d is None:\n return None\n return d\n\n\nclass _Settings:\n\n def __init__(self):\n self._loadConfigs()\n self._loadSymbols()\n self._loadColors()\n v, d = self._valueAt('margin')\n if isinstance(v, int) and v > 0:\n self.margin = v\n else:\n self.margin = d\n v, d = self._valueAt('symbols.width')\n if isinstance(v, int) and v > 0:\n self.symbolWidth = v\n else:\n self.symbolWidth = d\n v, d = self._valueAt('sessionTimeLinePadding')\n if isinstance(v, int) and v > 0:\n self.sessionTimeLinePadding = v\n else:\n self.sessionTimeLinePadding = d\n v, d = self._valueAt('logTimeLinePadding')\n if isinstance(v, int) and v > 0:\n self.logTimeLinePadding = v\n else:\n self.logTimeLinePadding = d\n\n def _valueAt(self, *paths):\n u = _get(self.userConfig, *paths)\n d = _get(self.defaultConfig, *paths)\n return u, d\n\n def _loadConfigs(self):\n yaml = YAML()\n defaultFile = Path(__file__).parent / 'resources' / 'jaclog.yml'\n self.defaultConfig = yaml.load(defaultFile)\n userFile = Path('~/.config/jaclog/jaclog.yml').expanduser()\n userFile.parent.mkdir(parents=True, exist_ok=True)\n if not userFile.exists():\n userFile.write_text(defaultFile.read_text())\n self.userConfig = yaml.load(userFile)\n\n def _loadSymbols(self):\n use = _get(self.userConfig, 'symbols.use')\n scheme = _get(self.userConfig, 'symbols.schemes', use)\n default = _get(self.defaultConfig, 'symbols.schemes.default')\n symbols = {}\n for name in default:\n v = _get(scheme, name)\n d = default[name]\n if isinstance(v, str):\n symbols[name] = v[0]\n else:\n symbols[name] = d\n self.symbols = symbols\n\n def _loadColors(self):\n use = _get(self.userConfig, 'colors.use')\n scheme = _get(self.userConfig, 'colors.schemes', use)\n default = _get(self.defaultConfig, 'colors.schemes.default')\n colors = {}\n for name in default:\n colors[name] = color2sgr(_get(scheme, name)) or color2sgr(default\n [name])\n self.colors = colors\n\n\nsettings = _Settings()\n",
"step-5": "# -*- coding: utf-8 -*-\n\nfrom pathlib import Path\n\nfrom ruamel.yaml import YAML\n\nfrom .screen import color2sgr\n\n\ndef _get(d, *paths):\n \"\"\" Query into configuration dictionary, return None on any error\n usag:\n _get(d, 'k1.2.k3.k4', 2, 'name')\n \"\"\"\n if d is None:\n return None\n\n if paths is None:\n return None\n\n for path in paths:\n if path is None:\n return None\n\n path = path.split('.')\n for key in path:\n try:\n i = int(key)\n if i in d:\n return d[i]\n else:\n return None\n\n except BaseException:\n d = d.get(key, None)\n if d is None:\n return None\n\n return d\n\n\nclass _Settings:\n\n def __init__(self):\n self._loadConfigs()\n self._loadSymbols()\n self._loadColors()\n\n # margin\n v, d = self._valueAt('margin')\n if isinstance(v, int) and v > 0:\n self.margin = v\n else:\n self.margin = d\n\n # symbolWidth\n v, d = self._valueAt('symbols.width')\n if isinstance(v, int) and v > 0:\n self.symbolWidth = v\n else:\n self.symbolWidth = d\n\n # sessionTimeLinePadding\n v, d = self._valueAt('sessionTimeLinePadding')\n if isinstance(v, int) and v > 0:\n self.sessionTimeLinePadding = v\n else:\n self.sessionTimeLinePadding = d\n\n # logTimeLinePadding\n v, d = self._valueAt('logTimeLinePadding')\n if isinstance(v, int) and v > 0:\n self.logTimeLinePadding = v\n else:\n self.logTimeLinePadding = d\n\n def _valueAt(self, *paths):\n u = _get(self.userConfig, *paths)\n d = _get(self.defaultConfig, *paths)\n return u, d\n\n def _loadConfigs(self):\n yaml = YAML()\n\n defaultFile = Path(__file__).parent / 'resources' / 'jaclog.yml'\n self.defaultConfig = yaml.load(defaultFile)\n\n userFile = Path('~/.config/jaclog/jaclog.yml').expanduser()\n userFile.parent.mkdir(parents=True, exist_ok=True)\n if not userFile.exists():\n userFile.write_text(defaultFile.read_text())\n self.userConfig = yaml.load(userFile)\n\n def _loadSymbols(self):\n use = _get(self.userConfig, 'symbols.use')\n scheme = _get(self.userConfig, 'symbols.schemes', use)\n default = _get(self.defaultConfig, 'symbols.schemes.default')\n\n symbols = {}\n for name in default:\n v = _get(scheme, name)\n d = default[name]\n\n if isinstance(v, str):\n symbols[name] = v[0]\n else:\n symbols[name] = d\n\n self.symbols = symbols\n\n def _loadColors(self):\n # colors\n use = _get(self.userConfig, 'colors.use')\n scheme = _get(self.userConfig, 'colors.schemes', use)\n default = _get(self.defaultConfig, 'colors.schemes.default')\n\n colors = {}\n for name in default:\n colors[name] = color2sgr(_get(scheme, name)) \\\n or color2sgr(default[name])\n\n self.colors = colors\n\n\nsettings = _Settings()\n",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
from django.urls import path
from rest_framework.routers import DefaultRouter
from . import views
app_name = "rooms"
router = DefaultRouter()
router.register("", views.RoomViewSet)
urlpatterns = router.urls
#
# urlpatterns = [
# # path("list/", views.ListRoomsView.as_view()),
# # path("list/", views.rooms_view),
# path("list/",views.RoomsView.as_view()),
# path('<int:pk>/',views.RoomView.as_view()),
# path('search/',views.room_search)
# ]
|
normal
|
{
"blob_id": "96708216c5ffa56a60475b295c21b18225e6eed9",
"index": 6056,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nrouter.register('', views.RoomViewSet)\n<mask token>\n",
"step-3": "<mask token>\napp_name = 'rooms'\nrouter = DefaultRouter()\nrouter.register('', views.RoomViewSet)\nurlpatterns = router.urls\n",
"step-4": "from django.urls import path\nfrom rest_framework.routers import DefaultRouter\nfrom . import views\napp_name = 'rooms'\nrouter = DefaultRouter()\nrouter.register('', views.RoomViewSet)\nurlpatterns = router.urls\n",
"step-5": "from django.urls import path\nfrom rest_framework.routers import DefaultRouter\nfrom . import views\n\napp_name = \"rooms\"\nrouter = DefaultRouter()\nrouter.register(\"\", views.RoomViewSet)\n\nurlpatterns = router.urls\n#\n# urlpatterns = [\n# # path(\"list/\", views.ListRoomsView.as_view()),\n# # path(\"list/\", views.rooms_view),\n# path(\"list/\",views.RoomsView.as_view()),\n# path('<int:pk>/',views.RoomView.as_view()),\n# path('search/',views.room_search)\n# ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import sklearn
import pandas as pd
import numpy as np
from sklearn import datasets, ensemble
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
import statistics as st
import itertools
from sklearn.model_selection import cross_val_score
from sklearn.experimental import enable_hist_gradient_boosting
from sklearn.ensemble import HistGradientBoostingRegressor
from sklearn.ensemble import HistGradientBoostingClassifier
from statsmodels import regression as reg
import statsmodels.api as regMods
from scipy.stats import norm
from scipy.stats import gamma
from scipy.stats import expon
from scipy.stats import poisson
from scipy.stats import binom
from scipy.stats import t
import plotly.express as px
import plotly.figure_factory as ff
heart=pd.read_csv(r"C:\Users\fredr\Documents\StatTool\BZAN540\Homework\HW6\HeartDisease.csv")
heart.columns
train, test = train_test_split(heart[['x1', 'x2', 'x3', 'x4', 'x5','HeartDisease']], test_size=0.2)
y_train=train['HeartDisease']
x_train=train[['x1', 'x2', 'x3', 'x4', 'x5']]
x_test=test[['x1', 'x2', 'x3', 'x4', 'x5']]
y_test=test['HeartDisease']
#boosting to predict heart disease
#make expand grid function to get all combos of the parameters
def expandgrid(*itrs):
product = list(itertools.product(*itrs))
return {'Var{}'.format(i+1):[x[i] for x in product] for i in range(len(itrs))}
#set the range for the parameter values:
n_estimators=np.arange(300, 450, 50) #the number of trees to fit
max_depth=np.arange(3, 5, 1)
min_samples_split=np.arange(3,4,1)
learning_rate=np.arange(0.001,0.004,0.001)
a=expandgrid(n_estimators,max_depth, min_samples_split,learning_rate)
params=pd.DataFrame.from_dict(a)
len(params)
#time the code ???
#looping through the possible parameters for the model and store the estimated validation rmse
ValAcc=list(range(0,len(params)))
for i in range(0,len(params)):
scores = cross_val_score(HistGradientBoostingClassifier(min_samples_leaf=params['Var3'].iloc[i],
max_depth=params['Var2'].iloc[i],
learning_rate=params['Var4'].iloc[i],max_iter=params['Var1'].iloc[i]).fit(x_train, y_train),
x_train, y_train, cv=4,scoring='accuracy')
acc=st.mean(scores)
ValAcc[i]=acc
ValAcc
max(ValAcc)
pars=list(params.iloc[ValAcc==max(ValAcc)].iloc[0])
pars.append(max(ValAcc))
pars
bestPos=np.array(np.where(np.array(ValAcc)==max(ValAcc))).tolist()[0][0]
#fit the best model on Train then predict on Test if mean acc close to val then fit on entire data
bestPos
bestMod=HistGradientBoostingClassifier(min_samples_leaf=params['Var3'].iloc[bestPos],
max_depth=params['Var2'].iloc[bestPos],
learning_rate=params['Var4'].iloc[bestPos],max_iter=params['Var1'].iloc[bestPos]).fit(x_train, y_train)
#gets the predicted values on the test data
bestMod.predict(x_test)
len(y_test[bestMod.predict(x_test)==y_test])/len(y_test) #67% acc on test
#create a dataset with one row and each col is a ind var from model fit above, then input data per var to fill df then predict y on the values in this df
df_i=pd.DataFrame({'x1':np.mean(heart['x1']), 'x2':np.mean(heart['x2']),'x3':np.mean(heart['x3']),'x4':np.mean(heart['x4']),'x5':np.mean(heart['x5'])},index=[0])
if(bestMod.predict(df_i)==0):
print('Predicted: No Heart Disease')
else:
print('Predicted: Has Heart Disease')
#plot two densities centered on the mean of the var and the selected value of the var for all vars
#start with treating each var as a normal distro then plot a density curve where the
#mean is the mean of the var and another curve on same plot where the mean is the selected value from the input of a normal distro set sd to the sd of the var
#for both in the plots, generate random vars the size of the data, except for history heart disease treat as beta with p=actuap prob for var and p=random value that is
#greater than .5
#generates random values from a normal distro with mean=loc and sd=scale
norm.rvs(size=10000,loc=3,scale=8)
#x1:
x1=190
mean=np.mean(heart['x1'])
sd=np.std(heart['x1'])
meanx1_2=x1
xActual=norm.rvs(size=len(heart),loc=mean,scale=sd)
xInput=norm.rvs(size=len(heart),loc=meanx1_2,scale=sd)
group_labels = ['actual','center_selected']
hist_data=[xActual,xInput]
fig = ff.create_distplot(hist_data,group_labels)
fig.show()
|
normal
|
{
"blob_id": "0d862715524bd35347626e7708c7c8f8b370bb3a",
"index": 7769,
"step-1": "<mask token>\n\n\ndef expandgrid(*itrs):\n product = list(itertools.product(*itrs))\n return {'Var{}'.format(i + 1): [x[i] for x in product] for i in range(\n len(itrs))}\n\n\n<mask token>\n",
"step-2": "<mask token>\nheart.columns\n<mask token>\n\n\ndef expandgrid(*itrs):\n product = list(itertools.product(*itrs))\n return {'Var{}'.format(i + 1): [x[i] for x in product] for i in range(\n len(itrs))}\n\n\n<mask token>\nlen(params)\n<mask token>\nfor i in range(0, len(params)):\n scores = cross_val_score(HistGradientBoostingClassifier(\n min_samples_leaf=params['Var3'].iloc[i], max_depth=params['Var2'].\n iloc[i], learning_rate=params['Var4'].iloc[i], max_iter=params[\n 'Var1'].iloc[i]).fit(x_train, y_train), x_train, y_train, cv=4,\n scoring='accuracy')\n acc = st.mean(scores)\n ValAcc[i] = acc\nValAcc\nmax(ValAcc)\n<mask token>\npars.append(max(ValAcc))\npars\n<mask token>\nbestPos\n<mask token>\nbestMod.predict(x_test)\nlen(y_test[bestMod.predict(x_test) == y_test]) / len(y_test)\n<mask token>\nif bestMod.predict(df_i) == 0:\n print('Predicted: No Heart Disease')\nelse:\n print('Predicted: Has Heart Disease')\nnorm.rvs(size=10000, loc=3, scale=8)\n<mask token>\nfig.show()\n",
"step-3": "<mask token>\nheart = pd.read_csv(\n 'C:\\\\Users\\\\fredr\\\\Documents\\\\StatTool\\\\BZAN540\\\\Homework\\\\HW6\\\\HeartDisease.csv'\n )\nheart.columns\ntrain, test = train_test_split(heart[['x1', 'x2', 'x3', 'x4', 'x5',\n 'HeartDisease']], test_size=0.2)\ny_train = train['HeartDisease']\nx_train = train[['x1', 'x2', 'x3', 'x4', 'x5']]\nx_test = test[['x1', 'x2', 'x3', 'x4', 'x5']]\ny_test = test['HeartDisease']\n\n\ndef expandgrid(*itrs):\n product = list(itertools.product(*itrs))\n return {'Var{}'.format(i + 1): [x[i] for x in product] for i in range(\n len(itrs))}\n\n\nn_estimators = np.arange(300, 450, 50)\nmax_depth = np.arange(3, 5, 1)\nmin_samples_split = np.arange(3, 4, 1)\nlearning_rate = np.arange(0.001, 0.004, 0.001)\na = expandgrid(n_estimators, max_depth, min_samples_split, learning_rate)\nparams = pd.DataFrame.from_dict(a)\nlen(params)\nValAcc = list(range(0, len(params)))\nfor i in range(0, len(params)):\n scores = cross_val_score(HistGradientBoostingClassifier(\n min_samples_leaf=params['Var3'].iloc[i], max_depth=params['Var2'].\n iloc[i], learning_rate=params['Var4'].iloc[i], max_iter=params[\n 'Var1'].iloc[i]).fit(x_train, y_train), x_train, y_train, cv=4,\n scoring='accuracy')\n acc = st.mean(scores)\n ValAcc[i] = acc\nValAcc\nmax(ValAcc)\npars = list(params.iloc[ValAcc == max(ValAcc)].iloc[0])\npars.append(max(ValAcc))\npars\nbestPos = np.array(np.where(np.array(ValAcc) == max(ValAcc))).tolist()[0][0]\nbestPos\nbestMod = HistGradientBoostingClassifier(min_samples_leaf=params['Var3'].\n iloc[bestPos], max_depth=params['Var2'].iloc[bestPos], learning_rate=\n params['Var4'].iloc[bestPos], max_iter=params['Var1'].iloc[bestPos]).fit(\n x_train, y_train)\nbestMod.predict(x_test)\nlen(y_test[bestMod.predict(x_test) == y_test]) / len(y_test)\ndf_i = pd.DataFrame({'x1': np.mean(heart['x1']), 'x2': np.mean(heart['x2']),\n 'x3': np.mean(heart['x3']), 'x4': np.mean(heart['x4']), 'x5': np.mean(\n heart['x5'])}, index=[0])\nif bestMod.predict(df_i) == 0:\n print('Predicted: No Heart Disease')\nelse:\n print('Predicted: Has Heart Disease')\nnorm.rvs(size=10000, loc=3, scale=8)\nx1 = 190\nmean = np.mean(heart['x1'])\nsd = np.std(heart['x1'])\nmeanx1_2 = x1\nxActual = norm.rvs(size=len(heart), loc=mean, scale=sd)\nxInput = norm.rvs(size=len(heart), loc=meanx1_2, scale=sd)\ngroup_labels = ['actual', 'center_selected']\nhist_data = [xActual, xInput]\nfig = ff.create_distplot(hist_data, group_labels)\nfig.show()\n",
"step-4": "import sklearn\nimport pandas as pd\nimport numpy as np\nfrom sklearn import datasets, ensemble\nfrom sklearn.metrics import mean_squared_error\nfrom sklearn.model_selection import train_test_split\nimport statistics as st\nimport itertools\nfrom sklearn.model_selection import cross_val_score\nfrom sklearn.experimental import enable_hist_gradient_boosting\nfrom sklearn.ensemble import HistGradientBoostingRegressor\nfrom sklearn.ensemble import HistGradientBoostingClassifier\nfrom statsmodels import regression as reg\nimport statsmodels.api as regMods\nfrom scipy.stats import norm\nfrom scipy.stats import gamma\nfrom scipy.stats import expon\nfrom scipy.stats import poisson\nfrom scipy.stats import binom\nfrom scipy.stats import t\nimport plotly.express as px\nimport plotly.figure_factory as ff\nheart = pd.read_csv(\n 'C:\\\\Users\\\\fredr\\\\Documents\\\\StatTool\\\\BZAN540\\\\Homework\\\\HW6\\\\HeartDisease.csv'\n )\nheart.columns\ntrain, test = train_test_split(heart[['x1', 'x2', 'x3', 'x4', 'x5',\n 'HeartDisease']], test_size=0.2)\ny_train = train['HeartDisease']\nx_train = train[['x1', 'x2', 'x3', 'x4', 'x5']]\nx_test = test[['x1', 'x2', 'x3', 'x4', 'x5']]\ny_test = test['HeartDisease']\n\n\ndef expandgrid(*itrs):\n product = list(itertools.product(*itrs))\n return {'Var{}'.format(i + 1): [x[i] for x in product] for i in range(\n len(itrs))}\n\n\nn_estimators = np.arange(300, 450, 50)\nmax_depth = np.arange(3, 5, 1)\nmin_samples_split = np.arange(3, 4, 1)\nlearning_rate = np.arange(0.001, 0.004, 0.001)\na = expandgrid(n_estimators, max_depth, min_samples_split, learning_rate)\nparams = pd.DataFrame.from_dict(a)\nlen(params)\nValAcc = list(range(0, len(params)))\nfor i in range(0, len(params)):\n scores = cross_val_score(HistGradientBoostingClassifier(\n min_samples_leaf=params['Var3'].iloc[i], max_depth=params['Var2'].\n iloc[i], learning_rate=params['Var4'].iloc[i], max_iter=params[\n 'Var1'].iloc[i]).fit(x_train, y_train), x_train, y_train, cv=4,\n scoring='accuracy')\n acc = st.mean(scores)\n ValAcc[i] = acc\nValAcc\nmax(ValAcc)\npars = list(params.iloc[ValAcc == max(ValAcc)].iloc[0])\npars.append(max(ValAcc))\npars\nbestPos = np.array(np.where(np.array(ValAcc) == max(ValAcc))).tolist()[0][0]\nbestPos\nbestMod = HistGradientBoostingClassifier(min_samples_leaf=params['Var3'].\n iloc[bestPos], max_depth=params['Var2'].iloc[bestPos], learning_rate=\n params['Var4'].iloc[bestPos], max_iter=params['Var1'].iloc[bestPos]).fit(\n x_train, y_train)\nbestMod.predict(x_test)\nlen(y_test[bestMod.predict(x_test) == y_test]) / len(y_test)\ndf_i = pd.DataFrame({'x1': np.mean(heart['x1']), 'x2': np.mean(heart['x2']),\n 'x3': np.mean(heart['x3']), 'x4': np.mean(heart['x4']), 'x5': np.mean(\n heart['x5'])}, index=[0])\nif bestMod.predict(df_i) == 0:\n print('Predicted: No Heart Disease')\nelse:\n print('Predicted: Has Heart Disease')\nnorm.rvs(size=10000, loc=3, scale=8)\nx1 = 190\nmean = np.mean(heart['x1'])\nsd = np.std(heart['x1'])\nmeanx1_2 = x1\nxActual = norm.rvs(size=len(heart), loc=mean, scale=sd)\nxInput = norm.rvs(size=len(heart), loc=meanx1_2, scale=sd)\ngroup_labels = ['actual', 'center_selected']\nhist_data = [xActual, xInput]\nfig = ff.create_distplot(hist_data, group_labels)\nfig.show()\n",
"step-5": "import sklearn\r\nimport pandas as pd \r\n\r\nimport numpy as np\r\nfrom sklearn import datasets, ensemble\r\nfrom sklearn.metrics import mean_squared_error\r\nfrom sklearn.model_selection import train_test_split\r\nimport statistics as st\r\nimport itertools\r\nfrom sklearn.model_selection import cross_val_score\r\nfrom sklearn.experimental import enable_hist_gradient_boosting \r\nfrom sklearn.ensemble import HistGradientBoostingRegressor\r\nfrom sklearn.ensemble import HistGradientBoostingClassifier\r\nfrom statsmodels import regression as reg\r\nimport statsmodels.api as regMods \r\nfrom scipy.stats import norm\r\nfrom scipy.stats import gamma\r\nfrom scipy.stats import expon\r\nfrom scipy.stats import poisson\r\nfrom scipy.stats import binom\r\nfrom scipy.stats import t \r\nimport plotly.express as px\r\nimport plotly.figure_factory as ff\r\n\r\nheart=pd.read_csv(r\"C:\\Users\\fredr\\Documents\\StatTool\\BZAN540\\Homework\\HW6\\HeartDisease.csv\") \r\nheart.columns\r\n\r\ntrain, test = train_test_split(heart[['x1', 'x2', 'x3', 'x4', 'x5','HeartDisease']], test_size=0.2)\r\ny_train=train['HeartDisease']\r\nx_train=train[['x1', 'x2', 'x3', 'x4', 'x5']]\r\n\r\nx_test=test[['x1', 'x2', 'x3', 'x4', 'x5']]\r\ny_test=test['HeartDisease']\r\n\r\n#boosting to predict heart disease \r\n\r\n#make expand grid function to get all combos of the parameters \r\ndef expandgrid(*itrs):\r\n product = list(itertools.product(*itrs))\r\n return {'Var{}'.format(i+1):[x[i] for x in product] for i in range(len(itrs))}\r\n\r\n#set the range for the parameter values:\r\nn_estimators=np.arange(300, 450, 50) #the number of trees to fit \r\nmax_depth=np.arange(3, 5, 1)\r\nmin_samples_split=np.arange(3,4,1)\r\nlearning_rate=np.arange(0.001,0.004,0.001)\r\na=expandgrid(n_estimators,max_depth, min_samples_split,learning_rate)\r\nparams=pd.DataFrame.from_dict(a)\r\nlen(params)\r\n\r\n#time the code ??? \r\n#looping through the possible parameters for the model and store the estimated validation rmse\r\nValAcc=list(range(0,len(params)))\r\nfor i in range(0,len(params)):\r\n scores = cross_val_score(HistGradientBoostingClassifier(min_samples_leaf=params['Var3'].iloc[i],\r\n max_depth=params['Var2'].iloc[i],\r\n learning_rate=params['Var4'].iloc[i],max_iter=params['Var1'].iloc[i]).fit(x_train, y_train), \r\n x_train, y_train, cv=4,scoring='accuracy')\r\n acc=st.mean(scores)\r\n ValAcc[i]=acc\r\n\r\nValAcc\r\nmax(ValAcc)\r\npars=list(params.iloc[ValAcc==max(ValAcc)].iloc[0])\r\npars.append(max(ValAcc))\r\npars\r\nbestPos=np.array(np.where(np.array(ValAcc)==max(ValAcc))).tolist()[0][0]\r\n#fit the best model on Train then predict on Test if mean acc close to val then fit on entire data \r\nbestPos\r\n\r\nbestMod=HistGradientBoostingClassifier(min_samples_leaf=params['Var3'].iloc[bestPos],\r\n max_depth=params['Var2'].iloc[bestPos],\r\n learning_rate=params['Var4'].iloc[bestPos],max_iter=params['Var1'].iloc[bestPos]).fit(x_train, y_train)\r\n\r\n#gets the predicted values on the test data \r\nbestMod.predict(x_test)\r\nlen(y_test[bestMod.predict(x_test)==y_test])/len(y_test) #67% acc on test \r\n#create a dataset with one row and each col is a ind var from model fit above, then input data per var to fill df then predict y on the values in this df \r\ndf_i=pd.DataFrame({'x1':np.mean(heart['x1']), 'x2':np.mean(heart['x2']),'x3':np.mean(heart['x3']),'x4':np.mean(heart['x4']),'x5':np.mean(heart['x5'])},index=[0])\r\n\r\nif(bestMod.predict(df_i)==0):\r\n print('Predicted: No Heart Disease')\r\nelse:\r\n print('Predicted: Has Heart Disease')\r\n\r\n\r\n#plot two densities centered on the mean of the var and the selected value of the var for all vars \r\n#start with treating each var as a normal distro then plot a density curve where the \r\n#mean is the mean of the var and another curve on same plot where the mean is the selected value from the input of a normal distro set sd to the sd of the var \r\n#for both in the plots, generate random vars the size of the data, except for history heart disease treat as beta with p=actuap prob for var and p=random value that is\r\n#greater than .5 \r\n\r\n#generates random values from a normal distro with mean=loc and sd=scale \r\nnorm.rvs(size=10000,loc=3,scale=8)\r\n#x1:\r\nx1=190\r\nmean=np.mean(heart['x1'])\r\nsd=np.std(heart['x1'])\r\nmeanx1_2=x1\r\nxActual=norm.rvs(size=len(heart),loc=mean,scale=sd)\r\nxInput=norm.rvs(size=len(heart),loc=meanx1_2,scale=sd)\r\n\r\ngroup_labels = ['actual','center_selected']\r\nhist_data=[xActual,xInput]\r\nfig = ff.create_distplot(hist_data,group_labels)\r\nfig.show()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#Uses python3
import sys
def lcs2(a, b):
dp_result = [[0 for j in range(b+1)] for i in range(a+1)]
for x in range(1, a+1):
for y in range(1, b+1):
if a[x-1] == b[y-1] and b[y-1] == c[z-1]:
dp_result[x][y] = dp_result[x-1][y-1] + 1
else:
dp_result[x][y] = max(dp_result[x-1][y], dp_result[x][y-1], dp_result[x][y])
return dp_result
if __name__ == '__main__':
input = sys.stdin.read()
data = list(map(int, input.split()))
n = data[0]
data = data[1:]
a = data[:n]
data = data[n:]
m = data[0]
data = data[1:]
b = data[:m]
print(lcs2(a, b))
|
normal
|
{
"blob_id": "d20b336c6588c3cfc4393256b660d6e4ff56b84e",
"index": 1543,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef lcs2(a, b):\n dp_result = [[(0) for j in range(b + 1)] for i in range(a + 1)]\n for x in range(1, a + 1):\n for y in range(1, b + 1):\n if a[x - 1] == b[y - 1] and b[y - 1] == c[z - 1]:\n dp_result[x][y] = dp_result[x - 1][y - 1] + 1\n else:\n dp_result[x][y] = max(dp_result[x - 1][y], dp_result[x][y -\n 1], dp_result[x][y])\n return dp_result\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef lcs2(a, b):\n dp_result = [[(0) for j in range(b + 1)] for i in range(a + 1)]\n for x in range(1, a + 1):\n for y in range(1, b + 1):\n if a[x - 1] == b[y - 1] and b[y - 1] == c[z - 1]:\n dp_result[x][y] = dp_result[x - 1][y - 1] + 1\n else:\n dp_result[x][y] = max(dp_result[x - 1][y], dp_result[x][y -\n 1], dp_result[x][y])\n return dp_result\n\n\nif __name__ == '__main__':\n input = sys.stdin.read()\n data = list(map(int, input.split()))\n n = data[0]\n data = data[1:]\n a = data[:n]\n data = data[n:]\n m = data[0]\n data = data[1:]\n b = data[:m]\n print(lcs2(a, b))\n",
"step-4": "import sys\n\n\ndef lcs2(a, b):\n dp_result = [[(0) for j in range(b + 1)] for i in range(a + 1)]\n for x in range(1, a + 1):\n for y in range(1, b + 1):\n if a[x - 1] == b[y - 1] and b[y - 1] == c[z - 1]:\n dp_result[x][y] = dp_result[x - 1][y - 1] + 1\n else:\n dp_result[x][y] = max(dp_result[x - 1][y], dp_result[x][y -\n 1], dp_result[x][y])\n return dp_result\n\n\nif __name__ == '__main__':\n input = sys.stdin.read()\n data = list(map(int, input.split()))\n n = data[0]\n data = data[1:]\n a = data[:n]\n data = data[n:]\n m = data[0]\n data = data[1:]\n b = data[:m]\n print(lcs2(a, b))\n",
"step-5": "#Uses python3\n\nimport sys\n\ndef lcs2(a, b): \n dp_result = [[0 for j in range(b+1)] for i in range(a+1)]\n for x in range(1, a+1):\n for y in range(1, b+1):\n if a[x-1] == b[y-1] and b[y-1] == c[z-1]: \n dp_result[x][y] = dp_result[x-1][y-1] + 1\n else:\n dp_result[x][y] = max(dp_result[x-1][y], dp_result[x][y-1], dp_result[x][y])\n\n return dp_result\n\n\nif __name__ == '__main__':\n input = sys.stdin.read()\n data = list(map(int, input.split()))\n\n n = data[0]\n data = data[1:]\n a = data[:n]\n\n data = data[n:]\n m = data[0]\n data = data[1:]\n b = data[:m]\n\n print(lcs2(a, b))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
#
# Dividend!
#
import os
import sys
import urllib2
import math
import numpy
from pylab import *
#
# Dividend adjusted!
#
Use_Dividend_Adjusted = True
if ( Use_Dividend_Adjusted ):
readinIndex = 6
else:
readinIndex = 4
# Subplots in total
nsubplots = 5
iprice = 1
imacd = 2
#icci = 4
idmi = 3
ibalance = 4
igain = 5
# CCI parameters!!!
CCI_period = 20
# DMI parameters!!!
DMI_period = 14
name = str(sys.argv[1])
period = str(sys.argv[2])
time_range = int(sys.argv[3])
predict = bool(int(sys.argv[4]))
if (period == 'd'):
periodText = "days"
if (period == 'w'):
periodText = "weeks"
if (period == 'm'):
periodText = "months"
def Average(list):
r=0.0
for i in list:
r+=float(i)
return r/len(list)
def EMA(list):
r = 0.0
f = 0
for i in range(1, len(list) + 1):
r = r + float(list[i-1]) * ( len(list) + 1 - i )
f = f + i
return r / f
response = urllib2.urlopen('http://table.finance.yahoo.com/table.csv?s='+name+'&d=12&e=29&f=3014&g='+period+'&a=3&b=23&c=1000&ignore=.csv')
if not os.path.exists( 'figures' ):
os.makedirs( 'figures' )
html = response.read()
#print html
a = html.split('\n')
dmax = len(a) - 2 #Be careful here! One header line and One empty line in the end
if ( dmax < time_range ):
time_range = dmax - 1
a200 = []
date200 = []
avg12 = []
avg26 = []
dif = []
TP = []
TR = []
TR14 = []
HighP = []
LowP = []
DM_positive = []
DM_negative = []
DM14_positive = []
DM14_negative = []
DI14_positive = []
DI14_negative = []
DX = []
ADX = []
for i in range(dmax, 0, -1):
date200.append(a[i].split(',')[0])
a200.append(float(a[i].split(',')[readinIndex]))
# HighP.append( float(a[i].split(',')[2]) )
HighP.append( float(a[i].split(',')[2]) / float(a[i].split(',')[4]) * float(a[i].split(',')[6]) )
# LowP.append( float(a[i].split(',')[3]) )
LowP.append( float(a[i].split(',')[3]) / float(a[i].split(',')[4]) * float(a[i].split(',')[6]) )
CloseP = float(a[i].split(',')[readinIndex])
TP.append( (HighP[dmax - i] + LowP[dmax - i] + CloseP) / 3.0 )
if ( i < dmax ):
TR.append( max(HighP[dmax - i], a200[dmax - i - 1]) - min(LowP[dmax - i], a200[dmax - i - 1]) )
TR14.append( TR14[dmax - i - 1] * float(DMI_period - 1) / float(DMI_period) + TR[dmax - i] / float(DMI_period) )
DM_positive.append( max(0, HighP[dmax - i] - HighP[dmax - i - 1]) )
DM_negative.append( max(0, LowP[dmax - i - 1] - LowP[dmax - i]) )
DM14_positive.append( DM14_positive[dmax - i - 1] * float(DMI_period - 1) / float(DMI_period) + DM_positive[dmax - i] / float(DMI_period) )
DM14_negative.append( DM14_negative[dmax - i - 1] * float(DMI_period - 1) / float(DMI_period) + DM_negative[dmax - i] / float(DMI_period) )
if ( TR14[dmax - i] == 0 ):
DI14_positive.append(0)
DI14_negative.append(0)
else:
DI14_positive.append( DM14_positive[dmax - i] / TR14[dmax - i] * 100 )
DI14_negative.append( DM14_negative[dmax - i] / TR14[dmax - i] * 100 )
if ( DI14_positive[dmax - i] + DI14_negative[dmax - i] == 0 ):
DX.append(0)
else:
DX.append( abs( DI14_positive[dmax - i] - DI14_negative[dmax - i] ) / ( DI14_positive[dmax - i] + DI14_negative[dmax - i] ) * 100 )
ADX.append( ADX[dmax - i - 1] * float(DMI_period - 1) / float(DMI_period) + DX[dmax - i] / float(DMI_period) )
else:
TR.append( HighP[dmax - i] - LowP[dmax - i] )
TR14.append( TR[dmax - i] )
DM_positive.append(0)
DM_negative.append(0)
DM14_positive.append( DM_positive[dmax - i] )
DM14_negative.append( DM_negative[dmax - i] )
if ( TR14[dmax - i] == 0 ):
DI14_positive.append(0)
DI14_negative.append(0)
else:
DI14_positive.append( DM14_positive[dmax - i] / TR14[dmax - i] * 100 )
DI14_negative.append( DM14_negative[dmax - i] / TR14[dmax - i] * 100 )
if ( DI14_positive[dmax - i] + DI14_negative[dmax - i] == 0 ):
DX.append(0)
else:
DX.append( abs( DI14_positive[dmax - i] - DI14_negative[dmax - i] ) / ( DI14_positive[dmax - i] + DI14_negative[dmax - i] ) * 100 )
ADX.append( DX[dmax - i] )
# print HighP, LowP, CloseP
#a200.reverse()
#date200.reverse()
#TP.reverse()
a300 = []
for i in range(0, len(a200) ):
a200[i] = float(a200[i])
#print max(a200)
EMA12 = a200[0]
EMA26 = a200[0]
DIF = 0.0
DEA_old = 0.0
DEA_new = 0.0
DIF_array = []
DEA_array = []
#print html
MA_array = []
CCI_array = []
figure(1,(12,15))
# CCI Part
for i in range(0, dmax):
if ( i < CCI_period - 1 ):
MA = Average( TP[:i+1] )
MA_array.append(MA)
# MD = Average( [abs(x - y) for x, y in zip(MA_array[:i+1], TP[:i+1])] )
MD = Average( [abs(x - MA) for x in TP[:i+1]] )
else:
MA = Average( TP[i-19:i+1] )
MA_array.append(MA)
# MD = Average( [abs(x - y) for x, y in zip(MA_array[i-19:i+1], TP[i-19:i+1])] )
MD = Average( [abs(x - MA) for x in TP[i-19:i+1]] )
if ( i < CCI_period - 1 ):
CCI_array.append(0)
else:
CCI_array.append ( ( TP[i] - MA ) / MD / 0.015 )
# print TP[i], MA
# MACD Part
for i in range(1, dmax):
EMA12 = ( 2 * float(a200[i]) + 11 * EMA12 ) / 13
EMA26 = ( 2 * float(a200[i]) + 25 * EMA26 ) / 27
DIF = EMA12 - EMA26
DEA_new = DEA_old * 8 / 10 + DIF * 2 / 10
DIF_array.append(DIF)
DEA_array.append(DEA_new)
DEA_old = DEA_new
x = arange(1, dmax, 1)
#print len(x)
#DIF_array = x
#plot(x[400:], DIF_array[400:], x[400:], DEA_array[400:])
subplot(nsubplots,1,iprice)
plot(x[dmax-time_range-1:]-(dmax-time_range-1), a200[dmax - time_range:], 'k')
grid(True)
xindex = []
xdate = []
xinterval = 5
for i in range( 0, xinterval ):
xindex.append( int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1 )
xdate.append( str( date200[dmax - 1 - time_range + int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1] ) )
xindex.append( time_range )
xdate.append( str( date200[dmax - 1] ) )
xticks(xindex, xdate)
ylabel('PRICE (USD)', fontsize=16)
title(name.upper() + ' Price and Indices in the past ' + str(time_range) + ' ' + periodText, fontsize = 18 )
# Plot CCI
#subplot(nsubplots,1,icci)
#plot(x[dmax-time_range-1:]-(dmax-time_range-1), CCI_array[dmax - time_range:], 'k')
#grid(True)
#xticks(xindex, xdate)
#ylabel('CCI_20', fontsize=16)
# Plot DMI
subplot(nsubplots,1,idmi)
plot(x[dmax-time_range-1:]-(dmax-time_range-1), DI14_positive[dmax - time_range:], 'b',linestyle=':')
plot(x[dmax-time_range-1:]-(dmax-time_range-1), DI14_negative[dmax - time_range:], 'm', linestyle='--')
#plot(x[dmax-time_range-1:]-(dmax-time_range-1), DX[dmax - time_range:], 'g')
plot(x[dmax-time_range-1:]-(dmax-time_range-1), ADX[dmax - time_range:], 'k', linestyle='-')
grid(True)
xticks(xindex, xdate)
ylabel('DMI_14', fontsize=16)
lg = legend(['DI+', 'DI-', 'ADX'], loc='upper center', bbox_to_anchor=(1.049, 1.05))
subplot(nsubplots,1,imacd)
plot(x[dmax-time_range-1:]-(dmax-time_range-1), DIF_array[dmax-time_range-1:], 'b')
plot(x[dmax-time_range-1:]-(dmax-time_range-1), DEA_array[dmax-time_range-1:], 'r')
#xlabel('Date', fontsize=16)
ylabel('MACD (USD)', fontsize=16)
globalmin = min([min(DIF_array[dmax-time_range-1:]), min(DEA_array[dmax-time_range-1:])])
globalmax = max([max(DIF_array[dmax-time_range-1:]), max(DEA_array[dmax-time_range-1:])])
#for j in range( 0, 5):
# text(time_range - j * xinterval - float(time_range) / 40.0, globalmin - (globalmax - globalmin) * 0.2, date200[dmax-1-j * xinterval],color='blue')
lg = legend(['DIF', 'MACD'], loc='upper center')
lg.draw_frame(False)
grid(True)
xticks(xindex, xdate)
#xticks([i * 5 for i in range(1, time_range / 5)])
#title('[12, 26, 9] MACD Curves for ' + name.upper() + ' in the recent ' + str(time_range) + ' ' + periodText )
if ( predict == True):
cash = 1.0
ns = 0
nborrow = 0
ntrade = 0
ngain = 0
nloss = 0
total_gain = 0.0
total_loss = 0.0
top = []
itop = []
bottom = []
ibottom = []
iabove = 1
ibelow = 1
imax = 1
maxd = -99999.0
imin = 1
mind = 99999.0
imaxprice = 1
maxprice = -99999.0
iminprice = 1
minprice = 99999.0
above_active = False
below_active = False
found_low_MACD = False
found_low_ADX = False
last_low_MACD = 0
last_low_ADX = 0
real_high = False
total_vector = []
gain_result_vector = []
for i in range( dmax - 1 - time_range, dmax - 1):
total = cash + ns * float(a200[i+1]) - nborrow * float(a200[i+1])
total_vector.append( total )
gain_result = 0.0
# print i, " ", a200[i+1], " ", total, date200[i+1]
correct = False
buy = False
sell = False
DIF_slope = DIF_array[i] - DIF_array[i-1]
DEA_slope = DEA_array[i] - DEA_array[i-1]
if ( DIF_array[i-1] < DEA_array[i-1] and DIF_array[i-2] > DIF_array[i-1] and DIF_array[i] > DIF_array[i-1] ):
found_low_MACD = True
last_low_MACD = i
if ( DIF_slope < 0 and DIF_array[i-1] > DEA_array[i-1] and DIF_array[i] < DEA_array[i] ):
sell = True
subplot(nsubplots,1,imacd)
axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')
# Make decision based on CCI
# if ( CCI_array[i] < 100 and CCI_array[i+1] >= 100 ):
# buy = True
# subplot(nsubplots,1,icci)
# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')
# if ( CCI_array[i] > -100 and CCI_array[i+1] <= -100 ):
# sell = True
# subplot(nsubplots,1,icci)
# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')
# Make decision based on DMI
if ( ADX[i+1] < ADX[i] and ADX[i-1] < ADX[i] ):
found_low_ADX = True
if ( i - last_low_MACD <= 3 ):
buy = True
subplot(nsubplots,1,imacd)
axvline(x = last_low_MACD - (dmax-time_range-1) + 1, linewidth=1, color='g')
subplot(nsubplots,1,idmi)
axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')
# if ( DI14_positive[i] > DI14_negative[i] and DI14_positive[i+1] < DI14_negative[i+1] and ADX[i+1] >= 25 ):
# sell = True
# subplot(nsubplots,1,idmi)
# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')
if ( buy ):
if ( nborrow > 0 ):
subplot(nsubplots,1,iprice)
axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')
ntrade = ntrade + 1
cash = cash - nborrow * float(a200[i+1])
if ( float(a200[i+1]) < borrow_price ):
ngain = ngain + 1
gain = nborrow * (borrow_price - float(a200[i+1]))
gain_result = gain
total_gain = total_gain + gain
# file.write(str(ntrade) + ' ' + str(gain) + '\n')
else:
nloss = nloss + 1
loss = nborrow * (borrow_price - float(a200[i+1]))
gain_result = loss
total_loss = total_loss + loss
# file.write(str(ntrade) + ' ' + str(loss) + '\n')
nborrow = 0
if ( ns == 0 ):
subplot(nsubplots,1,iprice)
axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')
# subplot(nsubplots,1,iprice)
# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')
# subplot(nsubplots,1,icci)
# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')
ns = cash / float(a200[i+1])
if ( ns > 0 ):
cash = cash - ns * float(a200[i+1])
buy_price = float(a200[i+1])
buy_date = i - (dmax-time_range-1) + 1
if ( sell ):
if ( ns > 0 ):
subplot(nsubplots,1,iprice)
axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')
# subplot(nsubplots,1,iprice)
# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')
# subplot(nsubplots,1,icci)
# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')
# print 'Bought on ', date200[(dmax-time_range-1) + buy_date], ' @ ', buy_price, '; Sell on ', date200[i+1], ' @ ', a200[i+1]
ntrade = ntrade + 1
cash = cash + ns * float(a200[i+1])
if ( float(a200[i+1]) > buy_price ):
ngain = ngain + 1
gain = ns * (float(a200[i+1]) - buy_price)
gain_result = gain
total_gain = total_gain + gain
# file.write(str(ntrade) + ' ' + str(gain) + '\n')
else:
nloss = nloss + 1
loss = ns * (float(a200[i+1]) - buy_price)
gain_result = loss
total_loss = total_loss + loss
# file.write(str(ntrade) + ' ' + str(loss) + '\n')
ns = 0
if ( nborrow == 0 ):
subplot(nsubplots,1,iprice)
axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')
# subplot(nsubplots,1,iprice)
# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')
# subplot(nsubplots,1,icci)
# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')
nborrow = cash / float(a200[i+1])
if ( nborrow > 0 ):
cash = cash + nborrow * float(a200[i+1])
borrow_price = float(a200[i+1])
borrow_date = i - (dmax-time_range-1) + 1
gain_result_vector.append( gain_result )
# file.close()
ref_total = 1.0 / float(a200[dmax - 1 - time_range + 1]) * (-float(a200[dmax - 1 - time_range + 1]) + float(a200[dmax - 1])) + 1.0
if ( ngain == 0 ):
avg_gain = 'NA'
else:
avg_gain = total_gain / ngain
if ( nloss == 0 ):
avg_loss = 'NA'
else:
avg_loss = total_loss / nloss
print ntrade, ' ', ngain, ' ', nloss, ' ', avg_gain, ' ', avg_loss, total, ref_total, (total-ref_total)/ref_total*100
#figure()
x = arange(1, time_range + 1, 1)
subplot(nsubplots,1,ibalance)
xlim([1,time_range])
plot( x, total_vector )
#title(name.upper() + ' Balance and Gain/Loss in the past ' + str(time_range) + ' ' + periodText, fontsize = 18 )
xindex = []
xdate = []
xinterval = 5
for i in range( 0, xinterval ):
xindex.append( int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1 )
# print int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) )
xdate.append( str( date200[dmax - 1 - time_range + int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1] ) )
xindex.append( time_range )
xdate.append( str( date200[dmax - 1] ) )
xticks(xindex, xdate, fontsize=12)
ylabel('Balance (USD)', fontsize=16)
grid(True)
subplot(nsubplots,1,igain)
xlim([1,time_range])
vlines( x, [0], gain_result_vector, lw=4 )
axhline(0, color='black')
xticks(xindex, xdate, fontsize=12)
xlabel('Date', fontsize=16)
ylabel('Gain (USD)', fontsize=16)
grid(True)
#figure()
#x = arange(1, time_range + 1, 1)
#subplot(nsubplots,1,3)
#xlim([1,time_range])
#plot( x, total_vector )
#title(name.upper() + ' Balance and Gain/Loss in the past ' + str(time_range) + ' ' + periodText, fontsize = 18 )
#xindex = []
#xdate = []
#xinterval = 5
#for i in range( 0, xinterval ):
# xindex.append( int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1 )
# print int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) )
# xdate.append( str( date200[dmax - 1 - time_range + int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1] ) )
#xindex.append( time_range )
#xdate.append( str( date200[dmax - 1] ) )
#xticks(xindex, xdate, fontsize=12)
#ylabel('Balance (USD)', fontsize=16)
#grid(True)
#subplot(nsubplots,1,4)
#xlim([1,time_range])
#vlines( x, [0], gain_result_vector, lw=4 )
#axhline(0, color='black')
#xticks(xindex, xdate, fontsize=12)
#xlabel('Date', fontsize=16)
#ylabel('Gain (USD)', fontsize=16)
#grid(True)
savefig( './figures/' + name.upper() + '_' + periodText + '.pdf' )
|
normal
|
{
"blob_id": "6454790c98b254edeead4e68ef7f5760c9105a57",
"index": 433,
"step-1": "#!/usr/bin/python\n#\n# Dividend!\n#\n\nimport os\nimport sys\nimport urllib2\nimport math\nimport numpy\nfrom pylab import *\n\n# \n# Dividend adjusted! \n#\n \nUse_Dividend_Adjusted = True\n\nif ( Use_Dividend_Adjusted ):\n readinIndex = 6\nelse:\n readinIndex = 4\n\n# Subplots in total\nnsubplots = 5\niprice = 1\nimacd = 2\n#icci = 4\nidmi = 3\nibalance = 4\nigain = 5\n\n\n# CCI parameters!!! \nCCI_period = 20\n\n# DMI parameters!!!\nDMI_period = 14\n\nname = str(sys.argv[1])\nperiod = str(sys.argv[2])\ntime_range = int(sys.argv[3]) \npredict = bool(int(sys.argv[4]))\n\nif (period == 'd'):\n periodText = \"days\"\nif (period == 'w'):\n periodText = \"weeks\"\nif (period == 'm'):\n periodText = \"months\"\n\ndef Average(list):\n r=0.0\n for i in list:\n r+=float(i)\n return r/len(list)\n\ndef EMA(list):\n r = 0.0\n f = 0\n for i in range(1, len(list) + 1): \n r = r + float(list[i-1]) * ( len(list) + 1 - i )\n f = f + i\n return r / f\n\nresponse = urllib2.urlopen('http://table.finance.yahoo.com/table.csv?s='+name+'&d=12&e=29&f=3014&g='+period+'&a=3&b=23&c=1000&ignore=.csv')\n\nif not os.path.exists( 'figures' ):\n os.makedirs( 'figures' )\n\nhtml = response.read()\n#print html\na = html.split('\\n')\n\ndmax = len(a) - 2 #Be careful here! One header line and One empty line in the end\nif ( dmax < time_range ):\n time_range = dmax - 1\na200 = []\ndate200 = []\navg12 = []\navg26 = []\ndif = []\nTP = []\nTR = []\nTR14 = []\nHighP = []\nLowP = []\n\nDM_positive = []\nDM_negative = []\nDM14_positive = []\nDM14_negative = []\n\nDI14_positive = []\nDI14_negative = []\n\nDX = []\nADX = []\n\nfor i in range(dmax, 0, -1):\n date200.append(a[i].split(',')[0])\n a200.append(float(a[i].split(',')[readinIndex]))\n# HighP.append( float(a[i].split(',')[2]) )\n HighP.append( float(a[i].split(',')[2]) / float(a[i].split(',')[4]) * float(a[i].split(',')[6]) )\n# LowP.append( float(a[i].split(',')[3]) )\n LowP.append( float(a[i].split(',')[3]) / float(a[i].split(',')[4]) * float(a[i].split(',')[6]) )\n CloseP = float(a[i].split(',')[readinIndex]) \n TP.append( (HighP[dmax - i] + LowP[dmax - i] + CloseP) / 3.0 )\n if ( i < dmax ):\n TR.append( max(HighP[dmax - i], a200[dmax - i - 1]) - min(LowP[dmax - i], a200[dmax - i - 1]) )\n TR14.append( TR14[dmax - i - 1] * float(DMI_period - 1) / float(DMI_period) + TR[dmax - i] / float(DMI_period) ) \n DM_positive.append( max(0, HighP[dmax - i] - HighP[dmax - i - 1]) )\n DM_negative.append( max(0, LowP[dmax - i - 1] - LowP[dmax - i]) )\n DM14_positive.append( DM14_positive[dmax - i - 1] * float(DMI_period - 1) / float(DMI_period) + DM_positive[dmax - i] / float(DMI_period) )\n DM14_negative.append( DM14_negative[dmax - i - 1] * float(DMI_period - 1) / float(DMI_period) + DM_negative[dmax - i] / float(DMI_period) )\n if ( TR14[dmax - i] == 0 ):\n DI14_positive.append(0)\n DI14_negative.append(0)\n else:\n DI14_positive.append( DM14_positive[dmax - i] / TR14[dmax - i] * 100 )\n DI14_negative.append( DM14_negative[dmax - i] / TR14[dmax - i] * 100 )\n if ( DI14_positive[dmax - i] + DI14_negative[dmax - i] == 0 ):\n DX.append(0)\n else:\n DX.append( abs( DI14_positive[dmax - i] - DI14_negative[dmax - i] ) / ( DI14_positive[dmax - i] + DI14_negative[dmax - i] ) * 100 )\n ADX.append( ADX[dmax - i - 1] * float(DMI_period - 1) / float(DMI_period) + DX[dmax - i] / float(DMI_period) )\n else:\n TR.append( HighP[dmax - i] - LowP[dmax - i] )\n TR14.append( TR[dmax - i] )\n DM_positive.append(0)\n DM_negative.append(0)\n DM14_positive.append( DM_positive[dmax - i] )\n DM14_negative.append( DM_negative[dmax - i] )\n if ( TR14[dmax - i] == 0 ):\n DI14_positive.append(0)\n DI14_negative.append(0)\n else:\n DI14_positive.append( DM14_positive[dmax - i] / TR14[dmax - i] * 100 )\n DI14_negative.append( DM14_negative[dmax - i] / TR14[dmax - i] * 100 )\n if ( DI14_positive[dmax - i] + DI14_negative[dmax - i] == 0 ):\n DX.append(0)\n else:\n DX.append( abs( DI14_positive[dmax - i] - DI14_negative[dmax - i] ) / ( DI14_positive[dmax - i] + DI14_negative[dmax - i] ) * 100 )\n ADX.append( DX[dmax - i] )\n\n# print HighP, LowP, CloseP\n#a200.reverse()\n#date200.reverse()\n#TP.reverse()\n\na300 = []\nfor i in range(0, len(a200) ):\n a200[i] = float(a200[i])\n#print max(a200)\nEMA12 = a200[0]\nEMA26 = a200[0]\nDIF = 0.0\nDEA_old = 0.0\nDEA_new = 0.0\nDIF_array = []\nDEA_array = []\n#print html\n\nMA_array = []\nCCI_array = []\n\nfigure(1,(12,15)) \n\n # CCI Part\nfor i in range(0, dmax):\n if ( i < CCI_period - 1 ):\n MA = Average( TP[:i+1] )\n MA_array.append(MA)\n# MD = Average( [abs(x - y) for x, y in zip(MA_array[:i+1], TP[:i+1])] )\n MD = Average( [abs(x - MA) for x in TP[:i+1]] )\n else:\n MA = Average( TP[i-19:i+1] )\n MA_array.append(MA)\n# MD = Average( [abs(x - y) for x, y in zip(MA_array[i-19:i+1], TP[i-19:i+1])] )\n MD = Average( [abs(x - MA) for x in TP[i-19:i+1]] )\n if ( i < CCI_period - 1 ):\n CCI_array.append(0)\n else:\n CCI_array.append ( ( TP[i] - MA ) / MD / 0.015 )\n# print TP[i], MA\n\n # MACD Part\nfor i in range(1, dmax):\n EMA12 = ( 2 * float(a200[i]) + 11 * EMA12 ) / 13\n EMA26 = ( 2 * float(a200[i]) + 25 * EMA26 ) / 27\n DIF = EMA12 - EMA26\n DEA_new = DEA_old * 8 / 10 + DIF * 2 / 10\n DIF_array.append(DIF)\n DEA_array.append(DEA_new)\n DEA_old = DEA_new\n\nx = arange(1, dmax, 1)\n#print len(x)\n#DIF_array = x\n#plot(x[400:], DIF_array[400:], x[400:], DEA_array[400:])\nsubplot(nsubplots,1,iprice)\nplot(x[dmax-time_range-1:]-(dmax-time_range-1), a200[dmax - time_range:], 'k')\ngrid(True)\nxindex = []\nxdate = []\nxinterval = 5\nfor i in range( 0, xinterval ):\n xindex.append( int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1 )\n xdate.append( str( date200[dmax - 1 - time_range + int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1] ) )\nxindex.append( time_range )\nxdate.append( str( date200[dmax - 1] ) )\nxticks(xindex, xdate)\nylabel('PRICE (USD)', fontsize=16)\ntitle(name.upper() + ' Price and Indices in the past ' + str(time_range) + ' ' + periodText, fontsize = 18 ) \n\n# Plot CCI\n#subplot(nsubplots,1,icci)\n#plot(x[dmax-time_range-1:]-(dmax-time_range-1), CCI_array[dmax - time_range:], 'k')\n#grid(True)\n#xticks(xindex, xdate)\n#ylabel('CCI_20', fontsize=16)\n\n# Plot DMI\nsubplot(nsubplots,1,idmi)\nplot(x[dmax-time_range-1:]-(dmax-time_range-1), DI14_positive[dmax - time_range:], 'b',linestyle=':')\nplot(x[dmax-time_range-1:]-(dmax-time_range-1), DI14_negative[dmax - time_range:], 'm', linestyle='--')\n#plot(x[dmax-time_range-1:]-(dmax-time_range-1), DX[dmax - time_range:], 'g')\nplot(x[dmax-time_range-1:]-(dmax-time_range-1), ADX[dmax - time_range:], 'k', linestyle='-')\ngrid(True)\nxticks(xindex, xdate)\nylabel('DMI_14', fontsize=16)\nlg = legend(['DI+', 'DI-', 'ADX'], loc='upper center', bbox_to_anchor=(1.049, 1.05))\n\nsubplot(nsubplots,1,imacd)\nplot(x[dmax-time_range-1:]-(dmax-time_range-1), DIF_array[dmax-time_range-1:], 'b')\nplot(x[dmax-time_range-1:]-(dmax-time_range-1), DEA_array[dmax-time_range-1:], 'r')\n#xlabel('Date', fontsize=16)\nylabel('MACD (USD)', fontsize=16)\nglobalmin = min([min(DIF_array[dmax-time_range-1:]), min(DEA_array[dmax-time_range-1:])])\nglobalmax = max([max(DIF_array[dmax-time_range-1:]), max(DEA_array[dmax-time_range-1:])])\n#for j in range( 0, 5):\n# text(time_range - j * xinterval - float(time_range) / 40.0, globalmin - (globalmax - globalmin) * 0.2, date200[dmax-1-j * xinterval],color='blue')\nlg = legend(['DIF', 'MACD'], loc='upper center')\nlg.draw_frame(False)\ngrid(True)\nxticks(xindex, xdate)\n#xticks([i * 5 for i in range(1, time_range / 5)])\n#title('[12, 26, 9] MACD Curves for ' + name.upper() + ' in the recent ' + str(time_range) + ' ' + periodText )\nif ( predict == True):\n cash = 1.0\n ns = 0\n nborrow = 0\n ntrade = 0\n ngain = 0\n nloss = 0\n total_gain = 0.0\n total_loss = 0.0\n top = []\n itop = []\n bottom = []\n ibottom = []\n iabove = 1\n ibelow = 1\n imax = 1\n maxd = -99999.0\n imin = 1\n mind = 99999.0\n imaxprice = 1\n maxprice = -99999.0\n iminprice = 1\n minprice = 99999.0\n above_active = False\n below_active = False\n\n found_low_MACD = False\n found_low_ADX = False\n last_low_MACD = 0\n last_low_ADX = 0\n\n real_high = False\n\n total_vector = []\n gain_result_vector = []\n\n for i in range( dmax - 1 - time_range, dmax - 1):\n total = cash + ns * float(a200[i+1]) - nborrow * float(a200[i+1])\n total_vector.append( total )\n gain_result = 0.0\n# print i, \" \", a200[i+1], \" \", total, date200[i+1]\n correct = False\n buy = False\n sell = False\n DIF_slope = DIF_array[i] - DIF_array[i-1]\n DEA_slope = DEA_array[i] - DEA_array[i-1]\n\n if ( DIF_array[i-1] < DEA_array[i-1] and DIF_array[i-2] > DIF_array[i-1] and DIF_array[i] > DIF_array[i-1] ):\n found_low_MACD = True\n last_low_MACD = i\n \n if ( DIF_slope < 0 and DIF_array[i-1] > DEA_array[i-1] and DIF_array[i] < DEA_array[i] ):\n sell = True\n subplot(nsubplots,1,imacd)\n axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')\n\n # Make decision based on CCI\n# if ( CCI_array[i] < 100 and CCI_array[i+1] >= 100 ):\n# buy = True\n# subplot(nsubplots,1,icci)\n# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')\n\n# if ( CCI_array[i] > -100 and CCI_array[i+1] <= -100 ):\n# sell = True\n# subplot(nsubplots,1,icci) \n# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed') \n\n # Make decision based on DMI\n if ( ADX[i+1] < ADX[i] and ADX[i-1] < ADX[i] ):\n found_low_ADX = True\n if ( i - last_low_MACD <= 3 ):\n buy = True\n subplot(nsubplots,1,imacd)\n axvline(x = last_low_MACD - (dmax-time_range-1) + 1, linewidth=1, color='g')\n subplot(nsubplots,1,idmi)\n axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')\n\n# if ( DI14_positive[i] > DI14_negative[i] and DI14_positive[i+1] < DI14_negative[i+1] and ADX[i+1] >= 25 ):\n# sell = True\n# subplot(nsubplots,1,idmi)\n# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')\n\n if ( buy ):\n if ( nborrow > 0 ):\n subplot(nsubplots,1,iprice)\n axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')\n ntrade = ntrade + 1\n cash = cash - nborrow * float(a200[i+1])\n if ( float(a200[i+1]) < borrow_price ):\n ngain = ngain + 1\n gain = nborrow * (borrow_price - float(a200[i+1]))\n gain_result = gain\n total_gain = total_gain + gain\n# file.write(str(ntrade) + ' ' + str(gain) + '\\n')\n else:\n nloss = nloss + 1\n loss = nborrow * (borrow_price - float(a200[i+1]))\n gain_result = loss\n total_loss = total_loss + loss\n# file.write(str(ntrade) + ' ' + str(loss) + '\\n')\n nborrow = 0\n if ( ns == 0 ):\n subplot(nsubplots,1,iprice)\n axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')\n# subplot(nsubplots,1,iprice)\n# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')\n# subplot(nsubplots,1,icci)\n# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='g')\n ns = cash / float(a200[i+1])\n if ( ns > 0 ):\n cash = cash - ns * float(a200[i+1])\n buy_price = float(a200[i+1])\n buy_date = i - (dmax-time_range-1) + 1\n\n if ( sell ): \n if ( ns > 0 ):\n subplot(nsubplots,1,iprice)\n axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')\n# subplot(nsubplots,1,iprice)\n# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')\n# subplot(nsubplots,1,icci)\n# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')\n# print 'Bought on ', date200[(dmax-time_range-1) + buy_date], ' @ ', buy_price, '; Sell on ', date200[i+1], ' @ ', a200[i+1]\n ntrade = ntrade + 1\n cash = cash + ns * float(a200[i+1])\n if ( float(a200[i+1]) > buy_price ):\n ngain = ngain + 1\n gain = ns * (float(a200[i+1]) - buy_price)\n gain_result = gain\n total_gain = total_gain + gain\n# file.write(str(ntrade) + ' ' + str(gain) + '\\n')\n else:\n nloss = nloss + 1\n loss = ns * (float(a200[i+1]) - buy_price)\n gain_result = loss\n total_loss = total_loss + loss\n# file.write(str(ntrade) + ' ' + str(loss) + '\\n')\n ns = 0\n if ( nborrow == 0 ):\n subplot(nsubplots,1,iprice)\n axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')\n# subplot(nsubplots,1,iprice)\n# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')\n# subplot(nsubplots,1,icci)\n# axvline(x = i - (dmax-time_range-1) + 1, linewidth=1, color='r',linestyle='dashed')\n nborrow = cash / float(a200[i+1])\n if ( nborrow > 0 ):\n cash = cash + nborrow * float(a200[i+1])\n borrow_price = float(a200[i+1])\n borrow_date = i - (dmax-time_range-1) + 1\n gain_result_vector.append( gain_result )\n# file.close()\n ref_total = 1.0 / float(a200[dmax - 1 - time_range + 1]) * (-float(a200[dmax - 1 - time_range + 1]) + float(a200[dmax - 1])) + 1.0\n if ( ngain == 0 ):\n avg_gain = 'NA'\n else:\n avg_gain = total_gain / ngain\n if ( nloss == 0 ):\n avg_loss = 'NA'\n else:\n avg_loss = total_loss / nloss\n print ntrade, ' ', ngain, ' ', nloss, ' ', avg_gain, ' ', avg_loss, total, ref_total, (total-ref_total)/ref_total*100\n\n#figure()\nx = arange(1, time_range + 1, 1)\nsubplot(nsubplots,1,ibalance)\nxlim([1,time_range])\nplot( x, total_vector )\n#title(name.upper() + ' Balance and Gain/Loss in the past ' + str(time_range) + ' ' + periodText, fontsize = 18 )\nxindex = []\nxdate = []\nxinterval = 5\nfor i in range( 0, xinterval ):\n xindex.append( int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1 )\n# print int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) )\n xdate.append( str( date200[dmax - 1 - time_range + int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1] ) )\nxindex.append( time_range )\nxdate.append( str( date200[dmax - 1] ) )\nxticks(xindex, xdate, fontsize=12)\nylabel('Balance (USD)', fontsize=16)\ngrid(True)\nsubplot(nsubplots,1,igain)\nxlim([1,time_range])\nvlines( x, [0], gain_result_vector, lw=4 )\naxhline(0, color='black')\nxticks(xindex, xdate, fontsize=12)\nxlabel('Date', fontsize=16)\nylabel('Gain (USD)', fontsize=16)\ngrid(True)\n\n#figure()\n#x = arange(1, time_range + 1, 1)\n#subplot(nsubplots,1,3)\n#xlim([1,time_range])\n#plot( x, total_vector )\n#title(name.upper() + ' Balance and Gain/Loss in the past ' + str(time_range) + ' ' + periodText, fontsize = 18 )\n#xindex = []\n#xdate = []\n#xinterval = 5\n#for i in range( 0, xinterval ):\n# xindex.append( int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1 )\n# print int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) )\n# xdate.append( str( date200[dmax - 1 - time_range + int ( math.ceil( float(i) * ( time_range - 1 ) / xinterval ) ) + 1] ) )\n#xindex.append( time_range )\n#xdate.append( str( date200[dmax - 1] ) )\n#xticks(xindex, xdate, fontsize=12)\n#ylabel('Balance (USD)', fontsize=16)\n#grid(True)\n#subplot(nsubplots,1,4)\n#xlim([1,time_range])\n#vlines( x, [0], gain_result_vector, lw=4 )\n#axhline(0, color='black')\n#xticks(xindex, xdate, fontsize=12)\n#xlabel('Date', fontsize=16)\n#ylabel('Gain (USD)', fontsize=16)\n#grid(True)\nsavefig( './figures/' + name.upper() + '_' + periodText + '.pdf' )\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#예외처리 문법을 활용하여 정수가 아닌 숫자를 입력했을때 에러문구가나오도록 작성.(에러문구:정수가아닙니다)
try:
x = int(input('정수를 입력하세요: '))
print(x)
except:
print('정수가 아닙니다.')
|
normal
|
{
"blob_id": "906265182a9776fec5bad41bfc9ee68b36873d1e",
"index": 573,
"step-1": "<mask token>\n",
"step-2": "try:\n x = int(input('정수를 입력하세요: '))\n print(x)\nexcept:\n print('정수가 아닙니다.')\n",
"step-3": "#예외처리 문법을 활용하여 정수가 아닌 숫자를 입력했을때 에러문구가나오도록 작성.(에러문구:정수가아닙니다)\n\ntry:\n x = int(input('정수를 입력하세요: '))\n print(x)\n \nexcept:\n print('정수가 아닙니다.')\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
""" Utility functions and classes for SRP
Context : SRP
Module : Statsistics
Version : 1.0.0
Author : Stefano Covino
Date : 04/04/2013
E-mail : [email protected]
URL: : http://www.merate.mi.astro.it/utenti/covino
Usage : to be imported
Remarks : inputs are a 1D vectors to be cross-correlated. Optionally you can
give a vector of x-axis units. It returns the cross-correlation
value.
History : (04/04/2013) First version.
"""
import numpy
def XCorr_1D (data, refdata, xdata=None):
if data.ndim == 1 and refdata.ndim == 1:
ycorr = numpy.correlate(data, refdata, mode="full")
xcorr = numpy.arange(ycorr.size)
lags = xcorr - (data.size-1)
if xdata == None:
distPerLag = 1.
elif xdata.ndim == 1:
distPerLag = (xdata[-1] - xdata[0])/float(xdata.size)
else:
return None
#
offsets = -lags*distPerLag
#
mx = ycorr.argmax()
ox = offsets[mx]
return ox
else:
return None
|
normal
|
{
"blob_id": "c62ffcaa9095d772e51be086be349d200346bc22",
"index": 9662,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef XCorr_1D(data, refdata, xdata=None):\n if data.ndim == 1 and refdata.ndim == 1:\n ycorr = numpy.correlate(data, refdata, mode='full')\n xcorr = numpy.arange(ycorr.size)\n lags = xcorr - (data.size - 1)\n if xdata == None:\n distPerLag = 1.0\n elif xdata.ndim == 1:\n distPerLag = (xdata[-1] - xdata[0]) / float(xdata.size)\n else:\n return None\n offsets = -lags * distPerLag\n mx = ycorr.argmax()\n ox = offsets[mx]\n return ox\n else:\n return None\n",
"step-3": "<mask token>\nimport numpy\n\n\ndef XCorr_1D(data, refdata, xdata=None):\n if data.ndim == 1 and refdata.ndim == 1:\n ycorr = numpy.correlate(data, refdata, mode='full')\n xcorr = numpy.arange(ycorr.size)\n lags = xcorr - (data.size - 1)\n if xdata == None:\n distPerLag = 1.0\n elif xdata.ndim == 1:\n distPerLag = (xdata[-1] - xdata[0]) / float(xdata.size)\n else:\n return None\n offsets = -lags * distPerLag\n mx = ycorr.argmax()\n ox = offsets[mx]\n return ox\n else:\n return None\n",
"step-4": "\"\"\" Utility functions and classes for SRP\n\nContext : SRP\nModule : Statsistics\nVersion : 1.0.0\nAuthor : Stefano Covino\nDate : 04/04/2013\nE-mail : [email protected]\nURL: : http://www.merate.mi.astro.it/utenti/covino\n\nUsage : to be imported\n\nRemarks : inputs are a 1D vectors to be cross-correlated. Optionally you can\n give a vector of x-axis units. It returns the cross-correlation \n value.\n\nHistory : (04/04/2013) First version.\n\n\"\"\"\n\nimport numpy\n\n\ndef XCorr_1D (data, refdata, xdata=None):\n if data.ndim == 1 and refdata.ndim == 1:\n ycorr = numpy.correlate(data, refdata, mode=\"full\")\n xcorr = numpy.arange(ycorr.size)\n lags = xcorr - (data.size-1)\n if xdata == None:\n distPerLag = 1.\n elif xdata.ndim == 1:\n distPerLag = (xdata[-1] - xdata[0])/float(xdata.size)\n else:\n return None\n #\n offsets = -lags*distPerLag\n #\n mx = ycorr.argmax()\n ox = offsets[mx]\n return ox\n else:\n return None\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import tkinter as tk
import Widgets as wg
import Logic as lgc
from tkinter.ttk import Separator
from tkinter.messagebox import showerror, showinfo
# Fonts that we can utilise
FONTS = {"large":("Helvetica", 20), "medium":("Helvetica", 16), "small":("Helvetica", 12)}
class Handler: # Handles the window and the Game interaction
def __init__(self):
# Game Handle
self.Game = None
self.GameParams = {}
# Window Handle
self.Window = Window(self)
self.Window.mainloop()
def Replay (self): # Reset attributes and classes
self.GameParams = {}
del self.Game
self.Game = None
def Is_Running (self):
return self.Game.Running
def Start_Game(self): # Begin the game, run the updates needed.
self.Game = lgc.Game(**self.GameParams)
self.Game.Start_Game()
# Update Game page
self.Update_Game()
self.Window.Pages["Game"].Update_Game_Type()
def Get_Current_Player(self) -> str: # get the current player whose turn it is
if self.Game.Running:
if self.Game.Current_Player == "B":
return "black"
else:
return "white"
else:
return "None"
def Get_Game_Type(self) -> str: # Get the game rule type
g = self.Game.Game_Type
if g == 1:
return "SIMPLE"
else:
return "FULL"
def Get_Score(self) -> tuple: # Get the current score
s = self.Game.Get_Discs()
return s[0], s[1] # b, w
def Move(self, x: int, y: int) -> bool: # Make a move on a given place
complete = self.Game.Next_Move(x, y)
if complete:
self.Update_Game()
self.Game_Complete_Check()
return True
self.Update_Game()
self.Game_Complete_Check()
return False
def Get_Winner(self) -> tuple: # Gets the winner of the game
return self.Game.Check_Winner()
def Game_Complete_Check(self): # Check if the game is over and act accordingly
if self.Is_Running() == False:
# Run Game Over feature here
self.Window.showPage("Postgame")
# Update the post page
self.Window.Pages["Postgame"].Update()
def Update_Game(self): # Run a full update on the game
self.Window.Pages["Game"].Full_Update()
class Window (tk.Tk): # This will be the main window of the GUI
def __init__ (self, controller, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
self.Handler = controller # This is handler between the game and window
# Root attributes
self.title("Othello")
try:
self.iconbitmap("Icon.ico")
except:
pass
self.minsize(600, 600)
#self.maxsize(1000,1000)
# Master frame
self.container = tk.Frame(self)
self.container.pack(side="top", fill="both", expand=True)
self.container.grid_rowconfigure(0, weight=1)
self.container.grid_columnconfigure(0, weight=1)
# Set up the pages
self.Pages = {}
for page in (Pregame, Custom_Board, Game, Postgame):
# Initiate each page and add them to the dictionary
# Dictionary will use the name of the class so that it can be accessed
# without the knowledge of the clas name
new = page(self.container, self)
self.Pages[page.FrameName] = new
new.grid(row=0, column=0, sticky="nsew")
# Show the initial page
self.showPage("Pregame")
# Window
def showPage(self, pagename: str): # Show a chosen page
page = self.Pages[pagename]
page.tkraise()
# Game
def Begin_Game(self): # Start the game
self.Handler.Start_Game()
def Get_Current_Player (self) -> str: # Get the current player
return self.Handler.Get_Current_Player()
def Replay(self): # Clean up the old game, start an new one
self.Pages["Pregame"].__GUI_Reset__()
self.Pages["Game"].Reset_Game()
self.Handler.Replay()
self.showPage("Pregame")
class Pregame (tk.Frame): # The 'home' screen
FrameName = "Pregame"
def __init__ (self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
self.configure(bg="white")
self.set_vals = []
self.__GUI_Reset__()
def __GUI_Reset__(self): # This will clean the screen and then recreate it, this is essential for replaying the game
for widget in self.winfo_children():
widget.destroy()
# Title Banner
tk.Label(self, text="Otello", font=FONTS["large"], bg="white").pack(side="top")
Separator(self, orient="horizontal").pack(side="top", fill="x", padx=10)
# Rule Set
rule_set_frame = tk.Frame(self, bg="white")
rule_set_frame.pack(pady=10)
# Subheading
self.rs_label = tk.Label(rule_set_frame, text="Rule Set", font=FONTS["medium"], bg="white")
self.rs_label.pack(side="top")
self.full_btn = tk.Button(rule_set_frame, text="FULL", font=FONTS["medium"], bg="#bbbbbb",
command=lambda:self.Select_Rule_Set("full"))
self.full_btn.pack()
self.simple_btn = tk.Button(rule_set_frame, text="SIMPLE", font=FONTS["medium"], bg="#bbbbbb",
command=lambda:self.Select_Rule_Set("simple"))
self.simple_btn.pack()
# Row Size
row_frame = tk.Frame(self, bg="white")
row_frame.pack(pady=10)
self.row_label = tk.Label(row_frame, text="Board Rows", font=FONTS["medium"], bg="white")
self.row_label.grid(row=0, column=0, columnspan=7)
self.Rows_Buttons = []
place = 0
for rows in [4, 6, 8, 10, 12, 14, 16]:
x = tk.Button(row_frame, text=str(rows), font=FONTS["small"], bg="#bbbbbb",
command=lambda rows=rows: self.Select_Rows(rows))
x.grid(row=1, column=place)
self.Rows_Buttons.append(x)
place += 1
# Column Size
col_frame = tk.Frame(self, bg="white")
col_frame.pack(pady=10)
self.col_label = tk.Label(col_frame, text="Board Columns", font=FONTS["medium"], bg="white")
self.col_label.grid(row=0, column=0, columnspan=7)
self.Cols_Buttons = []
place = 0
for cols in [4, 6, 8, 10, 12, 14, 16]:
x = tk.Button(col_frame, text=str(cols), font=FONTS["small"], bg="#bbbbbb",
command=lambda cols=cols: self.Select_Cols(cols))
x.grid(row=1, column=place)
self.Cols_Buttons.append(x)
place += 1
# First to Move
first_move_frame = tk.Frame(self, bg="white")
first_move_frame.pack(pady=10)
self.first_move_label = tk.Label(first_move_frame, text="First to move", bg="white", font=FONTS["medium"])
self.first_move_label.grid(row=0, column=0, columnspan=2)
self.black_btn = tk.Button(first_move_frame, text="Black", bg="#bbbbbb", font=FONTS["medium"],
command=lambda:self.Select_First_Move("black"))
self.black_btn.grid(row=1, column=0)
self.white_btn = tk.Button(first_move_frame, text="White", bg="#bbbbbb", font=FONTS["medium"],
command=lambda:self.Select_First_Move("white"))
self.white_btn.grid(row=1, column=1)
# How to win
condition_frame = tk.Frame(self, bg="white")
condition_frame.pack(pady=10)
self.condition_label = tk.Label(condition_frame, text="The winner is, the player with..",
bg="white", font=FONTS["medium"])
self.condition_label.grid(row=0, column=0, columnspan=2)
self.greater_score = tk.Button(condition_frame, text="more discs.", bg="#bbbbbb", font=FONTS["medium"],
command=lambda: self.Select_Condition(">"))
self.greater_score.grid(row=1, column=0)
self.lesser_score = tk.Button(condition_frame, text="less discs.", bg="#bbbbbb", font=FONTS["medium"],
command=lambda: self.Select_Condition("<"))
self.lesser_score.grid(row=1, column=1)
# Start the game button
self.Start_Game_Btn = tk.Button(self, text="Start", bg="#ff2222", activebackground="#992222",
font=FONTS["medium"])
self.Start_Game_Btn.pack(side="bottom")
def Select_Rule_Set(self, _set: str): # sets the rule set of the game
if _set == "simple":
self.controller.Handler.GameParams["game_type"] = 1 # Corresponds to the game logic
else:
self.controller.Handler.GameParams["game_type"] = 2
self.full_btn.destroy()
self.simple_btn.destroy()
self.rs_label.configure(text="Rule Set: " + _set.upper())
self.set_vals.append("rules")
self.Check_Can_Start()
def Select_Rows(self, rows: int): # Sets the rows of the board
self.controller.Handler.GameParams["y_size"] = rows
for button in self.Rows_Buttons:
button.destroy()
self.row_label.configure(text="Board Rows: " + str(rows))
self.set_vals.append("rows")
self.Check_Can_Start()
def Select_Cols(self, cols: int): # sets the columns of the board
self.controller.Handler.GameParams["x_size"] = cols
for button in self.Cols_Buttons:
button.destroy()
self.col_label.configure(text="Board Columns: " + str(cols))
self.set_vals.append("cols")
self.Check_Can_Start()
def Select_First_Move (self, mover: str): # Sets the first player to make a move
if mover == "black":
self.controller.Handler.GameParams["first_move"] = "B"
else:
self.controller.Handler.GameParams["first_move"] = "W"
self.black_btn.destroy()
self.white_btn.destroy()
self.first_move_label.configure(text="First to move: " + mover)
self.set_vals.append("move")
self.Check_Can_Start()
def Select_Condition(self, condition: str):# This will set the game win condition
self.controller.Handler.GameParams["game_winner"] = condition
if condition == ">":
self.condition_label.configure(text="The winner is, the player with more discs.")
else:
self.condition_label.configure(text="The winner is, the player with less discs.")
self.lesser_score.destroy()
self.greater_score.destroy()
self.set_vals.append("win")
self.Check_Can_Start()
def Check_Can_Start (self): # This will start the game if the game can be started
if "rules" in self.set_vals and\
"rows" in self.set_vals and\
"cols" in self.set_vals and\
"move" in self.set_vals and\
"win" in self.set_vals:
self.Start_Game_Btn.configure(bg="#22ff22", activebackground="#229922",
command=lambda: self.Start_Custom_Board())
def Start_Custom_Board (self):
self.controller.Pages["Setup_Board"].Setup_Board()
self.controller.showPage("Setup_Board")
self.controller.Pages["Setup_Board"].Instructions_Display()
class Custom_Board (tk.Frame):
FrameName = "Setup_Board"
def __init__ (self, parent, controller):
tk.Frame.__init__ (self, parent)
self.controller = controller
self.configure(bg="white")
# Title bar
self.Title_Frame = tk.Frame(self, bg="white")
self.Title_Frame.pack(side="top", fill="x")
# Title
tk.Label(self.Title_Frame, text="Create Custom Board", bg="white", font=FONTS["medium"]).pack(side="left")
# Start Button
start = tk.Button(self.Title_Frame, text="Play", bg="#22ff22", activebackground="#229922", font=FONTS["medium"],
command=lambda: self.Start())
start.pack(side="right")
# Use custom Board check button
self.Use_Board = tk.IntVar()
Use_Board = tk.Checkbutton(self.Title_Frame, text="Use custom board", font=FONTS["medium"],
bg="white", activebackground="white",
var=self.Use_Board, onvalue=1, offvalue=0)
Use_Board.pack(side="right", padx=10)
# Board
self.Board_Area = tk.Frame(self, bg="#009900")
self.Board_Area.pack(side="top", fill="both", expand=True)
self.Board = []
def Setup_Board (self):
for widget in self.Board_Area.winfo_children():
widget.destroy()
self.Board = []
for y in range(self.controller.Handler.GameParams["y_size"]):
row = []
for x in range(self.controller.Handler.GameParams["x_size"]):
# Diameter with respond to the length of the shortest side of the board
height = self.Board_Area.winfo_height()
width = self.Board_Area.winfo_width()
if height > width:
diameter = width/self.controller.Handler.GameParams["x_size"]
else:
diameter = height/self.controller.Handler.GameParams["y_size"]
self.Board_Area.grid_columnconfigure(x, weight=1)
self.Board_Area.grid_rowconfigure(y, weight=1)
disc = wg.Disc(self.Board_Area, self.controller, diameter=diameter, mode="setup")
disc.grid(row=y, column=x, sticky="nsew")
row.append(disc)
self.Board.append(row)
def Parse_Board (self) -> list: # This will parse the GUI board and create a board that will work for the Game()
new_board = []
for row in self.Board:
new_row = []
for disc in row:
if disc.Current_Color == "white":
new_row.append("W")
elif disc.Current_Color == "black":
new_row.append("B")
else:
new_row.append(None)
new_board.append(new_row)
return new_board
def Instructions_Display(self):
showinfo("How to use", "Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!")
def Start (self): # This will check if the user wants to use a custom board and then will set Game board to be the users selection
if self.Use_Board.get():
self.controller.Handler.GameParams["board"] = self.Parse_Board()
self.controller.Begin_Game()
self.controller.Pages["Game"].__GUI_init__()
self.controller.Pages["Game"].Update_Board()
self.controller.showPage("Game")
class Game (tk.Frame): # This is the 'stage' where the game will be played.
FrameName = "Game"
def __init__ (self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
self.configure(bg="white")
# Status Bar
self.Status_Bar = tk.Frame(self, bg="white")
self.Status_Bar.pack(side="top", fill="x")
self.Status_Bar.grid_columnconfigure(0, weight=1)
self.Status_Bar.grid_columnconfigure(1, weight=1)
self.Status_Bar.grid_columnconfigure(2, weight=1)
self.Status_Bar.grid_rowconfigure(0, weight=1)
self.Current_Player = tk.Label(self.Status_Bar, text="None", bg="white", font=FONTS["medium"])
self.Current_Player.grid(row=0, column=0)
self.Game_Type = tk.Label(self.Status_Bar, text="FULL", bg="white", font=FONTS["medium"])
self.Game_Type.grid(row=0, column=1)
self.Score = tk.Label(self.Status_Bar, text="Black: 2 | 2:White", bg="white", font=FONTS["medium"])
self.Score.grid(row=0, column=2)
# Board
self.Board_Area = tk.Frame(self, bg="#009900")
self.Board_Area.pack(side="top", fill="both", expand=True)
self.Board = []
def __GUI_init__ (self): # This will initiate the game board once all the datya is provided.
for y in range(self.controller.Handler.GameParams["y_size"]):
row = []
for x in range(self.controller.Handler.GameParams["x_size"]):
# Diameter with respond to the length of the shortest side of the board
height = self.Board_Area.winfo_height()
width = self.Board_Area.winfo_width()
if height > width:
diameter = width/self.controller.Handler.GameParams["x_size"]
else:
diameter = height/self.controller.Handler.GameParams["y_size"]
self.Board_Area.grid_columnconfigure(x, weight=1)
self.Board_Area.grid_rowconfigure(y, weight=1)
disc = wg.Disc(self.Board_Area, self.controller, diameter=diameter,
command= lambda x=x, y=y: self.Disc_Function(x, y))
disc.grid(row=y, column=x, sticky="nsew")
row.append(disc)
self.Board.append(row)
self.Update_Board()
def Reset_Game(self): #This will reset the game board to its initial state
self.Board = []
for widget in self.Board_Area.winfo_children():
widget.destroy()
def Disc_Function (self, x: int, y: int): # This is the function run when the player clicks a disc slot/disc
if not self.controller.Handler.Move(x+1, y+1): # Try run the Move function on the Handler
self.Invalid_Move()
def Invalid_Move(self): # This command will run when a player tries to make a move thats not possible
showerror("Invalid Move", "You cannot move there!")
def Update_Board (self): # Update the board to mathe the Game() board
for y in range(len(self.Board)):
for x in range(len(self.Board[y])):
game_piece = self.controller.Handler.Game.Board[y][x]
if game_piece == None:
pass
elif game_piece == "B":
if self.Board[y][x].Current_Color != "black":
self.Board[y][x].Set_Piece_Color("black")
elif game_piece == "W":
if self.Board[y][x].Current_Color != "white":
self.Board[y][x].Set_Piece_Color("white")
def Update_Current_Player (self): # Update the current player identifier
self.Current_Player.config(text="Turn: " + self.controller.Get_Current_Player())
def Update_Game_Type(self): # Update the game type identifier
g_type = self.controller.Handler.Get_Game_Type()
self.Game_Type.configure(text="Rules: " + g_type)
def Update_Score (self): # Update the score identifier
b, w = self.controller.Handler.Get_Score()
self.Score.configure(text="Black: {0!s} | {1!s} :White".format(b, w))
def Full_Update(self): # Run a full update on the graphics
self.Update_Score()
self.Update_Current_Player()
self.Update_Board()
class Postgame (tk.Frame): # The 'end game' screen
FrameName = "Postgame"
def __init__ (self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
self.configure(bg="white")
# Set a page title
self.Title = tk.Label(self, text="Game Over!", bg="white", font=FONTS["large"])
self.Title.pack(side="top")
Separator(self, orient="horizontal").pack(side="top", fill="x", padx=10)
# Set the winner text object
self.Winner = tk.Label(self, text="The winner is black-discs.", bg="white", font=FONTS["medium"])
self.Winner.pack(side="top")
# Create the replay and exit buttons
self.Buttons = tk.Frame(self, bg="white")
self.Buttons.pack()
Replay = tk.Button(self.Buttons, text="Replay", bg="#bbbbbb", font=FONTS["medium"],
command=lambda: self.Replay())
Replay.grid(row=0, column=0)
Quit = tk.Button(self.Buttons, text="Quit", bg="#bbbbbb", font=FONTS["medium"],
command=lambda: self.Quit())
Quit.grid(row=0, column=1)
# the area for the board output
self.Board_Area = tk.Frame(self, bg="white")
self.Board_Area.pack(side="bottom")
# Score text
self.Score = tk.Label(self.Board_Area, text="", bg="white", font=FONTS["medium"])
self.Score.pack()
# The display for the board
self.Board_Display = tk.Frame(self.Board_Area, bg="green")
self.Board_Display.pack()
self.Board = []
def Replay(self): # Initiate the Replay
self.controller.Replay()
def Quit(self): # Kill the game
self.controller.destroy()
exit()
def Update_Board (self): # Update the game board display, kill old, create new
for widget in self.Board_Display.winfo_children():
widget.destroy()
for y in range(self.controller.Handler.GameParams["y_size"]):
row = []
for x in range(self.controller.Handler.GameParams["x_size"]):
self.Board_Area.grid_columnconfigure(x, weight=1)
self.Board_Area.grid_rowconfigure(y, weight=1)
col = None
place_col = self.controller.Handler.Game.Board[y][x]
if place_col == "B":
col = "black"
elif place_col == "W":
col = "white"
disc = wg.Disc(self.Board_Display, self.controller, col=col, diameter=50)
disc.grid(row=y, column=x, sticky="nsew")
row.append(disc)
self.Board.append(row)
def Update(self): # Update the whole page
winner, scores = self.controller.Handler.Get_Winner()
if winner.lower() == "b":
winner = "black-discs"
elif winner.lower() == "w":
winner = "white-discs"
else:
winner == "no one"
self.Winner.configure(text="The winner is " + winner)
self.Score.configure(text="Black: {0!s} | {1!s}:White".format(scores[0], scores[1]))
self.Update_Board()
if __name__ == "__main__":
Window = Handler()
|
normal
|
{
"blob_id": "9b8f3962172d4a867a3a070b6139bb302fd7e2f5",
"index": 9934,
"step-1": "<mask token>\n\n\nclass Window(tk.Tk):\n <mask token>\n <mask token>\n <mask token>\n\n def Get_Current_Player(self) ->str:\n return self.Handler.Get_Current_Player()\n <mask token>\n\n\nclass Pregame(tk.Frame):\n FrameName = 'Pregame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.set_vals = []\n self.__GUI_Reset__()\n\n def __GUI_Reset__(self):\n for widget in self.winfo_children():\n widget.destroy()\n tk.Label(self, text='Otello', font=FONTS['large'], bg='white').pack(\n side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n rule_set_frame = tk.Frame(self, bg='white')\n rule_set_frame.pack(pady=10)\n self.rs_label = tk.Label(rule_set_frame, text='Rule Set', font=\n FONTS['medium'], bg='white')\n self.rs_label.pack(side='top')\n self.full_btn = tk.Button(rule_set_frame, text='FULL', font=FONTS[\n 'medium'], bg='#bbbbbb', command=lambda : self.Select_Rule_Set(\n 'full'))\n self.full_btn.pack()\n self.simple_btn = tk.Button(rule_set_frame, text='SIMPLE', font=\n FONTS['medium'], bg='#bbbbbb', command=lambda : self.\n Select_Rule_Set('simple'))\n self.simple_btn.pack()\n row_frame = tk.Frame(self, bg='white')\n row_frame.pack(pady=10)\n self.row_label = tk.Label(row_frame, text='Board Rows', font=FONTS[\n 'medium'], bg='white')\n self.row_label.grid(row=0, column=0, columnspan=7)\n self.Rows_Buttons = []\n place = 0\n for rows in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(row_frame, text=str(rows), font=FONTS['small'],\n bg='#bbbbbb', command=lambda rows=rows: self.Select_Rows(rows))\n x.grid(row=1, column=place)\n self.Rows_Buttons.append(x)\n place += 1\n col_frame = tk.Frame(self, bg='white')\n col_frame.pack(pady=10)\n self.col_label = tk.Label(col_frame, text='Board Columns', font=\n FONTS['medium'], bg='white')\n self.col_label.grid(row=0, column=0, columnspan=7)\n self.Cols_Buttons = []\n place = 0\n for cols in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(col_frame, text=str(cols), font=FONTS['small'],\n bg='#bbbbbb', command=lambda cols=cols: self.Select_Cols(cols))\n x.grid(row=1, column=place)\n self.Cols_Buttons.append(x)\n place += 1\n first_move_frame = tk.Frame(self, bg='white')\n first_move_frame.pack(pady=10)\n self.first_move_label = tk.Label(first_move_frame, text=\n 'First to move', bg='white', font=FONTS['medium'])\n self.first_move_label.grid(row=0, column=0, columnspan=2)\n self.black_btn = tk.Button(first_move_frame, text='Black', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('black'))\n self.black_btn.grid(row=1, column=0)\n self.white_btn = tk.Button(first_move_frame, text='White', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('white'))\n self.white_btn.grid(row=1, column=1)\n condition_frame = tk.Frame(self, bg='white')\n condition_frame.pack(pady=10)\n self.condition_label = tk.Label(condition_frame, text=\n 'The winner is, the player with..', bg='white', font=FONTS[\n 'medium'])\n self.condition_label.grid(row=0, column=0, columnspan=2)\n self.greater_score = tk.Button(condition_frame, text='more discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('>'))\n self.greater_score.grid(row=1, column=0)\n self.lesser_score = tk.Button(condition_frame, text='less discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('<'))\n self.lesser_score.grid(row=1, column=1)\n self.Start_Game_Btn = tk.Button(self, text='Start', bg='#ff2222',\n activebackground='#992222', font=FONTS['medium'])\n self.Start_Game_Btn.pack(side='bottom')\n\n def Select_Rule_Set(self, _set: str):\n if _set == 'simple':\n self.controller.Handler.GameParams['game_type'] = 1\n else:\n self.controller.Handler.GameParams['game_type'] = 2\n self.full_btn.destroy()\n self.simple_btn.destroy()\n self.rs_label.configure(text='Rule Set: ' + _set.upper())\n self.set_vals.append('rules')\n self.Check_Can_Start()\n\n def Select_Rows(self, rows: int):\n self.controller.Handler.GameParams['y_size'] = rows\n for button in self.Rows_Buttons:\n button.destroy()\n self.row_label.configure(text='Board Rows: ' + str(rows))\n self.set_vals.append('rows')\n self.Check_Can_Start()\n\n def Select_Cols(self, cols: int):\n self.controller.Handler.GameParams['x_size'] = cols\n for button in self.Cols_Buttons:\n button.destroy()\n self.col_label.configure(text='Board Columns: ' + str(cols))\n self.set_vals.append('cols')\n self.Check_Can_Start()\n\n def Select_First_Move(self, mover: str):\n if mover == 'black':\n self.controller.Handler.GameParams['first_move'] = 'B'\n else:\n self.controller.Handler.GameParams['first_move'] = 'W'\n self.black_btn.destroy()\n self.white_btn.destroy()\n self.first_move_label.configure(text='First to move: ' + mover)\n self.set_vals.append('move')\n self.Check_Can_Start()\n\n def Select_Condition(self, condition: str):\n self.controller.Handler.GameParams['game_winner'] = condition\n if condition == '>':\n self.condition_label.configure(text=\n 'The winner is, the player with more discs.')\n else:\n self.condition_label.configure(text=\n 'The winner is, the player with less discs.')\n self.lesser_score.destroy()\n self.greater_score.destroy()\n self.set_vals.append('win')\n self.Check_Can_Start()\n\n def Check_Can_Start(self):\n if ('rules' in self.set_vals and 'rows' in self.set_vals and 'cols' in\n self.set_vals and 'move' in self.set_vals and 'win' in self.\n set_vals):\n self.Start_Game_Btn.configure(bg='#22ff22', activebackground=\n '#229922', command=lambda : self.Start_Custom_Board())\n\n def Start_Custom_Board(self):\n self.controller.Pages['Setup_Board'].Setup_Board()\n self.controller.showPage('Setup_Board')\n self.controller.Pages['Setup_Board'].Instructions_Display()\n\n\nclass Custom_Board(tk.Frame):\n FrameName = 'Setup_Board'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title_Frame = tk.Frame(self, bg='white')\n self.Title_Frame.pack(side='top', fill='x')\n tk.Label(self.Title_Frame, text='Create Custom Board', bg='white',\n font=FONTS['medium']).pack(side='left')\n start = tk.Button(self.Title_Frame, text='Play', bg='#22ff22',\n activebackground='#229922', font=FONTS['medium'], command=lambda :\n self.Start())\n start.pack(side='right')\n self.Use_Board = tk.IntVar()\n Use_Board = tk.Checkbutton(self.Title_Frame, text=\n 'Use custom board', font=FONTS['medium'], bg='white',\n activebackground='white', var=self.Use_Board, onvalue=1, offvalue=0\n )\n Use_Board.pack(side='right', padx=10)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def Setup_Board(self):\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n self.Board = []\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, mode='setup')\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Parse_Board(self) ->list:\n new_board = []\n for row in self.Board:\n new_row = []\n for disc in row:\n if disc.Current_Color == 'white':\n new_row.append('W')\n elif disc.Current_Color == 'black':\n new_row.append('B')\n else:\n new_row.append(None)\n new_board.append(new_row)\n return new_board\n\n def Instructions_Display(self):\n showinfo('How to use',\n 'Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!'\n )\n\n def Start(self):\n if self.Use_Board.get():\n self.controller.Handler.GameParams['board'] = self.Parse_Board()\n self.controller.Begin_Game()\n self.controller.Pages['Game'].__GUI_init__()\n self.controller.Pages['Game'].Update_Board()\n self.controller.showPage('Game')\n\n\nclass Game(tk.Frame):\n FrameName = 'Game'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Status_Bar = tk.Frame(self, bg='white')\n self.Status_Bar.pack(side='top', fill='x')\n self.Status_Bar.grid_columnconfigure(0, weight=1)\n self.Status_Bar.grid_columnconfigure(1, weight=1)\n self.Status_Bar.grid_columnconfigure(2, weight=1)\n self.Status_Bar.grid_rowconfigure(0, weight=1)\n self.Current_Player = tk.Label(self.Status_Bar, text='None', bg=\n 'white', font=FONTS['medium'])\n self.Current_Player.grid(row=0, column=0)\n self.Game_Type = tk.Label(self.Status_Bar, text='FULL', bg='white',\n font=FONTS['medium'])\n self.Game_Type.grid(row=0, column=1)\n self.Score = tk.Label(self.Status_Bar, text='Black: 2 | 2:White',\n bg='white', font=FONTS['medium'])\n self.Score.grid(row=0, column=2)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def __GUI_init__(self):\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, command=lambda x=x, y=y: self.Disc_Function(x, y)\n )\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n self.Update_Board()\n\n def Reset_Game(self):\n self.Board = []\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n\n def Disc_Function(self, x: int, y: int):\n if not self.controller.Handler.Move(x + 1, y + 1):\n self.Invalid_Move()\n\n def Invalid_Move(self):\n showerror('Invalid Move', 'You cannot move there!')\n\n def Update_Board(self):\n for y in range(len(self.Board)):\n for x in range(len(self.Board[y])):\n game_piece = self.controller.Handler.Game.Board[y][x]\n if game_piece == None:\n pass\n elif game_piece == 'B':\n if self.Board[y][x].Current_Color != 'black':\n self.Board[y][x].Set_Piece_Color('black')\n elif game_piece == 'W':\n if self.Board[y][x].Current_Color != 'white':\n self.Board[y][x].Set_Piece_Color('white')\n\n def Update_Current_Player(self):\n self.Current_Player.config(text='Turn: ' + self.controller.\n Get_Current_Player())\n\n def Update_Game_Type(self):\n g_type = self.controller.Handler.Get_Game_Type()\n self.Game_Type.configure(text='Rules: ' + g_type)\n\n def Update_Score(self):\n b, w = self.controller.Handler.Get_Score()\n self.Score.configure(text='Black: {0!s} | {1!s} :White'.format(b, w))\n\n def Full_Update(self):\n self.Update_Score()\n self.Update_Current_Player()\n self.Update_Board()\n\n\nclass Postgame(tk.Frame):\n FrameName = 'Postgame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title = tk.Label(self, text='Game Over!', bg='white', font=\n FONTS['large'])\n self.Title.pack(side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n self.Winner = tk.Label(self, text='The winner is black-discs.', bg=\n 'white', font=FONTS['medium'])\n self.Winner.pack(side='top')\n self.Buttons = tk.Frame(self, bg='white')\n self.Buttons.pack()\n Replay = tk.Button(self.Buttons, text='Replay', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Replay())\n Replay.grid(row=0, column=0)\n Quit = tk.Button(self.Buttons, text='Quit', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Quit())\n Quit.grid(row=0, column=1)\n self.Board_Area = tk.Frame(self, bg='white')\n self.Board_Area.pack(side='bottom')\n self.Score = tk.Label(self.Board_Area, text='', bg='white', font=\n FONTS['medium'])\n self.Score.pack()\n self.Board_Display = tk.Frame(self.Board_Area, bg='green')\n self.Board_Display.pack()\n self.Board = []\n\n def Replay(self):\n self.controller.Replay()\n\n def Quit(self):\n self.controller.destroy()\n exit()\n\n def Update_Board(self):\n for widget in self.Board_Display.winfo_children():\n widget.destroy()\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n col = None\n place_col = self.controller.Handler.Game.Board[y][x]\n if place_col == 'B':\n col = 'black'\n elif place_col == 'W':\n col = 'white'\n disc = wg.Disc(self.Board_Display, self.controller, col=col,\n diameter=50)\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Update(self):\n winner, scores = self.controller.Handler.Get_Winner()\n if winner.lower() == 'b':\n winner = 'black-discs'\n elif winner.lower() == 'w':\n winner = 'white-discs'\n else:\n winner == 'no one'\n self.Winner.configure(text='The winner is ' + winner)\n self.Score.configure(text='Black: {0!s} | {1!s}:White'.format(\n scores[0], scores[1]))\n self.Update_Board()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Window(tk.Tk):\n <mask token>\n\n def showPage(self, pagename: str):\n page = self.Pages[pagename]\n page.tkraise()\n <mask token>\n\n def Get_Current_Player(self) ->str:\n return self.Handler.Get_Current_Player()\n <mask token>\n\n\nclass Pregame(tk.Frame):\n FrameName = 'Pregame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.set_vals = []\n self.__GUI_Reset__()\n\n def __GUI_Reset__(self):\n for widget in self.winfo_children():\n widget.destroy()\n tk.Label(self, text='Otello', font=FONTS['large'], bg='white').pack(\n side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n rule_set_frame = tk.Frame(self, bg='white')\n rule_set_frame.pack(pady=10)\n self.rs_label = tk.Label(rule_set_frame, text='Rule Set', font=\n FONTS['medium'], bg='white')\n self.rs_label.pack(side='top')\n self.full_btn = tk.Button(rule_set_frame, text='FULL', font=FONTS[\n 'medium'], bg='#bbbbbb', command=lambda : self.Select_Rule_Set(\n 'full'))\n self.full_btn.pack()\n self.simple_btn = tk.Button(rule_set_frame, text='SIMPLE', font=\n FONTS['medium'], bg='#bbbbbb', command=lambda : self.\n Select_Rule_Set('simple'))\n self.simple_btn.pack()\n row_frame = tk.Frame(self, bg='white')\n row_frame.pack(pady=10)\n self.row_label = tk.Label(row_frame, text='Board Rows', font=FONTS[\n 'medium'], bg='white')\n self.row_label.grid(row=0, column=0, columnspan=7)\n self.Rows_Buttons = []\n place = 0\n for rows in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(row_frame, text=str(rows), font=FONTS['small'],\n bg='#bbbbbb', command=lambda rows=rows: self.Select_Rows(rows))\n x.grid(row=1, column=place)\n self.Rows_Buttons.append(x)\n place += 1\n col_frame = tk.Frame(self, bg='white')\n col_frame.pack(pady=10)\n self.col_label = tk.Label(col_frame, text='Board Columns', font=\n FONTS['medium'], bg='white')\n self.col_label.grid(row=0, column=0, columnspan=7)\n self.Cols_Buttons = []\n place = 0\n for cols in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(col_frame, text=str(cols), font=FONTS['small'],\n bg='#bbbbbb', command=lambda cols=cols: self.Select_Cols(cols))\n x.grid(row=1, column=place)\n self.Cols_Buttons.append(x)\n place += 1\n first_move_frame = tk.Frame(self, bg='white')\n first_move_frame.pack(pady=10)\n self.first_move_label = tk.Label(first_move_frame, text=\n 'First to move', bg='white', font=FONTS['medium'])\n self.first_move_label.grid(row=0, column=0, columnspan=2)\n self.black_btn = tk.Button(first_move_frame, text='Black', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('black'))\n self.black_btn.grid(row=1, column=0)\n self.white_btn = tk.Button(first_move_frame, text='White', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('white'))\n self.white_btn.grid(row=1, column=1)\n condition_frame = tk.Frame(self, bg='white')\n condition_frame.pack(pady=10)\n self.condition_label = tk.Label(condition_frame, text=\n 'The winner is, the player with..', bg='white', font=FONTS[\n 'medium'])\n self.condition_label.grid(row=0, column=0, columnspan=2)\n self.greater_score = tk.Button(condition_frame, text='more discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('>'))\n self.greater_score.grid(row=1, column=0)\n self.lesser_score = tk.Button(condition_frame, text='less discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('<'))\n self.lesser_score.grid(row=1, column=1)\n self.Start_Game_Btn = tk.Button(self, text='Start', bg='#ff2222',\n activebackground='#992222', font=FONTS['medium'])\n self.Start_Game_Btn.pack(side='bottom')\n\n def Select_Rule_Set(self, _set: str):\n if _set == 'simple':\n self.controller.Handler.GameParams['game_type'] = 1\n else:\n self.controller.Handler.GameParams['game_type'] = 2\n self.full_btn.destroy()\n self.simple_btn.destroy()\n self.rs_label.configure(text='Rule Set: ' + _set.upper())\n self.set_vals.append('rules')\n self.Check_Can_Start()\n\n def Select_Rows(self, rows: int):\n self.controller.Handler.GameParams['y_size'] = rows\n for button in self.Rows_Buttons:\n button.destroy()\n self.row_label.configure(text='Board Rows: ' + str(rows))\n self.set_vals.append('rows')\n self.Check_Can_Start()\n\n def Select_Cols(self, cols: int):\n self.controller.Handler.GameParams['x_size'] = cols\n for button in self.Cols_Buttons:\n button.destroy()\n self.col_label.configure(text='Board Columns: ' + str(cols))\n self.set_vals.append('cols')\n self.Check_Can_Start()\n\n def Select_First_Move(self, mover: str):\n if mover == 'black':\n self.controller.Handler.GameParams['first_move'] = 'B'\n else:\n self.controller.Handler.GameParams['first_move'] = 'W'\n self.black_btn.destroy()\n self.white_btn.destroy()\n self.first_move_label.configure(text='First to move: ' + mover)\n self.set_vals.append('move')\n self.Check_Can_Start()\n\n def Select_Condition(self, condition: str):\n self.controller.Handler.GameParams['game_winner'] = condition\n if condition == '>':\n self.condition_label.configure(text=\n 'The winner is, the player with more discs.')\n else:\n self.condition_label.configure(text=\n 'The winner is, the player with less discs.')\n self.lesser_score.destroy()\n self.greater_score.destroy()\n self.set_vals.append('win')\n self.Check_Can_Start()\n\n def Check_Can_Start(self):\n if ('rules' in self.set_vals and 'rows' in self.set_vals and 'cols' in\n self.set_vals and 'move' in self.set_vals and 'win' in self.\n set_vals):\n self.Start_Game_Btn.configure(bg='#22ff22', activebackground=\n '#229922', command=lambda : self.Start_Custom_Board())\n\n def Start_Custom_Board(self):\n self.controller.Pages['Setup_Board'].Setup_Board()\n self.controller.showPage('Setup_Board')\n self.controller.Pages['Setup_Board'].Instructions_Display()\n\n\nclass Custom_Board(tk.Frame):\n FrameName = 'Setup_Board'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title_Frame = tk.Frame(self, bg='white')\n self.Title_Frame.pack(side='top', fill='x')\n tk.Label(self.Title_Frame, text='Create Custom Board', bg='white',\n font=FONTS['medium']).pack(side='left')\n start = tk.Button(self.Title_Frame, text='Play', bg='#22ff22',\n activebackground='#229922', font=FONTS['medium'], command=lambda :\n self.Start())\n start.pack(side='right')\n self.Use_Board = tk.IntVar()\n Use_Board = tk.Checkbutton(self.Title_Frame, text=\n 'Use custom board', font=FONTS['medium'], bg='white',\n activebackground='white', var=self.Use_Board, onvalue=1, offvalue=0\n )\n Use_Board.pack(side='right', padx=10)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def Setup_Board(self):\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n self.Board = []\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, mode='setup')\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Parse_Board(self) ->list:\n new_board = []\n for row in self.Board:\n new_row = []\n for disc in row:\n if disc.Current_Color == 'white':\n new_row.append('W')\n elif disc.Current_Color == 'black':\n new_row.append('B')\n else:\n new_row.append(None)\n new_board.append(new_row)\n return new_board\n\n def Instructions_Display(self):\n showinfo('How to use',\n 'Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!'\n )\n\n def Start(self):\n if self.Use_Board.get():\n self.controller.Handler.GameParams['board'] = self.Parse_Board()\n self.controller.Begin_Game()\n self.controller.Pages['Game'].__GUI_init__()\n self.controller.Pages['Game'].Update_Board()\n self.controller.showPage('Game')\n\n\nclass Game(tk.Frame):\n FrameName = 'Game'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Status_Bar = tk.Frame(self, bg='white')\n self.Status_Bar.pack(side='top', fill='x')\n self.Status_Bar.grid_columnconfigure(0, weight=1)\n self.Status_Bar.grid_columnconfigure(1, weight=1)\n self.Status_Bar.grid_columnconfigure(2, weight=1)\n self.Status_Bar.grid_rowconfigure(0, weight=1)\n self.Current_Player = tk.Label(self.Status_Bar, text='None', bg=\n 'white', font=FONTS['medium'])\n self.Current_Player.grid(row=0, column=0)\n self.Game_Type = tk.Label(self.Status_Bar, text='FULL', bg='white',\n font=FONTS['medium'])\n self.Game_Type.grid(row=0, column=1)\n self.Score = tk.Label(self.Status_Bar, text='Black: 2 | 2:White',\n bg='white', font=FONTS['medium'])\n self.Score.grid(row=0, column=2)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def __GUI_init__(self):\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, command=lambda x=x, y=y: self.Disc_Function(x, y)\n )\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n self.Update_Board()\n\n def Reset_Game(self):\n self.Board = []\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n\n def Disc_Function(self, x: int, y: int):\n if not self.controller.Handler.Move(x + 1, y + 1):\n self.Invalid_Move()\n\n def Invalid_Move(self):\n showerror('Invalid Move', 'You cannot move there!')\n\n def Update_Board(self):\n for y in range(len(self.Board)):\n for x in range(len(self.Board[y])):\n game_piece = self.controller.Handler.Game.Board[y][x]\n if game_piece == None:\n pass\n elif game_piece == 'B':\n if self.Board[y][x].Current_Color != 'black':\n self.Board[y][x].Set_Piece_Color('black')\n elif game_piece == 'W':\n if self.Board[y][x].Current_Color != 'white':\n self.Board[y][x].Set_Piece_Color('white')\n\n def Update_Current_Player(self):\n self.Current_Player.config(text='Turn: ' + self.controller.\n Get_Current_Player())\n\n def Update_Game_Type(self):\n g_type = self.controller.Handler.Get_Game_Type()\n self.Game_Type.configure(text='Rules: ' + g_type)\n\n def Update_Score(self):\n b, w = self.controller.Handler.Get_Score()\n self.Score.configure(text='Black: {0!s} | {1!s} :White'.format(b, w))\n\n def Full_Update(self):\n self.Update_Score()\n self.Update_Current_Player()\n self.Update_Board()\n\n\nclass Postgame(tk.Frame):\n FrameName = 'Postgame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title = tk.Label(self, text='Game Over!', bg='white', font=\n FONTS['large'])\n self.Title.pack(side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n self.Winner = tk.Label(self, text='The winner is black-discs.', bg=\n 'white', font=FONTS['medium'])\n self.Winner.pack(side='top')\n self.Buttons = tk.Frame(self, bg='white')\n self.Buttons.pack()\n Replay = tk.Button(self.Buttons, text='Replay', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Replay())\n Replay.grid(row=0, column=0)\n Quit = tk.Button(self.Buttons, text='Quit', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Quit())\n Quit.grid(row=0, column=1)\n self.Board_Area = tk.Frame(self, bg='white')\n self.Board_Area.pack(side='bottom')\n self.Score = tk.Label(self.Board_Area, text='', bg='white', font=\n FONTS['medium'])\n self.Score.pack()\n self.Board_Display = tk.Frame(self.Board_Area, bg='green')\n self.Board_Display.pack()\n self.Board = []\n\n def Replay(self):\n self.controller.Replay()\n\n def Quit(self):\n self.controller.destroy()\n exit()\n\n def Update_Board(self):\n for widget in self.Board_Display.winfo_children():\n widget.destroy()\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n col = None\n place_col = self.controller.Handler.Game.Board[y][x]\n if place_col == 'B':\n col = 'black'\n elif place_col == 'W':\n col = 'white'\n disc = wg.Disc(self.Board_Display, self.controller, col=col,\n diameter=50)\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Update(self):\n winner, scores = self.controller.Handler.Get_Winner()\n if winner.lower() == 'b':\n winner = 'black-discs'\n elif winner.lower() == 'w':\n winner = 'white-discs'\n else:\n winner == 'no one'\n self.Winner.configure(text='The winner is ' + winner)\n self.Score.configure(text='Black: {0!s} | {1!s}:White'.format(\n scores[0], scores[1]))\n self.Update_Board()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Window(tk.Tk):\n\n def __init__(self, controller, *args, **kwargs):\n tk.Tk.__init__(self, *args, **kwargs)\n self.Handler = controller\n self.title('Othello')\n try:\n self.iconbitmap('Icon.ico')\n except:\n pass\n self.minsize(600, 600)\n self.container = tk.Frame(self)\n self.container.pack(side='top', fill='both', expand=True)\n self.container.grid_rowconfigure(0, weight=1)\n self.container.grid_columnconfigure(0, weight=1)\n self.Pages = {}\n for page in (Pregame, Custom_Board, Game, Postgame):\n new = page(self.container, self)\n self.Pages[page.FrameName] = new\n new.grid(row=0, column=0, sticky='nsew')\n self.showPage('Pregame')\n\n def showPage(self, pagename: str):\n page = self.Pages[pagename]\n page.tkraise()\n <mask token>\n\n def Get_Current_Player(self) ->str:\n return self.Handler.Get_Current_Player()\n <mask token>\n\n\nclass Pregame(tk.Frame):\n FrameName = 'Pregame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.set_vals = []\n self.__GUI_Reset__()\n\n def __GUI_Reset__(self):\n for widget in self.winfo_children():\n widget.destroy()\n tk.Label(self, text='Otello', font=FONTS['large'], bg='white').pack(\n side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n rule_set_frame = tk.Frame(self, bg='white')\n rule_set_frame.pack(pady=10)\n self.rs_label = tk.Label(rule_set_frame, text='Rule Set', font=\n FONTS['medium'], bg='white')\n self.rs_label.pack(side='top')\n self.full_btn = tk.Button(rule_set_frame, text='FULL', font=FONTS[\n 'medium'], bg='#bbbbbb', command=lambda : self.Select_Rule_Set(\n 'full'))\n self.full_btn.pack()\n self.simple_btn = tk.Button(rule_set_frame, text='SIMPLE', font=\n FONTS['medium'], bg='#bbbbbb', command=lambda : self.\n Select_Rule_Set('simple'))\n self.simple_btn.pack()\n row_frame = tk.Frame(self, bg='white')\n row_frame.pack(pady=10)\n self.row_label = tk.Label(row_frame, text='Board Rows', font=FONTS[\n 'medium'], bg='white')\n self.row_label.grid(row=0, column=0, columnspan=7)\n self.Rows_Buttons = []\n place = 0\n for rows in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(row_frame, text=str(rows), font=FONTS['small'],\n bg='#bbbbbb', command=lambda rows=rows: self.Select_Rows(rows))\n x.grid(row=1, column=place)\n self.Rows_Buttons.append(x)\n place += 1\n col_frame = tk.Frame(self, bg='white')\n col_frame.pack(pady=10)\n self.col_label = tk.Label(col_frame, text='Board Columns', font=\n FONTS['medium'], bg='white')\n self.col_label.grid(row=0, column=0, columnspan=7)\n self.Cols_Buttons = []\n place = 0\n for cols in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(col_frame, text=str(cols), font=FONTS['small'],\n bg='#bbbbbb', command=lambda cols=cols: self.Select_Cols(cols))\n x.grid(row=1, column=place)\n self.Cols_Buttons.append(x)\n place += 1\n first_move_frame = tk.Frame(self, bg='white')\n first_move_frame.pack(pady=10)\n self.first_move_label = tk.Label(first_move_frame, text=\n 'First to move', bg='white', font=FONTS['medium'])\n self.first_move_label.grid(row=0, column=0, columnspan=2)\n self.black_btn = tk.Button(first_move_frame, text='Black', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('black'))\n self.black_btn.grid(row=1, column=0)\n self.white_btn = tk.Button(first_move_frame, text='White', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('white'))\n self.white_btn.grid(row=1, column=1)\n condition_frame = tk.Frame(self, bg='white')\n condition_frame.pack(pady=10)\n self.condition_label = tk.Label(condition_frame, text=\n 'The winner is, the player with..', bg='white', font=FONTS[\n 'medium'])\n self.condition_label.grid(row=0, column=0, columnspan=2)\n self.greater_score = tk.Button(condition_frame, text='more discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('>'))\n self.greater_score.grid(row=1, column=0)\n self.lesser_score = tk.Button(condition_frame, text='less discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('<'))\n self.lesser_score.grid(row=1, column=1)\n self.Start_Game_Btn = tk.Button(self, text='Start', bg='#ff2222',\n activebackground='#992222', font=FONTS['medium'])\n self.Start_Game_Btn.pack(side='bottom')\n\n def Select_Rule_Set(self, _set: str):\n if _set == 'simple':\n self.controller.Handler.GameParams['game_type'] = 1\n else:\n self.controller.Handler.GameParams['game_type'] = 2\n self.full_btn.destroy()\n self.simple_btn.destroy()\n self.rs_label.configure(text='Rule Set: ' + _set.upper())\n self.set_vals.append('rules')\n self.Check_Can_Start()\n\n def Select_Rows(self, rows: int):\n self.controller.Handler.GameParams['y_size'] = rows\n for button in self.Rows_Buttons:\n button.destroy()\n self.row_label.configure(text='Board Rows: ' + str(rows))\n self.set_vals.append('rows')\n self.Check_Can_Start()\n\n def Select_Cols(self, cols: int):\n self.controller.Handler.GameParams['x_size'] = cols\n for button in self.Cols_Buttons:\n button.destroy()\n self.col_label.configure(text='Board Columns: ' + str(cols))\n self.set_vals.append('cols')\n self.Check_Can_Start()\n\n def Select_First_Move(self, mover: str):\n if mover == 'black':\n self.controller.Handler.GameParams['first_move'] = 'B'\n else:\n self.controller.Handler.GameParams['first_move'] = 'W'\n self.black_btn.destroy()\n self.white_btn.destroy()\n self.first_move_label.configure(text='First to move: ' + mover)\n self.set_vals.append('move')\n self.Check_Can_Start()\n\n def Select_Condition(self, condition: str):\n self.controller.Handler.GameParams['game_winner'] = condition\n if condition == '>':\n self.condition_label.configure(text=\n 'The winner is, the player with more discs.')\n else:\n self.condition_label.configure(text=\n 'The winner is, the player with less discs.')\n self.lesser_score.destroy()\n self.greater_score.destroy()\n self.set_vals.append('win')\n self.Check_Can_Start()\n\n def Check_Can_Start(self):\n if ('rules' in self.set_vals and 'rows' in self.set_vals and 'cols' in\n self.set_vals and 'move' in self.set_vals and 'win' in self.\n set_vals):\n self.Start_Game_Btn.configure(bg='#22ff22', activebackground=\n '#229922', command=lambda : self.Start_Custom_Board())\n\n def Start_Custom_Board(self):\n self.controller.Pages['Setup_Board'].Setup_Board()\n self.controller.showPage('Setup_Board')\n self.controller.Pages['Setup_Board'].Instructions_Display()\n\n\nclass Custom_Board(tk.Frame):\n FrameName = 'Setup_Board'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title_Frame = tk.Frame(self, bg='white')\n self.Title_Frame.pack(side='top', fill='x')\n tk.Label(self.Title_Frame, text='Create Custom Board', bg='white',\n font=FONTS['medium']).pack(side='left')\n start = tk.Button(self.Title_Frame, text='Play', bg='#22ff22',\n activebackground='#229922', font=FONTS['medium'], command=lambda :\n self.Start())\n start.pack(side='right')\n self.Use_Board = tk.IntVar()\n Use_Board = tk.Checkbutton(self.Title_Frame, text=\n 'Use custom board', font=FONTS['medium'], bg='white',\n activebackground='white', var=self.Use_Board, onvalue=1, offvalue=0\n )\n Use_Board.pack(side='right', padx=10)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def Setup_Board(self):\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n self.Board = []\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, mode='setup')\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Parse_Board(self) ->list:\n new_board = []\n for row in self.Board:\n new_row = []\n for disc in row:\n if disc.Current_Color == 'white':\n new_row.append('W')\n elif disc.Current_Color == 'black':\n new_row.append('B')\n else:\n new_row.append(None)\n new_board.append(new_row)\n return new_board\n\n def Instructions_Display(self):\n showinfo('How to use',\n 'Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!'\n )\n\n def Start(self):\n if self.Use_Board.get():\n self.controller.Handler.GameParams['board'] = self.Parse_Board()\n self.controller.Begin_Game()\n self.controller.Pages['Game'].__GUI_init__()\n self.controller.Pages['Game'].Update_Board()\n self.controller.showPage('Game')\n\n\nclass Game(tk.Frame):\n FrameName = 'Game'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Status_Bar = tk.Frame(self, bg='white')\n self.Status_Bar.pack(side='top', fill='x')\n self.Status_Bar.grid_columnconfigure(0, weight=1)\n self.Status_Bar.grid_columnconfigure(1, weight=1)\n self.Status_Bar.grid_columnconfigure(2, weight=1)\n self.Status_Bar.grid_rowconfigure(0, weight=1)\n self.Current_Player = tk.Label(self.Status_Bar, text='None', bg=\n 'white', font=FONTS['medium'])\n self.Current_Player.grid(row=0, column=0)\n self.Game_Type = tk.Label(self.Status_Bar, text='FULL', bg='white',\n font=FONTS['medium'])\n self.Game_Type.grid(row=0, column=1)\n self.Score = tk.Label(self.Status_Bar, text='Black: 2 | 2:White',\n bg='white', font=FONTS['medium'])\n self.Score.grid(row=0, column=2)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def __GUI_init__(self):\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, command=lambda x=x, y=y: self.Disc_Function(x, y)\n )\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n self.Update_Board()\n\n def Reset_Game(self):\n self.Board = []\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n\n def Disc_Function(self, x: int, y: int):\n if not self.controller.Handler.Move(x + 1, y + 1):\n self.Invalid_Move()\n\n def Invalid_Move(self):\n showerror('Invalid Move', 'You cannot move there!')\n\n def Update_Board(self):\n for y in range(len(self.Board)):\n for x in range(len(self.Board[y])):\n game_piece = self.controller.Handler.Game.Board[y][x]\n if game_piece == None:\n pass\n elif game_piece == 'B':\n if self.Board[y][x].Current_Color != 'black':\n self.Board[y][x].Set_Piece_Color('black')\n elif game_piece == 'W':\n if self.Board[y][x].Current_Color != 'white':\n self.Board[y][x].Set_Piece_Color('white')\n\n def Update_Current_Player(self):\n self.Current_Player.config(text='Turn: ' + self.controller.\n Get_Current_Player())\n\n def Update_Game_Type(self):\n g_type = self.controller.Handler.Get_Game_Type()\n self.Game_Type.configure(text='Rules: ' + g_type)\n\n def Update_Score(self):\n b, w = self.controller.Handler.Get_Score()\n self.Score.configure(text='Black: {0!s} | {1!s} :White'.format(b, w))\n\n def Full_Update(self):\n self.Update_Score()\n self.Update_Current_Player()\n self.Update_Board()\n\n\nclass Postgame(tk.Frame):\n FrameName = 'Postgame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title = tk.Label(self, text='Game Over!', bg='white', font=\n FONTS['large'])\n self.Title.pack(side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n self.Winner = tk.Label(self, text='The winner is black-discs.', bg=\n 'white', font=FONTS['medium'])\n self.Winner.pack(side='top')\n self.Buttons = tk.Frame(self, bg='white')\n self.Buttons.pack()\n Replay = tk.Button(self.Buttons, text='Replay', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Replay())\n Replay.grid(row=0, column=0)\n Quit = tk.Button(self.Buttons, text='Quit', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Quit())\n Quit.grid(row=0, column=1)\n self.Board_Area = tk.Frame(self, bg='white')\n self.Board_Area.pack(side='bottom')\n self.Score = tk.Label(self.Board_Area, text='', bg='white', font=\n FONTS['medium'])\n self.Score.pack()\n self.Board_Display = tk.Frame(self.Board_Area, bg='green')\n self.Board_Display.pack()\n self.Board = []\n\n def Replay(self):\n self.controller.Replay()\n\n def Quit(self):\n self.controller.destroy()\n exit()\n\n def Update_Board(self):\n for widget in self.Board_Display.winfo_children():\n widget.destroy()\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n col = None\n place_col = self.controller.Handler.Game.Board[y][x]\n if place_col == 'B':\n col = 'black'\n elif place_col == 'W':\n col = 'white'\n disc = wg.Disc(self.Board_Display, self.controller, col=col,\n diameter=50)\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Update(self):\n winner, scores = self.controller.Handler.Get_Winner()\n if winner.lower() == 'b':\n winner = 'black-discs'\n elif winner.lower() == 'w':\n winner = 'white-discs'\n else:\n winner == 'no one'\n self.Winner.configure(text='The winner is ' + winner)\n self.Score.configure(text='Black: {0!s} | {1!s}:White'.format(\n scores[0], scores[1]))\n self.Update_Board()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Handler:\n\n def __init__(self):\n self.Game = None\n self.GameParams = {}\n self.Window = Window(self)\n self.Window.mainloop()\n <mask token>\n\n def Is_Running(self):\n return self.Game.Running\n\n def Start_Game(self):\n self.Game = lgc.Game(**self.GameParams)\n self.Game.Start_Game()\n self.Update_Game()\n self.Window.Pages['Game'].Update_Game_Type()\n\n def Get_Current_Player(self) ->str:\n if self.Game.Running:\n if self.Game.Current_Player == 'B':\n return 'black'\n else:\n return 'white'\n else:\n return 'None'\n\n def Get_Game_Type(self) ->str:\n g = self.Game.Game_Type\n if g == 1:\n return 'SIMPLE'\n else:\n return 'FULL'\n <mask token>\n\n def Move(self, x: int, y: int) ->bool:\n complete = self.Game.Next_Move(x, y)\n if complete:\n self.Update_Game()\n self.Game_Complete_Check()\n return True\n self.Update_Game()\n self.Game_Complete_Check()\n return False\n\n def Get_Winner(self) ->tuple:\n return self.Game.Check_Winner()\n\n def Game_Complete_Check(self):\n if self.Is_Running() == False:\n self.Window.showPage('Postgame')\n self.Window.Pages['Postgame'].Update()\n <mask token>\n\n\nclass Window(tk.Tk):\n\n def __init__(self, controller, *args, **kwargs):\n tk.Tk.__init__(self, *args, **kwargs)\n self.Handler = controller\n self.title('Othello')\n try:\n self.iconbitmap('Icon.ico')\n except:\n pass\n self.minsize(600, 600)\n self.container = tk.Frame(self)\n self.container.pack(side='top', fill='both', expand=True)\n self.container.grid_rowconfigure(0, weight=1)\n self.container.grid_columnconfigure(0, weight=1)\n self.Pages = {}\n for page in (Pregame, Custom_Board, Game, Postgame):\n new = page(self.container, self)\n self.Pages[page.FrameName] = new\n new.grid(row=0, column=0, sticky='nsew')\n self.showPage('Pregame')\n\n def showPage(self, pagename: str):\n page = self.Pages[pagename]\n page.tkraise()\n\n def Begin_Game(self):\n self.Handler.Start_Game()\n\n def Get_Current_Player(self) ->str:\n return self.Handler.Get_Current_Player()\n\n def Replay(self):\n self.Pages['Pregame'].__GUI_Reset__()\n self.Pages['Game'].Reset_Game()\n self.Handler.Replay()\n self.showPage('Pregame')\n\n\nclass Pregame(tk.Frame):\n FrameName = 'Pregame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.set_vals = []\n self.__GUI_Reset__()\n\n def __GUI_Reset__(self):\n for widget in self.winfo_children():\n widget.destroy()\n tk.Label(self, text='Otello', font=FONTS['large'], bg='white').pack(\n side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n rule_set_frame = tk.Frame(self, bg='white')\n rule_set_frame.pack(pady=10)\n self.rs_label = tk.Label(rule_set_frame, text='Rule Set', font=\n FONTS['medium'], bg='white')\n self.rs_label.pack(side='top')\n self.full_btn = tk.Button(rule_set_frame, text='FULL', font=FONTS[\n 'medium'], bg='#bbbbbb', command=lambda : self.Select_Rule_Set(\n 'full'))\n self.full_btn.pack()\n self.simple_btn = tk.Button(rule_set_frame, text='SIMPLE', font=\n FONTS['medium'], bg='#bbbbbb', command=lambda : self.\n Select_Rule_Set('simple'))\n self.simple_btn.pack()\n row_frame = tk.Frame(self, bg='white')\n row_frame.pack(pady=10)\n self.row_label = tk.Label(row_frame, text='Board Rows', font=FONTS[\n 'medium'], bg='white')\n self.row_label.grid(row=0, column=0, columnspan=7)\n self.Rows_Buttons = []\n place = 0\n for rows in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(row_frame, text=str(rows), font=FONTS['small'],\n bg='#bbbbbb', command=lambda rows=rows: self.Select_Rows(rows))\n x.grid(row=1, column=place)\n self.Rows_Buttons.append(x)\n place += 1\n col_frame = tk.Frame(self, bg='white')\n col_frame.pack(pady=10)\n self.col_label = tk.Label(col_frame, text='Board Columns', font=\n FONTS['medium'], bg='white')\n self.col_label.grid(row=0, column=0, columnspan=7)\n self.Cols_Buttons = []\n place = 0\n for cols in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(col_frame, text=str(cols), font=FONTS['small'],\n bg='#bbbbbb', command=lambda cols=cols: self.Select_Cols(cols))\n x.grid(row=1, column=place)\n self.Cols_Buttons.append(x)\n place += 1\n first_move_frame = tk.Frame(self, bg='white')\n first_move_frame.pack(pady=10)\n self.first_move_label = tk.Label(first_move_frame, text=\n 'First to move', bg='white', font=FONTS['medium'])\n self.first_move_label.grid(row=0, column=0, columnspan=2)\n self.black_btn = tk.Button(first_move_frame, text='Black', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('black'))\n self.black_btn.grid(row=1, column=0)\n self.white_btn = tk.Button(first_move_frame, text='White', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('white'))\n self.white_btn.grid(row=1, column=1)\n condition_frame = tk.Frame(self, bg='white')\n condition_frame.pack(pady=10)\n self.condition_label = tk.Label(condition_frame, text=\n 'The winner is, the player with..', bg='white', font=FONTS[\n 'medium'])\n self.condition_label.grid(row=0, column=0, columnspan=2)\n self.greater_score = tk.Button(condition_frame, text='more discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('>'))\n self.greater_score.grid(row=1, column=0)\n self.lesser_score = tk.Button(condition_frame, text='less discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('<'))\n self.lesser_score.grid(row=1, column=1)\n self.Start_Game_Btn = tk.Button(self, text='Start', bg='#ff2222',\n activebackground='#992222', font=FONTS['medium'])\n self.Start_Game_Btn.pack(side='bottom')\n\n def Select_Rule_Set(self, _set: str):\n if _set == 'simple':\n self.controller.Handler.GameParams['game_type'] = 1\n else:\n self.controller.Handler.GameParams['game_type'] = 2\n self.full_btn.destroy()\n self.simple_btn.destroy()\n self.rs_label.configure(text='Rule Set: ' + _set.upper())\n self.set_vals.append('rules')\n self.Check_Can_Start()\n\n def Select_Rows(self, rows: int):\n self.controller.Handler.GameParams['y_size'] = rows\n for button in self.Rows_Buttons:\n button.destroy()\n self.row_label.configure(text='Board Rows: ' + str(rows))\n self.set_vals.append('rows')\n self.Check_Can_Start()\n\n def Select_Cols(self, cols: int):\n self.controller.Handler.GameParams['x_size'] = cols\n for button in self.Cols_Buttons:\n button.destroy()\n self.col_label.configure(text='Board Columns: ' + str(cols))\n self.set_vals.append('cols')\n self.Check_Can_Start()\n\n def Select_First_Move(self, mover: str):\n if mover == 'black':\n self.controller.Handler.GameParams['first_move'] = 'B'\n else:\n self.controller.Handler.GameParams['first_move'] = 'W'\n self.black_btn.destroy()\n self.white_btn.destroy()\n self.first_move_label.configure(text='First to move: ' + mover)\n self.set_vals.append('move')\n self.Check_Can_Start()\n\n def Select_Condition(self, condition: str):\n self.controller.Handler.GameParams['game_winner'] = condition\n if condition == '>':\n self.condition_label.configure(text=\n 'The winner is, the player with more discs.')\n else:\n self.condition_label.configure(text=\n 'The winner is, the player with less discs.')\n self.lesser_score.destroy()\n self.greater_score.destroy()\n self.set_vals.append('win')\n self.Check_Can_Start()\n\n def Check_Can_Start(self):\n if ('rules' in self.set_vals and 'rows' in self.set_vals and 'cols' in\n self.set_vals and 'move' in self.set_vals and 'win' in self.\n set_vals):\n self.Start_Game_Btn.configure(bg='#22ff22', activebackground=\n '#229922', command=lambda : self.Start_Custom_Board())\n\n def Start_Custom_Board(self):\n self.controller.Pages['Setup_Board'].Setup_Board()\n self.controller.showPage('Setup_Board')\n self.controller.Pages['Setup_Board'].Instructions_Display()\n\n\nclass Custom_Board(tk.Frame):\n FrameName = 'Setup_Board'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title_Frame = tk.Frame(self, bg='white')\n self.Title_Frame.pack(side='top', fill='x')\n tk.Label(self.Title_Frame, text='Create Custom Board', bg='white',\n font=FONTS['medium']).pack(side='left')\n start = tk.Button(self.Title_Frame, text='Play', bg='#22ff22',\n activebackground='#229922', font=FONTS['medium'], command=lambda :\n self.Start())\n start.pack(side='right')\n self.Use_Board = tk.IntVar()\n Use_Board = tk.Checkbutton(self.Title_Frame, text=\n 'Use custom board', font=FONTS['medium'], bg='white',\n activebackground='white', var=self.Use_Board, onvalue=1, offvalue=0\n )\n Use_Board.pack(side='right', padx=10)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def Setup_Board(self):\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n self.Board = []\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, mode='setup')\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Parse_Board(self) ->list:\n new_board = []\n for row in self.Board:\n new_row = []\n for disc in row:\n if disc.Current_Color == 'white':\n new_row.append('W')\n elif disc.Current_Color == 'black':\n new_row.append('B')\n else:\n new_row.append(None)\n new_board.append(new_row)\n return new_board\n\n def Instructions_Display(self):\n showinfo('How to use',\n 'Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!'\n )\n\n def Start(self):\n if self.Use_Board.get():\n self.controller.Handler.GameParams['board'] = self.Parse_Board()\n self.controller.Begin_Game()\n self.controller.Pages['Game'].__GUI_init__()\n self.controller.Pages['Game'].Update_Board()\n self.controller.showPage('Game')\n\n\nclass Game(tk.Frame):\n FrameName = 'Game'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Status_Bar = tk.Frame(self, bg='white')\n self.Status_Bar.pack(side='top', fill='x')\n self.Status_Bar.grid_columnconfigure(0, weight=1)\n self.Status_Bar.grid_columnconfigure(1, weight=1)\n self.Status_Bar.grid_columnconfigure(2, weight=1)\n self.Status_Bar.grid_rowconfigure(0, weight=1)\n self.Current_Player = tk.Label(self.Status_Bar, text='None', bg=\n 'white', font=FONTS['medium'])\n self.Current_Player.grid(row=0, column=0)\n self.Game_Type = tk.Label(self.Status_Bar, text='FULL', bg='white',\n font=FONTS['medium'])\n self.Game_Type.grid(row=0, column=1)\n self.Score = tk.Label(self.Status_Bar, text='Black: 2 | 2:White',\n bg='white', font=FONTS['medium'])\n self.Score.grid(row=0, column=2)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def __GUI_init__(self):\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, command=lambda x=x, y=y: self.Disc_Function(x, y)\n )\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n self.Update_Board()\n\n def Reset_Game(self):\n self.Board = []\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n\n def Disc_Function(self, x: int, y: int):\n if not self.controller.Handler.Move(x + 1, y + 1):\n self.Invalid_Move()\n\n def Invalid_Move(self):\n showerror('Invalid Move', 'You cannot move there!')\n\n def Update_Board(self):\n for y in range(len(self.Board)):\n for x in range(len(self.Board[y])):\n game_piece = self.controller.Handler.Game.Board[y][x]\n if game_piece == None:\n pass\n elif game_piece == 'B':\n if self.Board[y][x].Current_Color != 'black':\n self.Board[y][x].Set_Piece_Color('black')\n elif game_piece == 'W':\n if self.Board[y][x].Current_Color != 'white':\n self.Board[y][x].Set_Piece_Color('white')\n\n def Update_Current_Player(self):\n self.Current_Player.config(text='Turn: ' + self.controller.\n Get_Current_Player())\n\n def Update_Game_Type(self):\n g_type = self.controller.Handler.Get_Game_Type()\n self.Game_Type.configure(text='Rules: ' + g_type)\n\n def Update_Score(self):\n b, w = self.controller.Handler.Get_Score()\n self.Score.configure(text='Black: {0!s} | {1!s} :White'.format(b, w))\n\n def Full_Update(self):\n self.Update_Score()\n self.Update_Current_Player()\n self.Update_Board()\n\n\nclass Postgame(tk.Frame):\n FrameName = 'Postgame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title = tk.Label(self, text='Game Over!', bg='white', font=\n FONTS['large'])\n self.Title.pack(side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n self.Winner = tk.Label(self, text='The winner is black-discs.', bg=\n 'white', font=FONTS['medium'])\n self.Winner.pack(side='top')\n self.Buttons = tk.Frame(self, bg='white')\n self.Buttons.pack()\n Replay = tk.Button(self.Buttons, text='Replay', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Replay())\n Replay.grid(row=0, column=0)\n Quit = tk.Button(self.Buttons, text='Quit', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Quit())\n Quit.grid(row=0, column=1)\n self.Board_Area = tk.Frame(self, bg='white')\n self.Board_Area.pack(side='bottom')\n self.Score = tk.Label(self.Board_Area, text='', bg='white', font=\n FONTS['medium'])\n self.Score.pack()\n self.Board_Display = tk.Frame(self.Board_Area, bg='green')\n self.Board_Display.pack()\n self.Board = []\n\n def Replay(self):\n self.controller.Replay()\n\n def Quit(self):\n self.controller.destroy()\n exit()\n\n def Update_Board(self):\n for widget in self.Board_Display.winfo_children():\n widget.destroy()\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n col = None\n place_col = self.controller.Handler.Game.Board[y][x]\n if place_col == 'B':\n col = 'black'\n elif place_col == 'W':\n col = 'white'\n disc = wg.Disc(self.Board_Display, self.controller, col=col,\n diameter=50)\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Update(self):\n winner, scores = self.controller.Handler.Get_Winner()\n if winner.lower() == 'b':\n winner = 'black-discs'\n elif winner.lower() == 'w':\n winner = 'white-discs'\n else:\n winner == 'no one'\n self.Winner.configure(text='The winner is ' + winner)\n self.Score.configure(text='Black: {0!s} | {1!s}:White'.format(\n scores[0], scores[1]))\n self.Update_Board()\n\n\n<mask token>\n",
"step-5": "import tkinter \t\tas tk\nimport Widgets \t\tas wg\nimport Logic \t\tas lgc\nfrom tkinter.ttk \timport Separator\nfrom tkinter.messagebox import showerror, showinfo\n\n# Fonts that we can utilise\nFONTS = {\"large\":(\"Helvetica\", 20), \"medium\":(\"Helvetica\", 16), \"small\":(\"Helvetica\", 12)}\n\nclass Handler: # Handles the window and the Game interaction\n\tdef __init__(self):\n\n\t\t# Game Handle\n\t\tself.Game = None\n\t\tself.GameParams = {}\n\n\t\t# Window Handle\n\t\tself.Window = Window(self)\n\t\tself.Window.mainloop()\n\n\tdef Replay (self): # Reset attributes and classes\n\t\tself.GameParams = {}\n\t\tdel self.Game\n\t\tself.Game = None\n\t\t\n\tdef Is_Running (self):\n\t\treturn self.Game.Running\n\n\tdef Start_Game(self): # Begin the game, run the updates needed.\n\t\tself.Game = lgc.Game(**self.GameParams)\n\t\tself.Game.Start_Game()\n\n\t\t# Update Game page\n\t\tself.Update_Game()\n\t\tself.Window.Pages[\"Game\"].Update_Game_Type()\n\n\tdef Get_Current_Player(self) -> str: # get the current player whose turn it is\n\t\tif self.Game.Running:\n\t\t\tif self.Game.Current_Player == \"B\":\n\t\t\t\treturn \"black\"\n\t\t\telse:\n\t\t\t\treturn \"white\"\n\t\telse:\n\t\t\treturn \"None\"\n\n\tdef Get_Game_Type(self) -> str: # Get the game rule type\n\t\tg = self.Game.Game_Type\n\t\tif g == 1:\n\t\t\treturn \"SIMPLE\"\n\t\telse:\n\t\t\treturn \"FULL\"\n\n\tdef Get_Score(self) -> tuple: # Get the current score\n\t\ts = self.Game.Get_Discs()\n\t\treturn s[0], s[1] # b, w\n\n\tdef Move(self, x: int, y: int) -> bool: # Make a move on a given place\n\t\tcomplete = self.Game.Next_Move(x, y)\n\t\tif complete:\n\t\t\tself.Update_Game()\n\t\t\tself.Game_Complete_Check()\n\t\t\treturn True\n\t\tself.Update_Game()\n\t\tself.Game_Complete_Check()\n\t\treturn False\n\n\tdef Get_Winner(self) -> tuple: # Gets the winner of the game\n\t\treturn self.Game.Check_Winner()\n\n\tdef Game_Complete_Check(self): # Check if the game is over and act accordingly\n\t\tif self.Is_Running() == False:\n\t\t\t# Run Game Over feature here\n\t\t\tself.Window.showPage(\"Postgame\")\n\t\t\t# Update the post page\n\t\t\tself.Window.Pages[\"Postgame\"].Update()\n\n\tdef Update_Game(self): # Run a full update on the game\n\t\tself.Window.Pages[\"Game\"].Full_Update()\n\nclass Window (tk.Tk): # This will be the main window of the GUI\n\tdef __init__ (self, controller, *args, **kwargs):\n\t\ttk.Tk.__init__(self, *args, **kwargs)\n\n\t\tself.Handler = controller # This is handler between the game and window\n\n\t\t# Root attributes\n\t\tself.title(\"Othello\")\n\t\t\n\t\ttry:\n\t\t\tself.iconbitmap(\"Icon.ico\")\n\t\texcept:\n\t\t\tpass\n\n\t\tself.minsize(600, 600)\n\t\t#self.maxsize(1000,1000)\n\n\t\t# Master frame\n\t\tself.container = tk.Frame(self)\n\t\tself.container.pack(side=\"top\", fill=\"both\", expand=True)\n\t\tself.container.grid_rowconfigure(0, weight=1)\n\t\tself.container.grid_columnconfigure(0, weight=1)\n\n\t\t# Set up the pages\n\t\tself.Pages = {}\n\t\tfor page in (Pregame, Custom_Board, Game, Postgame):\n\t\t\t# Initiate each page and add them to the dictionary\n\t\t\t# Dictionary will use the name of the class so that it can be accessed\n\t\t\t# without the knowledge of the clas name\n\t\t\tnew = page(self.container, self)\n\t\t\tself.Pages[page.FrameName] = new\n\t\t\tnew.grid(row=0, column=0, sticky=\"nsew\")\n\n\t\t# Show the initial page\n\t\tself.showPage(\"Pregame\")\n\n\t# Window\n\n\tdef showPage(self, pagename: str): # Show a chosen page\n\t\tpage = self.Pages[pagename]\n\t\tpage.tkraise()\n\n\t# Game\n\tdef Begin_Game(self): # Start the game\n\t\tself.Handler.Start_Game()\n\n\tdef Get_Current_Player (self) -> str: # Get the current player\n\t\treturn self.Handler.Get_Current_Player()\n\n\tdef Replay(self): # Clean up the old game, start an new one\n\t\tself.Pages[\"Pregame\"].__GUI_Reset__()\n\t\tself.Pages[\"Game\"].Reset_Game()\n\t\tself.Handler.Replay()\n\t\tself.showPage(\"Pregame\")\n\nclass Pregame (tk.Frame): # The 'home' screen\n\tFrameName = \"Pregame\"\n\tdef __init__ (self, parent, controller):\n\t\ttk.Frame.__init__(self, parent)\t\n\n\t\tself.controller = controller\n\t\tself.configure(bg=\"white\")\n\n\t\tself.set_vals = []\n\n\t\tself.__GUI_Reset__()\n\n\tdef __GUI_Reset__(self): # This will clean the screen and then recreate it, this is essential for replaying the game\n\t\tfor widget in self.winfo_children():\n\t\t\twidget.destroy()\n\n\t\t# Title Banner\n\t\ttk.Label(self, text=\"Otello\", font=FONTS[\"large\"], bg=\"white\").pack(side=\"top\")\n\t\tSeparator(self, orient=\"horizontal\").pack(side=\"top\", fill=\"x\", padx=10)\n\n\t\t# Rule Set\n\t\trule_set_frame = tk.Frame(self, bg=\"white\")\n\t\trule_set_frame.pack(pady=10)\n\t\t# Subheading\n\t\tself.rs_label = tk.Label(rule_set_frame, text=\"Rule Set\", font=FONTS[\"medium\"], bg=\"white\")\n\t\tself.rs_label.pack(side=\"top\")\n\n\t\tself.full_btn = tk.Button(rule_set_frame, text=\"FULL\", font=FONTS[\"medium\"], bg=\"#bbbbbb\",\n\t\t\tcommand=lambda:self.Select_Rule_Set(\"full\"))\n\t\tself.full_btn.pack()\n\n\t\tself.simple_btn = tk.Button(rule_set_frame, text=\"SIMPLE\", font=FONTS[\"medium\"], bg=\"#bbbbbb\",\n\t\t\tcommand=lambda:self.Select_Rule_Set(\"simple\"))\n\t\tself.simple_btn.pack()\n\n\t\t# Row Size\n\t\trow_frame = tk.Frame(self, bg=\"white\")\n\t\trow_frame.pack(pady=10)\n\n\t\tself.row_label = tk.Label(row_frame, text=\"Board Rows\", font=FONTS[\"medium\"], bg=\"white\")\n\t\tself.row_label.grid(row=0, column=0, columnspan=7)\n\n\t\tself.Rows_Buttons = []\n\n\t\tplace = 0\n\t\tfor rows in [4, 6, 8, 10, 12, 14, 16]:\n\t\t\tx = tk.Button(row_frame, text=str(rows), font=FONTS[\"small\"], bg=\"#bbbbbb\",\n\t\t\t\tcommand=lambda rows=rows: self.Select_Rows(rows))\n\t\t\tx.grid(row=1, column=place)\n\t\t\tself.Rows_Buttons.append(x)\n\t\t\tplace += 1\n\n\t\t# Column Size\n\t\tcol_frame = tk.Frame(self, bg=\"white\")\n\t\tcol_frame.pack(pady=10)\n\n\t\tself.col_label = tk.Label(col_frame, text=\"Board Columns\", font=FONTS[\"medium\"], bg=\"white\")\n\t\tself.col_label.grid(row=0, column=0, columnspan=7)\n\n\t\tself.Cols_Buttons = []\n\n\t\tplace = 0\n\t\tfor cols in [4, 6, 8, 10, 12, 14, 16]:\n\t\t\tx = tk.Button(col_frame, text=str(cols), font=FONTS[\"small\"], bg=\"#bbbbbb\",\n\t\t\t\tcommand=lambda cols=cols: self.Select_Cols(cols))\n\t\t\tx.grid(row=1, column=place)\n\t\t\tself.Cols_Buttons.append(x)\n\t\t\tplace += 1\n\n\t\t# First to Move\n\t\tfirst_move_frame = tk.Frame(self, bg=\"white\")\n\t\tfirst_move_frame.pack(pady=10)\n\n\t\tself.first_move_label = tk.Label(first_move_frame, text=\"First to move\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.first_move_label.grid(row=0, column=0, columnspan=2)\n\n\t\tself.black_btn = tk.Button(first_move_frame, text=\"Black\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda:self.Select_First_Move(\"black\"))\n\t\tself.black_btn.grid(row=1, column=0)\n\n\t\tself.white_btn = tk.Button(first_move_frame, text=\"White\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda:self.Select_First_Move(\"white\"))\n\t\tself.white_btn.grid(row=1, column=1)\n\n\t\t# How to win\n\t\tcondition_frame = tk.Frame(self, bg=\"white\")\n\t\tcondition_frame.pack(pady=10)\n\n\t\tself.condition_label = tk.Label(condition_frame, text=\"The winner is, the player with..\",\n\t\t\tbg=\"white\", font=FONTS[\"medium\"])\n\t\tself.condition_label.grid(row=0, column=0, columnspan=2)\n\n\t\tself.greater_score = tk.Button(condition_frame, text=\"more discs.\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Select_Condition(\">\"))\n\t\tself.greater_score.grid(row=1, column=0)\n\n\t\tself.lesser_score = tk.Button(condition_frame, text=\"less discs.\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Select_Condition(\"<\"))\n\t\tself.lesser_score.grid(row=1, column=1)\n\n\n\t\t# Start the game button\n\t\tself.Start_Game_Btn = tk.Button(self, text=\"Start\", bg=\"#ff2222\", activebackground=\"#992222\",\n\t\t\t\t\t\t\t\t\tfont=FONTS[\"medium\"])\n\t\tself.Start_Game_Btn.pack(side=\"bottom\")\n\n\tdef Select_Rule_Set(self, _set: str): # sets the rule set of the game\n\t\tif _set == \"simple\":\n\t\t\tself.controller.Handler.GameParams[\"game_type\"] = 1 # Corresponds to the game logic\n\t\telse:\n\t\t\tself.controller.Handler.GameParams[\"game_type\"] = 2\n\n\t\tself.full_btn.destroy()\n\t\tself.simple_btn.destroy()\n\t\tself.rs_label.configure(text=\"Rule Set: \" + _set.upper())\n\n\t\tself.set_vals.append(\"rules\")\n\t\tself.Check_Can_Start()\n\n\tdef Select_Rows(self, rows: int): # Sets the rows of the board\n\t\tself.controller.Handler.GameParams[\"y_size\"] = rows\n\n\t\tfor button in self.Rows_Buttons:\n\t\t\tbutton.destroy()\n\n\t\tself.row_label.configure(text=\"Board Rows: \" + str(rows))\n\n\t\tself.set_vals.append(\"rows\")\n\t\tself.Check_Can_Start()\n\n\tdef Select_Cols(self, cols: int): # sets the columns of the board\n\t\tself.controller.Handler.GameParams[\"x_size\"] = cols\n\n\t\tfor button in self.Cols_Buttons:\n\t\t\tbutton.destroy()\n\n\t\tself.col_label.configure(text=\"Board Columns: \" + str(cols))\n\t\t\n\t\tself.set_vals.append(\"cols\")\n\t\tself.Check_Can_Start()\n\n\tdef Select_First_Move (self, mover: str): # Sets the first player to make a move\n\t\tif mover == \"black\":\n\t\t\tself.controller.Handler.GameParams[\"first_move\"] = \"B\"\n\t\telse:\n\t\t\tself.controller.Handler.GameParams[\"first_move\"] = \"W\"\n\n\t\tself.black_btn.destroy()\n\t\tself.white_btn.destroy()\n\n\t\tself.first_move_label.configure(text=\"First to move: \" + mover)\n\n\t\tself.set_vals.append(\"move\")\n\t\tself.Check_Can_Start()\n\n\tdef Select_Condition(self, condition: str):# This will set the game win condition\n\t\tself.controller.Handler.GameParams[\"game_winner\"] = condition\n\n\t\tif condition == \">\":\n\t\t\tself.condition_label.configure(text=\"The winner is, the player with more discs.\")\n\t\telse:\n\t\t\tself.condition_label.configure(text=\"The winner is, the player with less discs.\")\n\n\t\tself.lesser_score.destroy()\n\t\tself.greater_score.destroy()\n\n\t\tself.set_vals.append(\"win\")\n\t\tself.Check_Can_Start()\n\n\tdef Check_Can_Start (self): # This will start the game if the game can be started\n\t\tif \"rules\" in self.set_vals and\\\n\t\t \"rows\" in self.set_vals and\\\n\t\t \"cols\" in self.set_vals and\\\n\t\t \"move\" in self.set_vals and\\\n\t\t \"win\" in self.set_vals:\n\t\t self.Start_Game_Btn.configure(bg=\"#22ff22\", activebackground=\"#229922\",\n\t\t \tcommand=lambda: self.Start_Custom_Board())\n\n\tdef Start_Custom_Board (self):\n\t\tself.controller.Pages[\"Setup_Board\"].Setup_Board()\n\t\tself.controller.showPage(\"Setup_Board\")\n\t\tself.controller.Pages[\"Setup_Board\"].Instructions_Display()\n\nclass Custom_Board (tk.Frame):\n\tFrameName = \"Setup_Board\"\n\tdef __init__ (self, parent, controller):\n\t\ttk.Frame.__init__ (self, parent)\n\n\t\tself.controller = controller\n\t\tself.configure(bg=\"white\")\n\n\t\t# Title bar\n\t\tself.Title_Frame = tk.Frame(self, bg=\"white\")\n\t\tself.Title_Frame.pack(side=\"top\", fill=\"x\")\n\n\t\t# Title\n\t\ttk.Label(self.Title_Frame, text=\"Create Custom Board\", bg=\"white\", font=FONTS[\"medium\"]).pack(side=\"left\")\n\n\t\t# Start Button\n\t\tstart = tk.Button(self.Title_Frame, text=\"Play\", bg=\"#22ff22\", activebackground=\"#229922\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Start())\n\t\tstart.pack(side=\"right\")\t\t\n\n\n\t\t# Use custom Board check button\n\t\tself.Use_Board = tk.IntVar()\n\n\t\tUse_Board = tk.Checkbutton(self.Title_Frame, text=\"Use custom board\", font=FONTS[\"medium\"],\n\t\t\tbg=\"white\", activebackground=\"white\",\n\t\t\tvar=self.Use_Board, onvalue=1, offvalue=0)\n\t\tUse_Board.pack(side=\"right\", padx=10)\n\n\t\t\n\t\t# Board\n\t\tself.Board_Area = tk.Frame(self, bg=\"#009900\")\n\t\tself.Board_Area.pack(side=\"top\", fill=\"both\", expand=True)\n\n\t\tself.Board = []\n\n\tdef Setup_Board (self):\n\t\tfor widget in self.Board_Area.winfo_children():\n\t\t\twidget.destroy()\n\t\tself.Board = []\n\n\t\t\n\t\tfor y in range(self.controller.Handler.GameParams[\"y_size\"]):\n\t\t\trow = []\n\t\t\tfor x in range(self.controller.Handler.GameParams[\"x_size\"]):\n\t\t\t\t# Diameter with respond to the length of the shortest side of the board\n\t\t\t\theight = self.Board_Area.winfo_height()\n\t\t\t\twidth = self.Board_Area.winfo_width()\n\n\t\t\t\tif height > width:\n\t\t\t\t\tdiameter = width/self.controller.Handler.GameParams[\"x_size\"]\n\t\t\t\telse:\n\t\t\t\t\tdiameter = height/self.controller.Handler.GameParams[\"y_size\"]\n\n\t\t\t\tself.Board_Area.grid_columnconfigure(x, weight=1)\n\t\t\t\tself.Board_Area.grid_rowconfigure(y, weight=1)\n\n\t\t\t\tdisc = wg.Disc(self.Board_Area, self.controller, diameter=diameter, mode=\"setup\")\n\t\t\t\tdisc.grid(row=y, column=x, sticky=\"nsew\")\n\t\t\t\trow.append(disc)\n\n\t\t\tself.Board.append(row)\n\n\tdef Parse_Board (self) -> list: # This will parse the GUI board and create a board that will work for the Game()\n\t\tnew_board = []\n\t\tfor row in self.Board:\n\t\t\tnew_row = []\n\t\t\tfor disc in row:\n\t\t\t\tif disc.Current_Color == \"white\":\n\t\t\t\t\tnew_row.append(\"W\")\n\t\t\t\telif disc.Current_Color == \"black\":\n\t\t\t\t\tnew_row.append(\"B\")\n\t\t\t\telse:\n\t\t\t\t\tnew_row.append(None)\n\t\t\tnew_board.append(new_row)\n\n\t\treturn new_board\n\n\tdef Instructions_Display(self):\n\t\tshowinfo(\"How to use\", \"Click on a tile to cycle between white, black or empty. Check the \\\"Use Custom Board\\\" box to use this board!\")\n\n\tdef Start (self): # This will check if the user wants to use a custom board and then will set Game board to be the users selection\n\t\tif self.Use_Board.get():\n\t\t\tself.controller.Handler.GameParams[\"board\"] = self.Parse_Board()\n\t\tself.controller.Begin_Game()\n\t\tself.controller.Pages[\"Game\"].__GUI_init__()\n\t\tself.controller.Pages[\"Game\"].Update_Board()\n\t\tself.controller.showPage(\"Game\")\n\nclass Game (tk.Frame): # This is the 'stage' where the game will be played.\n\tFrameName = \"Game\"\n\tdef __init__ (self, parent, controller):\n\t\ttk.Frame.__init__(self, parent)\n\n\t\tself.controller = controller\n\t\tself.configure(bg=\"white\")\n\n\t\t# Status Bar\n\t\tself.Status_Bar = tk.Frame(self, bg=\"white\")\n\t\tself.Status_Bar.pack(side=\"top\", fill=\"x\")\n\n\t\tself.Status_Bar.grid_columnconfigure(0, weight=1)\n\t\tself.Status_Bar.grid_columnconfigure(1, weight=1)\n\t\tself.Status_Bar.grid_columnconfigure(2, weight=1)\n\t\tself.Status_Bar.grid_rowconfigure(0, weight=1)\n\n\t\tself.Current_Player = tk.Label(self.Status_Bar, text=\"None\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Current_Player.grid(row=0, column=0)\n\n\t\tself.Game_Type = tk.Label(self.Status_Bar, text=\"FULL\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Game_Type.grid(row=0, column=1)\n\n\t\tself.Score = tk.Label(self.Status_Bar, text=\"Black: 2 | 2:White\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Score.grid(row=0, column=2)\n\n\t\t# Board\n\t\tself.Board_Area = tk.Frame(self, bg=\"#009900\")\n\t\tself.Board_Area.pack(side=\"top\", fill=\"both\", expand=True)\n\n\t\tself.Board = []\n\n\tdef __GUI_init__ (self): # This will initiate the game board once all the datya is provided.\n\t\tfor y in range(self.controller.Handler.GameParams[\"y_size\"]):\n\t\t\trow = []\n\t\t\tfor x in range(self.controller.Handler.GameParams[\"x_size\"]):\n\t\t\t\t# Diameter with respond to the length of the shortest side of the board\n\t\t\t\theight = self.Board_Area.winfo_height()\n\t\t\t\twidth = self.Board_Area.winfo_width()\n\n\t\t\t\tif height > width:\n\t\t\t\t\tdiameter = width/self.controller.Handler.GameParams[\"x_size\"]\n\t\t\t\telse:\n\t\t\t\t\tdiameter = height/self.controller.Handler.GameParams[\"y_size\"]\n\n\t\t\t\tself.Board_Area.grid_columnconfigure(x, weight=1)\n\t\t\t\tself.Board_Area.grid_rowconfigure(y, weight=1)\n\n\t\t\t\tdisc = wg.Disc(self.Board_Area, self.controller, diameter=diameter,\n\t\t\t\t\tcommand= lambda x=x, y=y: self.Disc_Function(x, y))\n\t\t\t\tdisc.grid(row=y, column=x, sticky=\"nsew\")\n\t\t\t\trow.append(disc)\n\n\t\t\tself.Board.append(row)\n\n\t\tself.Update_Board()\n\n\tdef Reset_Game(self): #This will reset the game board to its initial state\n\t\tself.Board = []\n\t\tfor widget in self.Board_Area.winfo_children():\n\t\t\twidget.destroy()\n\n\tdef Disc_Function (self, x: int, y: int): # This is the function run when the player clicks a disc slot/disc\n\t\tif not self.controller.Handler.Move(x+1, y+1): # Try run the Move function on the Handler\n\t\t\tself.Invalid_Move()\n\n\tdef Invalid_Move(self): # This command will run when a player tries to make a move thats not possible\n\t\tshowerror(\"Invalid Move\", \"You cannot move there!\")\n\n\tdef Update_Board (self): # Update the board to mathe the Game() board\n\t\tfor y in range(len(self.Board)):\n\t\t\tfor x in range(len(self.Board[y])):\n\t\t\t\tgame_piece = self.controller.Handler.Game.Board[y][x]\n\t\t\t\tif game_piece == None:\n\t\t\t\t\tpass\n\t\t\t\telif game_piece == \"B\":\n\t\t\t\t\tif self.Board[y][x].Current_Color != \"black\":\n\t\t\t\t\t\tself.Board[y][x].Set_Piece_Color(\"black\")\n\t\t\t\telif game_piece == \"W\":\n\t\t\t\t\tif self.Board[y][x].Current_Color != \"white\":\n\t\t\t\t\t\tself.Board[y][x].Set_Piece_Color(\"white\")\n\n\tdef Update_Current_Player (self): # Update the current player identifier\n\t\tself.Current_Player.config(text=\"Turn: \" + self.controller.Get_Current_Player())\n\n\tdef Update_Game_Type(self): # Update the game type identifier\n\t\tg_type = self.controller.Handler.Get_Game_Type()\n\t\tself.Game_Type.configure(text=\"Rules: \" + g_type)\n\n\tdef Update_Score (self): # Update the score identifier\n\t\tb, w = self.controller.Handler.Get_Score()\n\t\tself.Score.configure(text=\"Black: {0!s} | {1!s} :White\".format(b, w))\n\n\tdef Full_Update(self): # Run a full update on the graphics\n\t\tself.Update_Score()\n\t\tself.Update_Current_Player()\n\t\tself.Update_Board()\n\nclass Postgame (tk.Frame): # The 'end game' screen\n\tFrameName = \"Postgame\"\n\tdef __init__ (self, parent, controller):\n\t\ttk.Frame.__init__(self, parent)\n\n\t\tself.controller = controller\n\t\tself.configure(bg=\"white\")\n\n\t\t# Set a page title\n\t\tself.Title = tk.Label(self, text=\"Game Over!\", bg=\"white\", font=FONTS[\"large\"])\n\t\tself.Title.pack(side=\"top\")\n\n\t\tSeparator(self, orient=\"horizontal\").pack(side=\"top\", fill=\"x\", padx=10)\n\n\t\t# Set the winner text object\n\t\tself.Winner = tk.Label(self, text=\"The winner is black-discs.\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Winner.pack(side=\"top\")\n\n\t\t# Create the replay and exit buttons\n\t\tself.Buttons = tk.Frame(self, bg=\"white\")\n\t\tself.Buttons.pack()\n\n\t\tReplay = tk.Button(self.Buttons, text=\"Replay\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Replay())\n\t\tReplay.grid(row=0, column=0)\n\n\t\tQuit = tk.Button(self.Buttons, text=\"Quit\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Quit())\n\t\tQuit.grid(row=0, column=1)\n\n\t\t# the area for the board output\n\t\tself.Board_Area = tk.Frame(self, bg=\"white\")\n\t\tself.Board_Area.pack(side=\"bottom\")\n\n\t\t# Score text\n\t\tself.Score = tk.Label(self.Board_Area, text=\"\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Score.pack()\n\n\t\t# The display for the board\n\t\tself.Board_Display = tk.Frame(self.Board_Area, bg=\"green\")\n\t\tself.Board_Display.pack()\n\n\t\tself.Board = []\n\n\tdef Replay(self): # Initiate the Replay\n\t\tself.controller.Replay()\n\n\tdef Quit(self): # Kill the game\n\t\tself.controller.destroy()\n\t\texit()\n\n\tdef Update_Board (self): # Update the game board display, kill old, create new\n\t\tfor widget in self.Board_Display.winfo_children():\n\t\t\twidget.destroy()\n\n\t\tfor y in range(self.controller.Handler.GameParams[\"y_size\"]):\n\t\t\trow = []\n\t\t\tfor x in range(self.controller.Handler.GameParams[\"x_size\"]):\n\t\t\t\tself.Board_Area.grid_columnconfigure(x, weight=1)\n\t\t\t\tself.Board_Area.grid_rowconfigure(y, weight=1)\n\n\t\t\t\tcol = None\n\t\t\t\tplace_col = self.controller.Handler.Game.Board[y][x]\n\t\t\t\tif place_col == \"B\":\n\t\t\t\t\tcol = \"black\"\n\t\t\t\telif place_col == \"W\":\n\t\t\t\t\tcol = \"white\"\n\n\t\t\t\tdisc = wg.Disc(self.Board_Display, self.controller, col=col, diameter=50)\n\t\t\t\tdisc.grid(row=y, column=x, sticky=\"nsew\")\n\t\t\t\trow.append(disc)\n\n\t\t\tself.Board.append(row)\n\n\tdef Update(self): # Update the whole page\n\t\twinner, scores = self.controller.Handler.Get_Winner() \n\t\tif winner.lower() == \"b\":\n\t\t\twinner = \"black-discs\"\n\t\telif winner.lower() == \"w\":\n\t\t\twinner = \"white-discs\"\n\t\telse:\n\t\t\twinner == \"no one\"\n\t\tself.Winner.configure(text=\"The winner is \" + winner)\n\t\tself.Score.configure(text=\"Black: {0!s} | {1!s}:White\".format(scores[0], scores[1]))\n\t\tself.Update_Board()\n\nif __name__ == \"__main__\":\n\tWindow = Handler()\n",
"step-ids": [
39,
40,
41,
52,
59
]
}
|
[
39,
40,
41,
52,
59
] |
# %%
import numpy as np
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.manifold import TSNE
from sklearn.decomposition import PCA, TruncatedSVD
import matplotlib.patches as mpatches
import time
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC
from sklearn.neighbors import KNeighborsClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
import collections
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from imblearn.pipeline import make_pipeline as imbalanced_make_pipeline
from imblearn.over_sampling import SMOTE
from imblearn.under_sampling import NearMiss
from imblearn.metrics import classification_report_imbalanced
from sklearn.metrics import precision_score, recall_score, f1_score, roc_auc_score, accuracy_score, classification_report, confusion_matrix, plot_confusion_matrix
from collections import Counter
from sklearn.model_selection import KFold, StratifiedKFold, train_test_split, cross_val_score, GridSearchCV, cross_val_predict
from sklearn.preprocessing import RobustScaler
from scipy.stats import norm
import keras
from keras import backend as K
from keras.models import Sequential
from keras.layers import Activation, Dense
from keras.optimizers import Adam
from keras.metrics import categorical_crossentropy
from keras.callbacks import ModelCheckpoint
import itertools
# %%
dataset = pd.read_csv('./dataset/creditcard.csv')
dataset.head()
# %%
dataset.describe()
# %%
robustScaler = RobustScaler()
dataset['scaled_amount'] = robustScaler.fit_transform(
dataset['Amount'].values.reshape(-1, 1))
dataset['scaled_time'] = robustScaler.fit_transform(
dataset['Time'].values.reshape(-1, 1))
# %%
dataset.drop(['Amount', 'Time'], axis=1, inplace=True)
dataset.head()
# %%
X = dataset.drop(['Class'], axis=1)
Y = dataset['Class']
# %%
SKfold = StratifiedKFold(random_state=42)
for train_index, test_index in SKfold.split(X, Y):
og_X_train, og_X_test = X.iloc[train_index], X.iloc[test_index]
og_Y_train, og_Y_test = Y.iloc[train_index], Y.iloc[test_index]
# %%
og_X_train = og_X_train.values
og_X_test = og_X_test.values
og_Y_train = og_Y_train.values
og_Y_test = og_Y_test.values
# %%
dataset = dataset.sample(frac=1, random_state=42)
fraud = dataset.loc[dataset['Class'] == 1]
normal = dataset.loc[dataset['Class'] == 0][:492]
nd_dataset = pd.concat([fraud, normal])
nd_dataset = nd_dataset.sample(frac=1, random_state=42)
nd_dataset.head()
# %%
nd_X = nd_dataset.drop("Class", axis=1)
nd_Y = nd_dataset["Class"]
# %%
nd_Xtrain, nd_Xtest, nd_Ytrain, nd_Ytest = train_test_split(
nd_X, nd_Y, random_state=42, test_size=0.2)
nd_Xtrain = nd_Xtrain.values
nd_Xtest = nd_Xtest.values
nd_Ytrain = nd_Ytrain.values
nd_Ytest = nd_Ytest.values
# %%
n_inputs = nd_Xtrain.shape[1]
undersample_model = Sequential([
Dense(n_inputs, input_shape=(n_inputs,), activation="relu"),
Dense(32, activation="relu"),
Dense(2, activation="softmax")
])
# %%
undersample_model.summary()
# %%
undersample_model.compile(
Adam(lr=0.001), loss='sparse_categorical_crossentropy', metrics=["accuracy"])
modelcheckpoint = ModelCheckpoint(
"models/undersample_model.h5", save_best_only=True, monitor="val_acc")
undersample_model.fit(nd_Xtrain, nd_Ytrain, validation_split=0.2, epochs=20,
batch_size=25, shuffle=True, verbose=2, callbacks=[modelcheckpoint])
# %%
undersample_pred = undersample_model.predict(og_X_test, verbose=2)
# %%
undersample_pred_classes = undersample_model.predict_classes(
og_X_test, verbose=2)
# %%
confmat = confusion_matrix(og_Y_test, undersample_pred_classes)
print(confmat)
# %%
def plotTensorflowConfmat(confmat, classes):
plt.imshow(confmat, interpolation='nearest', cmap=plt.cm.Blues)
plt.title("Confusion Matrix")
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
plt.tight_layout()
plt.ylabel("True label")
plt.xlabel("Predicted label")
for i, j in itertools.product(range(confmat.shape[0]), range(confmat.shape[1])):
plt.text(j, i, format(confmat[i, j], '.2f'),
horizontalalignment='center', color='black')
# %%
classes = ["Normal", "Fraud"]
plotTensorflowConfmat(confmat, classes)
# %%
sm = SMOTE(sampling_strategy="minority", random_state=42)
sm_X_train, sm_Y_train = sm.fit_sample(og_X_train, og_Y_train)
# %%
sm_X_train.shape
# %%
n_inputs = sm_X_train.shape[1]
smote_model = Sequential([
Dense(n_inputs, input_shape=(n_inputs,), activation='relu'),
Dense(32, activation='relu'),
Dense(2, activation='softmax')
])
# %%
smote_model.summary()
# %%
smote_model.compile(
Adam(lr=0.001), loss='sparse_categorical_crossentropy', metrics=['accuracy'])
modelcheckpoint = ModelCheckpoint(
'models/smote_model.h5', save_best_only=True, monitor='val_acc')
smote_model.fit(sm_X_train, sm_Y_train, validation_split=0.2, batch_size=25,
epochs=20, verbose=2, shuffle=True, callbacks=[modelcheckpoint])
# %%
smote_model.save('models/smote_model.h5')
# %%
smote_pred_classes = smote_model.predict_classes(og_X_test)
# %%
confmat = confusion_matrix(og_Y_test, smote_pred_classes)
print(confmat)
# %%
plotTensorflowConfmat(confmat, classes)
# %%
sm2 = SMOTE(sampling_strategy="minority", random_state=42)
# %%
sm2_X_train, sm2_Y_train = sm2.fit_sample(og_X_train, og_Y_train)
sm2_X_train = pd.DataFrame(sm2_X_train)
sm2_X_train.head()
# %%
sm2_Y_train = pd.DataFrame(sm2_Y_train, columns=["Class"])
sm2_Y_train.head()
# %%
smote_df = pd.concat([sm2_X_train, sm2_Y_train], axis=1)
smote_df.head()
# %%
smote_df = smote_df.sample(frac=1, random_state=42)
# %%
corr = smote_df.corr()
sns.heatmap(corr, cmap='coolwarm_r', annot_kws={'size': 20})
plt.show()
# %%
corr["Class"].sort_values()
# %%
negative_corr = [13, 11, 9, 15]
positive_corr = [3, 10]
# %%
f, axes = plt.subplots(ncols=4, figsize=(20, 4))
f.suptitle("Negative Corr")
for i, feature in enumerate(negative_corr):
sns.boxplot(x="Class", y=feature, data=smote_df, ax=axes[i])
axes[i].set_title(feature)
# %%
f, axes = plt.subplots(ncols=2, figsize=(20, 4))
f.suptitle("Positive Corr")
for i, feature in enumerate(positive_corr):
sns.boxplot(x="Class", y=feature, data=smote_df, ax=axes[i])
axes[i].set_title(feature)
# %%
for i, feature in enumerate(negative_corr):
fraud_dist = smote_df[feature].loc[smote_df["Class"] == 1].values
q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)
iqr = q75-q25
cutoff = iqr*1.5
upper_limit, lower_limit = q75+cutoff, q25-cutoff
outlier_list = [x for x in fraud_dist if x <
lower_limit or x > upper_limit]
smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (
smote_df[feature] < lower_limit)].index)
print(f"outliers removed {len(outlier_list)}")
# %%
for i, feature in enumerate(positive_corr):
fraud_dist = smote_df[feature].loc[smote_df["Class"] == 1].values
q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)
iqr = q75-q25
cutoff = iqr*1.5
upper_limit, lower_limit = q75+cutoff, q25-cutoff
outlier_list = [x for x in fraud_dist if x <
lower_limit or x > upper_limit]
smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (
smote_df[feature] < lower_limit)].index)
print(f"outliers removed {len(outlier_list)}")
# %%
smote_df.shape
# %%
smote_X_train = smote_df.drop(["Class"], axis=1)
smote_Y_train = smote_df["Class"]
# %%
n_inputs = smote_X_train.shape[1]
smote_model = Sequential([
Dense(n_inputs, input_shape=(n_inputs,), activation='relu'),
Dense(64, activation='relu'),
Dense(32, activation='relu'),
Dense(32, activation='relu'),
Dense(2, activation='softmax')
])
# %%
smote_model.summary()
# %%
smote_model.compile(
Adam(lr=0.001), loss="sparse_categorical_crossentropy", metrics=["accuracy"])
modelcheckpoint = ModelCheckpoint(
"models/smote_outliers_removed.h5", save_best_only=True, monitor="val_acc")
smote_model.fit(smote_X_train, smote_Y_train, validation_split=0.2,
shuffle=True, batch_size=25, epochs=20, callbacks=[modelcheckpoint])
# %%
smote_model.save("models/smote_outliers_removed.h5")
# %%
smote_pred_classes = smote_model.predict_classes(og_X_test)
# %%
confmat = confusion_matrix(og_Y_test, smote_pred_classes)
print(confmat)
# %%
classes = ["normal", "fraud"]
plotTensorflowConfmat(confmat, classes)
# %%
|
normal
|
{
"blob_id": "3923aed29006b4290437f2b0e11667c702da3241",
"index": 4605,
"step-1": "<mask token>\n\n\ndef plotTensorflowConfmat(confmat, classes):\n plt.imshow(confmat, interpolation='nearest', cmap=plt.cm.Blues)\n plt.title('Confusion Matrix')\n plt.colorbar()\n tick_marks = np.arange(len(classes))\n plt.xticks(tick_marks, classes, rotation=45)\n plt.yticks(tick_marks, classes)\n plt.tight_layout()\n plt.ylabel('True label')\n plt.xlabel('Predicted label')\n for i, j in itertools.product(range(confmat.shape[0]), range(confmat.\n shape[1])):\n plt.text(j, i, format(confmat[i, j], '.2f'), horizontalalignment=\n 'center', color='black')\n\n\n<mask token>\n",
"step-2": "<mask token>\ndataset.head()\ndataset.describe()\n<mask token>\ndataset.drop(['Amount', 'Time'], axis=1, inplace=True)\ndataset.head()\n<mask token>\nfor train_index, test_index in SKfold.split(X, Y):\n og_X_train, og_X_test = X.iloc[train_index], X.iloc[test_index]\n og_Y_train, og_Y_test = Y.iloc[train_index], Y.iloc[test_index]\n<mask token>\nnd_dataset.head()\n<mask token>\nundersample_model.summary()\nundersample_model.compile(Adam(lr=0.001), loss=\n 'sparse_categorical_crossentropy', metrics=['accuracy'])\n<mask token>\nundersample_model.fit(nd_Xtrain, nd_Ytrain, validation_split=0.2, epochs=20,\n batch_size=25, shuffle=True, verbose=2, callbacks=[modelcheckpoint])\n<mask token>\nprint(confmat)\n\n\ndef plotTensorflowConfmat(confmat, classes):\n plt.imshow(confmat, interpolation='nearest', cmap=plt.cm.Blues)\n plt.title('Confusion Matrix')\n plt.colorbar()\n tick_marks = np.arange(len(classes))\n plt.xticks(tick_marks, classes, rotation=45)\n plt.yticks(tick_marks, classes)\n plt.tight_layout()\n plt.ylabel('True label')\n plt.xlabel('Predicted label')\n for i, j in itertools.product(range(confmat.shape[0]), range(confmat.\n shape[1])):\n plt.text(j, i, format(confmat[i, j], '.2f'), horizontalalignment=\n 'center', color='black')\n\n\n<mask token>\nplotTensorflowConfmat(confmat, classes)\n<mask token>\nsm_X_train.shape\n<mask token>\nsmote_model.summary()\nsmote_model.compile(Adam(lr=0.001), loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])\n<mask token>\nsmote_model.fit(sm_X_train, sm_Y_train, validation_split=0.2, batch_size=25,\n epochs=20, verbose=2, shuffle=True, callbacks=[modelcheckpoint])\nsmote_model.save('models/smote_model.h5')\n<mask token>\nprint(confmat)\nplotTensorflowConfmat(confmat, classes)\n<mask token>\nsm2_X_train.head()\n<mask token>\nsm2_Y_train.head()\n<mask token>\nsmote_df.head()\n<mask token>\nsns.heatmap(corr, cmap='coolwarm_r', annot_kws={'size': 20})\nplt.show()\ncorr['Class'].sort_values()\n<mask token>\nf.suptitle('Negative Corr')\nfor i, feature in enumerate(negative_corr):\n sns.boxplot(x='Class', y=feature, data=smote_df, ax=axes[i])\n axes[i].set_title(feature)\n<mask token>\nf.suptitle('Positive Corr')\nfor i, feature in enumerate(positive_corr):\n sns.boxplot(x='Class', y=feature, data=smote_df, ax=axes[i])\n axes[i].set_title(feature)\nfor i, feature in enumerate(negative_corr):\n fraud_dist = smote_df[feature].loc[smote_df['Class'] == 1].values\n q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)\n iqr = q75 - q25\n cutoff = iqr * 1.5\n upper_limit, lower_limit = q75 + cutoff, q25 - cutoff\n outlier_list = [x for x in fraud_dist if x < lower_limit or x > upper_limit\n ]\n smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (\n smote_df[feature] < lower_limit)].index)\n print(f'outliers removed {len(outlier_list)}')\nfor i, feature in enumerate(positive_corr):\n fraud_dist = smote_df[feature].loc[smote_df['Class'] == 1].values\n q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)\n iqr = q75 - q25\n cutoff = iqr * 1.5\n upper_limit, lower_limit = q75 + cutoff, q25 - cutoff\n outlier_list = [x for x in fraud_dist if x < lower_limit or x > upper_limit\n ]\n smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (\n smote_df[feature] < lower_limit)].index)\n print(f'outliers removed {len(outlier_list)}')\nsmote_df.shape\n<mask token>\nsmote_model.summary()\nsmote_model.compile(Adam(lr=0.001), loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])\n<mask token>\nsmote_model.fit(smote_X_train, smote_Y_train, validation_split=0.2, shuffle\n =True, batch_size=25, epochs=20, callbacks=[modelcheckpoint])\nsmote_model.save('models/smote_outliers_removed.h5')\n<mask token>\nprint(confmat)\n<mask token>\nplotTensorflowConfmat(confmat, classes)\n",
"step-3": "<mask token>\ndataset = pd.read_csv('./dataset/creditcard.csv')\ndataset.head()\ndataset.describe()\nrobustScaler = RobustScaler()\ndataset['scaled_amount'] = robustScaler.fit_transform(dataset['Amount'].\n values.reshape(-1, 1))\ndataset['scaled_time'] = robustScaler.fit_transform(dataset['Time'].values.\n reshape(-1, 1))\ndataset.drop(['Amount', 'Time'], axis=1, inplace=True)\ndataset.head()\nX = dataset.drop(['Class'], axis=1)\nY = dataset['Class']\nSKfold = StratifiedKFold(random_state=42)\nfor train_index, test_index in SKfold.split(X, Y):\n og_X_train, og_X_test = X.iloc[train_index], X.iloc[test_index]\n og_Y_train, og_Y_test = Y.iloc[train_index], Y.iloc[test_index]\nog_X_train = og_X_train.values\nog_X_test = og_X_test.values\nog_Y_train = og_Y_train.values\nog_Y_test = og_Y_test.values\ndataset = dataset.sample(frac=1, random_state=42)\nfraud = dataset.loc[dataset['Class'] == 1]\nnormal = dataset.loc[dataset['Class'] == 0][:492]\nnd_dataset = pd.concat([fraud, normal])\nnd_dataset = nd_dataset.sample(frac=1, random_state=42)\nnd_dataset.head()\nnd_X = nd_dataset.drop('Class', axis=1)\nnd_Y = nd_dataset['Class']\nnd_Xtrain, nd_Xtest, nd_Ytrain, nd_Ytest = train_test_split(nd_X, nd_Y,\n random_state=42, test_size=0.2)\nnd_Xtrain = nd_Xtrain.values\nnd_Xtest = nd_Xtest.values\nnd_Ytrain = nd_Ytrain.values\nnd_Ytest = nd_Ytest.values\nn_inputs = nd_Xtrain.shape[1]\nundersample_model = Sequential([Dense(n_inputs, input_shape=(n_inputs,),\n activation='relu'), Dense(32, activation='relu'), Dense(2, activation=\n 'softmax')])\nundersample_model.summary()\nundersample_model.compile(Adam(lr=0.001), loss=\n 'sparse_categorical_crossentropy', metrics=['accuracy'])\nmodelcheckpoint = ModelCheckpoint('models/undersample_model.h5',\n save_best_only=True, monitor='val_acc')\nundersample_model.fit(nd_Xtrain, nd_Ytrain, validation_split=0.2, epochs=20,\n batch_size=25, shuffle=True, verbose=2, callbacks=[modelcheckpoint])\nundersample_pred = undersample_model.predict(og_X_test, verbose=2)\nundersample_pred_classes = undersample_model.predict_classes(og_X_test,\n verbose=2)\nconfmat = confusion_matrix(og_Y_test, undersample_pred_classes)\nprint(confmat)\n\n\ndef plotTensorflowConfmat(confmat, classes):\n plt.imshow(confmat, interpolation='nearest', cmap=plt.cm.Blues)\n plt.title('Confusion Matrix')\n plt.colorbar()\n tick_marks = np.arange(len(classes))\n plt.xticks(tick_marks, classes, rotation=45)\n plt.yticks(tick_marks, classes)\n plt.tight_layout()\n plt.ylabel('True label')\n plt.xlabel('Predicted label')\n for i, j in itertools.product(range(confmat.shape[0]), range(confmat.\n shape[1])):\n plt.text(j, i, format(confmat[i, j], '.2f'), horizontalalignment=\n 'center', color='black')\n\n\nclasses = ['Normal', 'Fraud']\nplotTensorflowConfmat(confmat, classes)\nsm = SMOTE(sampling_strategy='minority', random_state=42)\nsm_X_train, sm_Y_train = sm.fit_sample(og_X_train, og_Y_train)\nsm_X_train.shape\nn_inputs = sm_X_train.shape[1]\nsmote_model = Sequential([Dense(n_inputs, input_shape=(n_inputs,),\n activation='relu'), Dense(32, activation='relu'), Dense(2, activation=\n 'softmax')])\nsmote_model.summary()\nsmote_model.compile(Adam(lr=0.001), loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])\nmodelcheckpoint = ModelCheckpoint('models/smote_model.h5', save_best_only=\n True, monitor='val_acc')\nsmote_model.fit(sm_X_train, sm_Y_train, validation_split=0.2, batch_size=25,\n epochs=20, verbose=2, shuffle=True, callbacks=[modelcheckpoint])\nsmote_model.save('models/smote_model.h5')\nsmote_pred_classes = smote_model.predict_classes(og_X_test)\nconfmat = confusion_matrix(og_Y_test, smote_pred_classes)\nprint(confmat)\nplotTensorflowConfmat(confmat, classes)\nsm2 = SMOTE(sampling_strategy='minority', random_state=42)\nsm2_X_train, sm2_Y_train = sm2.fit_sample(og_X_train, og_Y_train)\nsm2_X_train = pd.DataFrame(sm2_X_train)\nsm2_X_train.head()\nsm2_Y_train = pd.DataFrame(sm2_Y_train, columns=['Class'])\nsm2_Y_train.head()\nsmote_df = pd.concat([sm2_X_train, sm2_Y_train], axis=1)\nsmote_df.head()\nsmote_df = smote_df.sample(frac=1, random_state=42)\ncorr = smote_df.corr()\nsns.heatmap(corr, cmap='coolwarm_r', annot_kws={'size': 20})\nplt.show()\ncorr['Class'].sort_values()\nnegative_corr = [13, 11, 9, 15]\npositive_corr = [3, 10]\nf, axes = plt.subplots(ncols=4, figsize=(20, 4))\nf.suptitle('Negative Corr')\nfor i, feature in enumerate(negative_corr):\n sns.boxplot(x='Class', y=feature, data=smote_df, ax=axes[i])\n axes[i].set_title(feature)\nf, axes = plt.subplots(ncols=2, figsize=(20, 4))\nf.suptitle('Positive Corr')\nfor i, feature in enumerate(positive_corr):\n sns.boxplot(x='Class', y=feature, data=smote_df, ax=axes[i])\n axes[i].set_title(feature)\nfor i, feature in enumerate(negative_corr):\n fraud_dist = smote_df[feature].loc[smote_df['Class'] == 1].values\n q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)\n iqr = q75 - q25\n cutoff = iqr * 1.5\n upper_limit, lower_limit = q75 + cutoff, q25 - cutoff\n outlier_list = [x for x in fraud_dist if x < lower_limit or x > upper_limit\n ]\n smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (\n smote_df[feature] < lower_limit)].index)\n print(f'outliers removed {len(outlier_list)}')\nfor i, feature in enumerate(positive_corr):\n fraud_dist = smote_df[feature].loc[smote_df['Class'] == 1].values\n q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)\n iqr = q75 - q25\n cutoff = iqr * 1.5\n upper_limit, lower_limit = q75 + cutoff, q25 - cutoff\n outlier_list = [x for x in fraud_dist if x < lower_limit or x > upper_limit\n ]\n smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (\n smote_df[feature] < lower_limit)].index)\n print(f'outliers removed {len(outlier_list)}')\nsmote_df.shape\nsmote_X_train = smote_df.drop(['Class'], axis=1)\nsmote_Y_train = smote_df['Class']\nn_inputs = smote_X_train.shape[1]\nsmote_model = Sequential([Dense(n_inputs, input_shape=(n_inputs,),\n activation='relu'), Dense(64, activation='relu'), Dense(32, activation=\n 'relu'), Dense(32, activation='relu'), Dense(2, activation='softmax')])\nsmote_model.summary()\nsmote_model.compile(Adam(lr=0.001), loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])\nmodelcheckpoint = ModelCheckpoint('models/smote_outliers_removed.h5',\n save_best_only=True, monitor='val_acc')\nsmote_model.fit(smote_X_train, smote_Y_train, validation_split=0.2, shuffle\n =True, batch_size=25, epochs=20, callbacks=[modelcheckpoint])\nsmote_model.save('models/smote_outliers_removed.h5')\nsmote_pred_classes = smote_model.predict_classes(og_X_test)\nconfmat = confusion_matrix(og_Y_test, smote_pred_classes)\nprint(confmat)\nclasses = ['normal', 'fraud']\nplotTensorflowConfmat(confmat, classes)\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport tensorflow as tf\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom sklearn.manifold import TSNE\nfrom sklearn.decomposition import PCA, TruncatedSVD\nimport matplotlib.patches as mpatches\nimport time\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.svm import SVC\nfrom sklearn.neighbors import KNeighborsClassifier\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.ensemble import RandomForestClassifier\nimport collections\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.pipeline import make_pipeline\nfrom imblearn.pipeline import make_pipeline as imbalanced_make_pipeline\nfrom imblearn.over_sampling import SMOTE\nfrom imblearn.under_sampling import NearMiss\nfrom imblearn.metrics import classification_report_imbalanced\nfrom sklearn.metrics import precision_score, recall_score, f1_score, roc_auc_score, accuracy_score, classification_report, confusion_matrix, plot_confusion_matrix\nfrom collections import Counter\nfrom sklearn.model_selection import KFold, StratifiedKFold, train_test_split, cross_val_score, GridSearchCV, cross_val_predict\nfrom sklearn.preprocessing import RobustScaler\nfrom scipy.stats import norm\nimport keras\nfrom keras import backend as K\nfrom keras.models import Sequential\nfrom keras.layers import Activation, Dense\nfrom keras.optimizers import Adam\nfrom keras.metrics import categorical_crossentropy\nfrom keras.callbacks import ModelCheckpoint\nimport itertools\ndataset = pd.read_csv('./dataset/creditcard.csv')\ndataset.head()\ndataset.describe()\nrobustScaler = RobustScaler()\ndataset['scaled_amount'] = robustScaler.fit_transform(dataset['Amount'].\n values.reshape(-1, 1))\ndataset['scaled_time'] = robustScaler.fit_transform(dataset['Time'].values.\n reshape(-1, 1))\ndataset.drop(['Amount', 'Time'], axis=1, inplace=True)\ndataset.head()\nX = dataset.drop(['Class'], axis=1)\nY = dataset['Class']\nSKfold = StratifiedKFold(random_state=42)\nfor train_index, test_index in SKfold.split(X, Y):\n og_X_train, og_X_test = X.iloc[train_index], X.iloc[test_index]\n og_Y_train, og_Y_test = Y.iloc[train_index], Y.iloc[test_index]\nog_X_train = og_X_train.values\nog_X_test = og_X_test.values\nog_Y_train = og_Y_train.values\nog_Y_test = og_Y_test.values\ndataset = dataset.sample(frac=1, random_state=42)\nfraud = dataset.loc[dataset['Class'] == 1]\nnormal = dataset.loc[dataset['Class'] == 0][:492]\nnd_dataset = pd.concat([fraud, normal])\nnd_dataset = nd_dataset.sample(frac=1, random_state=42)\nnd_dataset.head()\nnd_X = nd_dataset.drop('Class', axis=1)\nnd_Y = nd_dataset['Class']\nnd_Xtrain, nd_Xtest, nd_Ytrain, nd_Ytest = train_test_split(nd_X, nd_Y,\n random_state=42, test_size=0.2)\nnd_Xtrain = nd_Xtrain.values\nnd_Xtest = nd_Xtest.values\nnd_Ytrain = nd_Ytrain.values\nnd_Ytest = nd_Ytest.values\nn_inputs = nd_Xtrain.shape[1]\nundersample_model = Sequential([Dense(n_inputs, input_shape=(n_inputs,),\n activation='relu'), Dense(32, activation='relu'), Dense(2, activation=\n 'softmax')])\nundersample_model.summary()\nundersample_model.compile(Adam(lr=0.001), loss=\n 'sparse_categorical_crossentropy', metrics=['accuracy'])\nmodelcheckpoint = ModelCheckpoint('models/undersample_model.h5',\n save_best_only=True, monitor='val_acc')\nundersample_model.fit(nd_Xtrain, nd_Ytrain, validation_split=0.2, epochs=20,\n batch_size=25, shuffle=True, verbose=2, callbacks=[modelcheckpoint])\nundersample_pred = undersample_model.predict(og_X_test, verbose=2)\nundersample_pred_classes = undersample_model.predict_classes(og_X_test,\n verbose=2)\nconfmat = confusion_matrix(og_Y_test, undersample_pred_classes)\nprint(confmat)\n\n\ndef plotTensorflowConfmat(confmat, classes):\n plt.imshow(confmat, interpolation='nearest', cmap=plt.cm.Blues)\n plt.title('Confusion Matrix')\n plt.colorbar()\n tick_marks = np.arange(len(classes))\n plt.xticks(tick_marks, classes, rotation=45)\n plt.yticks(tick_marks, classes)\n plt.tight_layout()\n plt.ylabel('True label')\n plt.xlabel('Predicted label')\n for i, j in itertools.product(range(confmat.shape[0]), range(confmat.\n shape[1])):\n plt.text(j, i, format(confmat[i, j], '.2f'), horizontalalignment=\n 'center', color='black')\n\n\nclasses = ['Normal', 'Fraud']\nplotTensorflowConfmat(confmat, classes)\nsm = SMOTE(sampling_strategy='minority', random_state=42)\nsm_X_train, sm_Y_train = sm.fit_sample(og_X_train, og_Y_train)\nsm_X_train.shape\nn_inputs = sm_X_train.shape[1]\nsmote_model = Sequential([Dense(n_inputs, input_shape=(n_inputs,),\n activation='relu'), Dense(32, activation='relu'), Dense(2, activation=\n 'softmax')])\nsmote_model.summary()\nsmote_model.compile(Adam(lr=0.001), loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])\nmodelcheckpoint = ModelCheckpoint('models/smote_model.h5', save_best_only=\n True, monitor='val_acc')\nsmote_model.fit(sm_X_train, sm_Y_train, validation_split=0.2, batch_size=25,\n epochs=20, verbose=2, shuffle=True, callbacks=[modelcheckpoint])\nsmote_model.save('models/smote_model.h5')\nsmote_pred_classes = smote_model.predict_classes(og_X_test)\nconfmat = confusion_matrix(og_Y_test, smote_pred_classes)\nprint(confmat)\nplotTensorflowConfmat(confmat, classes)\nsm2 = SMOTE(sampling_strategy='minority', random_state=42)\nsm2_X_train, sm2_Y_train = sm2.fit_sample(og_X_train, og_Y_train)\nsm2_X_train = pd.DataFrame(sm2_X_train)\nsm2_X_train.head()\nsm2_Y_train = pd.DataFrame(sm2_Y_train, columns=['Class'])\nsm2_Y_train.head()\nsmote_df = pd.concat([sm2_X_train, sm2_Y_train], axis=1)\nsmote_df.head()\nsmote_df = smote_df.sample(frac=1, random_state=42)\ncorr = smote_df.corr()\nsns.heatmap(corr, cmap='coolwarm_r', annot_kws={'size': 20})\nplt.show()\ncorr['Class'].sort_values()\nnegative_corr = [13, 11, 9, 15]\npositive_corr = [3, 10]\nf, axes = plt.subplots(ncols=4, figsize=(20, 4))\nf.suptitle('Negative Corr')\nfor i, feature in enumerate(negative_corr):\n sns.boxplot(x='Class', y=feature, data=smote_df, ax=axes[i])\n axes[i].set_title(feature)\nf, axes = plt.subplots(ncols=2, figsize=(20, 4))\nf.suptitle('Positive Corr')\nfor i, feature in enumerate(positive_corr):\n sns.boxplot(x='Class', y=feature, data=smote_df, ax=axes[i])\n axes[i].set_title(feature)\nfor i, feature in enumerate(negative_corr):\n fraud_dist = smote_df[feature].loc[smote_df['Class'] == 1].values\n q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)\n iqr = q75 - q25\n cutoff = iqr * 1.5\n upper_limit, lower_limit = q75 + cutoff, q25 - cutoff\n outlier_list = [x for x in fraud_dist if x < lower_limit or x > upper_limit\n ]\n smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (\n smote_df[feature] < lower_limit)].index)\n print(f'outliers removed {len(outlier_list)}')\nfor i, feature in enumerate(positive_corr):\n fraud_dist = smote_df[feature].loc[smote_df['Class'] == 1].values\n q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)\n iqr = q75 - q25\n cutoff = iqr * 1.5\n upper_limit, lower_limit = q75 + cutoff, q25 - cutoff\n outlier_list = [x for x in fraud_dist if x < lower_limit or x > upper_limit\n ]\n smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (\n smote_df[feature] < lower_limit)].index)\n print(f'outliers removed {len(outlier_list)}')\nsmote_df.shape\nsmote_X_train = smote_df.drop(['Class'], axis=1)\nsmote_Y_train = smote_df['Class']\nn_inputs = smote_X_train.shape[1]\nsmote_model = Sequential([Dense(n_inputs, input_shape=(n_inputs,),\n activation='relu'), Dense(64, activation='relu'), Dense(32, activation=\n 'relu'), Dense(32, activation='relu'), Dense(2, activation='softmax')])\nsmote_model.summary()\nsmote_model.compile(Adam(lr=0.001), loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])\nmodelcheckpoint = ModelCheckpoint('models/smote_outliers_removed.h5',\n save_best_only=True, monitor='val_acc')\nsmote_model.fit(smote_X_train, smote_Y_train, validation_split=0.2, shuffle\n =True, batch_size=25, epochs=20, callbacks=[modelcheckpoint])\nsmote_model.save('models/smote_outliers_removed.h5')\nsmote_pred_classes = smote_model.predict_classes(og_X_test)\nconfmat = confusion_matrix(og_Y_test, smote_pred_classes)\nprint(confmat)\nclasses = ['normal', 'fraud']\nplotTensorflowConfmat(confmat, classes)\n",
"step-5": "# %%\nimport numpy as np\nimport pandas as pd\nimport tensorflow as tf\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom sklearn.manifold import TSNE\nfrom sklearn.decomposition import PCA, TruncatedSVD\nimport matplotlib.patches as mpatches\nimport time\n\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.svm import SVC\nfrom sklearn.neighbors import KNeighborsClassifier\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.ensemble import RandomForestClassifier\nimport collections\n\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.pipeline import make_pipeline\nfrom imblearn.pipeline import make_pipeline as imbalanced_make_pipeline\nfrom imblearn.over_sampling import SMOTE\nfrom imblearn.under_sampling import NearMiss\nfrom imblearn.metrics import classification_report_imbalanced\nfrom sklearn.metrics import precision_score, recall_score, f1_score, roc_auc_score, accuracy_score, classification_report, confusion_matrix, plot_confusion_matrix\nfrom collections import Counter\nfrom sklearn.model_selection import KFold, StratifiedKFold, train_test_split, cross_val_score, GridSearchCV, cross_val_predict\nfrom sklearn.preprocessing import RobustScaler\nfrom scipy.stats import norm\n\nimport keras\nfrom keras import backend as K\nfrom keras.models import Sequential\nfrom keras.layers import Activation, Dense\nfrom keras.optimizers import Adam\nfrom keras.metrics import categorical_crossentropy\nfrom keras.callbacks import ModelCheckpoint\n\nimport itertools\n# %%\ndataset = pd.read_csv('./dataset/creditcard.csv')\ndataset.head()\n# %%\ndataset.describe()\n# %%\nrobustScaler = RobustScaler()\ndataset['scaled_amount'] = robustScaler.fit_transform(\n dataset['Amount'].values.reshape(-1, 1))\ndataset['scaled_time'] = robustScaler.fit_transform(\n dataset['Time'].values.reshape(-1, 1))\n# %%\ndataset.drop(['Amount', 'Time'], axis=1, inplace=True)\ndataset.head()\n# %%\nX = dataset.drop(['Class'], axis=1)\nY = dataset['Class']\n# %%\nSKfold = StratifiedKFold(random_state=42)\nfor train_index, test_index in SKfold.split(X, Y):\n og_X_train, og_X_test = X.iloc[train_index], X.iloc[test_index]\n og_Y_train, og_Y_test = Y.iloc[train_index], Y.iloc[test_index]\n\n# %%\nog_X_train = og_X_train.values\nog_X_test = og_X_test.values\nog_Y_train = og_Y_train.values\nog_Y_test = og_Y_test.values\n# %%\ndataset = dataset.sample(frac=1, random_state=42)\nfraud = dataset.loc[dataset['Class'] == 1]\nnormal = dataset.loc[dataset['Class'] == 0][:492]\nnd_dataset = pd.concat([fraud, normal])\nnd_dataset = nd_dataset.sample(frac=1, random_state=42)\nnd_dataset.head()\n# %%\nnd_X = nd_dataset.drop(\"Class\", axis=1)\nnd_Y = nd_dataset[\"Class\"]\n\n# %%\nnd_Xtrain, nd_Xtest, nd_Ytrain, nd_Ytest = train_test_split(\n nd_X, nd_Y, random_state=42, test_size=0.2)\nnd_Xtrain = nd_Xtrain.values\nnd_Xtest = nd_Xtest.values\nnd_Ytrain = nd_Ytrain.values\nnd_Ytest = nd_Ytest.values\n\n# %%\nn_inputs = nd_Xtrain.shape[1]\nundersample_model = Sequential([\n Dense(n_inputs, input_shape=(n_inputs,), activation=\"relu\"),\n Dense(32, activation=\"relu\"),\n Dense(2, activation=\"softmax\")\n])\n# %%\nundersample_model.summary()\n# %%\nundersample_model.compile(\n Adam(lr=0.001), loss='sparse_categorical_crossentropy', metrics=[\"accuracy\"])\nmodelcheckpoint = ModelCheckpoint(\n \"models/undersample_model.h5\", save_best_only=True, monitor=\"val_acc\")\nundersample_model.fit(nd_Xtrain, nd_Ytrain, validation_split=0.2, epochs=20,\n batch_size=25, shuffle=True, verbose=2, callbacks=[modelcheckpoint])\n\n# %%\nundersample_pred = undersample_model.predict(og_X_test, verbose=2)\n# %%\nundersample_pred_classes = undersample_model.predict_classes(\n og_X_test, verbose=2)\n# %%\nconfmat = confusion_matrix(og_Y_test, undersample_pred_classes)\nprint(confmat)\n# %%\n\n\ndef plotTensorflowConfmat(confmat, classes):\n plt.imshow(confmat, interpolation='nearest', cmap=plt.cm.Blues)\n plt.title(\"Confusion Matrix\")\n plt.colorbar()\n tick_marks = np.arange(len(classes))\n plt.xticks(tick_marks, classes, rotation=45)\n plt.yticks(tick_marks, classes)\n plt.tight_layout()\n plt.ylabel(\"True label\")\n plt.xlabel(\"Predicted label\")\n for i, j in itertools.product(range(confmat.shape[0]), range(confmat.shape[1])):\n plt.text(j, i, format(confmat[i, j], '.2f'),\n horizontalalignment='center', color='black')\n\n\n# %%\nclasses = [\"Normal\", \"Fraud\"]\nplotTensorflowConfmat(confmat, classes)\n\n# %%\nsm = SMOTE(sampling_strategy=\"minority\", random_state=42)\nsm_X_train, sm_Y_train = sm.fit_sample(og_X_train, og_Y_train)\n# %%\nsm_X_train.shape\n# %%\nn_inputs = sm_X_train.shape[1]\nsmote_model = Sequential([\n Dense(n_inputs, input_shape=(n_inputs,), activation='relu'),\n Dense(32, activation='relu'),\n Dense(2, activation='softmax')\n])\n# %%\nsmote_model.summary()\n# %%\nsmote_model.compile(\n Adam(lr=0.001), loss='sparse_categorical_crossentropy', metrics=['accuracy'])\nmodelcheckpoint = ModelCheckpoint(\n 'models/smote_model.h5', save_best_only=True, monitor='val_acc')\nsmote_model.fit(sm_X_train, sm_Y_train, validation_split=0.2, batch_size=25,\n epochs=20, verbose=2, shuffle=True, callbacks=[modelcheckpoint])\n# %%\nsmote_model.save('models/smote_model.h5')\n# %%\nsmote_pred_classes = smote_model.predict_classes(og_X_test)\n# %%\nconfmat = confusion_matrix(og_Y_test, smote_pred_classes)\nprint(confmat)\n# %%\nplotTensorflowConfmat(confmat, classes)\n# %%\nsm2 = SMOTE(sampling_strategy=\"minority\", random_state=42)\n# %%\nsm2_X_train, sm2_Y_train = sm2.fit_sample(og_X_train, og_Y_train)\nsm2_X_train = pd.DataFrame(sm2_X_train)\nsm2_X_train.head()\n# %%\nsm2_Y_train = pd.DataFrame(sm2_Y_train, columns=[\"Class\"])\nsm2_Y_train.head()\n# %%\nsmote_df = pd.concat([sm2_X_train, sm2_Y_train], axis=1)\nsmote_df.head()\n\n# %%\nsmote_df = smote_df.sample(frac=1, random_state=42)\n# %%\ncorr = smote_df.corr()\nsns.heatmap(corr, cmap='coolwarm_r', annot_kws={'size': 20})\nplt.show()\n# %%\ncorr[\"Class\"].sort_values()\n# %%\nnegative_corr = [13, 11, 9, 15]\npositive_corr = [3, 10]\n# %%\nf, axes = plt.subplots(ncols=4, figsize=(20, 4))\nf.suptitle(\"Negative Corr\")\nfor i, feature in enumerate(negative_corr):\n sns.boxplot(x=\"Class\", y=feature, data=smote_df, ax=axes[i])\n axes[i].set_title(feature)\n# %%\nf, axes = plt.subplots(ncols=2, figsize=(20, 4))\nf.suptitle(\"Positive Corr\")\nfor i, feature in enumerate(positive_corr):\n sns.boxplot(x=\"Class\", y=feature, data=smote_df, ax=axes[i])\n axes[i].set_title(feature)\n# %%\nfor i, feature in enumerate(negative_corr):\n fraud_dist = smote_df[feature].loc[smote_df[\"Class\"] == 1].values\n q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)\n iqr = q75-q25\n cutoff = iqr*1.5\n upper_limit, lower_limit = q75+cutoff, q25-cutoff\n outlier_list = [x for x in fraud_dist if x <\n lower_limit or x > upper_limit]\n smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (\n smote_df[feature] < lower_limit)].index)\n print(f\"outliers removed {len(outlier_list)}\")\n\n# %%\nfor i, feature in enumerate(positive_corr):\n fraud_dist = smote_df[feature].loc[smote_df[\"Class\"] == 1].values\n q25, q75 = np.percentile(fraud_dist, 25), np.percentile(fraud_dist, 75)\n iqr = q75-q25\n cutoff = iqr*1.5\n upper_limit, lower_limit = q75+cutoff, q25-cutoff\n outlier_list = [x for x in fraud_dist if x <\n lower_limit or x > upper_limit]\n smote_df = smote_df.drop(smote_df[(smote_df[feature] > upper_limit) | (\n smote_df[feature] < lower_limit)].index)\n print(f\"outliers removed {len(outlier_list)}\")\n# %%\nsmote_df.shape\n# %%\nsmote_X_train = smote_df.drop([\"Class\"], axis=1)\nsmote_Y_train = smote_df[\"Class\"]\n# %%\nn_inputs = smote_X_train.shape[1]\nsmote_model = Sequential([\n Dense(n_inputs, input_shape=(n_inputs,), activation='relu'),\n Dense(64, activation='relu'),\n Dense(32, activation='relu'),\n Dense(32, activation='relu'),\n Dense(2, activation='softmax')\n])\n# %%\nsmote_model.summary()\n# %%\nsmote_model.compile(\n Adam(lr=0.001), loss=\"sparse_categorical_crossentropy\", metrics=[\"accuracy\"])\nmodelcheckpoint = ModelCheckpoint(\n \"models/smote_outliers_removed.h5\", save_best_only=True, monitor=\"val_acc\")\nsmote_model.fit(smote_X_train, smote_Y_train, validation_split=0.2,\n shuffle=True, batch_size=25, epochs=20, callbacks=[modelcheckpoint])\n\n# %%\nsmote_model.save(\"models/smote_outliers_removed.h5\")\n# %%\nsmote_pred_classes = smote_model.predict_classes(og_X_test)\n# %%\nconfmat = confusion_matrix(og_Y_test, smote_pred_classes)\nprint(confmat)\n# %%\nclasses = [\"normal\", \"fraud\"]\nplotTensorflowConfmat(confmat, classes)\n# %%\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
###
### Copyright 2009 The Chicago Independent Radio Project
### All Rights Reserved.
###
### Licensed under the Apache License, Version 2.0 (the "License");
### you may not use this file except in compliance with the License.
### You may obtain a copy of the License at
###
### http://www.apache.org/licenses/LICENSE-2.0
###
### Unless required by applicable law or agreed to in writing, software
### distributed under the License is distributed on an "AS IS" BASIS,
### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
### See the License for the specific language governing permissions and
### limitations under the License.
###
"""CHIRP authentication system."""
import base64
import logging
import os
import time
from common import in_prod
from common.autoretry import AutoRetry
# TODO(trow): This is a work-around for problems with PyCrypto on the Mac.
# For more information, see
# http://code.google.com/p/googleappengine/issues/detail?id=1627
_DISABLE_CRYPTO = False
try:
from Crypto.Cipher import AES
from Crypto.Hash import HMAC
except ImportError:
# Only allow crypto to be disabled if we are running in a local
# development environment.
if in_prod():
raise
_DISABLE_CRYPTO = True
logging.warn("PyCrypto not found! Operating in insecure mode!")
from django import http
from auth.models import User, KeyStorage
from auth import roles
# Our logout URL.
LOGOUT_URL = "/auth/goodbye/"
# Users are ultimately redirected to the URL after logging out.
_FINAL_LOGOUT_URL = '/auth/hello/'
# The name of the cookie used to store our security token.
_CHIRP_SECURITY_TOKEN_COOKIE = 'chirp_security_token'
# Our security tokens expire after 24 hours.
# TODO(kumar) set this back to two hours after
# all CHIRP volunteers have set initial password?
_TOKEN_TIMEOUT_S = 24 * 60 * 60
class UserNotAllowedError(Exception):
"""Raised when the user is recognized but forbidden from entering."""
class _Credentials(object):
email = None
security_token_is_stale = False
def _create_security_token(user):
"""Create a CHIRP security token.
Args:
user: A User object.
Returns:
A string containing an encrypted security token that encodes
the user's email address as well as a timestamp.
"""
timestamp = int(time.time())
plaintext = "%x %s" % (timestamp, user.email)
nearest_mult_of_16 = 16 * ((len(plaintext) + 15) // 16)
# Pad plaintest with whitespace to make the length a multiple of 16,
# as this is a requirement of AES encryption.
plaintext = plaintext.rjust(nearest_mult_of_16, ' ')
if _DISABLE_CRYPTO:
body = plaintext
sig = "sig"
else:
key_storage = KeyStorage.get()
body = AES.new(key_storage.aes_key, AES.MODE_CBC).encrypt(plaintext)
hmac_key = key_storage.hmac_key
if type(hmac_key) == unicode:
# Crypto requires byte strings
hmac_key = hmac_key.encode('utf8')
sig = HMAC.HMAC(key=hmac_key, msg=body).hexdigest()
return '%s:%s' % (sig, body)
def _parse_security_token(token):
"""Parse a CHIRP security token.
Returns:
A Credentials object, or None if the token is not valid.
If a Credentials object is returned, its "user" field will not
be set.
"""
if not token:
return None
if ':' not in token:
logging.warn('Malformed token: no signature separator')
return None
sig, body = token.split(':', 1)
if _DISABLE_CRYPTO:
plaintext = body
else:
key_storage = KeyStorage.get()
hmac_key = key_storage.hmac_key
if type(hmac_key) == unicode:
# Crypto requires byte strings
hmac_key = hmac_key.encode('utf8')
computed_sig = HMAC.HMAC(key=hmac_key,
msg=body).hexdigest()
if sig != computed_sig:
logging.warn('Malformed token: invalid signature')
return None
try:
plaintext = AES.new(key_storage.aes_key,
AES.MODE_CBC).decrypt(body)
except ValueError:
logging.warn('Malformed token: wrong size')
return None
# Remove excess whitespace.
plaintext = plaintext.strip()
# The plaintext should contain at least one space.
if ' ' not in plaintext:
logging.warn('Malformed token: bad contents')
return None
parts = plaintext.split(' ')
if len(parts) != 2:
logging.warn('Malformed token: bad structure')
return None
timestamp, email = parts
try:
timestamp = int(timestamp, 16)
except ValueError:
logging.warn('Malformed token: bad timestamp')
return None
# Reject tokens that are too old or which have time-traveled. We
# allow for 1s of clock skew.
age_s = time.time() - timestamp
if age_s < -1 or age_s > _TOKEN_TIMEOUT_S:
logging.warn('Malformed token: expired (age=%ds)', age_s)
return None
cred = _Credentials()
cred.email = email
cred.security_token_is_stale = (age_s > 0.5 * _TOKEN_TIMEOUT_S)
return cred
def attach_credentials(response, user):
"""Attach a user's credentials to a response.
Args:
response: An HttpResponse object.
user: A User object.
"""
response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE,
_create_security_token(user))
def get_current_user(request):
"""Get the current logged-in user's.
Returns:
A User object, or None if the user is not logged in.
Raises:
UserNotAllowedError if the user is prohibited from accessing
the site.
"""
cred = None
token = request.COOKIES.get(_CHIRP_SECURITY_TOKEN_COOKIE)
if token:
cred = _parse_security_token(token)
# If this is a POST, look for a base64-encoded security token in
# the CHIRP_Auth variable.
if cred is None and request.method == 'POST':
token = request.POST.get("CHIRP_Auth")
if token:
try:
token = base64.urlsafe_b64decode(token)
except TypeError:
token = None
if token:
cred = _parse_security_token(token)
# No valid token? This is hopeless!
if cred is None:
return None
# Try to find a user for this email address.
user = User.get_by_email(cred.email)
if user is None:
return None
# Reject inactive users.
if not user.is_active:
logging.info('Rejected inactive user %s', user.email)
raise UserNotAllowedError
user._credentials = cred
return user
def create_login_url(path):
"""Returns the URL of a login page that redirects to 'path' on success."""
return "/auth/hello?redirect=%s" % path
def logout(redirect=None):
"""Create an HTTP response that will log a user out.
The redirect param can be a relative URL in which case
the user will go back to the same page when logging in.
This is useful for switching users like on the playlist
tracker page.
Returns:
An HttpResponse object that will log the user out.
"""
# If the user was signed in and has a cookie, clear it.
logout_url = _FINAL_LOGOUT_URL
if redirect:
logout_url = '%s?redirect=%s' % (logout_url, redirect)
response = http.HttpResponseRedirect(logout_url)
response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE, '')
return response
def get_password_reset_token(user):
"""A URL-safe token that authenticates a user for a password reset."""
return base64.urlsafe_b64encode(_create_security_token(user))
def parse_password_reset_token(token):
"""Extracts an email address from a valid password reset token."""
try:
token = base64.urlsafe_b64decode(str(token))
except TypeError:
return None
cred = _parse_security_token(token)
return cred and cred.email
|
normal
|
{
"blob_id": "d077f32061b87a4bfd6a0ac226730957a4000804",
"index": 5859,
"step-1": "<mask token>\n\n\nclass UserNotAllowedError(Exception):\n \"\"\"Raised when the user is recognized but forbidden from entering.\"\"\"\n\n\nclass _Credentials(object):\n email = None\n security_token_is_stale = False\n\n\n<mask token>\n\n\ndef _parse_security_token(token):\n \"\"\"Parse a CHIRP security token.\n\n Returns:\n A Credentials object, or None if the token is not valid.\n If a Credentials object is returned, its \"user\" field will not\n be set.\n \"\"\"\n if not token:\n return None\n if ':' not in token:\n logging.warn('Malformed token: no signature separator')\n return None\n sig, body = token.split(':', 1)\n if _DISABLE_CRYPTO:\n plaintext = body\n else:\n key_storage = KeyStorage.get()\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n hmac_key = hmac_key.encode('utf8')\n computed_sig = HMAC.HMAC(key=hmac_key, msg=body).hexdigest()\n if sig != computed_sig:\n logging.warn('Malformed token: invalid signature')\n return None\n try:\n plaintext = AES.new(key_storage.aes_key, AES.MODE_CBC).decrypt(body\n )\n except ValueError:\n logging.warn('Malformed token: wrong size')\n return None\n plaintext = plaintext.strip()\n if ' ' not in plaintext:\n logging.warn('Malformed token: bad contents')\n return None\n parts = plaintext.split(' ')\n if len(parts) != 2:\n logging.warn('Malformed token: bad structure')\n return None\n timestamp, email = parts\n try:\n timestamp = int(timestamp, 16)\n except ValueError:\n logging.warn('Malformed token: bad timestamp')\n return None\n age_s = time.time() - timestamp\n if age_s < -1 or age_s > _TOKEN_TIMEOUT_S:\n logging.warn('Malformed token: expired (age=%ds)', age_s)\n return None\n cred = _Credentials()\n cred.email = email\n cred.security_token_is_stale = age_s > 0.5 * _TOKEN_TIMEOUT_S\n return cred\n\n\ndef attach_credentials(response, user):\n \"\"\"Attach a user's credentials to a response.\n\n Args:\n response: An HttpResponse object.\n user: A User object.\n \"\"\"\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE,\n _create_security_token(user))\n\n\n<mask token>\n\n\ndef create_login_url(path):\n \"\"\"Returns the URL of a login page that redirects to 'path' on success.\"\"\"\n return '/auth/hello?redirect=%s' % path\n\n\ndef logout(redirect=None):\n \"\"\"Create an HTTP response that will log a user out.\n \n The redirect param can be a relative URL in which case \n the user will go back to the same page when logging in.\n This is useful for switching users like on the playlist \n tracker page.\n \n Returns:\n An HttpResponse object that will log the user out.\n \"\"\"\n logout_url = _FINAL_LOGOUT_URL\n if redirect:\n logout_url = '%s?redirect=%s' % (logout_url, redirect)\n response = http.HttpResponseRedirect(logout_url)\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE, '')\n return response\n\n\ndef get_password_reset_token(user):\n \"\"\"A URL-safe token that authenticates a user for a password reset.\"\"\"\n return base64.urlsafe_b64encode(_create_security_token(user))\n\n\ndef parse_password_reset_token(token):\n \"\"\"Extracts an email address from a valid password reset token.\"\"\"\n try:\n token = base64.urlsafe_b64decode(str(token))\n except TypeError:\n return None\n cred = _parse_security_token(token)\n return cred and cred.email\n",
"step-2": "<mask token>\n\n\nclass UserNotAllowedError(Exception):\n \"\"\"Raised when the user is recognized but forbidden from entering.\"\"\"\n\n\nclass _Credentials(object):\n email = None\n security_token_is_stale = False\n\n\ndef _create_security_token(user):\n \"\"\"Create a CHIRP security token.\n\n Args:\n user: A User object.\n\n Returns:\n A string containing an encrypted security token that encodes\n the user's email address as well as a timestamp.\n \"\"\"\n timestamp = int(time.time())\n plaintext = '%x %s' % (timestamp, user.email)\n nearest_mult_of_16 = 16 * ((len(plaintext) + 15) // 16)\n plaintext = plaintext.rjust(nearest_mult_of_16, ' ')\n if _DISABLE_CRYPTO:\n body = plaintext\n sig = 'sig'\n else:\n key_storage = KeyStorage.get()\n body = AES.new(key_storage.aes_key, AES.MODE_CBC).encrypt(plaintext)\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n hmac_key = hmac_key.encode('utf8')\n sig = HMAC.HMAC(key=hmac_key, msg=body).hexdigest()\n return '%s:%s' % (sig, body)\n\n\ndef _parse_security_token(token):\n \"\"\"Parse a CHIRP security token.\n\n Returns:\n A Credentials object, or None if the token is not valid.\n If a Credentials object is returned, its \"user\" field will not\n be set.\n \"\"\"\n if not token:\n return None\n if ':' not in token:\n logging.warn('Malformed token: no signature separator')\n return None\n sig, body = token.split(':', 1)\n if _DISABLE_CRYPTO:\n plaintext = body\n else:\n key_storage = KeyStorage.get()\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n hmac_key = hmac_key.encode('utf8')\n computed_sig = HMAC.HMAC(key=hmac_key, msg=body).hexdigest()\n if sig != computed_sig:\n logging.warn('Malformed token: invalid signature')\n return None\n try:\n plaintext = AES.new(key_storage.aes_key, AES.MODE_CBC).decrypt(body\n )\n except ValueError:\n logging.warn('Malformed token: wrong size')\n return None\n plaintext = plaintext.strip()\n if ' ' not in plaintext:\n logging.warn('Malformed token: bad contents')\n return None\n parts = plaintext.split(' ')\n if len(parts) != 2:\n logging.warn('Malformed token: bad structure')\n return None\n timestamp, email = parts\n try:\n timestamp = int(timestamp, 16)\n except ValueError:\n logging.warn('Malformed token: bad timestamp')\n return None\n age_s = time.time() - timestamp\n if age_s < -1 or age_s > _TOKEN_TIMEOUT_S:\n logging.warn('Malformed token: expired (age=%ds)', age_s)\n return None\n cred = _Credentials()\n cred.email = email\n cred.security_token_is_stale = age_s > 0.5 * _TOKEN_TIMEOUT_S\n return cred\n\n\ndef attach_credentials(response, user):\n \"\"\"Attach a user's credentials to a response.\n\n Args:\n response: An HttpResponse object.\n user: A User object.\n \"\"\"\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE,\n _create_security_token(user))\n\n\n<mask token>\n\n\ndef create_login_url(path):\n \"\"\"Returns the URL of a login page that redirects to 'path' on success.\"\"\"\n return '/auth/hello?redirect=%s' % path\n\n\ndef logout(redirect=None):\n \"\"\"Create an HTTP response that will log a user out.\n \n The redirect param can be a relative URL in which case \n the user will go back to the same page when logging in.\n This is useful for switching users like on the playlist \n tracker page.\n \n Returns:\n An HttpResponse object that will log the user out.\n \"\"\"\n logout_url = _FINAL_LOGOUT_URL\n if redirect:\n logout_url = '%s?redirect=%s' % (logout_url, redirect)\n response = http.HttpResponseRedirect(logout_url)\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE, '')\n return response\n\n\ndef get_password_reset_token(user):\n \"\"\"A URL-safe token that authenticates a user for a password reset.\"\"\"\n return base64.urlsafe_b64encode(_create_security_token(user))\n\n\ndef parse_password_reset_token(token):\n \"\"\"Extracts an email address from a valid password reset token.\"\"\"\n try:\n token = base64.urlsafe_b64decode(str(token))\n except TypeError:\n return None\n cred = _parse_security_token(token)\n return cred and cred.email\n",
"step-3": "<mask token>\n\n\nclass UserNotAllowedError(Exception):\n \"\"\"Raised when the user is recognized but forbidden from entering.\"\"\"\n\n\nclass _Credentials(object):\n email = None\n security_token_is_stale = False\n\n\ndef _create_security_token(user):\n \"\"\"Create a CHIRP security token.\n\n Args:\n user: A User object.\n\n Returns:\n A string containing an encrypted security token that encodes\n the user's email address as well as a timestamp.\n \"\"\"\n timestamp = int(time.time())\n plaintext = '%x %s' % (timestamp, user.email)\n nearest_mult_of_16 = 16 * ((len(plaintext) + 15) // 16)\n plaintext = plaintext.rjust(nearest_mult_of_16, ' ')\n if _DISABLE_CRYPTO:\n body = plaintext\n sig = 'sig'\n else:\n key_storage = KeyStorage.get()\n body = AES.new(key_storage.aes_key, AES.MODE_CBC).encrypt(plaintext)\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n hmac_key = hmac_key.encode('utf8')\n sig = HMAC.HMAC(key=hmac_key, msg=body).hexdigest()\n return '%s:%s' % (sig, body)\n\n\ndef _parse_security_token(token):\n \"\"\"Parse a CHIRP security token.\n\n Returns:\n A Credentials object, or None if the token is not valid.\n If a Credentials object is returned, its \"user\" field will not\n be set.\n \"\"\"\n if not token:\n return None\n if ':' not in token:\n logging.warn('Malformed token: no signature separator')\n return None\n sig, body = token.split(':', 1)\n if _DISABLE_CRYPTO:\n plaintext = body\n else:\n key_storage = KeyStorage.get()\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n hmac_key = hmac_key.encode('utf8')\n computed_sig = HMAC.HMAC(key=hmac_key, msg=body).hexdigest()\n if sig != computed_sig:\n logging.warn('Malformed token: invalid signature')\n return None\n try:\n plaintext = AES.new(key_storage.aes_key, AES.MODE_CBC).decrypt(body\n )\n except ValueError:\n logging.warn('Malformed token: wrong size')\n return None\n plaintext = plaintext.strip()\n if ' ' not in plaintext:\n logging.warn('Malformed token: bad contents')\n return None\n parts = plaintext.split(' ')\n if len(parts) != 2:\n logging.warn('Malformed token: bad structure')\n return None\n timestamp, email = parts\n try:\n timestamp = int(timestamp, 16)\n except ValueError:\n logging.warn('Malformed token: bad timestamp')\n return None\n age_s = time.time() - timestamp\n if age_s < -1 or age_s > _TOKEN_TIMEOUT_S:\n logging.warn('Malformed token: expired (age=%ds)', age_s)\n return None\n cred = _Credentials()\n cred.email = email\n cred.security_token_is_stale = age_s > 0.5 * _TOKEN_TIMEOUT_S\n return cred\n\n\ndef attach_credentials(response, user):\n \"\"\"Attach a user's credentials to a response.\n\n Args:\n response: An HttpResponse object.\n user: A User object.\n \"\"\"\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE,\n _create_security_token(user))\n\n\ndef get_current_user(request):\n \"\"\"Get the current logged-in user's.\n\n Returns:\n A User object, or None if the user is not logged in.\n\n Raises:\n UserNotAllowedError if the user is prohibited from accessing\n the site.\n \"\"\"\n cred = None\n token = request.COOKIES.get(_CHIRP_SECURITY_TOKEN_COOKIE)\n if token:\n cred = _parse_security_token(token)\n if cred is None and request.method == 'POST':\n token = request.POST.get('CHIRP_Auth')\n if token:\n try:\n token = base64.urlsafe_b64decode(token)\n except TypeError:\n token = None\n if token:\n cred = _parse_security_token(token)\n if cred is None:\n return None\n user = User.get_by_email(cred.email)\n if user is None:\n return None\n if not user.is_active:\n logging.info('Rejected inactive user %s', user.email)\n raise UserNotAllowedError\n user._credentials = cred\n return user\n\n\ndef create_login_url(path):\n \"\"\"Returns the URL of a login page that redirects to 'path' on success.\"\"\"\n return '/auth/hello?redirect=%s' % path\n\n\ndef logout(redirect=None):\n \"\"\"Create an HTTP response that will log a user out.\n \n The redirect param can be a relative URL in which case \n the user will go back to the same page when logging in.\n This is useful for switching users like on the playlist \n tracker page.\n \n Returns:\n An HttpResponse object that will log the user out.\n \"\"\"\n logout_url = _FINAL_LOGOUT_URL\n if redirect:\n logout_url = '%s?redirect=%s' % (logout_url, redirect)\n response = http.HttpResponseRedirect(logout_url)\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE, '')\n return response\n\n\ndef get_password_reset_token(user):\n \"\"\"A URL-safe token that authenticates a user for a password reset.\"\"\"\n return base64.urlsafe_b64encode(_create_security_token(user))\n\n\ndef parse_password_reset_token(token):\n \"\"\"Extracts an email address from a valid password reset token.\"\"\"\n try:\n token = base64.urlsafe_b64decode(str(token))\n except TypeError:\n return None\n cred = _parse_security_token(token)\n return cred and cred.email\n",
"step-4": "<mask token>\ntry:\n from Crypto.Cipher import AES\n from Crypto.Hash import HMAC\nexcept ImportError:\n if in_prod():\n raise\n _DISABLE_CRYPTO = True\n logging.warn('PyCrypto not found! Operating in insecure mode!')\n<mask token>\n\n\nclass UserNotAllowedError(Exception):\n \"\"\"Raised when the user is recognized but forbidden from entering.\"\"\"\n\n\nclass _Credentials(object):\n email = None\n security_token_is_stale = False\n\n\ndef _create_security_token(user):\n \"\"\"Create a CHIRP security token.\n\n Args:\n user: A User object.\n\n Returns:\n A string containing an encrypted security token that encodes\n the user's email address as well as a timestamp.\n \"\"\"\n timestamp = int(time.time())\n plaintext = '%x %s' % (timestamp, user.email)\n nearest_mult_of_16 = 16 * ((len(plaintext) + 15) // 16)\n plaintext = plaintext.rjust(nearest_mult_of_16, ' ')\n if _DISABLE_CRYPTO:\n body = plaintext\n sig = 'sig'\n else:\n key_storage = KeyStorage.get()\n body = AES.new(key_storage.aes_key, AES.MODE_CBC).encrypt(plaintext)\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n hmac_key = hmac_key.encode('utf8')\n sig = HMAC.HMAC(key=hmac_key, msg=body).hexdigest()\n return '%s:%s' % (sig, body)\n\n\ndef _parse_security_token(token):\n \"\"\"Parse a CHIRP security token.\n\n Returns:\n A Credentials object, or None if the token is not valid.\n If a Credentials object is returned, its \"user\" field will not\n be set.\n \"\"\"\n if not token:\n return None\n if ':' not in token:\n logging.warn('Malformed token: no signature separator')\n return None\n sig, body = token.split(':', 1)\n if _DISABLE_CRYPTO:\n plaintext = body\n else:\n key_storage = KeyStorage.get()\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n hmac_key = hmac_key.encode('utf8')\n computed_sig = HMAC.HMAC(key=hmac_key, msg=body).hexdigest()\n if sig != computed_sig:\n logging.warn('Malformed token: invalid signature')\n return None\n try:\n plaintext = AES.new(key_storage.aes_key, AES.MODE_CBC).decrypt(body\n )\n except ValueError:\n logging.warn('Malformed token: wrong size')\n return None\n plaintext = plaintext.strip()\n if ' ' not in plaintext:\n logging.warn('Malformed token: bad contents')\n return None\n parts = plaintext.split(' ')\n if len(parts) != 2:\n logging.warn('Malformed token: bad structure')\n return None\n timestamp, email = parts\n try:\n timestamp = int(timestamp, 16)\n except ValueError:\n logging.warn('Malformed token: bad timestamp')\n return None\n age_s = time.time() - timestamp\n if age_s < -1 or age_s > _TOKEN_TIMEOUT_S:\n logging.warn('Malformed token: expired (age=%ds)', age_s)\n return None\n cred = _Credentials()\n cred.email = email\n cred.security_token_is_stale = age_s > 0.5 * _TOKEN_TIMEOUT_S\n return cred\n\n\ndef attach_credentials(response, user):\n \"\"\"Attach a user's credentials to a response.\n\n Args:\n response: An HttpResponse object.\n user: A User object.\n \"\"\"\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE,\n _create_security_token(user))\n\n\ndef get_current_user(request):\n \"\"\"Get the current logged-in user's.\n\n Returns:\n A User object, or None if the user is not logged in.\n\n Raises:\n UserNotAllowedError if the user is prohibited from accessing\n the site.\n \"\"\"\n cred = None\n token = request.COOKIES.get(_CHIRP_SECURITY_TOKEN_COOKIE)\n if token:\n cred = _parse_security_token(token)\n if cred is None and request.method == 'POST':\n token = request.POST.get('CHIRP_Auth')\n if token:\n try:\n token = base64.urlsafe_b64decode(token)\n except TypeError:\n token = None\n if token:\n cred = _parse_security_token(token)\n if cred is None:\n return None\n user = User.get_by_email(cred.email)\n if user is None:\n return None\n if not user.is_active:\n logging.info('Rejected inactive user %s', user.email)\n raise UserNotAllowedError\n user._credentials = cred\n return user\n\n\ndef create_login_url(path):\n \"\"\"Returns the URL of a login page that redirects to 'path' on success.\"\"\"\n return '/auth/hello?redirect=%s' % path\n\n\ndef logout(redirect=None):\n \"\"\"Create an HTTP response that will log a user out.\n \n The redirect param can be a relative URL in which case \n the user will go back to the same page when logging in.\n This is useful for switching users like on the playlist \n tracker page.\n \n Returns:\n An HttpResponse object that will log the user out.\n \"\"\"\n logout_url = _FINAL_LOGOUT_URL\n if redirect:\n logout_url = '%s?redirect=%s' % (logout_url, redirect)\n response = http.HttpResponseRedirect(logout_url)\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE, '')\n return response\n\n\ndef get_password_reset_token(user):\n \"\"\"A URL-safe token that authenticates a user for a password reset.\"\"\"\n return base64.urlsafe_b64encode(_create_security_token(user))\n\n\ndef parse_password_reset_token(token):\n \"\"\"Extracts an email address from a valid password reset token.\"\"\"\n try:\n token = base64.urlsafe_b64decode(str(token))\n except TypeError:\n return None\n cred = _parse_security_token(token)\n return cred and cred.email\n",
"step-5": "###\n### Copyright 2009 The Chicago Independent Radio Project\n### All Rights Reserved.\n###\n### Licensed under the Apache License, Version 2.0 (the \"License\");\n### you may not use this file except in compliance with the License.\n### You may obtain a copy of the License at\n###\n### http://www.apache.org/licenses/LICENSE-2.0\n###\n### Unless required by applicable law or agreed to in writing, software\n### distributed under the License is distributed on an \"AS IS\" BASIS,\n### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n### See the License for the specific language governing permissions and\n### limitations under the License.\n###\n\n\"\"\"CHIRP authentication system.\"\"\"\n\nimport base64\nimport logging\nimport os\nimport time\n\nfrom common import in_prod\nfrom common.autoretry import AutoRetry\n\n# TODO(trow): This is a work-around for problems with PyCrypto on the Mac.\n# For more information, see\n# http://code.google.com/p/googleappengine/issues/detail?id=1627\n_DISABLE_CRYPTO = False\ntry:\n from Crypto.Cipher import AES\n from Crypto.Hash import HMAC\nexcept ImportError:\n # Only allow crypto to be disabled if we are running in a local\n # development environment.\n if in_prod():\n raise\n _DISABLE_CRYPTO = True\n logging.warn(\"PyCrypto not found! Operating in insecure mode!\")\n \nfrom django import http\nfrom auth.models import User, KeyStorage\nfrom auth import roles\n\n# Our logout URL.\nLOGOUT_URL = \"/auth/goodbye/\"\n\n# Users are ultimately redirected to the URL after logging out.\n_FINAL_LOGOUT_URL = '/auth/hello/'\n\n# The name of the cookie used to store our security token.\n_CHIRP_SECURITY_TOKEN_COOKIE = 'chirp_security_token'\n\n# Our security tokens expire after 24 hours.\n# TODO(kumar) set this back to two hours after \n# all CHIRP volunteers have set initial password?\n_TOKEN_TIMEOUT_S = 24 * 60 * 60\n\n\nclass UserNotAllowedError(Exception):\n \"\"\"Raised when the user is recognized but forbidden from entering.\"\"\"\n\n\nclass _Credentials(object):\n email = None\n security_token_is_stale = False\n\n\ndef _create_security_token(user):\n \"\"\"Create a CHIRP security token.\n\n Args:\n user: A User object.\n\n Returns:\n A string containing an encrypted security token that encodes\n the user's email address as well as a timestamp.\n \"\"\"\n timestamp = int(time.time())\n plaintext = \"%x %s\" % (timestamp, user.email)\n nearest_mult_of_16 = 16 * ((len(plaintext) + 15) // 16)\n # Pad plaintest with whitespace to make the length a multiple of 16,\n # as this is a requirement of AES encryption.\n plaintext = plaintext.rjust(nearest_mult_of_16, ' ')\n if _DISABLE_CRYPTO:\n body = plaintext\n sig = \"sig\"\n else:\n key_storage = KeyStorage.get()\n body = AES.new(key_storage.aes_key, AES.MODE_CBC).encrypt(plaintext)\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n # Crypto requires byte strings\n hmac_key = hmac_key.encode('utf8')\n sig = HMAC.HMAC(key=hmac_key, msg=body).hexdigest()\n return '%s:%s' % (sig, body)\n\ndef _parse_security_token(token):\n \"\"\"Parse a CHIRP security token.\n\n Returns:\n A Credentials object, or None if the token is not valid.\n If a Credentials object is returned, its \"user\" field will not\n be set.\n \"\"\"\n if not token:\n return None\n if ':' not in token:\n logging.warn('Malformed token: no signature separator')\n return None\n sig, body = token.split(':', 1)\n if _DISABLE_CRYPTO:\n plaintext = body\n else:\n key_storage = KeyStorage.get()\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n # Crypto requires byte strings\n hmac_key = hmac_key.encode('utf8')\n computed_sig = HMAC.HMAC(key=hmac_key,\n msg=body).hexdigest()\n if sig != computed_sig:\n logging.warn('Malformed token: invalid signature')\n return None\n try:\n plaintext = AES.new(key_storage.aes_key,\n AES.MODE_CBC).decrypt(body)\n except ValueError:\n logging.warn('Malformed token: wrong size')\n return None\n # Remove excess whitespace.\n plaintext = plaintext.strip()\n # The plaintext should contain at least one space.\n if ' ' not in plaintext:\n logging.warn('Malformed token: bad contents')\n return None\n parts = plaintext.split(' ')\n if len(parts) != 2:\n logging.warn('Malformed token: bad structure')\n return None\n timestamp, email = parts\n try:\n timestamp = int(timestamp, 16)\n except ValueError:\n logging.warn('Malformed token: bad timestamp')\n return None\n # Reject tokens that are too old or which have time-traveled. We\n # allow for 1s of clock skew.\n age_s = time.time() - timestamp\n if age_s < -1 or age_s > _TOKEN_TIMEOUT_S:\n logging.warn('Malformed token: expired (age=%ds)', age_s)\n return None\n cred = _Credentials()\n cred.email = email\n cred.security_token_is_stale = (age_s > 0.5 * _TOKEN_TIMEOUT_S)\n return cred\n\n\ndef attach_credentials(response, user):\n \"\"\"Attach a user's credentials to a response.\n\n Args:\n response: An HttpResponse object.\n user: A User object.\n \"\"\"\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE,\n _create_security_token(user))\n\n\ndef get_current_user(request):\n \"\"\"Get the current logged-in user's.\n\n Returns:\n A User object, or None if the user is not logged in.\n\n Raises:\n UserNotAllowedError if the user is prohibited from accessing\n the site.\n \"\"\"\n cred = None\n token = request.COOKIES.get(_CHIRP_SECURITY_TOKEN_COOKIE)\n if token:\n cred = _parse_security_token(token)\n # If this is a POST, look for a base64-encoded security token in\n # the CHIRP_Auth variable.\n if cred is None and request.method == 'POST':\n token = request.POST.get(\"CHIRP_Auth\")\n if token:\n try:\n token = base64.urlsafe_b64decode(token)\n except TypeError:\n token = None\n if token:\n cred = _parse_security_token(token)\n # No valid token? This is hopeless!\n if cred is None:\n return None\n # Try to find a user for this email address.\n user = User.get_by_email(cred.email)\n if user is None:\n return None\n # Reject inactive users.\n if not user.is_active:\n logging.info('Rejected inactive user %s', user.email)\n raise UserNotAllowedError\n user._credentials = cred\n return user\n\n\ndef create_login_url(path):\n \"\"\"Returns the URL of a login page that redirects to 'path' on success.\"\"\"\n return \"/auth/hello?redirect=%s\" % path\n\n\ndef logout(redirect=None):\n \"\"\"Create an HTTP response that will log a user out.\n \n The redirect param can be a relative URL in which case \n the user will go back to the same page when logging in.\n This is useful for switching users like on the playlist \n tracker page.\n \n Returns:\n An HttpResponse object that will log the user out.\n \"\"\"\n # If the user was signed in and has a cookie, clear it.\n logout_url = _FINAL_LOGOUT_URL\n if redirect:\n logout_url = '%s?redirect=%s' % (logout_url, redirect)\n response = http.HttpResponseRedirect(logout_url)\n response.set_cookie(_CHIRP_SECURITY_TOKEN_COOKIE, '')\n return response\n\n\ndef get_password_reset_token(user):\n \"\"\"A URL-safe token that authenticates a user for a password reset.\"\"\"\n return base64.urlsafe_b64encode(_create_security_token(user))\n\n\ndef parse_password_reset_token(token):\n \"\"\"Extracts an email address from a valid password reset token.\"\"\"\n try:\n token = base64.urlsafe_b64decode(str(token))\n except TypeError:\n return None\n cred = _parse_security_token(token)\n return cred and cred.email\n",
"step-ids": [
10,
11,
12,
13,
16
]
}
|
[
10,
11,
12,
13,
16
] |
#!/usr/bin/env python
#_*_coding:utf-8_*_
#作者:Paul哥
from fabric.api import settings,run,cd,env,hosts
from fabric.colors import *
env.hosts=['192.168.75.130:22']
env.password='hello123'
env.user='root'
def test():
with cd('/home'):
print yellow(run('ls -l'))
test()
|
normal
|
{
"blob_id": "6b45541c54f1a4ce94d6bd457701ecd1b90a4c4c",
"index": 1129,
"step-1": "#!/usr/bin/env python\n#_*_coding:utf-8_*_\n#作者:Paul哥\n\n\n\nfrom fabric.api import settings,run,cd,env,hosts\nfrom fabric.colors import *\n\nenv.hosts=['192.168.75.130:22']\nenv.password='hello123'\nenv.user='root'\ndef test():\n\twith cd('/home'):\n\t\tprint yellow(run('ls -l'))\n\ntest()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
/home/khang/anaconda3/lib/python3.6/tempfile.py
|
normal
|
{
"blob_id": "399a22450d215638051a7d643fb6d391156779c5",
"index": 5855,
"step-1": "/home/khang/anaconda3/lib/python3.6/tempfile.py",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# 玩家(攻击力)攻击敌人(血量)敌人受伤(减血)可能死亡(播放动画)
# 敌人攻击玩家 玩家受伤(减血 碎屏) 可能死亡(游戏结束)
# class Player:
# def __init__(self,name,hp,atk):
# self.name = name
# self.hp = hp
# self.atk = atk
#
# @property
# def hp(self):
# return self.__hp
# @hp.setter
# def hp(self,value):
# if 0<=value<=100:
# self.__hp = value
# else:
# raise ValueError('血量不在区间内')
#
# @property
# def atk(self):
# return self.__atk
#
# @atk.setter
# def atk(self, value):
# if 0 <= value <= 50:
# self.__atk = value
# else:
# raise ValueError('攻击力不在区间内')
#
#
# class Enemy:
# def __init__(self, e_name, e_hp, e_atk):
# self.e_name = e_name
# self.e_hp = e_hp
# self.e_atk = e_atk
#
# @property
# def e_hp(self):
# return self.__e_hp
#
# @e_hp.setter
# def e_hp(self, value):
# if 0 <= value <= 100:
# self.__e_hp = value
# else:
# raise ValueError('血量不在区间内')
#
# @property
# def e_atk(self):
# return self.__e_atk
#
# @e_atk.setter
# def e_atk(self, value):
# if 0 <= value <= 20:
# self.__e_atk = value
# else:
# raise ValueError('攻击力不在区间内')
#
#
#
# p1 = Player('悟空',100,20)
# e1 = Enemy('妖怪',40,10)
#
# #1.玩家(攻击力)攻击敌人(血量)敌人受伤(减血)可能死亡(播放动画)
# print('1.玩家攻击敌人:')
# def p_atk_e():
# count = 0
# while True:
# e1.e_hp -= p1.atk
# count += 1
# if e1.e_hp >0:
# print('玩家攻击%d次,敌人血量减少到%d' %
# (count,e1.e_hp))
# elif e1.e_hp == 0:
# print('玩家攻击%d次,敌人死亡,播放动画' % count)
# break
#
# p_atk_e()
#
# # 2.敌人攻击玩家 玩家受伤(减血 碎屏) 可能死亡(游戏结束)
# print('2.敌人攻击玩家:')
# def e_atk_p():
# count = 0
# while True:
# p1.hp -= e1.e_atk
# count += 1
# if p1.hp >0:
# print('敌人攻击%d次,玩家血量减少到%d' %
# (count,p1.hp))
# elif p1.hp == 0:
# print('敌人攻击%d次,玩家死亡,游戏结束' % count)
# break
# e_atk_p()
#玩家类
class Player:
def __init__(self,hp = 100,atk = 100):
self.hp = hp
self.atk = atk
def attack(self,enemy):
print('电脑:玩家攻击敌人')
enemy.damage(self.atk)
def damage(self,value):
print('玩家:我去')
#敌人减血
self.hp -= value
#可能死亡
if self.hp <= 0:
print('敌人:你真菜')
#敌人类
class Enemy:
def __init__(self,hp = 100,atk = 99):
self.hp = hp
self.atk = atk
def damage(self,value):
print('敌人:啊')
#玩家减血
self.hp -= value
#可能死亡
if self.hp <= 0:
print('电脑:敌人死亡,播放动画')
def attack(self,player):
print('电脑:敌人攻击玩家')
player.damage(self.atk)
p01 = Player()
e01 = Enemy()
p01.attack(e01)
e01.attack(p01)
e01.attack(p01)
|
normal
|
{
"blob_id": "3065c87f79433e9fbbd2ff45c2915dfd5b1fa7cc",
"index": 8427,
"step-1": "class Player:\n\n def __init__(self, hp=100, atk=100):\n self.hp = hp\n self.atk = atk\n <mask token>\n <mask token>\n\n\nclass Enemy:\n\n def __init__(self, hp=100, atk=99):\n self.hp = hp\n self.atk = atk\n\n def damage(self, value):\n print('敌人:啊')\n self.hp -= value\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n\n def attack(self, player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\n\n<mask token>\n",
"step-2": "class Player:\n\n def __init__(self, hp=100, atk=100):\n self.hp = hp\n self.atk = atk\n\n def attack(self, enemy):\n print('电脑:玩家攻击敌人')\n enemy.damage(self.atk)\n <mask token>\n\n\nclass Enemy:\n\n def __init__(self, hp=100, atk=99):\n self.hp = hp\n self.atk = atk\n\n def damage(self, value):\n print('敌人:啊')\n self.hp -= value\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n\n def attack(self, player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\n\n<mask token>\n",
"step-3": "class Player:\n\n def __init__(self, hp=100, atk=100):\n self.hp = hp\n self.atk = atk\n\n def attack(self, enemy):\n print('电脑:玩家攻击敌人')\n enemy.damage(self.atk)\n\n def damage(self, value):\n print('玩家:我去')\n self.hp -= value\n if self.hp <= 0:\n print('敌人:你真菜')\n\n\nclass Enemy:\n\n def __init__(self, hp=100, atk=99):\n self.hp = hp\n self.atk = atk\n\n def damage(self, value):\n print('敌人:啊')\n self.hp -= value\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n\n def attack(self, player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\n\n<mask token>\n",
"step-4": "class Player:\n\n def __init__(self, hp=100, atk=100):\n self.hp = hp\n self.atk = atk\n\n def attack(self, enemy):\n print('电脑:玩家攻击敌人')\n enemy.damage(self.atk)\n\n def damage(self, value):\n print('玩家:我去')\n self.hp -= value\n if self.hp <= 0:\n print('敌人:你真菜')\n\n\nclass Enemy:\n\n def __init__(self, hp=100, atk=99):\n self.hp = hp\n self.atk = atk\n\n def damage(self, value):\n print('敌人:啊')\n self.hp -= value\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n\n def attack(self, player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\n\n<mask token>\np01.attack(e01)\ne01.attack(p01)\ne01.attack(p01)\n",
"step-5": "# 玩家(攻击力)攻击敌人(血量)敌人受伤(减血)可能死亡(播放动画)\n# 敌人攻击玩家 玩家受伤(减血 碎屏) 可能死亡(游戏结束)\n\n# class Player:\n# def __init__(self,name,hp,atk):\n# self.name = name\n# self.hp = hp\n# self.atk = atk\n#\n# @property\n# def hp(self):\n# return self.__hp\n# @hp.setter\n# def hp(self,value):\n# if 0<=value<=100:\n# self.__hp = value\n# else:\n# raise ValueError('血量不在区间内')\n#\n# @property\n# def atk(self):\n# return self.__atk\n#\n# @atk.setter\n# def atk(self, value):\n# if 0 <= value <= 50:\n# self.__atk = value\n# else:\n# raise ValueError('攻击力不在区间内')\n#\n#\n# class Enemy:\n# def __init__(self, e_name, e_hp, e_atk):\n# self.e_name = e_name\n# self.e_hp = e_hp\n# self.e_atk = e_atk\n#\n# @property\n# def e_hp(self):\n# return self.__e_hp\n#\n# @e_hp.setter\n# def e_hp(self, value):\n# if 0 <= value <= 100:\n# self.__e_hp = value\n# else:\n# raise ValueError('血量不在区间内')\n#\n# @property\n# def e_atk(self):\n# return self.__e_atk\n#\n# @e_atk.setter\n# def e_atk(self, value):\n# if 0 <= value <= 20:\n# self.__e_atk = value\n# else:\n# raise ValueError('攻击力不在区间内')\n#\n#\n#\n# p1 = Player('悟空',100,20)\n# e1 = Enemy('妖怪',40,10)\n#\n# #1.玩家(攻击力)攻击敌人(血量)敌人受伤(减血)可能死亡(播放动画)\n# print('1.玩家攻击敌人:')\n# def p_atk_e():\n# count = 0\n# while True:\n# e1.e_hp -= p1.atk\n# count += 1\n# if e1.e_hp >0:\n# print('玩家攻击%d次,敌人血量减少到%d' %\n# (count,e1.e_hp))\n# elif e1.e_hp == 0:\n# print('玩家攻击%d次,敌人死亡,播放动画' % count)\n# break\n#\n# p_atk_e()\n#\n# # 2.敌人攻击玩家 玩家受伤(减血 碎屏) 可能死亡(游戏结束)\n# print('2.敌人攻击玩家:')\n# def e_atk_p():\n# count = 0\n# while True:\n# p1.hp -= e1.e_atk\n# count += 1\n# if p1.hp >0:\n# print('敌人攻击%d次,玩家血量减少到%d' %\n# (count,p1.hp))\n# elif p1.hp == 0:\n# print('敌人攻击%d次,玩家死亡,游戏结束' % count)\n# break\n# e_atk_p()\n\n\n#玩家类\nclass Player:\n def __init__(self,hp = 100,atk = 100):\n self.hp = hp\n self.atk = atk\n def attack(self,enemy):\n print('电脑:玩家攻击敌人')\n enemy.damage(self.atk)\n def damage(self,value):\n print('玩家:我去')\n #敌人减血\n self.hp -= value\n #可能死亡\n if self.hp <= 0:\n print('敌人:你真菜')\n\n#敌人类\nclass Enemy:\n def __init__(self,hp = 100,atk = 99):\n self.hp = hp\n self.atk = atk\n def damage(self,value):\n print('敌人:啊')\n #玩家减血\n self.hp -= value\n #可能死亡\n if self.hp <= 0:\n print('电脑:敌人死亡,播放动画')\n def attack(self,player):\n print('电脑:敌人攻击玩家')\n player.damage(self.atk)\n\np01 = Player()\ne01 = Enemy()\np01.attack(e01)\ne01.attack(p01)\ne01.attack(p01)\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
#------------------------------------------------------------------------
#
# @Author : EV2 CHEVALLIER
#
# @Date : 16.09.20
# @Location : École Navale / Chaire de Cyberdéfense des systèmes navals
# @Project : Projet de Fin d'Études
# @Subject : # Real time detection of cyber anomalies upon a NMEA network by using machine learning methods
#
#------------------------------------------------------------------------
# @Title : Training
#------------------------------------------------------------------------
# @Description : # This programm get the training dataset, extract the interesting features ( mean and standard deviation of variations of latitude,
# longitude, heading and distance )
# and put it in a python dictionnary and save it in a binary file with the pickle module.
#------------------------------------------------------------------------
import traitement as tr
import pickle as pk
import model as md
def training(dict):
model={}
model["µ"]={}
model["sigma"]={}
for x in dict: # loop with speed
model["µ"][x]={}
model["sigma"][x]={}
for y in dict[x]: # loop with heading
model["µ"][x][y] = {}
model["sigma"][x][y] = {}
doc=tr.load(dict[x][y]) # open the json file
phi_l=doc[0]
g_l=doc[1] # get a list of phi,g,t
t_l=doc[2]
dphi_l=tr.delta(phi_l,t_l) # compute the differences
dg_l=tr.delta(g_l,t_l)
dheading_l=tr.delta(tr.heading(phi_l,g_l),t_l)
d_distance=tr.delta_distance(phi_l,g_l)
# we build a model with the statistical values of the features : variation of latitude, longitude, heading and distance
model["µ"][x][y]["phi"] = tr.parameters(dphi_l)["mean"]
model["µ"][x][y]["g"] = tr.parameters(dg_l)["mean"] # met à jour le modele
model["sigma"][x][y]["phi"] = tr.parameters(dphi_l)["standard_deviation"]
model["sigma"][x][y]["g"] = tr.parameters(g_l)["standard_deviation"]
model["µ"][x][y]["heading"] = tr.parameters(dheading_l)["mean"]
model["µ"][x][y]["distance"] = tr.parameters(d_distance)["mean"]
model["sigma"][x][y]["heading"] = tr.parameters(dheading_l)["standard_deviation"]
model["sigma"][x][y]["distance"] = tr.parameters(d_distance)["standard_deviation"]
with open('model.sauv','wb' ) as model_sauv_file:
pk.dump(model, model_sauv_file) # save the model in a binary file
return model
training(md.model())
|
normal
|
{
"blob_id": "6726c8f1b3ef9a0df74c25c1921203af3aaacb12",
"index": 8758,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef training(dict):\n model = {}\n model['µ'] = {}\n model['sigma'] = {}\n for x in dict:\n model['µ'][x] = {}\n model['sigma'][x] = {}\n for y in dict[x]:\n model['µ'][x][y] = {}\n model['sigma'][x][y] = {}\n doc = tr.load(dict[x][y])\n phi_l = doc[0]\n g_l = doc[1]\n t_l = doc[2]\n dphi_l = tr.delta(phi_l, t_l)\n dg_l = tr.delta(g_l, t_l)\n dheading_l = tr.delta(tr.heading(phi_l, g_l), t_l)\n d_distance = tr.delta_distance(phi_l, g_l)\n model['µ'][x][y]['phi'] = tr.parameters(dphi_l)['mean']\n model['µ'][x][y]['g'] = tr.parameters(dg_l)['mean']\n model['sigma'][x][y]['phi'] = tr.parameters(dphi_l)[\n 'standard_deviation']\n model['sigma'][x][y]['g'] = tr.parameters(g_l)['standard_deviation'\n ]\n model['µ'][x][y]['heading'] = tr.parameters(dheading_l)['mean']\n model['µ'][x][y]['distance'] = tr.parameters(d_distance)['mean']\n model['sigma'][x][y]['heading'] = tr.parameters(dheading_l)[\n 'standard_deviation']\n model['sigma'][x][y]['distance'] = tr.parameters(d_distance)[\n 'standard_deviation']\n with open('model.sauv', 'wb') as model_sauv_file:\n pk.dump(model, model_sauv_file)\n return model\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef training(dict):\n model = {}\n model['µ'] = {}\n model['sigma'] = {}\n for x in dict:\n model['µ'][x] = {}\n model['sigma'][x] = {}\n for y in dict[x]:\n model['µ'][x][y] = {}\n model['sigma'][x][y] = {}\n doc = tr.load(dict[x][y])\n phi_l = doc[0]\n g_l = doc[1]\n t_l = doc[2]\n dphi_l = tr.delta(phi_l, t_l)\n dg_l = tr.delta(g_l, t_l)\n dheading_l = tr.delta(tr.heading(phi_l, g_l), t_l)\n d_distance = tr.delta_distance(phi_l, g_l)\n model['µ'][x][y]['phi'] = tr.parameters(dphi_l)['mean']\n model['µ'][x][y]['g'] = tr.parameters(dg_l)['mean']\n model['sigma'][x][y]['phi'] = tr.parameters(dphi_l)[\n 'standard_deviation']\n model['sigma'][x][y]['g'] = tr.parameters(g_l)['standard_deviation'\n ]\n model['µ'][x][y]['heading'] = tr.parameters(dheading_l)['mean']\n model['µ'][x][y]['distance'] = tr.parameters(d_distance)['mean']\n model['sigma'][x][y]['heading'] = tr.parameters(dheading_l)[\n 'standard_deviation']\n model['sigma'][x][y]['distance'] = tr.parameters(d_distance)[\n 'standard_deviation']\n with open('model.sauv', 'wb') as model_sauv_file:\n pk.dump(model, model_sauv_file)\n return model\n\n\ntraining(md.model())\n",
"step-4": "import traitement as tr\nimport pickle as pk\nimport model as md\n\n\ndef training(dict):\n model = {}\n model['µ'] = {}\n model['sigma'] = {}\n for x in dict:\n model['µ'][x] = {}\n model['sigma'][x] = {}\n for y in dict[x]:\n model['µ'][x][y] = {}\n model['sigma'][x][y] = {}\n doc = tr.load(dict[x][y])\n phi_l = doc[0]\n g_l = doc[1]\n t_l = doc[2]\n dphi_l = tr.delta(phi_l, t_l)\n dg_l = tr.delta(g_l, t_l)\n dheading_l = tr.delta(tr.heading(phi_l, g_l), t_l)\n d_distance = tr.delta_distance(phi_l, g_l)\n model['µ'][x][y]['phi'] = tr.parameters(dphi_l)['mean']\n model['µ'][x][y]['g'] = tr.parameters(dg_l)['mean']\n model['sigma'][x][y]['phi'] = tr.parameters(dphi_l)[\n 'standard_deviation']\n model['sigma'][x][y]['g'] = tr.parameters(g_l)['standard_deviation'\n ]\n model['µ'][x][y]['heading'] = tr.parameters(dheading_l)['mean']\n model['µ'][x][y]['distance'] = tr.parameters(d_distance)['mean']\n model['sigma'][x][y]['heading'] = tr.parameters(dheading_l)[\n 'standard_deviation']\n model['sigma'][x][y]['distance'] = tr.parameters(d_distance)[\n 'standard_deviation']\n with open('model.sauv', 'wb') as model_sauv_file:\n pk.dump(model, model_sauv_file)\n return model\n\n\ntraining(md.model())\n",
"step-5": "#------------------------------------------------------------------------\n#\n# @Author : EV2 CHEVALLIER \n#\n# @Date : 16.09.20\n# @Location : École Navale / Chaire de Cyberdéfense des systèmes navals\n# @Project : Projet de Fin d'Études\n# @Subject : # Real time detection of cyber anomalies upon a NMEA network by using machine learning methods\n#\n#------------------------------------------------------------------------\n# @Title : Training\n#------------------------------------------------------------------------\n# @Description : # This programm get the training dataset, extract the interesting features ( mean and standard deviation of variations of latitude, \n# longitude, heading and distance )\n# and put it in a python dictionnary and save it in a binary file with the pickle module.\n\n#------------------------------------------------------------------------\n\n\nimport traitement as tr\nimport pickle as pk\nimport model as md\n\ndef training(dict):\n\n\n model={}\n model[\"µ\"]={}\n model[\"sigma\"]={}\n\n for x in dict: # loop with speed\n model[\"µ\"][x]={}\n model[\"sigma\"][x]={}\n\n for y in dict[x]: # loop with heading\n\n model[\"µ\"][x][y] = {}\n model[\"sigma\"][x][y] = {}\n\n doc=tr.load(dict[x][y]) # open the json file\n\n phi_l=doc[0]\n g_l=doc[1] # get a list of phi,g,t\n t_l=doc[2]\n\n dphi_l=tr.delta(phi_l,t_l) # compute the differences\n dg_l=tr.delta(g_l,t_l)\n dheading_l=tr.delta(tr.heading(phi_l,g_l),t_l)\n d_distance=tr.delta_distance(phi_l,g_l)\n\n# we build a model with the statistical values of the features : variation of latitude, longitude, heading and distance\n\n model[\"µ\"][x][y][\"phi\"] = tr.parameters(dphi_l)[\"mean\"]\n model[\"µ\"][x][y][\"g\"] = tr.parameters(dg_l)[\"mean\"] # met à jour le modele\n\n model[\"sigma\"][x][y][\"phi\"] = tr.parameters(dphi_l)[\"standard_deviation\"]\n model[\"sigma\"][x][y][\"g\"] = tr.parameters(g_l)[\"standard_deviation\"]\n\n\n model[\"µ\"][x][y][\"heading\"] = tr.parameters(dheading_l)[\"mean\"]\n model[\"µ\"][x][y][\"distance\"] = tr.parameters(d_distance)[\"mean\"]\n\n model[\"sigma\"][x][y][\"heading\"] = tr.parameters(dheading_l)[\"standard_deviation\"]\n model[\"sigma\"][x][y][\"distance\"] = tr.parameters(d_distance)[\"standard_deviation\"]\n\n with open('model.sauv','wb' ) as model_sauv_file: \n pk.dump(model, model_sauv_file) # save the model in a binary file\n\n return model\n\ntraining(md.model())\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import webbrowser as wb
points = 0
import time as t
import pyautogui as pg
name = pg.prompt("What is your name? ").title()
pg.alert(name)
if name == "Caroline":
pg.alert ("Hi " + name)
points += 5
t.sleep(1)
wb.open ("https://www.textgiraffe.com/Caroline/Page2/")
elif name == "Bob":
pg.alert (name + ",you are a great person!")
points += 3
t.sleep(1)
wb.open("http://dreamworks.wikia.com/wiki/File:Bob_the_Builder.jpeg")
elif name == "Catherine":
pg.alert (name + "I like you already.")
points += 2
t.sleep(2)
wb.open ("https://www.amazon.com/Catherine-Street-Sign-Reflective-Aluminum/dp/B00KY6ZDZW")
elif name == "James":
pg.alert ("nice to meet you" + name)
points += 1
t.sleep(1)
wb.open ("https://www.youtube.com/watch?v=uV9LYMAEnRA")
elif name == "Kate":
pg.alert ("Hello!")
points += 2
t.sleep (1)
wb.open ("https://www.google.com/search?q=kate+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj-3cyIyJzeAhVRnOAKHRnoCtQQ_AUIDigB&biw=924&bih=639#imgrc=sbQIiK5VLfo7kM:")
elif name == "Will":
pg.alert ("Coool!")
ponts += 3
t.sleep (2)
wb.open ("https://www.google.com/search?q=will+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj3n93PyJzeAhWvY98KHcoWCFEQ_AUIDigB&biw=924&bih=639#imgrc=Z0hfeIoXQgHxJM:")
else:
pg.alert ("I don't know you!")
points += 0
t.sleep(2)
wb.open ("https://www.google.com/search?q=smiley+face&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjwsdL4gYveAhXtc98KHaGcAz0Q_AUIDigB&biw=1366&bih=657")
color = pg.prompt ("what is your favorite color? ").title()
if color == "Blue":
pg.alert ("mine too!")
points += 5
t.sleep(1)
wb.open ("https://www.youtube.com/watch?v=SoIKv3xxuMA")
elif color == "Pink":
pg.alert ("Do you like unicorns too?")
points += 2
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=a-xWhG4UU_Y")
elif color == "Purple":
pg.alert ("cool!")
points += 3
t.sleep(1)
wb.open ("https://www.youtube.com/watch?v=TvnYmWpD_T8")
elif color == "Black":
pg.alert ("ok...")
points -= 2
t.sleep(2)
wb.open ("https://www.google.com/search?q=goth&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiJ-tDj-oreAhUpUt8KHWZsAzQQ_AUIDigB&biw=1366&bih=657#imgrc=odGcWJwuqRcJsM:")
elif color == "Yellow":
pg.alert ("Like a sunflower!")
points += 1
t.sleep (1)
wb.open ("https://www.google.com/search?q=sunflower&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiZyKCTyZzeAhXGc98KHd8kDJ8Q_AUIDigB&biw=924&bih=639#imgrc=8kZ1NZp_9-nr5M:")
elif color == "Brown":
pg.alert ("wow.")
points -= 5
t.sleep (1)
wb.open ("https://www.youtube.com/watch?v=dsJtgmAhFF4")
else:
pg.alert("nice")
points += 1
t.sleep(2)
wb.open ("https://giphy.com/explore/rainbow")
sport = pg.prompt ("What is your favorite sport? ").title()
if sport == "Hockey":
pg.alert ("yep, I guess your cool")
points += 5
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=JDnZTUkCOBQ")
elif sport == "Soccer":
pg.alert ("you mean futbol...")
points += 5
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=K-U1ZgrsGGg")
elif sport == "Lacrosse":
pg.alert (" I used to play..")
points += 2
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=o5hsPBsGD44")
elif sport == "Football":
pg.alert ("that cool.")
points += 4
t.sleep(3)
wb.open ("https://www.google.com/search?q=football&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwimsOqj_IreAhUumeAKHd-FD6kQ_AUIDigB&biw=1366&bih=657#imgrc=GCqjPQ-jqckcfM:")
elif sport == "Field Hockey":
pg.alert ("Nice!")
points += 2
t.sleep(3)
wb.open ("https://www.google.com/search?q=field+hockey&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwieus2jypzeAhWvVN8KHeK1CJ8Q_AUIDigB&biw=924&bih=639#imgrc=FCpGZY2CS5KVXM:")
elif sport == "Surfing":
pg.alert ("WOAH")
points += 7
t.sleep(1)
wb.open ("https://www.youtube.com/watch?v=HBklS2vYEPo")
else:
pg.alert ("cool")
points += 0
t.sleep(2)
wb.open ("https://www.google.com/search?q=no+sports&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiGqOK-_IreAhXFneAKHcEGANIQ_AUIDigB&biw=1366&bih=657#imgrc=y7acx-yoEouoUM:")
subject = pg.prompt ("What is your favorite subject?").title()
if subject == "Math":
pg.alert ("so your a mathmatician")
points += 2
t.sleep(3)
wb.open ("https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=HNvFW9yoDYTm_QbUyKzgDw&q=addiong&oq=addiong&gs_l=img.3..0i10i24.5226.6666..6852...1.0..0.56.417.8......0....1..gws-wiz-img.......0j0i67j0i10.kcqMNDR26RY#imgrc=LqznGvY1fJpCGM:")
elif subject == "Computer science":
pg.alert ("nice")
points += 9
t.sleep(3)
wb.open ("https://www.google.com/search?q=computers&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiom6vv_IreAhUuneAKHXVGA4kQ_AUIDygC&biw=1366&bih=657")
elif subject == "English":
pg.alert ("I like it too.")
points += 3
t.sleep(3)
wb.open ("https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=hNvFW4e3Jafp_QbR26mIDw&q=+book&oq=+book&gs_l=img.3..0i67l3j0j0i67j0l5.3464.3464..3690...0.0..0.51.51.1......0....1..gws-wiz-img.2n6KjdjVyU0")
elif subject == "Science":
pg.alert ("Bill Nye the Science Guy.")
points += 3
t.sleep(2)
wb.open("https://www.youtube.com/watch?v=nDN7M0J3HXc")
elif subject == "Spanish":
pg.alert ("Hola! Como estas?")
points += 3
t.sleep(2)
wb.open ("https://www.google.com/search?hl=en&authuser=0&rlz=1C1GCEA_enUS752US774&tbm=isch&q=fiesta&chips=q:fiesta,online_chips:mexican+fiesta&usg=AI4_-kQGU87DySQyv0Aqat3pdqhIpYYwjA&sa=X&ved=0ahUKEwjzjvL6lq7eAhWpTd8KHQ6-CIoQ4lYIKygE&biw=924&bih=639&dpr=1#imgrc=6H_w7py8kTIUHM:")
elif subject == "History":
pg.alert ("In 1492 Christopher Columbus sailed the ocean blue")
points += 3
t.sleep(2)
wb.open ("https://www.google.com/search?q=history&rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&source=lnms&sa=X&ved=0ahUKEwiZ_YDvutHeAhXOVN8KHdEUDEkQ_AUICygC")
else:
pg.alert ("cool")
points += 1
t.sleep(2)
wb.open ("https://www.google.com/search?q=school+gif&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjqpI_f_YreAhWsd98KHblYBY8Q_AUIDigB&biw=1366&bih=657#imgrc=kk5pi12VrUoKGM:")
food = pg.prompt ("What is your favorite food?").title()
if food == "Pizza":
pg.alert ("Pizza Hut? Dominos?")
points += 2
t.sleep(2)
wb.open ("https://cooking.nytimes.com/guides/1-how-to-make-pizza")
elif food == "Chocolate cake":
pg.alert ("Now I want one")
points += 9
t.sleep(3)
wb.open ("https://www.youtube.com/watch?v=dsJtgmAhFF4")
elif food == "Pasta":
pg.alert ("I like pasta!")
points += 3
t.sleep(3)
wb.open ("https://www.google.com/search?q=pasta&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiH_JXSlK7eAhWKT98KHScQASEQ_AUIDigB&biw=924&bih=639")
elif food == "Ice cream":
pg.alert ("What kind? I like cookie monster.")
points += 3
t.sleep(2)
wb.open("https://barefeetinthekitchen.com/homemade-ice-cream-recipe/")
elif food == "Fruit":
pg.alert ("Refreshing!")
points += 3
t.sleep(2)
wb.open ("https://www.google.com/search?q=fruit&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwijobOcla7eAhVyUt8KHfONDGUQ_AUIDigB&biw=924&bih=639#imgrc=ACrdFKwEzni-QM:")
elif food == "Chicken":
pg.alert ("Yum!")
points += 2
t.sleep(2)
wb.open ("https://www.google.com/search?q=chicken&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj59fTCutHeAhXLct8KHRV6D88Q_AUIEygB&biw=1366&bih=657")
else:
pg.alert ("YUUMMM")
points += 1
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=11HK5EuYwSk")
movie = pg.prompt ("What is your favorite movie series?").title()
if "Divergent" in movie:
number = pg.prompt("Which movie is your favorite").title()
if number == "1":
pg.alert("Nice!")
ice_cream = pg.confirm("Which of these flavors is your favorite?", "Choose one", ["chocolate", "vanilla", "cookies and cream"])
if ice_cream == "cookies and cream":
pg.alert("YES")
pg.alert ("Your final score is " + str(points))
|
normal
|
{
"blob_id": "16e10db90a0a0d8ee7ca5b0c7f86cc81432d87d1",
"index": 4391,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npg.alert(name)\nif name == 'Caroline':\n pg.alert('Hi ' + name)\n points += 5\n t.sleep(1)\n wb.open('https://www.textgiraffe.com/Caroline/Page2/')\nelif name == 'Bob':\n pg.alert(name + ',you are a great person!')\n points += 3\n t.sleep(1)\n wb.open('http://dreamworks.wikia.com/wiki/File:Bob_the_Builder.jpeg')\nelif name == 'Catherine':\n pg.alert(name + 'I like you already.')\n points += 2\n t.sleep(2)\n wb.open(\n 'https://www.amazon.com/Catherine-Street-Sign-Reflective-Aluminum/dp/B00KY6ZDZW'\n )\nelif name == 'James':\n pg.alert('nice to meet you' + name)\n points += 1\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=uV9LYMAEnRA')\nelif name == 'Kate':\n pg.alert('Hello!')\n points += 2\n t.sleep(1)\n wb.open(\n 'https://www.google.com/search?q=kate+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj-3cyIyJzeAhVRnOAKHRnoCtQQ_AUIDigB&biw=924&bih=639#imgrc=sbQIiK5VLfo7kM:'\n )\nelif name == 'Will':\n pg.alert('Coool!')\n ponts += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=will+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj3n93PyJzeAhWvY98KHcoWCFEQ_AUIDigB&biw=924&bih=639#imgrc=Z0hfeIoXQgHxJM:'\n )\nelse:\n pg.alert(\"I don't know you!\")\n points += 0\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=smiley+face&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjwsdL4gYveAhXtc98KHaGcAz0Q_AUIDigB&biw=1366&bih=657'\n )\n<mask token>\nif color == 'Blue':\n pg.alert('mine too!')\n points += 5\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=SoIKv3xxuMA')\nelif color == 'Pink':\n pg.alert('Do you like unicorns too?')\n points += 2\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=a-xWhG4UU_Y')\nelif color == 'Purple':\n pg.alert('cool!')\n points += 3\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=TvnYmWpD_T8')\nelif color == 'Black':\n pg.alert('ok...')\n points -= 2\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=goth&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiJ-tDj-oreAhUpUt8KHWZsAzQQ_AUIDigB&biw=1366&bih=657#imgrc=odGcWJwuqRcJsM:'\n )\nelif color == 'Yellow':\n pg.alert('Like a sunflower!')\n points += 1\n t.sleep(1)\n wb.open(\n 'https://www.google.com/search?q=sunflower&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiZyKCTyZzeAhXGc98KHd8kDJ8Q_AUIDigB&biw=924&bih=639#imgrc=8kZ1NZp_9-nr5M:'\n )\nelif color == 'Brown':\n pg.alert('wow.')\n points -= 5\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=dsJtgmAhFF4')\nelse:\n pg.alert('nice')\n points += 1\n t.sleep(2)\n wb.open('https://giphy.com/explore/rainbow')\n<mask token>\nif sport == 'Hockey':\n pg.alert('yep, I guess your cool')\n points += 5\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=JDnZTUkCOBQ')\nelif sport == 'Soccer':\n pg.alert('you mean futbol...')\n points += 5\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=K-U1ZgrsGGg')\nelif sport == 'Lacrosse':\n pg.alert(' I used to play..')\n points += 2\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=o5hsPBsGD44')\nelif sport == 'Football':\n pg.alert('that cool.')\n points += 4\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=football&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwimsOqj_IreAhUumeAKHd-FD6kQ_AUIDigB&biw=1366&bih=657#imgrc=GCqjPQ-jqckcfM:'\n )\nelif sport == 'Field Hockey':\n pg.alert('Nice!')\n points += 2\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=field+hockey&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwieus2jypzeAhWvVN8KHeK1CJ8Q_AUIDigB&biw=924&bih=639#imgrc=FCpGZY2CS5KVXM:'\n )\nelif sport == 'Surfing':\n pg.alert('WOAH')\n points += 7\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=HBklS2vYEPo')\nelse:\n pg.alert('cool')\n points += 0\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=no+sports&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiGqOK-_IreAhXFneAKHcEGANIQ_AUIDigB&biw=1366&bih=657#imgrc=y7acx-yoEouoUM:'\n )\n<mask token>\nif subject == 'Math':\n pg.alert('so your a mathmatician')\n points += 2\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=HNvFW9yoDYTm_QbUyKzgDw&q=addiong&oq=addiong&gs_l=img.3..0i10i24.5226.6666..6852...1.0..0.56.417.8......0....1..gws-wiz-img.......0j0i67j0i10.kcqMNDR26RY#imgrc=LqznGvY1fJpCGM:'\n )\nelif subject == 'Computer science':\n pg.alert('nice')\n points += 9\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=computers&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiom6vv_IreAhUuneAKHXVGA4kQ_AUIDygC&biw=1366&bih=657'\n )\nelif subject == 'English':\n pg.alert('I like it too.')\n points += 3\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=hNvFW4e3Jafp_QbR26mIDw&q=+book&oq=+book&gs_l=img.3..0i67l3j0j0i67j0l5.3464.3464..3690...0.0..0.51.51.1......0....1..gws-wiz-img.2n6KjdjVyU0'\n )\nelif subject == 'Science':\n pg.alert('Bill Nye the Science Guy.')\n points += 3\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=nDN7M0J3HXc')\nelif subject == 'Spanish':\n pg.alert('Hola! Como estas?')\n points += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?hl=en&authuser=0&rlz=1C1GCEA_enUS752US774&tbm=isch&q=fiesta&chips=q:fiesta,online_chips:mexican+fiesta&usg=AI4_-kQGU87DySQyv0Aqat3pdqhIpYYwjA&sa=X&ved=0ahUKEwjzjvL6lq7eAhWpTd8KHQ6-CIoQ4lYIKygE&biw=924&bih=639&dpr=1#imgrc=6H_w7py8kTIUHM:'\n )\nelif subject == 'History':\n pg.alert('In 1492 Christopher Columbus sailed the ocean blue')\n points += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=history&rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&source=lnms&sa=X&ved=0ahUKEwiZ_YDvutHeAhXOVN8KHdEUDEkQ_AUICygC'\n )\nelse:\n pg.alert('cool')\n points += 1\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=school+gif&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjqpI_f_YreAhWsd98KHblYBY8Q_AUIDigB&biw=1366&bih=657#imgrc=kk5pi12VrUoKGM:'\n )\n<mask token>\nif food == 'Pizza':\n pg.alert('Pizza Hut? Dominos?')\n points += 2\n t.sleep(2)\n wb.open('https://cooking.nytimes.com/guides/1-how-to-make-pizza')\nelif food == 'Chocolate cake':\n pg.alert('Now I want one')\n points += 9\n t.sleep(3)\n wb.open('https://www.youtube.com/watch?v=dsJtgmAhFF4')\nelif food == 'Pasta':\n pg.alert('I like pasta!')\n points += 3\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=pasta&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiH_JXSlK7eAhWKT98KHScQASEQ_AUIDigB&biw=924&bih=639'\n )\nelif food == 'Ice cream':\n pg.alert('What kind? I like cookie monster.')\n points += 3\n t.sleep(2)\n wb.open('https://barefeetinthekitchen.com/homemade-ice-cream-recipe/')\nelif food == 'Fruit':\n pg.alert('Refreshing!')\n points += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=fruit&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwijobOcla7eAhVyUt8KHfONDGUQ_AUIDigB&biw=924&bih=639#imgrc=ACrdFKwEzni-QM:'\n )\nelif food == 'Chicken':\n pg.alert('Yum!')\n points += 2\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=chicken&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj59fTCutHeAhXLct8KHRV6D88Q_AUIEygB&biw=1366&bih=657'\n )\nelse:\n pg.alert('YUUMMM')\n points += 1\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=11HK5EuYwSk')\n<mask token>\nif 'Divergent' in movie:\n number = pg.prompt('Which movie is your favorite').title()\n if number == '1':\n pg.alert('Nice!')\n<mask token>\nif ice_cream == 'cookies and cream':\n pg.alert('YES')\npg.alert('Your final score is ' + str(points))\n",
"step-3": "<mask token>\npoints = 0\n<mask token>\nname = pg.prompt('What is your name? ').title()\npg.alert(name)\nif name == 'Caroline':\n pg.alert('Hi ' + name)\n points += 5\n t.sleep(1)\n wb.open('https://www.textgiraffe.com/Caroline/Page2/')\nelif name == 'Bob':\n pg.alert(name + ',you are a great person!')\n points += 3\n t.sleep(1)\n wb.open('http://dreamworks.wikia.com/wiki/File:Bob_the_Builder.jpeg')\nelif name == 'Catherine':\n pg.alert(name + 'I like you already.')\n points += 2\n t.sleep(2)\n wb.open(\n 'https://www.amazon.com/Catherine-Street-Sign-Reflective-Aluminum/dp/B00KY6ZDZW'\n )\nelif name == 'James':\n pg.alert('nice to meet you' + name)\n points += 1\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=uV9LYMAEnRA')\nelif name == 'Kate':\n pg.alert('Hello!')\n points += 2\n t.sleep(1)\n wb.open(\n 'https://www.google.com/search?q=kate+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj-3cyIyJzeAhVRnOAKHRnoCtQQ_AUIDigB&biw=924&bih=639#imgrc=sbQIiK5VLfo7kM:'\n )\nelif name == 'Will':\n pg.alert('Coool!')\n ponts += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=will+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj3n93PyJzeAhWvY98KHcoWCFEQ_AUIDigB&biw=924&bih=639#imgrc=Z0hfeIoXQgHxJM:'\n )\nelse:\n pg.alert(\"I don't know you!\")\n points += 0\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=smiley+face&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjwsdL4gYveAhXtc98KHaGcAz0Q_AUIDigB&biw=1366&bih=657'\n )\ncolor = pg.prompt('what is your favorite color? ').title()\nif color == 'Blue':\n pg.alert('mine too!')\n points += 5\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=SoIKv3xxuMA')\nelif color == 'Pink':\n pg.alert('Do you like unicorns too?')\n points += 2\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=a-xWhG4UU_Y')\nelif color == 'Purple':\n pg.alert('cool!')\n points += 3\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=TvnYmWpD_T8')\nelif color == 'Black':\n pg.alert('ok...')\n points -= 2\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=goth&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiJ-tDj-oreAhUpUt8KHWZsAzQQ_AUIDigB&biw=1366&bih=657#imgrc=odGcWJwuqRcJsM:'\n )\nelif color == 'Yellow':\n pg.alert('Like a sunflower!')\n points += 1\n t.sleep(1)\n wb.open(\n 'https://www.google.com/search?q=sunflower&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiZyKCTyZzeAhXGc98KHd8kDJ8Q_AUIDigB&biw=924&bih=639#imgrc=8kZ1NZp_9-nr5M:'\n )\nelif color == 'Brown':\n pg.alert('wow.')\n points -= 5\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=dsJtgmAhFF4')\nelse:\n pg.alert('nice')\n points += 1\n t.sleep(2)\n wb.open('https://giphy.com/explore/rainbow')\nsport = pg.prompt('What is your favorite sport? ').title()\nif sport == 'Hockey':\n pg.alert('yep, I guess your cool')\n points += 5\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=JDnZTUkCOBQ')\nelif sport == 'Soccer':\n pg.alert('you mean futbol...')\n points += 5\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=K-U1ZgrsGGg')\nelif sport == 'Lacrosse':\n pg.alert(' I used to play..')\n points += 2\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=o5hsPBsGD44')\nelif sport == 'Football':\n pg.alert('that cool.')\n points += 4\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=football&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwimsOqj_IreAhUumeAKHd-FD6kQ_AUIDigB&biw=1366&bih=657#imgrc=GCqjPQ-jqckcfM:'\n )\nelif sport == 'Field Hockey':\n pg.alert('Nice!')\n points += 2\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=field+hockey&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwieus2jypzeAhWvVN8KHeK1CJ8Q_AUIDigB&biw=924&bih=639#imgrc=FCpGZY2CS5KVXM:'\n )\nelif sport == 'Surfing':\n pg.alert('WOAH')\n points += 7\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=HBklS2vYEPo')\nelse:\n pg.alert('cool')\n points += 0\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=no+sports&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiGqOK-_IreAhXFneAKHcEGANIQ_AUIDigB&biw=1366&bih=657#imgrc=y7acx-yoEouoUM:'\n )\nsubject = pg.prompt('What is your favorite subject?').title()\nif subject == 'Math':\n pg.alert('so your a mathmatician')\n points += 2\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=HNvFW9yoDYTm_QbUyKzgDw&q=addiong&oq=addiong&gs_l=img.3..0i10i24.5226.6666..6852...1.0..0.56.417.8......0....1..gws-wiz-img.......0j0i67j0i10.kcqMNDR26RY#imgrc=LqznGvY1fJpCGM:'\n )\nelif subject == 'Computer science':\n pg.alert('nice')\n points += 9\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=computers&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiom6vv_IreAhUuneAKHXVGA4kQ_AUIDygC&biw=1366&bih=657'\n )\nelif subject == 'English':\n pg.alert('I like it too.')\n points += 3\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=hNvFW4e3Jafp_QbR26mIDw&q=+book&oq=+book&gs_l=img.3..0i67l3j0j0i67j0l5.3464.3464..3690...0.0..0.51.51.1......0....1..gws-wiz-img.2n6KjdjVyU0'\n )\nelif subject == 'Science':\n pg.alert('Bill Nye the Science Guy.')\n points += 3\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=nDN7M0J3HXc')\nelif subject == 'Spanish':\n pg.alert('Hola! Como estas?')\n points += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?hl=en&authuser=0&rlz=1C1GCEA_enUS752US774&tbm=isch&q=fiesta&chips=q:fiesta,online_chips:mexican+fiesta&usg=AI4_-kQGU87DySQyv0Aqat3pdqhIpYYwjA&sa=X&ved=0ahUKEwjzjvL6lq7eAhWpTd8KHQ6-CIoQ4lYIKygE&biw=924&bih=639&dpr=1#imgrc=6H_w7py8kTIUHM:'\n )\nelif subject == 'History':\n pg.alert('In 1492 Christopher Columbus sailed the ocean blue')\n points += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=history&rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&source=lnms&sa=X&ved=0ahUKEwiZ_YDvutHeAhXOVN8KHdEUDEkQ_AUICygC'\n )\nelse:\n pg.alert('cool')\n points += 1\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=school+gif&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjqpI_f_YreAhWsd98KHblYBY8Q_AUIDigB&biw=1366&bih=657#imgrc=kk5pi12VrUoKGM:'\n )\nfood = pg.prompt('What is your favorite food?').title()\nif food == 'Pizza':\n pg.alert('Pizza Hut? Dominos?')\n points += 2\n t.sleep(2)\n wb.open('https://cooking.nytimes.com/guides/1-how-to-make-pizza')\nelif food == 'Chocolate cake':\n pg.alert('Now I want one')\n points += 9\n t.sleep(3)\n wb.open('https://www.youtube.com/watch?v=dsJtgmAhFF4')\nelif food == 'Pasta':\n pg.alert('I like pasta!')\n points += 3\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=pasta&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiH_JXSlK7eAhWKT98KHScQASEQ_AUIDigB&biw=924&bih=639'\n )\nelif food == 'Ice cream':\n pg.alert('What kind? I like cookie monster.')\n points += 3\n t.sleep(2)\n wb.open('https://barefeetinthekitchen.com/homemade-ice-cream-recipe/')\nelif food == 'Fruit':\n pg.alert('Refreshing!')\n points += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=fruit&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwijobOcla7eAhVyUt8KHfONDGUQ_AUIDigB&biw=924&bih=639#imgrc=ACrdFKwEzni-QM:'\n )\nelif food == 'Chicken':\n pg.alert('Yum!')\n points += 2\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=chicken&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj59fTCutHeAhXLct8KHRV6D88Q_AUIEygB&biw=1366&bih=657'\n )\nelse:\n pg.alert('YUUMMM')\n points += 1\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=11HK5EuYwSk')\nmovie = pg.prompt('What is your favorite movie series?').title()\nif 'Divergent' in movie:\n number = pg.prompt('Which movie is your favorite').title()\n if number == '1':\n pg.alert('Nice!')\nice_cream = pg.confirm('Which of these flavors is your favorite?',\n 'Choose one', ['chocolate', 'vanilla', 'cookies and cream'])\nif ice_cream == 'cookies and cream':\n pg.alert('YES')\npg.alert('Your final score is ' + str(points))\n",
"step-4": "import webbrowser as wb\npoints = 0\nimport time as t\nimport pyautogui as pg\nname = pg.prompt('What is your name? ').title()\npg.alert(name)\nif name == 'Caroline':\n pg.alert('Hi ' + name)\n points += 5\n t.sleep(1)\n wb.open('https://www.textgiraffe.com/Caroline/Page2/')\nelif name == 'Bob':\n pg.alert(name + ',you are a great person!')\n points += 3\n t.sleep(1)\n wb.open('http://dreamworks.wikia.com/wiki/File:Bob_the_Builder.jpeg')\nelif name == 'Catherine':\n pg.alert(name + 'I like you already.')\n points += 2\n t.sleep(2)\n wb.open(\n 'https://www.amazon.com/Catherine-Street-Sign-Reflective-Aluminum/dp/B00KY6ZDZW'\n )\nelif name == 'James':\n pg.alert('nice to meet you' + name)\n points += 1\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=uV9LYMAEnRA')\nelif name == 'Kate':\n pg.alert('Hello!')\n points += 2\n t.sleep(1)\n wb.open(\n 'https://www.google.com/search?q=kate+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj-3cyIyJzeAhVRnOAKHRnoCtQQ_AUIDigB&biw=924&bih=639#imgrc=sbQIiK5VLfo7kM:'\n )\nelif name == 'Will':\n pg.alert('Coool!')\n ponts += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=will+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj3n93PyJzeAhWvY98KHcoWCFEQ_AUIDigB&biw=924&bih=639#imgrc=Z0hfeIoXQgHxJM:'\n )\nelse:\n pg.alert(\"I don't know you!\")\n points += 0\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=smiley+face&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjwsdL4gYveAhXtc98KHaGcAz0Q_AUIDigB&biw=1366&bih=657'\n )\ncolor = pg.prompt('what is your favorite color? ').title()\nif color == 'Blue':\n pg.alert('mine too!')\n points += 5\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=SoIKv3xxuMA')\nelif color == 'Pink':\n pg.alert('Do you like unicorns too?')\n points += 2\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=a-xWhG4UU_Y')\nelif color == 'Purple':\n pg.alert('cool!')\n points += 3\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=TvnYmWpD_T8')\nelif color == 'Black':\n pg.alert('ok...')\n points -= 2\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=goth&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiJ-tDj-oreAhUpUt8KHWZsAzQQ_AUIDigB&biw=1366&bih=657#imgrc=odGcWJwuqRcJsM:'\n )\nelif color == 'Yellow':\n pg.alert('Like a sunflower!')\n points += 1\n t.sleep(1)\n wb.open(\n 'https://www.google.com/search?q=sunflower&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiZyKCTyZzeAhXGc98KHd8kDJ8Q_AUIDigB&biw=924&bih=639#imgrc=8kZ1NZp_9-nr5M:'\n )\nelif color == 'Brown':\n pg.alert('wow.')\n points -= 5\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=dsJtgmAhFF4')\nelse:\n pg.alert('nice')\n points += 1\n t.sleep(2)\n wb.open('https://giphy.com/explore/rainbow')\nsport = pg.prompt('What is your favorite sport? ').title()\nif sport == 'Hockey':\n pg.alert('yep, I guess your cool')\n points += 5\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=JDnZTUkCOBQ')\nelif sport == 'Soccer':\n pg.alert('you mean futbol...')\n points += 5\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=K-U1ZgrsGGg')\nelif sport == 'Lacrosse':\n pg.alert(' I used to play..')\n points += 2\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=o5hsPBsGD44')\nelif sport == 'Football':\n pg.alert('that cool.')\n points += 4\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=football&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwimsOqj_IreAhUumeAKHd-FD6kQ_AUIDigB&biw=1366&bih=657#imgrc=GCqjPQ-jqckcfM:'\n )\nelif sport == 'Field Hockey':\n pg.alert('Nice!')\n points += 2\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=field+hockey&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwieus2jypzeAhWvVN8KHeK1CJ8Q_AUIDigB&biw=924&bih=639#imgrc=FCpGZY2CS5KVXM:'\n )\nelif sport == 'Surfing':\n pg.alert('WOAH')\n points += 7\n t.sleep(1)\n wb.open('https://www.youtube.com/watch?v=HBklS2vYEPo')\nelse:\n pg.alert('cool')\n points += 0\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=no+sports&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiGqOK-_IreAhXFneAKHcEGANIQ_AUIDigB&biw=1366&bih=657#imgrc=y7acx-yoEouoUM:'\n )\nsubject = pg.prompt('What is your favorite subject?').title()\nif subject == 'Math':\n pg.alert('so your a mathmatician')\n points += 2\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=HNvFW9yoDYTm_QbUyKzgDw&q=addiong&oq=addiong&gs_l=img.3..0i10i24.5226.6666..6852...1.0..0.56.417.8......0....1..gws-wiz-img.......0j0i67j0i10.kcqMNDR26RY#imgrc=LqznGvY1fJpCGM:'\n )\nelif subject == 'Computer science':\n pg.alert('nice')\n points += 9\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=computers&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiom6vv_IreAhUuneAKHXVGA4kQ_AUIDygC&biw=1366&bih=657'\n )\nelif subject == 'English':\n pg.alert('I like it too.')\n points += 3\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=hNvFW4e3Jafp_QbR26mIDw&q=+book&oq=+book&gs_l=img.3..0i67l3j0j0i67j0l5.3464.3464..3690...0.0..0.51.51.1......0....1..gws-wiz-img.2n6KjdjVyU0'\n )\nelif subject == 'Science':\n pg.alert('Bill Nye the Science Guy.')\n points += 3\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=nDN7M0J3HXc')\nelif subject == 'Spanish':\n pg.alert('Hola! Como estas?')\n points += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?hl=en&authuser=0&rlz=1C1GCEA_enUS752US774&tbm=isch&q=fiesta&chips=q:fiesta,online_chips:mexican+fiesta&usg=AI4_-kQGU87DySQyv0Aqat3pdqhIpYYwjA&sa=X&ved=0ahUKEwjzjvL6lq7eAhWpTd8KHQ6-CIoQ4lYIKygE&biw=924&bih=639&dpr=1#imgrc=6H_w7py8kTIUHM:'\n )\nelif subject == 'History':\n pg.alert('In 1492 Christopher Columbus sailed the ocean blue')\n points += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=history&rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&source=lnms&sa=X&ved=0ahUKEwiZ_YDvutHeAhXOVN8KHdEUDEkQ_AUICygC'\n )\nelse:\n pg.alert('cool')\n points += 1\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=school+gif&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjqpI_f_YreAhWsd98KHblYBY8Q_AUIDigB&biw=1366&bih=657#imgrc=kk5pi12VrUoKGM:'\n )\nfood = pg.prompt('What is your favorite food?').title()\nif food == 'Pizza':\n pg.alert('Pizza Hut? Dominos?')\n points += 2\n t.sleep(2)\n wb.open('https://cooking.nytimes.com/guides/1-how-to-make-pizza')\nelif food == 'Chocolate cake':\n pg.alert('Now I want one')\n points += 9\n t.sleep(3)\n wb.open('https://www.youtube.com/watch?v=dsJtgmAhFF4')\nelif food == 'Pasta':\n pg.alert('I like pasta!')\n points += 3\n t.sleep(3)\n wb.open(\n 'https://www.google.com/search?q=pasta&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiH_JXSlK7eAhWKT98KHScQASEQ_AUIDigB&biw=924&bih=639'\n )\nelif food == 'Ice cream':\n pg.alert('What kind? I like cookie monster.')\n points += 3\n t.sleep(2)\n wb.open('https://barefeetinthekitchen.com/homemade-ice-cream-recipe/')\nelif food == 'Fruit':\n pg.alert('Refreshing!')\n points += 3\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=fruit&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwijobOcla7eAhVyUt8KHfONDGUQ_AUIDigB&biw=924&bih=639#imgrc=ACrdFKwEzni-QM:'\n )\nelif food == 'Chicken':\n pg.alert('Yum!')\n points += 2\n t.sleep(2)\n wb.open(\n 'https://www.google.com/search?q=chicken&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj59fTCutHeAhXLct8KHRV6D88Q_AUIEygB&biw=1366&bih=657'\n )\nelse:\n pg.alert('YUUMMM')\n points += 1\n t.sleep(2)\n wb.open('https://www.youtube.com/watch?v=11HK5EuYwSk')\nmovie = pg.prompt('What is your favorite movie series?').title()\nif 'Divergent' in movie:\n number = pg.prompt('Which movie is your favorite').title()\n if number == '1':\n pg.alert('Nice!')\nice_cream = pg.confirm('Which of these flavors is your favorite?',\n 'Choose one', ['chocolate', 'vanilla', 'cookies and cream'])\nif ice_cream == 'cookies and cream':\n pg.alert('YES')\npg.alert('Your final score is ' + str(points))\n",
"step-5": "import webbrowser as wb\r\npoints = 0\r\nimport time as t\r\nimport pyautogui as pg\r\n\r\n\r\nname = pg.prompt(\"What is your name? \").title()\r\n\r\npg.alert(name)\r\nif name == \"Caroline\":\r\n pg.alert (\"Hi \" + name)\r\n points += 5\r\n t.sleep(1) \r\n wb.open (\"https://www.textgiraffe.com/Caroline/Page2/\")\r\nelif name == \"Bob\":\r\n pg.alert (name + \",you are a great person!\")\r\n points += 3\r\n t.sleep(1)\r\n wb.open(\"http://dreamworks.wikia.com/wiki/File:Bob_the_Builder.jpeg\")\r\nelif name == \"Catherine\":\r\n pg.alert (name + \"I like you already.\")\r\n points += 2\r\n t.sleep(2)\r\n wb.open (\"https://www.amazon.com/Catherine-Street-Sign-Reflective-Aluminum/dp/B00KY6ZDZW\")\r\nelif name == \"James\":\r\n pg.alert (\"nice to meet you\" + name)\r\n points += 1\r\n t.sleep(1)\r\n wb.open (\"https://www.youtube.com/watch?v=uV9LYMAEnRA\")\r\nelif name == \"Kate\":\r\n pg.alert (\"Hello!\")\r\n points += 2\r\n t.sleep (1)\r\n wb.open (\"https://www.google.com/search?q=kate+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj-3cyIyJzeAhVRnOAKHRnoCtQQ_AUIDigB&biw=924&bih=639#imgrc=sbQIiK5VLfo7kM:\")\r\nelif name == \"Will\":\r\n pg.alert (\"Coool!\")\r\n ponts += 3\r\n t.sleep (2)\r\n wb.open (\"https://www.google.com/search?q=will+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj3n93PyJzeAhWvY98KHcoWCFEQ_AUIDigB&biw=924&bih=639#imgrc=Z0hfeIoXQgHxJM:\")\r\nelse:\r\n pg.alert (\"I don't know you!\")\r\n points += 0\r\n t.sleep(2)\r\n wb.open (\"https://www.google.com/search?q=smiley+face&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjwsdL4gYveAhXtc98KHaGcAz0Q_AUIDigB&biw=1366&bih=657\")\r\ncolor = pg.prompt (\"what is your favorite color? \").title()\r\nif color == \"Blue\":\r\n pg.alert (\"mine too!\")\r\n points += 5\r\n t.sleep(1)\r\n wb.open (\"https://www.youtube.com/watch?v=SoIKv3xxuMA\")\r\nelif color == \"Pink\":\r\n pg.alert (\"Do you like unicorns too?\")\r\n points += 2\r\n t.sleep(2)\r\n wb.open (\"https://www.youtube.com/watch?v=a-xWhG4UU_Y\")\r\nelif color == \"Purple\":\r\n pg.alert (\"cool!\")\r\n points += 3\r\n t.sleep(1)\r\n wb.open (\"https://www.youtube.com/watch?v=TvnYmWpD_T8\")\r\nelif color == \"Black\":\r\n pg.alert (\"ok...\")\r\n points -= 2\r\n t.sleep(2)\r\n wb.open (\"https://www.google.com/search?q=goth&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiJ-tDj-oreAhUpUt8KHWZsAzQQ_AUIDigB&biw=1366&bih=657#imgrc=odGcWJwuqRcJsM:\")\r\nelif color == \"Yellow\":\r\n pg.alert (\"Like a sunflower!\")\r\n points += 1\r\n t.sleep (1)\r\n wb.open (\"https://www.google.com/search?q=sunflower&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiZyKCTyZzeAhXGc98KHd8kDJ8Q_AUIDigB&biw=924&bih=639#imgrc=8kZ1NZp_9-nr5M:\")\r\nelif color == \"Brown\":\r\n pg.alert (\"wow.\")\r\n points -= 5\r\n t.sleep (1)\r\n wb.open (\"https://www.youtube.com/watch?v=dsJtgmAhFF4\")\r\nelse:\r\n pg.alert(\"nice\")\r\n points += 1\r\n t.sleep(2)\r\n wb.open (\"https://giphy.com/explore/rainbow\")\r\nsport = pg.prompt (\"What is your favorite sport? \").title()\r\nif sport == \"Hockey\":\r\n pg.alert (\"yep, I guess your cool\")\r\n points += 5\r\n t.sleep(2)\r\n wb.open (\"https://www.youtube.com/watch?v=JDnZTUkCOBQ\")\r\nelif sport == \"Soccer\":\r\n pg.alert (\"you mean futbol...\")\r\n points += 5\r\n t.sleep(2)\r\n wb.open (\"https://www.youtube.com/watch?v=K-U1ZgrsGGg\")\r\nelif sport == \"Lacrosse\":\r\n pg.alert (\" I used to play..\")\r\n points += 2\r\n t.sleep(2)\r\n wb.open (\"https://www.youtube.com/watch?v=o5hsPBsGD44\")\r\nelif sport == \"Football\":\r\n pg.alert (\"that cool.\")\r\n points += 4\r\n t.sleep(3)\r\n wb.open (\"https://www.google.com/search?q=football&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwimsOqj_IreAhUumeAKHd-FD6kQ_AUIDigB&biw=1366&bih=657#imgrc=GCqjPQ-jqckcfM:\")\r\nelif sport == \"Field Hockey\":\r\n pg.alert (\"Nice!\")\r\n points += 2\r\n t.sleep(3)\r\n wb.open (\"https://www.google.com/search?q=field+hockey&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwieus2jypzeAhWvVN8KHeK1CJ8Q_AUIDigB&biw=924&bih=639#imgrc=FCpGZY2CS5KVXM:\")\r\nelif sport == \"Surfing\":\r\n pg.alert (\"WOAH\")\r\n points += 7\r\n t.sleep(1)\r\n wb.open (\"https://www.youtube.com/watch?v=HBklS2vYEPo\")\r\nelse:\r\n pg.alert (\"cool\")\r\n points += 0\r\n t.sleep(2)\r\n wb.open (\"https://www.google.com/search?q=no+sports&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiGqOK-_IreAhXFneAKHcEGANIQ_AUIDigB&biw=1366&bih=657#imgrc=y7acx-yoEouoUM:\")\r\nsubject = pg.prompt (\"What is your favorite subject?\").title()\r\nif subject == \"Math\":\r\n pg.alert (\"so your a mathmatician\")\r\n points += 2\r\n t.sleep(3)\r\n wb.open (\"https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=HNvFW9yoDYTm_QbUyKzgDw&q=addiong&oq=addiong&gs_l=img.3..0i10i24.5226.6666..6852...1.0..0.56.417.8......0....1..gws-wiz-img.......0j0i67j0i10.kcqMNDR26RY#imgrc=LqznGvY1fJpCGM:\")\r\nelif subject == \"Computer science\":\r\n pg.alert (\"nice\")\r\n points += 9\r\n t.sleep(3)\r\n wb.open (\"https://www.google.com/search?q=computers&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiom6vv_IreAhUuneAKHXVGA4kQ_AUIDygC&biw=1366&bih=657\")\r\nelif subject == \"English\":\r\n pg.alert (\"I like it too.\")\r\n points += 3\r\n t.sleep(3)\r\n wb.open (\"https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=hNvFW4e3Jafp_QbR26mIDw&q=+book&oq=+book&gs_l=img.3..0i67l3j0j0i67j0l5.3464.3464..3690...0.0..0.51.51.1......0....1..gws-wiz-img.2n6KjdjVyU0\")\r\nelif subject == \"Science\":\r\n pg.alert (\"Bill Nye the Science Guy.\")\r\n points += 3\r\n t.sleep(2)\r\n wb.open(\"https://www.youtube.com/watch?v=nDN7M0J3HXc\")\r\nelif subject == \"Spanish\":\r\n pg.alert (\"Hola! Como estas?\")\r\n points += 3\r\n t.sleep(2)\r\n wb.open (\"https://www.google.com/search?hl=en&authuser=0&rlz=1C1GCEA_enUS752US774&tbm=isch&q=fiesta&chips=q:fiesta,online_chips:mexican+fiesta&usg=AI4_-kQGU87DySQyv0Aqat3pdqhIpYYwjA&sa=X&ved=0ahUKEwjzjvL6lq7eAhWpTd8KHQ6-CIoQ4lYIKygE&biw=924&bih=639&dpr=1#imgrc=6H_w7py8kTIUHM:\")\r\nelif subject == \"History\":\r\n pg.alert (\"In 1492 Christopher Columbus sailed the ocean blue\")\r\n points += 3\r\n t.sleep(2)\r\n wb.open (\"https://www.google.com/search?q=history&rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&source=lnms&sa=X&ved=0ahUKEwiZ_YDvutHeAhXOVN8KHdEUDEkQ_AUICygC\")\r\nelse:\r\n pg.alert (\"cool\")\r\n points += 1\r\n t.sleep(2)\r\n wb.open (\"https://www.google.com/search?q=school+gif&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjqpI_f_YreAhWsd98KHblYBY8Q_AUIDigB&biw=1366&bih=657#imgrc=kk5pi12VrUoKGM:\")\r\n\r\nfood = pg.prompt (\"What is your favorite food?\").title()\r\nif food == \"Pizza\":\r\n pg.alert (\"Pizza Hut? Dominos?\")\r\n points += 2\r\n t.sleep(2)\r\n wb.open (\"https://cooking.nytimes.com/guides/1-how-to-make-pizza\")\r\nelif food == \"Chocolate cake\":\r\n pg.alert (\"Now I want one\")\r\n points += 9\r\n t.sleep(3)\r\n wb.open (\"https://www.youtube.com/watch?v=dsJtgmAhFF4\")\r\nelif food == \"Pasta\":\r\n pg.alert (\"I like pasta!\")\r\n points += 3\r\n t.sleep(3)\r\n wb.open (\"https://www.google.com/search?q=pasta&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiH_JXSlK7eAhWKT98KHScQASEQ_AUIDigB&biw=924&bih=639\")\r\nelif food == \"Ice cream\":\r\n pg.alert (\"What kind? I like cookie monster.\")\r\n points += 3\r\n t.sleep(2)\r\n wb.open(\"https://barefeetinthekitchen.com/homemade-ice-cream-recipe/\")\r\nelif food == \"Fruit\":\r\n pg.alert (\"Refreshing!\")\r\n points += 3\r\n t.sleep(2)\r\n wb.open (\"https://www.google.com/search?q=fruit&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwijobOcla7eAhVyUt8KHfONDGUQ_AUIDigB&biw=924&bih=639#imgrc=ACrdFKwEzni-QM:\")\r\nelif food == \"Chicken\":\r\n pg.alert (\"Yum!\")\r\n points += 2\r\n t.sleep(2)\r\n wb.open (\"https://www.google.com/search?q=chicken&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj59fTCutHeAhXLct8KHRV6D88Q_AUIEygB&biw=1366&bih=657\")\r\nelse:\r\n pg.alert (\"YUUMMM\")\r\n points += 1\r\n t.sleep(2)\r\n wb.open (\"https://www.youtube.com/watch?v=11HK5EuYwSk\")\r\n\r\nmovie = pg.prompt (\"What is your favorite movie series?\").title()\r\nif \"Divergent\" in movie:\r\n number = pg.prompt(\"Which movie is your favorite\").title()\r\n\r\n if number == \"1\":\r\n pg.alert(\"Nice!\")\r\n\r\nice_cream = pg.confirm(\"Which of these flavors is your favorite?\", \"Choose one\", [\"chocolate\", \"vanilla\", \"cookies and cream\"])\r\nif ice_cream == \"cookies and cream\":\r\n pg.alert(\"YES\")\r\n\r\npg.alert (\"Your final score is \" + str(points))\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
from flask import jsonify
from flask.views import MethodView
class Users(MethodView):
def get(self):
return jsonify(
{
'status': 'OK',
'users': [
{'name': 'Pepe', 'age': 35, 'ocupation': "Engineer"},
{'name': 'Bob', 'age': 20, 'ocupation': "Student"}
]
}
)
def post(self):
# create user
pass
def put(self):
# update user
pass
def delete(self):
# delete user
pass
|
normal
|
{
"blob_id": "781ce153d5053078ee11cecc13d055a67999a651",
"index": 3800,
"step-1": "<mask token>\n\n\nclass Users(MethodView):\n <mask token>\n <mask token>\n\n def put(self):\n pass\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Users(MethodView):\n\n def get(self):\n return jsonify({'status': 'OK', 'users': [{'name': 'Pepe', 'age': \n 35, 'ocupation': 'Engineer'}, {'name': 'Bob', 'age': 20,\n 'ocupation': 'Student'}]})\n\n def post(self):\n pass\n\n def put(self):\n pass\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Users(MethodView):\n\n def get(self):\n return jsonify({'status': 'OK', 'users': [{'name': 'Pepe', 'age': \n 35, 'ocupation': 'Engineer'}, {'name': 'Bob', 'age': 20,\n 'ocupation': 'Student'}]})\n\n def post(self):\n pass\n\n def put(self):\n pass\n\n def delete(self):\n pass\n",
"step-4": "from flask import jsonify\nfrom flask.views import MethodView\n\n\nclass Users(MethodView):\n\n def get(self):\n return jsonify({'status': 'OK', 'users': [{'name': 'Pepe', 'age': \n 35, 'ocupation': 'Engineer'}, {'name': 'Bob', 'age': 20,\n 'ocupation': 'Student'}]})\n\n def post(self):\n pass\n\n def put(self):\n pass\n\n def delete(self):\n pass\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom flask import jsonify\nfrom flask.views import MethodView\n\n\nclass Users(MethodView):\n\n def get(self):\n return jsonify(\n {\n 'status': 'OK',\n 'users': [\n {'name': 'Pepe', 'age': 35, 'ocupation': \"Engineer\"},\n {'name': 'Bob', 'age': 20, 'ocupation': \"Student\"}\n ]\n }\n )\n\n def post(self):\n # create user\n pass\n\n def put(self):\n # update user\n pass\n\n def delete(self):\n # delete user\n pass\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
# I Have Created this file -Nabeel
from django.http import HttpResponse
from django.shortcuts import render
def index(request):
return render(request,'index.html')
def aboutme(request):
return HttpResponse (" <a href='https://nb786.github.io/Ncoder/about.html' > Aboutme</a>")
def contact(request):
return HttpResponse ("<a href='https://nb786.github.io/Ncoder/contact.html' > contact us </a>")
def analyze(request):
#get the text
djtext = request.POST.get('text', 'default')
#check checkbox value
removepunc = request.POST.get('removepunc', 'off') #on & off
fullcaps = request.POST.get('fullcaps','off')
newlineremover = request.POST.get('newlineremover','off')
extraspaceremover = request.POST.get('extraspaceremover', 'off')
charcount = request.POST.get('charcount', 'off')
print(removepunc)
#check which checkbox is on
if removepunc == "on":
punctuations = '''!()-[]{};:'"\,<>./?@#$%^&*_~'''
analyzed=""
for char in djtext:
if char not in punctuations:
analyzed=analyzed + char
dics = {'purpose':'Removed Punctuations' , 'analyzed_text':analyzed}
djtext=analyzed
#return render(request,'analyze.html',dics)
if (fullcaps == "on"):
analyzed = ""
for char in djtext:
analyzed = analyzed + char.upper()
dics = {'purpose': 'Changed to Uppercase', 'analyzed_text': analyzed}
# Analyze the text
djtext = analyzed
# return render(request, 'analyze.html', dics)
if (newlineremover == "on"):
analyzed = ""
for char in djtext:
if char != "\n" and char != "\r":
analyzed = analyzed + char
else:
print("no")
print("pre", analyzed)
dics = {'purpose': 'Removed NewLines', 'analyzed_text': analyzed}
djtext=analyzed
# Analyze the text
#return render(request, 'analyze.html', dics)
if (extraspaceremover == "on"):
analyzed = ""
for index, char in enumerate(djtext):
if not (djtext[index] == "" and djtext[index+1] == ""):
analyzed = analyzed + char
dics = {'purpose': 'Removed the Extra Spaces', 'analyzed_text': analyzed}
djtext = analyzed
#return render(request, 'analyze.html', dics)
if (charcount == "on"):
analyzed = ""
for char in djtext:
analyzed = len(djtext)
dics = {'purpose': 'Total no. of Character in your text are', 'analyzed_text': analyzed}
if (removepunc != "on" and fullcaps != "on" and newlineremover != "on" and extraspaceremover != "on" and charcount!= "on"):
return HttpResponse("Please Select Any Function And Try Again!")
return render(request, 'analyze.html', dics)
|
normal
|
{
"blob_id": "512d0a293b0cc3e6f7d84bb6958dc6693acde680",
"index": 1612,
"step-1": "<mask token>\n\n\ndef aboutme(request):\n return HttpResponse(\n \" <a href='https://nb786.github.io/Ncoder/about.html' > Aboutme</a>\")\n\n\n<mask token>\n\n\ndef analyze(request):\n djtext = request.POST.get('text', 'default')\n removepunc = request.POST.get('removepunc', 'off')\n fullcaps = request.POST.get('fullcaps', 'off')\n newlineremover = request.POST.get('newlineremover', 'off')\n extraspaceremover = request.POST.get('extraspaceremover', 'off')\n charcount = request.POST.get('charcount', 'off')\n print(removepunc)\n if removepunc == 'on':\n punctuations = '!()-[]{};:\\'\"\\\\,<>./?@#$%^&*_~'\n analyzed = ''\n for char in djtext:\n if char not in punctuations:\n analyzed = analyzed + char\n dics = {'purpose': 'Removed Punctuations', 'analyzed_text':\n analyzed}\n djtext = analyzed\n if fullcaps == 'on':\n analyzed = ''\n for char in djtext:\n analyzed = analyzed + char.upper()\n dics = {'purpose': 'Changed to Uppercase', 'analyzed_text': analyzed}\n djtext = analyzed\n if newlineremover == 'on':\n analyzed = ''\n for char in djtext:\n if char != '\\n' and char != '\\r':\n analyzed = analyzed + char\n else:\n print('no')\n print('pre', analyzed)\n dics = {'purpose': 'Removed NewLines', 'analyzed_text': analyzed}\n djtext = analyzed\n if extraspaceremover == 'on':\n analyzed = ''\n for index, char in enumerate(djtext):\n if not (djtext[index] == '' and djtext[index + 1] == ''):\n analyzed = analyzed + char\n dics = {'purpose': 'Removed the Extra Spaces', 'analyzed_text':\n analyzed}\n djtext = analyzed\n if charcount == 'on':\n analyzed = ''\n for char in djtext:\n analyzed = len(djtext)\n dics = {'purpose': 'Total no. of Character in your text are',\n 'analyzed_text': analyzed}\n if (removepunc != 'on' and fullcaps != 'on' and newlineremover != 'on' and\n extraspaceremover != 'on' and charcount != 'on'):\n return HttpResponse('Please Select Any Function And Try Again!')\n return render(request, 'analyze.html', dics)\n",
"step-2": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\ndef aboutme(request):\n return HttpResponse(\n \" <a href='https://nb786.github.io/Ncoder/about.html' > Aboutme</a>\")\n\n\n<mask token>\n\n\ndef analyze(request):\n djtext = request.POST.get('text', 'default')\n removepunc = request.POST.get('removepunc', 'off')\n fullcaps = request.POST.get('fullcaps', 'off')\n newlineremover = request.POST.get('newlineremover', 'off')\n extraspaceremover = request.POST.get('extraspaceremover', 'off')\n charcount = request.POST.get('charcount', 'off')\n print(removepunc)\n if removepunc == 'on':\n punctuations = '!()-[]{};:\\'\"\\\\,<>./?@#$%^&*_~'\n analyzed = ''\n for char in djtext:\n if char not in punctuations:\n analyzed = analyzed + char\n dics = {'purpose': 'Removed Punctuations', 'analyzed_text':\n analyzed}\n djtext = analyzed\n if fullcaps == 'on':\n analyzed = ''\n for char in djtext:\n analyzed = analyzed + char.upper()\n dics = {'purpose': 'Changed to Uppercase', 'analyzed_text': analyzed}\n djtext = analyzed\n if newlineremover == 'on':\n analyzed = ''\n for char in djtext:\n if char != '\\n' and char != '\\r':\n analyzed = analyzed + char\n else:\n print('no')\n print('pre', analyzed)\n dics = {'purpose': 'Removed NewLines', 'analyzed_text': analyzed}\n djtext = analyzed\n if extraspaceremover == 'on':\n analyzed = ''\n for index, char in enumerate(djtext):\n if not (djtext[index] == '' and djtext[index + 1] == ''):\n analyzed = analyzed + char\n dics = {'purpose': 'Removed the Extra Spaces', 'analyzed_text':\n analyzed}\n djtext = analyzed\n if charcount == 'on':\n analyzed = ''\n for char in djtext:\n analyzed = len(djtext)\n dics = {'purpose': 'Total no. of Character in your text are',\n 'analyzed_text': analyzed}\n if (removepunc != 'on' and fullcaps != 'on' and newlineremover != 'on' and\n extraspaceremover != 'on' and charcount != 'on'):\n return HttpResponse('Please Select Any Function And Try Again!')\n return render(request, 'analyze.html', dics)\n",
"step-3": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\ndef aboutme(request):\n return HttpResponse(\n \" <a href='https://nb786.github.io/Ncoder/about.html' > Aboutme</a>\")\n\n\ndef contact(request):\n return HttpResponse(\n \"<a href='https://nb786.github.io/Ncoder/contact.html' > contact us </a>\"\n )\n\n\ndef analyze(request):\n djtext = request.POST.get('text', 'default')\n removepunc = request.POST.get('removepunc', 'off')\n fullcaps = request.POST.get('fullcaps', 'off')\n newlineremover = request.POST.get('newlineremover', 'off')\n extraspaceremover = request.POST.get('extraspaceremover', 'off')\n charcount = request.POST.get('charcount', 'off')\n print(removepunc)\n if removepunc == 'on':\n punctuations = '!()-[]{};:\\'\"\\\\,<>./?@#$%^&*_~'\n analyzed = ''\n for char in djtext:\n if char not in punctuations:\n analyzed = analyzed + char\n dics = {'purpose': 'Removed Punctuations', 'analyzed_text':\n analyzed}\n djtext = analyzed\n if fullcaps == 'on':\n analyzed = ''\n for char in djtext:\n analyzed = analyzed + char.upper()\n dics = {'purpose': 'Changed to Uppercase', 'analyzed_text': analyzed}\n djtext = analyzed\n if newlineremover == 'on':\n analyzed = ''\n for char in djtext:\n if char != '\\n' and char != '\\r':\n analyzed = analyzed + char\n else:\n print('no')\n print('pre', analyzed)\n dics = {'purpose': 'Removed NewLines', 'analyzed_text': analyzed}\n djtext = analyzed\n if extraspaceremover == 'on':\n analyzed = ''\n for index, char in enumerate(djtext):\n if not (djtext[index] == '' and djtext[index + 1] == ''):\n analyzed = analyzed + char\n dics = {'purpose': 'Removed the Extra Spaces', 'analyzed_text':\n analyzed}\n djtext = analyzed\n if charcount == 'on':\n analyzed = ''\n for char in djtext:\n analyzed = len(djtext)\n dics = {'purpose': 'Total no. of Character in your text are',\n 'analyzed_text': analyzed}\n if (removepunc != 'on' and fullcaps != 'on' and newlineremover != 'on' and\n extraspaceremover != 'on' and charcount != 'on'):\n return HttpResponse('Please Select Any Function And Try Again!')\n return render(request, 'analyze.html', dics)\n",
"step-4": "from django.http import HttpResponse\nfrom django.shortcuts import render\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\ndef aboutme(request):\n return HttpResponse(\n \" <a href='https://nb786.github.io/Ncoder/about.html' > Aboutme</a>\")\n\n\ndef contact(request):\n return HttpResponse(\n \"<a href='https://nb786.github.io/Ncoder/contact.html' > contact us </a>\"\n )\n\n\ndef analyze(request):\n djtext = request.POST.get('text', 'default')\n removepunc = request.POST.get('removepunc', 'off')\n fullcaps = request.POST.get('fullcaps', 'off')\n newlineremover = request.POST.get('newlineremover', 'off')\n extraspaceremover = request.POST.get('extraspaceremover', 'off')\n charcount = request.POST.get('charcount', 'off')\n print(removepunc)\n if removepunc == 'on':\n punctuations = '!()-[]{};:\\'\"\\\\,<>./?@#$%^&*_~'\n analyzed = ''\n for char in djtext:\n if char not in punctuations:\n analyzed = analyzed + char\n dics = {'purpose': 'Removed Punctuations', 'analyzed_text':\n analyzed}\n djtext = analyzed\n if fullcaps == 'on':\n analyzed = ''\n for char in djtext:\n analyzed = analyzed + char.upper()\n dics = {'purpose': 'Changed to Uppercase', 'analyzed_text': analyzed}\n djtext = analyzed\n if newlineremover == 'on':\n analyzed = ''\n for char in djtext:\n if char != '\\n' and char != '\\r':\n analyzed = analyzed + char\n else:\n print('no')\n print('pre', analyzed)\n dics = {'purpose': 'Removed NewLines', 'analyzed_text': analyzed}\n djtext = analyzed\n if extraspaceremover == 'on':\n analyzed = ''\n for index, char in enumerate(djtext):\n if not (djtext[index] == '' and djtext[index + 1] == ''):\n analyzed = analyzed + char\n dics = {'purpose': 'Removed the Extra Spaces', 'analyzed_text':\n analyzed}\n djtext = analyzed\n if charcount == 'on':\n analyzed = ''\n for char in djtext:\n analyzed = len(djtext)\n dics = {'purpose': 'Total no. of Character in your text are',\n 'analyzed_text': analyzed}\n if (removepunc != 'on' and fullcaps != 'on' and newlineremover != 'on' and\n extraspaceremover != 'on' and charcount != 'on'):\n return HttpResponse('Please Select Any Function And Try Again!')\n return render(request, 'analyze.html', dics)\n",
"step-5": "# I Have Created this file -Nabeel\n\nfrom django.http import HttpResponse\nfrom django.shortcuts import render\n\ndef index(request):\n return render(request,'index.html')\n\n\ndef aboutme(request):\n return HttpResponse (\" <a href='https://nb786.github.io/Ncoder/about.html' > Aboutme</a>\")\n\ndef contact(request):\n return HttpResponse (\"<a href='https://nb786.github.io/Ncoder/contact.html' > contact us </a>\")\n\ndef analyze(request):\n #get the text\n djtext = request.POST.get('text', 'default')\n #check checkbox value\n removepunc = request.POST.get('removepunc', 'off') #on & off\n fullcaps = request.POST.get('fullcaps','off')\n newlineremover = request.POST.get('newlineremover','off')\n extraspaceremover = request.POST.get('extraspaceremover', 'off')\n charcount = request.POST.get('charcount', 'off')\n print(removepunc)\n\n #check which checkbox is on\n if removepunc == \"on\":\n punctuations = '''!()-[]{};:'\"\\,<>./?@#$%^&*_~'''\n analyzed=\"\"\n for char in djtext:\n if char not in punctuations:\n analyzed=analyzed + char\n dics = {'purpose':'Removed Punctuations' , 'analyzed_text':analyzed}\n djtext=analyzed\n #return render(request,'analyze.html',dics)\n\n\n\n if (fullcaps == \"on\"):\n analyzed = \"\"\n for char in djtext:\n analyzed = analyzed + char.upper()\n\n dics = {'purpose': 'Changed to Uppercase', 'analyzed_text': analyzed}\n # Analyze the text\n djtext = analyzed\n # return render(request, 'analyze.html', dics)\n\n if (newlineremover == \"on\"):\n analyzed = \"\"\n for char in djtext:\n if char != \"\\n\" and char != \"\\r\":\n analyzed = analyzed + char\n else:\n print(\"no\")\n print(\"pre\", analyzed)\n dics = {'purpose': 'Removed NewLines', 'analyzed_text': analyzed}\n djtext=analyzed\n # Analyze the text\n #return render(request, 'analyze.html', dics)\n\n\n\n if (extraspaceremover == \"on\"):\n analyzed = \"\"\n for index, char in enumerate(djtext):\n if not (djtext[index] == \"\" and djtext[index+1] == \"\"):\n analyzed = analyzed + char\n\n dics = {'purpose': 'Removed the Extra Spaces', 'analyzed_text': analyzed}\n djtext = analyzed\n #return render(request, 'analyze.html', dics)\n\n if (charcount == \"on\"):\n analyzed = \"\"\n for char in djtext:\n analyzed = len(djtext)\n dics = {'purpose': 'Total no. of Character in your text are', 'analyzed_text': analyzed}\n if (removepunc != \"on\" and fullcaps != \"on\" and newlineremover != \"on\" and extraspaceremover != \"on\" and charcount!= \"on\"):\n\n return HttpResponse(\"Please Select Any Function And Try Again!\")\n\n return render(request, 'analyze.html', dics)\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
"""Usage:
sharedprint.py INPUT [--output=out.mrc]
sharedprint.py INPUT [--csv=greenglass.csv]
Process Koha MARC export for SCELC Shared Print.
The two uses above either 1) create a subset of the MARC input that's limited to
circulating items only or 2) performs a comparison between what's in the catalog
and what's in GreenGlass i.e. how many records were added and weeded.
Arguments:
INPUT MARC records (.mrc file)
Options:
-h --help show this usage information
--debug show debug information as the script runs
--output=FILE output records to this file [default: out.mrc]
--csv=CSV GreenGlass CSV to compare input MARC file against
"""
import csv
from docopt import docopt
from pymarc import MARCReader, MARCWriter
# https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=LOST
lost_codes = {
"0": "",
"1": "Lost",
"2": "Long Overdue (Lost)",
"3": "Lost and Paid For",
"4": "Missing",
"5": "Lost (On Search)",
"6": "Claims Returned",
}
# https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=NOT_LOAN
notforloan_codes = {
"-3": "Repair",
"-2": "In Processing",
"-1": "Ordered",
"0": "",
"1": "Library Use Only",
"2": "Staff Collection",
"3": "Bindery",
"4": "By Appointment",
"5": "On display",
}
# https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=LOC
valid_locations = [
"CART",
"FACDEV",
"MAIN",
"NEWBOOK",
"DISPLAY",
]
# https://library-staff.cca.edu/cgi-bin/koha/admin/itemtypes.pl
valid_types = [
"BOOK",
"SUPPL",
]
# name of column in the GreenGlass spreadsheet that contains the bib record ID
GG_ID_COLUMN = 'Bib Record Number'
# field and subfield in MARC record that contains the bib record ID
# Koha appears to store it in both 999$c & $d
MARC_ID_FIELD = '999'
MARC_ID_SUBFIELD = 'c'
def validate_item(item):
# "item status" is an agglomeration of several things
status = []
# whether the _item_ we're looking at should be included
valid = True
# checked out, will be a date if item is checked out
if item['q'] and item['q'] != "0":
status.append('checked out')
# "not for loan", variety of reasons why an item might not circ
if item['7'] and item['7'] != "0":
status.append(notforloan_codes[item['7']])
valid = False
# 1 is an item is damanged
if item['4'] and item['4'] != "0":
status.append('damaged')
valid = False
# lost, variety of codes
if item['1'] and item['1'] != "0":
status.append(lost_codes[item['1']])
valid = False
# 1 if an item has been withdrawn
if item['0'] and item['0'] != "0":
status.append('withdrawn')
valid = False
# filter items based on location & type
if item['c'] not in valid_locations:
valid = False
if item['y'] not in valid_types:
valid = False
if len(status) > 0 and options.get('--debug'):
print('"' + record.title() + '" item status: ' + ', '.join(status))
return valid
def main():
total_count = 0
valid_count = 0
with open(options['INPUT'], 'rb') as fh:
reader = MARCReader(fh, to_unicode=True, force_utf8=True)
# 1) first mode: write a MARC output file
if not options['--csv']:
writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))
for record in reader:
# whether we'll include the _bib_ record in export file
include_record = False
# Koha stores item data in 952 fields, one per item
for item in record.get_fields('952'):
valid = validate_item(item)
total_count += 1
if valid is True:
valid_count += 1
# if there's any valid item then the bib should be included
include_record = True
if include_record is True:
writer.write(record)
print('Total items: %i | Items included: %i' % (total_count, valid_count))
elif options['--csv']:
koha_record_ids = set()
for record in reader:
total_count += 1
for item in record.get_fields('952'):
valid = validate_item(item)
if valid:
id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(MARC_ID_SUBFIELD)[0]
koha_record_ids.add(id)
# stop looking at items after we find the first valid one
break
csvreader = csv.DictReader(open(options['--csv'], 'r'))
gg_record_ids = set()
for row in csvreader:
gg_record_ids.add(row[GG_ID_COLUMN])
print('Total Koha Bibs: %i' % total_count)
print('Koha Bibs with circulating items: %i ' % len(koha_record_ids))
print('Total GreenGlass Bibs: %i' % len(gg_record_ids))
print('Weeded Items (I in GG & not in Koha): %i' % len(gg_record_ids - koha_record_ids))
print('Added Items (I in Koha & not in GG): %i' % len(koha_record_ids - gg_record_ids))
if __name__ == '__main__':
options = docopt(__doc__)
# print(options)
main()
|
normal
|
{
"blob_id": "c6cce2edafd7683af766b932d90ca170359e648a",
"index": 679,
"step-1": "<mask token>\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n include_record = False\n for item in record.get_fields('952'):\n valid = validate_item(item)\n total_count += 1\n if valid is True:\n valid_count += 1\n include_record = True\n if include_record is True:\n writer.write(record)\n print('Total items: %i | Items included: %i' % (total_count,\n valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(\n MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n break\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(\n koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(\n gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(\n koha_record_ids - gg_record_ids))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef validate_item(item):\n status = []\n valid = True\n if item['q'] and item['q'] != '0':\n status.append('checked out')\n if item['7'] and item['7'] != '0':\n status.append(notforloan_codes[item['7']])\n valid = False\n if item['4'] and item['4'] != '0':\n status.append('damaged')\n valid = False\n if item['1'] and item['1'] != '0':\n status.append(lost_codes[item['1']])\n valid = False\n if item['0'] and item['0'] != '0':\n status.append('withdrawn')\n valid = False\n if item['c'] not in valid_locations:\n valid = False\n if item['y'] not in valid_types:\n valid = False\n if len(status) > 0 and options.get('--debug'):\n print('\"' + record.title() + '\" item status: ' + ', '.join(status))\n return valid\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n include_record = False\n for item in record.get_fields('952'):\n valid = validate_item(item)\n total_count += 1\n if valid is True:\n valid_count += 1\n include_record = True\n if include_record is True:\n writer.write(record)\n print('Total items: %i | Items included: %i' % (total_count,\n valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(\n MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n break\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(\n koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(\n gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(\n koha_record_ids - gg_record_ids))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef validate_item(item):\n status = []\n valid = True\n if item['q'] and item['q'] != '0':\n status.append('checked out')\n if item['7'] and item['7'] != '0':\n status.append(notforloan_codes[item['7']])\n valid = False\n if item['4'] and item['4'] != '0':\n status.append('damaged')\n valid = False\n if item['1'] and item['1'] != '0':\n status.append(lost_codes[item['1']])\n valid = False\n if item['0'] and item['0'] != '0':\n status.append('withdrawn')\n valid = False\n if item['c'] not in valid_locations:\n valid = False\n if item['y'] not in valid_types:\n valid = False\n if len(status) > 0 and options.get('--debug'):\n print('\"' + record.title() + '\" item status: ' + ', '.join(status))\n return valid\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n include_record = False\n for item in record.get_fields('952'):\n valid = validate_item(item)\n total_count += 1\n if valid is True:\n valid_count += 1\n include_record = True\n if include_record is True:\n writer.write(record)\n print('Total items: %i | Items included: %i' % (total_count,\n valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(\n MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n break\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(\n koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(\n gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(\n koha_record_ids - gg_record_ids))\n\n\nif __name__ == '__main__':\n options = docopt(__doc__)\n main()\n",
"step-4": "<mask token>\nimport csv\nfrom docopt import docopt\nfrom pymarc import MARCReader, MARCWriter\nlost_codes = {'0': '', '1': 'Lost', '2': 'Long Overdue (Lost)', '3':\n 'Lost and Paid For', '4': 'Missing', '5': 'Lost (On Search)', '6':\n 'Claims Returned'}\nnotforloan_codes = {'-3': 'Repair', '-2': 'In Processing', '-1': 'Ordered',\n '0': '', '1': 'Library Use Only', '2': 'Staff Collection', '3':\n 'Bindery', '4': 'By Appointment', '5': 'On display'}\nvalid_locations = ['CART', 'FACDEV', 'MAIN', 'NEWBOOK', 'DISPLAY']\nvalid_types = ['BOOK', 'SUPPL']\nGG_ID_COLUMN = 'Bib Record Number'\nMARC_ID_FIELD = '999'\nMARC_ID_SUBFIELD = 'c'\n\n\ndef validate_item(item):\n status = []\n valid = True\n if item['q'] and item['q'] != '0':\n status.append('checked out')\n if item['7'] and item['7'] != '0':\n status.append(notforloan_codes[item['7']])\n valid = False\n if item['4'] and item['4'] != '0':\n status.append('damaged')\n valid = False\n if item['1'] and item['1'] != '0':\n status.append(lost_codes[item['1']])\n valid = False\n if item['0'] and item['0'] != '0':\n status.append('withdrawn')\n valid = False\n if item['c'] not in valid_locations:\n valid = False\n if item['y'] not in valid_types:\n valid = False\n if len(status) > 0 and options.get('--debug'):\n print('\"' + record.title() + '\" item status: ' + ', '.join(status))\n return valid\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n include_record = False\n for item in record.get_fields('952'):\n valid = validate_item(item)\n total_count += 1\n if valid is True:\n valid_count += 1\n include_record = True\n if include_record is True:\n writer.write(record)\n print('Total items: %i | Items included: %i' % (total_count,\n valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(\n MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n break\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(\n koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(\n gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(\n koha_record_ids - gg_record_ids))\n\n\nif __name__ == '__main__':\n options = docopt(__doc__)\n main()\n",
"step-5": "\"\"\"Usage:\n sharedprint.py INPUT [--output=out.mrc]\n sharedprint.py INPUT [--csv=greenglass.csv]\n\nProcess Koha MARC export for SCELC Shared Print.\n\nThe two uses above either 1) create a subset of the MARC input that's limited to\ncirculating items only or 2) performs a comparison between what's in the catalog\nand what's in GreenGlass i.e. how many records were added and weeded.\n\nArguments:\n INPUT MARC records (.mrc file)\n\nOptions:\n -h --help show this usage information\n --debug show debug information as the script runs\n --output=FILE output records to this file [default: out.mrc]\n --csv=CSV GreenGlass CSV to compare input MARC file against\n\"\"\"\nimport csv\n\nfrom docopt import docopt\nfrom pymarc import MARCReader, MARCWriter\n\n# https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=LOST\nlost_codes = {\n \"0\": \"\",\n \"1\": \"Lost\",\n \"2\": \"Long Overdue (Lost)\",\n \"3\": \"Lost and Paid For\",\n \"4\": \"Missing\",\n \"5\": \"Lost (On Search)\",\n \"6\": \"Claims Returned\",\n}\n\n# https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=NOT_LOAN\nnotforloan_codes = {\n \"-3\":\t\"Repair\",\n \"-2\":\t\"In Processing\",\n \"-1\":\t\"Ordered\",\n \"0\":\t\"\",\n \"1\":\t\"Library Use Only\",\n \"2\":\t\"Staff Collection\",\n \"3\":\t\"Bindery\",\n \"4\":\t\"By Appointment\",\n \"5\":\t\"On display\",\n}\n\n# https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=LOC\nvalid_locations = [\n \"CART\",\n \"FACDEV\",\n \"MAIN\",\n \"NEWBOOK\",\n \"DISPLAY\",\n]\n\n# https://library-staff.cca.edu/cgi-bin/koha/admin/itemtypes.pl\nvalid_types = [\n \"BOOK\",\n \"SUPPL\",\n]\n\n# name of column in the GreenGlass spreadsheet that contains the bib record ID\nGG_ID_COLUMN = 'Bib Record Number'\n# field and subfield in MARC record that contains the bib record ID\n# Koha appears to store it in both 999$c & $d\nMARC_ID_FIELD = '999'\nMARC_ID_SUBFIELD = 'c'\n\ndef validate_item(item):\n # \"item status\" is an agglomeration of several things\n status = []\n # whether the _item_ we're looking at should be included\n valid = True\n\n # checked out, will be a date if item is checked out\n if item['q'] and item['q'] != \"0\":\n status.append('checked out')\n\n # \"not for loan\", variety of reasons why an item might not circ\n if item['7'] and item['7'] != \"0\":\n status.append(notforloan_codes[item['7']])\n valid = False\n\n # 1 is an item is damanged\n if item['4'] and item['4'] != \"0\":\n status.append('damaged')\n valid = False\n\n # lost, variety of codes\n if item['1'] and item['1'] != \"0\":\n status.append(lost_codes[item['1']])\n valid = False\n\n # 1 if an item has been withdrawn\n if item['0'] and item['0'] != \"0\":\n status.append('withdrawn')\n valid = False\n\n # filter items based on location & type\n if item['c'] not in valid_locations:\n valid = False\n\n if item['y'] not in valid_types:\n valid = False\n\n if len(status) > 0 and options.get('--debug'):\n print('\"' + record.title() + '\" item status: ' + ', '.join(status))\n\n return valid\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n # 1) first mode: write a MARC output file\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n # whether we'll include the _bib_ record in export file\n include_record = False\n # Koha stores item data in 952 fields, one per item\n for item in record.get_fields('952'):\n valid = validate_item(item)\n\n total_count += 1\n if valid is True:\n valid_count += 1\n # if there's any valid item then the bib should be included\n include_record = True\n\n if include_record is True:\n writer.write(record)\n\n print('Total items: %i | Items included: %i' % (total_count, valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n # stop looking at items after we find the first valid one\n break\n\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(koha_record_ids - gg_record_ids))\n\n\nif __name__ == '__main__':\n options = docopt(__doc__)\n # print(options)\n main()\n",
"step-ids": [
1,
2,
3,
5,
6
]
}
|
[
1,
2,
3,
5,
6
] |
import numpy as np
import cv2
import glob
from scipy.spatial.transform import Rotation
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import mpl_toolkits.mplot3d.art3d as art3d
from matplotlib.patches import Rectangle
import celluloid
from celluloid import Camera # couldn't save animation with ArtisticAnimation, TO DO
# datadir = 'data/'
# detected_corners = np.loadtxt(datadir + 'detected_corners.txt') # pixel coords (u,v) of detected corners
# K = np.loadtxt(datadir + 'K.txt') # camera matrix
# Pw_corners = .01 * np.loadtxt('data/p_W_corners.txt', delimiter=',') # [12x3] world coords of detected corners in centimeters
class DLT(object):
def __init__(self, K, detected_corners, Pw_corners, reproject_points=False):
self.K = K
self.p = detected_corners
self.Pw = Pw_corners
self.reproject_points = reproject_points
def getimg(self, idx):
images = sorted(glob.glob(datadir + 'images_undistorted/*.jpg'))
return cv2.imread(images[idx])
# def currFrame(detected_corners, K, Pw_corners, frame_idx):
def currFrame(self, frame_idx):
# get normalized coordinates [x;y;1]
u = self.p[frame_idx][0:-1:2]
v = self.p[frame_idx][1::2]
p = np.linalg.inv(self.K) @ np.vstack((u,v,np.ones(u.shape[0])))
# get 3d world coordinates [X; Y; Z; 1]
P = np.vstack((self.Pw.T, np.ones(self.Pw.shape[0])))
return p, P
def estimatePoseDLT(self, p, P,idx):
'''
DLT algorithm. Refer to http://www.kwon3d.com/theory/dlt/dlt.html for in-depth analysis
Solves for projection matrix M = [R|t], given the n 2D-3D points corresponding to p_i and P_i
***Note: Matrix Q is built using the /normalized/ coordinates of p_i
SVD returns V already transposed
Args:
p = given 2D coordinates (u,v) of the projections of the referenced 3D points in the undistorted image
P = given position coordinates of the n reference 3D points given in the world coordinates
K = given camera matrix
Returns:
M = The solved projection matrix (for normalized coordinates)
'''
# construct Q matrix
for col_idx in range(0, P.shape[1]):
if col_idx == 0:
Q = np.array([
[P[0,col_idx], P[1,col_idx], P[2,col_idx], 1, 0, 0, 0, 0, -p[0, col_idx]*P[0,col_idx], -p[0, col_idx]*P[1,col_idx], -p[0, col_idx]*P[2,col_idx], -p[0, col_idx]],
[0, 0, 0, 0, P[0,col_idx], P[1,col_idx], P[2,col_idx], 1, -p[1, col_idx]*P[0,col_idx], -p[1, col_idx]*P[1,col_idx], -p[1, col_idx]*P[2,col_idx], -p[1, col_idx]]
])
else:
currQ = np.array([
[P[0,col_idx], P[1,col_idx], P[2,col_idx], 1, 0, 0, 0, 0, -p[0, col_idx]*P[0,col_idx], -p[0, col_idx]*P[1,col_idx], -p[0, col_idx]*P[2,col_idx], -p[0, col_idx]],
[0, 0, 0, 0, P[0,col_idx], P[1,col_idx], P[2,col_idx], 1, -p[1, col_idx]*P[0,col_idx], -p[1, col_idx]*P[1,col_idx], -p[1, col_idx]*P[2,col_idx], -p[1, col_idx]]
])
Q = np.vstack((Q,currQ)).astype(np.float32)
U, S, V = np.linalg.svd(Q, full_matrices=True)
M = V[-1:]
M = M.reshape((3,4)) # reshape to true projection matrix
if np.linalg.det(M[:,:3]) < 0:
M = -M
'''
Orthogonal Procrustes problem:
Did not impose any constraints on R from M = [R|t] is actually a rotation matrix;
Need to compute matrix R_tilde, the matrix closest to the true "R" in the sense of Frobenius norm
'''
R = M[:,:3] # rotation matrix
U,S,V = np.linalg.svd(R)
R_tilde = U @ V
# M is not true M in this case, but alpha*M where alpha is the scale
alpha = np.linalg.norm(R_tilde, ord='fro') / np.linalg.norm(R, ord='fro')
M = np.hstack((R_tilde, alpha*M[:,-1].reshape((3,1))))
return M
def reprojectPoints(self, P, M):
'''
Reprojects the 3D points P_i in the current image using the estimated projection matrix M
and camera matrix K. Use this to show on image to double check that reprojected points p_i' fall close to
points p_i.
Args:
P = referenced 3D world coordinates
M = Projection matrix solved from estimatePoseDLT
org_image = the original image, needed to project points onto
Returns:
reprojected_pts = self-explanatory
'''
homo_mtx = (K @ M @ P).T
homo_mtx[:,0] = homo_mtx[:,0] / homo_mtx[:,2]
homo_mtx[:,1] = homo_mtx[:,1] / homo_mtx[:,2]
reprojected_pts = homo_mtx[:,:2]
# print(reprojected_pts)
return reprojected_pts
def plotTrajectory3D(M, fig, output_filename='motion.avi'):
R = M[:,:3].T
t = M[:,-1]
rotMat = Rotation.from_matrix(R) # Rotation object instance
quat = rotMat.as_quat()
quat = np.roll(quat, 1)
transl = -R @ t
# prelims
plt.clf()
# ax = fig.add_subplot(111, projection='3d')
ax = plt.axes(projection='3d')
camera = Camera(fig)
ax.set(xlim=(-.1,.4), ylim=(-.2,.3), zlim=(-.3, 0))
ax.set_xlabel('Z')
ax.set_ylabel('X')
ax.set_zlabel('Y')
ax.scatter(-Pw_corners[:,2], Pw_corners[:,0], -Pw_corners[:,1]) # draw given corners
# draw rectangles at corners
r = Rectangle((0, -.22), width=.105, height=.14, color='blue', fill=False, hatch='/')
ax.add_patch(r)
art3d.pathpatch_2d_to_3d(r, z=0, zdir='x')
r1 = Rectangle((.11,-.25), width=.13, height=.1, color='red', fill=False, hatch='/')
ax.add_patch(r1)
art3d.pathpatch_2d_to_3d(r1, z=.2, zdir='y')
r2 = Rectangle((.11, 0), width=.13, height=.11, color='green', fill=False, hatch='/')
ax.add_patch(r2)
art3d.pathpatch_2d_to_3d(r2, z=-.265, zdir='z')
# draw camera coordinate frame onto image
rotMat = rotMat.as_matrix()
ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2,0], rotMat[0,0], -rotMat[1,0], color='red', length=.1)
ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2,1], rotMat[0,1], -rotMat[1,1], color='green', length=.1)
ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2,2], rotMat[1,2], -rotMat[1,2], color='blue', length=.1)
# print([-transl[2], transl[0], -transl[1], -rotMat[2,0], rotMat[0,0], -rotMat[1,0]])
camera.snap()
if __name__ == "__main__":
# Given info
datadir = 'data/'
detected_corners = np.loadtxt(datadir + 'detected_corners.txt') # pixel coords (u,v) of detected corners
K = np.loadtxt(datadir + 'K.txt') # camera matrix
Pw_corners = .01 * np.loadtxt('data/p_W_corners.txt', delimiter=',') # [12x3] world coords of detected corners in centimeters
# Iterate through each picture
file_list = sorted(glob.glob(datadir + 'images_undistorted/*.jpg'))
# num_images = len(glob.glob(datadir + 'images_undistorted/*.jpg'))
num_images = len(file_list)
projection = DLT(K, detected_corners, Pw_corners, reproject_points=False)
fig = plt.figure()
for img_idx in range(0, num_images):
image = projection.getimg(img_idx) # get current image in directory
p, P = projection.currFrame(img_idx) # get normalized 2D pixel points and 3D world points in correct format
M = projection.estimatePoseDLT(p, P, img_idx) # get projection matrix M = [R|t]
reprojected_pts = projection.reprojectPoints(P, M) # reproject P_i onto image
if projection.reproject_points:
# show reprojected points on image
for point in reprojected_pts:
estimate = point.astype(np.float32) # my estimated points
cv2.circle(image, tuple(estimate), radius=5, color=(0,0,255), thickness=2)
for u, v in zip(detected_corners[img_idx][0::2], detected_corners[img_idx][1::2]):
u = u.astype(np.float32)
v = v.astype(np.float32)
cv2.circle(image, (u,v), radius=5, color=(0,255,0), thickness=2)
cv2.imshow('img', image)
cv2.waitKey(34) # 30 FPS
plotTrajectory3D(M, fig)
fname = 'my_results' + '/' + file_list[img_idx][28:28+4] + '.png'
plt.savefig(fname) # to create animation, save all of the figures into my_results directory, then run animate.py, which will produce a video
|
normal
|
{
"blob_id": "50ae47c88bbc0f281ef75784377fb65192e257b0",
"index": 1206,
"step-1": "<mask token>\n\n\nclass DLT(object):\n <mask token>\n\n def getimg(self, idx):\n images = sorted(glob.glob(datadir + 'images_undistorted/*.jpg'))\n return cv2.imread(images[idx])\n <mask token>\n\n def estimatePoseDLT(self, p, P, idx):\n \"\"\"\n DLT algorithm. Refer to http://www.kwon3d.com/theory/dlt/dlt.html for in-depth analysis \n Solves for projection matrix M = [R|t], given the n 2D-3D points corresponding to p_i and P_i\n ***Note: Matrix Q is built using the /normalized/ coordinates of p_i \n SVD returns V already transposed\n Args: \n p = given 2D coordinates (u,v) of the projections of the referenced 3D points in the undistorted image\n P = given position coordinates of the n reference 3D points given in the world coordinates \n K = given camera matrix \n Returns: \n M = The solved projection matrix (for normalized coordinates)\n \"\"\"\n for col_idx in range(0, P.shape[1]):\n if col_idx == 0:\n Q = np.array([[P[0, col_idx], P[1, col_idx], P[2, col_idx],\n 1, 0, 0, 0, 0, -p[0, col_idx] * P[0, col_idx], -p[0,\n col_idx] * P[1, col_idx], -p[0, col_idx] * P[2, col_idx\n ], -p[0, col_idx]], [0, 0, 0, 0, P[0, col_idx], P[1,\n col_idx], P[2, col_idx], 1, -p[1, col_idx] * P[0,\n col_idx], -p[1, col_idx] * P[1, col_idx], -p[1, col_idx\n ] * P[2, col_idx], -p[1, col_idx]]])\n else:\n currQ = np.array([[P[0, col_idx], P[1, col_idx], P[2,\n col_idx], 1, 0, 0, 0, 0, -p[0, col_idx] * P[0, col_idx],\n -p[0, col_idx] * P[1, col_idx], -p[0, col_idx] * P[2,\n col_idx], -p[0, col_idx]], [0, 0, 0, 0, P[0, col_idx],\n P[1, col_idx], P[2, col_idx], 1, -p[1, col_idx] * P[0,\n col_idx], -p[1, col_idx] * P[1, col_idx], -p[1, col_idx\n ] * P[2, col_idx], -p[1, col_idx]]])\n Q = np.vstack((Q, currQ)).astype(np.float32)\n U, S, V = np.linalg.svd(Q, full_matrices=True)\n M = V[-1:]\n M = M.reshape((3, 4))\n if np.linalg.det(M[:, :3]) < 0:\n M = -M\n \"\"\"\n Orthogonal Procrustes problem: \n Did not impose any constraints on R from M = [R|t] is actually a rotation matrix; \n Need to compute matrix R_tilde, the matrix closest to the true \"R\" in the sense of Frobenius norm \n \"\"\"\n R = M[:, :3]\n U, S, V = np.linalg.svd(R)\n R_tilde = U @ V\n alpha = np.linalg.norm(R_tilde, ord='fro') / np.linalg.norm(R, ord=\n 'fro')\n M = np.hstack((R_tilde, alpha * M[:, -1].reshape((3, 1))))\n return M\n\n def reprojectPoints(self, P, M):\n \"\"\" \n Reprojects the 3D points P_i in the current image using the estimated projection matrix M \n and camera matrix K. Use this to show on image to double check that reprojected points p_i' fall close to \n points p_i. \n Args: \n P = referenced 3D world coordinates \n M = Projection matrix solved from estimatePoseDLT\n org_image = the original image, needed to project points onto \n Returns: \n reprojected_pts = self-explanatory \n \"\"\"\n homo_mtx = (K @ M @ P).T\n homo_mtx[:, 0] = homo_mtx[:, 0] / homo_mtx[:, 2]\n homo_mtx[:, 1] = homo_mtx[:, 1] / homo_mtx[:, 2]\n reprojected_pts = homo_mtx[:, :2]\n return reprojected_pts\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass DLT(object):\n\n def __init__(self, K, detected_corners, Pw_corners, reproject_points=False\n ):\n self.K = K\n self.p = detected_corners\n self.Pw = Pw_corners\n self.reproject_points = reproject_points\n\n def getimg(self, idx):\n images = sorted(glob.glob(datadir + 'images_undistorted/*.jpg'))\n return cv2.imread(images[idx])\n\n def currFrame(self, frame_idx):\n u = self.p[frame_idx][0:-1:2]\n v = self.p[frame_idx][1::2]\n p = np.linalg.inv(self.K) @ np.vstack((u, v, np.ones(u.shape[0])))\n P = np.vstack((self.Pw.T, np.ones(self.Pw.shape[0])))\n return p, P\n\n def estimatePoseDLT(self, p, P, idx):\n \"\"\"\n DLT algorithm. Refer to http://www.kwon3d.com/theory/dlt/dlt.html for in-depth analysis \n Solves for projection matrix M = [R|t], given the n 2D-3D points corresponding to p_i and P_i\n ***Note: Matrix Q is built using the /normalized/ coordinates of p_i \n SVD returns V already transposed\n Args: \n p = given 2D coordinates (u,v) of the projections of the referenced 3D points in the undistorted image\n P = given position coordinates of the n reference 3D points given in the world coordinates \n K = given camera matrix \n Returns: \n M = The solved projection matrix (for normalized coordinates)\n \"\"\"\n for col_idx in range(0, P.shape[1]):\n if col_idx == 0:\n Q = np.array([[P[0, col_idx], P[1, col_idx], P[2, col_idx],\n 1, 0, 0, 0, 0, -p[0, col_idx] * P[0, col_idx], -p[0,\n col_idx] * P[1, col_idx], -p[0, col_idx] * P[2, col_idx\n ], -p[0, col_idx]], [0, 0, 0, 0, P[0, col_idx], P[1,\n col_idx], P[2, col_idx], 1, -p[1, col_idx] * P[0,\n col_idx], -p[1, col_idx] * P[1, col_idx], -p[1, col_idx\n ] * P[2, col_idx], -p[1, col_idx]]])\n else:\n currQ = np.array([[P[0, col_idx], P[1, col_idx], P[2,\n col_idx], 1, 0, 0, 0, 0, -p[0, col_idx] * P[0, col_idx],\n -p[0, col_idx] * P[1, col_idx], -p[0, col_idx] * P[2,\n col_idx], -p[0, col_idx]], [0, 0, 0, 0, P[0, col_idx],\n P[1, col_idx], P[2, col_idx], 1, -p[1, col_idx] * P[0,\n col_idx], -p[1, col_idx] * P[1, col_idx], -p[1, col_idx\n ] * P[2, col_idx], -p[1, col_idx]]])\n Q = np.vstack((Q, currQ)).astype(np.float32)\n U, S, V = np.linalg.svd(Q, full_matrices=True)\n M = V[-1:]\n M = M.reshape((3, 4))\n if np.linalg.det(M[:, :3]) < 0:\n M = -M\n \"\"\"\n Orthogonal Procrustes problem: \n Did not impose any constraints on R from M = [R|t] is actually a rotation matrix; \n Need to compute matrix R_tilde, the matrix closest to the true \"R\" in the sense of Frobenius norm \n \"\"\"\n R = M[:, :3]\n U, S, V = np.linalg.svd(R)\n R_tilde = U @ V\n alpha = np.linalg.norm(R_tilde, ord='fro') / np.linalg.norm(R, ord=\n 'fro')\n M = np.hstack((R_tilde, alpha * M[:, -1].reshape((3, 1))))\n return M\n\n def reprojectPoints(self, P, M):\n \"\"\" \n Reprojects the 3D points P_i in the current image using the estimated projection matrix M \n and camera matrix K. Use this to show on image to double check that reprojected points p_i' fall close to \n points p_i. \n Args: \n P = referenced 3D world coordinates \n M = Projection matrix solved from estimatePoseDLT\n org_image = the original image, needed to project points onto \n Returns: \n reprojected_pts = self-explanatory \n \"\"\"\n homo_mtx = (K @ M @ P).T\n homo_mtx[:, 0] = homo_mtx[:, 0] / homo_mtx[:, 2]\n homo_mtx[:, 1] = homo_mtx[:, 1] / homo_mtx[:, 2]\n reprojected_pts = homo_mtx[:, :2]\n return reprojected_pts\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass DLT(object):\n\n def __init__(self, K, detected_corners, Pw_corners, reproject_points=False\n ):\n self.K = K\n self.p = detected_corners\n self.Pw = Pw_corners\n self.reproject_points = reproject_points\n\n def getimg(self, idx):\n images = sorted(glob.glob(datadir + 'images_undistorted/*.jpg'))\n return cv2.imread(images[idx])\n\n def currFrame(self, frame_idx):\n u = self.p[frame_idx][0:-1:2]\n v = self.p[frame_idx][1::2]\n p = np.linalg.inv(self.K) @ np.vstack((u, v, np.ones(u.shape[0])))\n P = np.vstack((self.Pw.T, np.ones(self.Pw.shape[0])))\n return p, P\n\n def estimatePoseDLT(self, p, P, idx):\n \"\"\"\n DLT algorithm. Refer to http://www.kwon3d.com/theory/dlt/dlt.html for in-depth analysis \n Solves for projection matrix M = [R|t], given the n 2D-3D points corresponding to p_i and P_i\n ***Note: Matrix Q is built using the /normalized/ coordinates of p_i \n SVD returns V already transposed\n Args: \n p = given 2D coordinates (u,v) of the projections of the referenced 3D points in the undistorted image\n P = given position coordinates of the n reference 3D points given in the world coordinates \n K = given camera matrix \n Returns: \n M = The solved projection matrix (for normalized coordinates)\n \"\"\"\n for col_idx in range(0, P.shape[1]):\n if col_idx == 0:\n Q = np.array([[P[0, col_idx], P[1, col_idx], P[2, col_idx],\n 1, 0, 0, 0, 0, -p[0, col_idx] * P[0, col_idx], -p[0,\n col_idx] * P[1, col_idx], -p[0, col_idx] * P[2, col_idx\n ], -p[0, col_idx]], [0, 0, 0, 0, P[0, col_idx], P[1,\n col_idx], P[2, col_idx], 1, -p[1, col_idx] * P[0,\n col_idx], -p[1, col_idx] * P[1, col_idx], -p[1, col_idx\n ] * P[2, col_idx], -p[1, col_idx]]])\n else:\n currQ = np.array([[P[0, col_idx], P[1, col_idx], P[2,\n col_idx], 1, 0, 0, 0, 0, -p[0, col_idx] * P[0, col_idx],\n -p[0, col_idx] * P[1, col_idx], -p[0, col_idx] * P[2,\n col_idx], -p[0, col_idx]], [0, 0, 0, 0, P[0, col_idx],\n P[1, col_idx], P[2, col_idx], 1, -p[1, col_idx] * P[0,\n col_idx], -p[1, col_idx] * P[1, col_idx], -p[1, col_idx\n ] * P[2, col_idx], -p[1, col_idx]]])\n Q = np.vstack((Q, currQ)).astype(np.float32)\n U, S, V = np.linalg.svd(Q, full_matrices=True)\n M = V[-1:]\n M = M.reshape((3, 4))\n if np.linalg.det(M[:, :3]) < 0:\n M = -M\n \"\"\"\n Orthogonal Procrustes problem: \n Did not impose any constraints on R from M = [R|t] is actually a rotation matrix; \n Need to compute matrix R_tilde, the matrix closest to the true \"R\" in the sense of Frobenius norm \n \"\"\"\n R = M[:, :3]\n U, S, V = np.linalg.svd(R)\n R_tilde = U @ V\n alpha = np.linalg.norm(R_tilde, ord='fro') / np.linalg.norm(R, ord=\n 'fro')\n M = np.hstack((R_tilde, alpha * M[:, -1].reshape((3, 1))))\n return M\n\n def reprojectPoints(self, P, M):\n \"\"\" \n Reprojects the 3D points P_i in the current image using the estimated projection matrix M \n and camera matrix K. Use this to show on image to double check that reprojected points p_i' fall close to \n points p_i. \n Args: \n P = referenced 3D world coordinates \n M = Projection matrix solved from estimatePoseDLT\n org_image = the original image, needed to project points onto \n Returns: \n reprojected_pts = self-explanatory \n \"\"\"\n homo_mtx = (K @ M @ P).T\n homo_mtx[:, 0] = homo_mtx[:, 0] / homo_mtx[:, 2]\n homo_mtx[:, 1] = homo_mtx[:, 1] / homo_mtx[:, 2]\n reprojected_pts = homo_mtx[:, :2]\n return reprojected_pts\n\n\ndef plotTrajectory3D(M, fig, output_filename='motion.avi'):\n R = M[:, :3].T\n t = M[:, -1]\n rotMat = Rotation.from_matrix(R)\n quat = rotMat.as_quat()\n quat = np.roll(quat, 1)\n transl = -R @ t\n plt.clf()\n ax = plt.axes(projection='3d')\n camera = Camera(fig)\n ax.set(xlim=(-0.1, 0.4), ylim=(-0.2, 0.3), zlim=(-0.3, 0))\n ax.set_xlabel('Z')\n ax.set_ylabel('X')\n ax.set_zlabel('Y')\n ax.scatter(-Pw_corners[:, 2], Pw_corners[:, 0], -Pw_corners[:, 1])\n r = Rectangle((0, -0.22), width=0.105, height=0.14, color='blue', fill=\n False, hatch='/')\n ax.add_patch(r)\n art3d.pathpatch_2d_to_3d(r, z=0, zdir='x')\n r1 = Rectangle((0.11, -0.25), width=0.13, height=0.1, color='red', fill\n =False, hatch='/')\n ax.add_patch(r1)\n art3d.pathpatch_2d_to_3d(r1, z=0.2, zdir='y')\n r2 = Rectangle((0.11, 0), width=0.13, height=0.11, color='green', fill=\n False, hatch='/')\n ax.add_patch(r2)\n art3d.pathpatch_2d_to_3d(r2, z=-0.265, zdir='z')\n rotMat = rotMat.as_matrix()\n ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2, 0], rotMat[0, 0\n ], -rotMat[1, 0], color='red', length=0.1)\n ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2, 1], rotMat[0, 1\n ], -rotMat[1, 1], color='green', length=0.1)\n ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2, 2], rotMat[1, 2\n ], -rotMat[1, 2], color='blue', length=0.1)\n camera.snap()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass DLT(object):\n\n def __init__(self, K, detected_corners, Pw_corners, reproject_points=False\n ):\n self.K = K\n self.p = detected_corners\n self.Pw = Pw_corners\n self.reproject_points = reproject_points\n\n def getimg(self, idx):\n images = sorted(glob.glob(datadir + 'images_undistorted/*.jpg'))\n return cv2.imread(images[idx])\n\n def currFrame(self, frame_idx):\n u = self.p[frame_idx][0:-1:2]\n v = self.p[frame_idx][1::2]\n p = np.linalg.inv(self.K) @ np.vstack((u, v, np.ones(u.shape[0])))\n P = np.vstack((self.Pw.T, np.ones(self.Pw.shape[0])))\n return p, P\n\n def estimatePoseDLT(self, p, P, idx):\n \"\"\"\n DLT algorithm. Refer to http://www.kwon3d.com/theory/dlt/dlt.html for in-depth analysis \n Solves for projection matrix M = [R|t], given the n 2D-3D points corresponding to p_i and P_i\n ***Note: Matrix Q is built using the /normalized/ coordinates of p_i \n SVD returns V already transposed\n Args: \n p = given 2D coordinates (u,v) of the projections of the referenced 3D points in the undistorted image\n P = given position coordinates of the n reference 3D points given in the world coordinates \n K = given camera matrix \n Returns: \n M = The solved projection matrix (for normalized coordinates)\n \"\"\"\n for col_idx in range(0, P.shape[1]):\n if col_idx == 0:\n Q = np.array([[P[0, col_idx], P[1, col_idx], P[2, col_idx],\n 1, 0, 0, 0, 0, -p[0, col_idx] * P[0, col_idx], -p[0,\n col_idx] * P[1, col_idx], -p[0, col_idx] * P[2, col_idx\n ], -p[0, col_idx]], [0, 0, 0, 0, P[0, col_idx], P[1,\n col_idx], P[2, col_idx], 1, -p[1, col_idx] * P[0,\n col_idx], -p[1, col_idx] * P[1, col_idx], -p[1, col_idx\n ] * P[2, col_idx], -p[1, col_idx]]])\n else:\n currQ = np.array([[P[0, col_idx], P[1, col_idx], P[2,\n col_idx], 1, 0, 0, 0, 0, -p[0, col_idx] * P[0, col_idx],\n -p[0, col_idx] * P[1, col_idx], -p[0, col_idx] * P[2,\n col_idx], -p[0, col_idx]], [0, 0, 0, 0, P[0, col_idx],\n P[1, col_idx], P[2, col_idx], 1, -p[1, col_idx] * P[0,\n col_idx], -p[1, col_idx] * P[1, col_idx], -p[1, col_idx\n ] * P[2, col_idx], -p[1, col_idx]]])\n Q = np.vstack((Q, currQ)).astype(np.float32)\n U, S, V = np.linalg.svd(Q, full_matrices=True)\n M = V[-1:]\n M = M.reshape((3, 4))\n if np.linalg.det(M[:, :3]) < 0:\n M = -M\n \"\"\"\n Orthogonal Procrustes problem: \n Did not impose any constraints on R from M = [R|t] is actually a rotation matrix; \n Need to compute matrix R_tilde, the matrix closest to the true \"R\" in the sense of Frobenius norm \n \"\"\"\n R = M[:, :3]\n U, S, V = np.linalg.svd(R)\n R_tilde = U @ V\n alpha = np.linalg.norm(R_tilde, ord='fro') / np.linalg.norm(R, ord=\n 'fro')\n M = np.hstack((R_tilde, alpha * M[:, -1].reshape((3, 1))))\n return M\n\n def reprojectPoints(self, P, M):\n \"\"\" \n Reprojects the 3D points P_i in the current image using the estimated projection matrix M \n and camera matrix K. Use this to show on image to double check that reprojected points p_i' fall close to \n points p_i. \n Args: \n P = referenced 3D world coordinates \n M = Projection matrix solved from estimatePoseDLT\n org_image = the original image, needed to project points onto \n Returns: \n reprojected_pts = self-explanatory \n \"\"\"\n homo_mtx = (K @ M @ P).T\n homo_mtx[:, 0] = homo_mtx[:, 0] / homo_mtx[:, 2]\n homo_mtx[:, 1] = homo_mtx[:, 1] / homo_mtx[:, 2]\n reprojected_pts = homo_mtx[:, :2]\n return reprojected_pts\n\n\ndef plotTrajectory3D(M, fig, output_filename='motion.avi'):\n R = M[:, :3].T\n t = M[:, -1]\n rotMat = Rotation.from_matrix(R)\n quat = rotMat.as_quat()\n quat = np.roll(quat, 1)\n transl = -R @ t\n plt.clf()\n ax = plt.axes(projection='3d')\n camera = Camera(fig)\n ax.set(xlim=(-0.1, 0.4), ylim=(-0.2, 0.3), zlim=(-0.3, 0))\n ax.set_xlabel('Z')\n ax.set_ylabel('X')\n ax.set_zlabel('Y')\n ax.scatter(-Pw_corners[:, 2], Pw_corners[:, 0], -Pw_corners[:, 1])\n r = Rectangle((0, -0.22), width=0.105, height=0.14, color='blue', fill=\n False, hatch='/')\n ax.add_patch(r)\n art3d.pathpatch_2d_to_3d(r, z=0, zdir='x')\n r1 = Rectangle((0.11, -0.25), width=0.13, height=0.1, color='red', fill\n =False, hatch='/')\n ax.add_patch(r1)\n art3d.pathpatch_2d_to_3d(r1, z=0.2, zdir='y')\n r2 = Rectangle((0.11, 0), width=0.13, height=0.11, color='green', fill=\n False, hatch='/')\n ax.add_patch(r2)\n art3d.pathpatch_2d_to_3d(r2, z=-0.265, zdir='z')\n rotMat = rotMat.as_matrix()\n ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2, 0], rotMat[0, 0\n ], -rotMat[1, 0], color='red', length=0.1)\n ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2, 1], rotMat[0, 1\n ], -rotMat[1, 1], color='green', length=0.1)\n ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2, 2], rotMat[1, 2\n ], -rotMat[1, 2], color='blue', length=0.1)\n camera.snap()\n\n\nif __name__ == '__main__':\n datadir = 'data/'\n detected_corners = np.loadtxt(datadir + 'detected_corners.txt')\n K = np.loadtxt(datadir + 'K.txt')\n Pw_corners = 0.01 * np.loadtxt('data/p_W_corners.txt', delimiter=',')\n file_list = sorted(glob.glob(datadir + 'images_undistorted/*.jpg'))\n num_images = len(file_list)\n projection = DLT(K, detected_corners, Pw_corners, reproject_points=False)\n fig = plt.figure()\n for img_idx in range(0, num_images):\n image = projection.getimg(img_idx)\n p, P = projection.currFrame(img_idx)\n M = projection.estimatePoseDLT(p, P, img_idx)\n reprojected_pts = projection.reprojectPoints(P, M)\n if projection.reproject_points:\n for point in reprojected_pts:\n estimate = point.astype(np.float32)\n cv2.circle(image, tuple(estimate), radius=5, color=(0, 0, \n 255), thickness=2)\n for u, v in zip(detected_corners[img_idx][0::2],\n detected_corners[img_idx][1::2]):\n u = u.astype(np.float32)\n v = v.astype(np.float32)\n cv2.circle(image, (u, v), radius=5, color=(0, 255, 0),\n thickness=2)\n cv2.imshow('img', image)\n cv2.waitKey(34)\n plotTrajectory3D(M, fig)\n fname = 'my_results' + '/' + file_list[img_idx][28:28 + 4] + '.png'\n plt.savefig(fname)\n",
"step-5": "import numpy as np \nimport cv2 \nimport glob\nfrom scipy.spatial.transform import Rotation \nimport matplotlib.pyplot as plt \nfrom mpl_toolkits.mplot3d import Axes3D\nimport mpl_toolkits.mplot3d.art3d as art3d\nfrom matplotlib.patches import Rectangle\nimport celluloid \nfrom celluloid import Camera # couldn't save animation with ArtisticAnimation, TO DO\n\n# datadir = 'data/'\n# detected_corners = np.loadtxt(datadir + 'detected_corners.txt') # pixel coords (u,v) of detected corners \n# K = np.loadtxt(datadir + 'K.txt') # camera matrix \n# Pw_corners = .01 * np.loadtxt('data/p_W_corners.txt', delimiter=',') # [12x3] world coords of detected corners in centimeters\n\nclass DLT(object): \n def __init__(self, K, detected_corners, Pw_corners, reproject_points=False): \n self.K = K \n self.p = detected_corners\n self.Pw = Pw_corners\n self.reproject_points = reproject_points\n\n\n def getimg(self, idx): \n images = sorted(glob.glob(datadir + 'images_undistorted/*.jpg'))\n return cv2.imread(images[idx])\n\n\n # def currFrame(detected_corners, K, Pw_corners, frame_idx): \n def currFrame(self, frame_idx):\n # get normalized coordinates [x;y;1]\n u = self.p[frame_idx][0:-1:2]\n v = self.p[frame_idx][1::2]\n p = np.linalg.inv(self.K) @ np.vstack((u,v,np.ones(u.shape[0])))\n\n # get 3d world coordinates [X; Y; Z; 1]\n P = np.vstack((self.Pw.T, np.ones(self.Pw.shape[0])))\n\n return p, P\n\n\n def estimatePoseDLT(self, p, P,idx): \n '''\n DLT algorithm. Refer to http://www.kwon3d.com/theory/dlt/dlt.html for in-depth analysis \n Solves for projection matrix M = [R|t], given the n 2D-3D points corresponding to p_i and P_i\n ***Note: Matrix Q is built using the /normalized/ coordinates of p_i \n SVD returns V already transposed\n Args: \n p = given 2D coordinates (u,v) of the projections of the referenced 3D points in the undistorted image\n P = given position coordinates of the n reference 3D points given in the world coordinates \n K = given camera matrix \n Returns: \n M = The solved projection matrix (for normalized coordinates)\n '''\n # construct Q matrix \n for col_idx in range(0, P.shape[1]):\n if col_idx == 0:\n Q = np.array([\n [P[0,col_idx], P[1,col_idx], P[2,col_idx], 1, 0, 0, 0, 0, -p[0, col_idx]*P[0,col_idx], -p[0, col_idx]*P[1,col_idx], -p[0, col_idx]*P[2,col_idx], -p[0, col_idx]], \n [0, 0, 0, 0, P[0,col_idx], P[1,col_idx], P[2,col_idx], 1, -p[1, col_idx]*P[0,col_idx], -p[1, col_idx]*P[1,col_idx], -p[1, col_idx]*P[2,col_idx], -p[1, col_idx]]\n ])\n else: \n currQ = np.array([\n [P[0,col_idx], P[1,col_idx], P[2,col_idx], 1, 0, 0, 0, 0, -p[0, col_idx]*P[0,col_idx], -p[0, col_idx]*P[1,col_idx], -p[0, col_idx]*P[2,col_idx], -p[0, col_idx]], \n [0, 0, 0, 0, P[0,col_idx], P[1,col_idx], P[2,col_idx], 1, -p[1, col_idx]*P[0,col_idx], -p[1, col_idx]*P[1,col_idx], -p[1, col_idx]*P[2,col_idx], -p[1, col_idx]]\n ]) \n Q = np.vstack((Q,currQ)).astype(np.float32)\n \n U, S, V = np.linalg.svd(Q, full_matrices=True)\n M = V[-1:]\n M = M.reshape((3,4)) # reshape to true projection matrix \n if np.linalg.det(M[:,:3]) < 0: \n M = -M \n '''\n Orthogonal Procrustes problem: \n Did not impose any constraints on R from M = [R|t] is actually a rotation matrix; \n Need to compute matrix R_tilde, the matrix closest to the true \"R\" in the sense of Frobenius norm \n '''\n R = M[:,:3] # rotation matrix \n U,S,V = np.linalg.svd(R)\n R_tilde = U @ V\n \n # M is not true M in this case, but alpha*M where alpha is the scale \n alpha = np.linalg.norm(R_tilde, ord='fro') / np.linalg.norm(R, ord='fro') \n M = np.hstack((R_tilde, alpha*M[:,-1].reshape((3,1))))\n return M\n\n def reprojectPoints(self, P, M):\n ''' \n Reprojects the 3D points P_i in the current image using the estimated projection matrix M \n and camera matrix K. Use this to show on image to double check that reprojected points p_i' fall close to \n points p_i. \n Args: \n P = referenced 3D world coordinates \n M = Projection matrix solved from estimatePoseDLT\n org_image = the original image, needed to project points onto \n Returns: \n reprojected_pts = self-explanatory \n ''' \n homo_mtx = (K @ M @ P).T\n homo_mtx[:,0] = homo_mtx[:,0] / homo_mtx[:,2]\n homo_mtx[:,1] = homo_mtx[:,1] / homo_mtx[:,2]\n\n reprojected_pts = homo_mtx[:,:2]\n # print(reprojected_pts)\n return reprojected_pts\n\ndef plotTrajectory3D(M, fig, output_filename='motion.avi'): \n R = M[:,:3].T\n t = M[:,-1]\n \n rotMat = Rotation.from_matrix(R) # Rotation object instance \n quat = rotMat.as_quat()\n quat = np.roll(quat, 1)\n transl = -R @ t\n \n # prelims \n plt.clf()\n # ax = fig.add_subplot(111, projection='3d')\n ax = plt.axes(projection='3d')\n camera = Camera(fig)\n ax.set(xlim=(-.1,.4), ylim=(-.2,.3), zlim=(-.3, 0))\n ax.set_xlabel('Z')\n ax.set_ylabel('X')\n ax.set_zlabel('Y')\n ax.scatter(-Pw_corners[:,2], Pw_corners[:,0], -Pw_corners[:,1]) # draw given corners \n # draw rectangles at corners\n r = Rectangle((0, -.22), width=.105, height=.14, color='blue', fill=False, hatch='/')\n ax.add_patch(r)\n art3d.pathpatch_2d_to_3d(r, z=0, zdir='x') \n r1 = Rectangle((.11,-.25), width=.13, height=.1, color='red', fill=False, hatch='/')\n ax.add_patch(r1)\n art3d.pathpatch_2d_to_3d(r1, z=.2, zdir='y') \n r2 = Rectangle((.11, 0), width=.13, height=.11, color='green', fill=False, hatch='/')\n ax.add_patch(r2)\n art3d.pathpatch_2d_to_3d(r2, z=-.265, zdir='z') \n\n # draw camera coordinate frame onto image \n rotMat = rotMat.as_matrix()\n ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2,0], rotMat[0,0], -rotMat[1,0], color='red', length=.1)\n ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2,1], rotMat[0,1], -rotMat[1,1], color='green', length=.1)\n ax.quiver(-transl[2], transl[0], -transl[1], -rotMat[2,2], rotMat[1,2], -rotMat[1,2], color='blue', length=.1)\n # print([-transl[2], transl[0], -transl[1], -rotMat[2,0], rotMat[0,0], -rotMat[1,0]])\n camera.snap()\n \n\n\nif __name__ == \"__main__\": \n # Given info \n datadir = 'data/'\n detected_corners = np.loadtxt(datadir + 'detected_corners.txt') # pixel coords (u,v) of detected corners \n K = np.loadtxt(datadir + 'K.txt') # camera matrix \n Pw_corners = .01 * np.loadtxt('data/p_W_corners.txt', delimiter=',') # [12x3] world coords of detected corners in centimeters\n\n # Iterate through each picture \n file_list = sorted(glob.glob(datadir + 'images_undistorted/*.jpg'))\n # num_images = len(glob.glob(datadir + 'images_undistorted/*.jpg'))\n num_images = len(file_list)\n projection = DLT(K, detected_corners, Pw_corners, reproject_points=False) \n fig = plt.figure()\n for img_idx in range(0, num_images): \n image = projection.getimg(img_idx) # get current image in directory \n\n p, P = projection.currFrame(img_idx) # get normalized 2D pixel points and 3D world points in correct format \n M = projection.estimatePoseDLT(p, P, img_idx) # get projection matrix M = [R|t]\n \n reprojected_pts = projection.reprojectPoints(P, M) # reproject P_i onto image \n if projection.reproject_points: \n # show reprojected points on image\n for point in reprojected_pts: \n estimate = point.astype(np.float32) # my estimated points \n cv2.circle(image, tuple(estimate), radius=5, color=(0,0,255), thickness=2)\n\n for u, v in zip(detected_corners[img_idx][0::2], detected_corners[img_idx][1::2]): \n u = u.astype(np.float32)\n v = v.astype(np.float32)\n cv2.circle(image, (u,v), radius=5, color=(0,255,0), thickness=2)\n\n cv2.imshow('img', image)\n cv2.waitKey(34) # 30 FPS \n\n plotTrajectory3D(M, fig) \n fname = 'my_results' + '/' + file_list[img_idx][28:28+4] + '.png'\n plt.savefig(fname) # to create animation, save all of the figures into my_results directory, then run animate.py, which will produce a video \n",
"step-ids": [
4,
6,
7,
8,
10
]
}
|
[
4,
6,
7,
8,
10
] |
"""autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import arm_navigation_msgs.msg
import geometry_msgs.msg
import std_msgs.msg
import genpy
import sensor_msgs.msg
class GetPlanningSceneRequest(genpy.Message):
_md5sum = "67ad55e9bed9c8f21dfb4b9b1ca8df7d"
_type = "arm_navigation_msgs/GetPlanningSceneRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
PlanningScene planning_scene_diff
arm_navigation_msgs/OrderedCollisionOperations operations
================================================================================
MSG: arm_navigation_msgs/PlanningScene
#full robot state
arm_navigation_msgs/RobotState robot_state
#additional frames for duplicating tf
geometry_msgs/TransformStamped[] fixed_frame_transforms
#full allowed collision matrix
AllowedCollisionMatrix allowed_collision_matrix
#allowed contacts
arm_navigation_msgs/AllowedContactSpecification[] allowed_contacts
#all link paddings
arm_navigation_msgs/LinkPadding[] link_padding
#collision objects
arm_navigation_msgs/CollisionObject[] collision_objects
arm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects
#the collision map
arm_navigation_msgs/CollisionMap collision_map
================================================================================
MSG: arm_navigation_msgs/RobotState
# This message contains information about the robot state, i.e. the positions of its joints and links
sensor_msgs/JointState joint_state
arm_navigation_msgs/MultiDOFJointState multi_dof_joint_state
================================================================================
MSG: sensor_msgs/JointState
# This is a message that holds data to describe the state of a set of torque controlled joints.
#
# The state of each joint (revolute or prismatic) is defined by:
# * the position of the joint (rad or m),
# * the velocity of the joint (rad/s or m/s) and
# * the effort that is applied in the joint (Nm or N).
#
# Each joint is uniquely identified by its name
# The header specifies the time at which the joint states were recorded. All the joint states
# in one message have to be recorded at the same time.
#
# This message consists of a multiple arrays, one for each part of the joint state.
# The goal is to make each of the fields optional. When e.g. your joints have no
# effort associated with them, you can leave the effort array empty.
#
# All arrays in this message should have the same size, or be empty.
# This is the only way to uniquely associate the joint name with the correct
# states.
Header header
string[] name
float64[] position
float64[] velocity
float64[] effort
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: arm_navigation_msgs/MultiDOFJointState
#A representation of a multi-dof joint state
time stamp
string[] joint_names
string[] frame_ids
string[] child_frame_ids
geometry_msgs/Pose[] poses
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of postion and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
================================================================================
MSG: geometry_msgs/TransformStamped
# This expresses a transform from coordinate frame header.frame_id
# to the coordinate frame child_frame_id
#
# This message is mostly used by the
# <a href="http://www.ros.org/wiki/tf">tf</a> package.
# See it's documentation for more information.
Header header
string child_frame_id # the frame id of the child frame
Transform transform
================================================================================
MSG: geometry_msgs/Transform
# This represents the transform between two coordinate frames in free space.
Vector3 translation
Quaternion rotation
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
float64 x
float64 y
float64 z
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionMatrix
# the list of link names in the matrix
string[] link_names
# the individual entries in the allowed collision matrix
# symmetric, with same order as link_names
AllowedCollisionEntry[] entries
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionEntry
# whether or not collision checking is enabled
bool[] enabled
================================================================================
MSG: arm_navigation_msgs/AllowedContactSpecification
# The names of the regions
string name
# The shape of the region in the environment
arm_navigation_msgs/Shape shape
# The pose of the space defining the region
geometry_msgs/PoseStamped pose_stamped
# The set of links that will be allowed to have penetration contact within this region
string[] link_names
# The maximum penetration depth allowed for every link
float64 penetration_depth
================================================================================
MSG: arm_navigation_msgs/Shape
byte SPHERE=0
byte BOX=1
byte CYLINDER=2
byte MESH=3
byte type
#### define sphere, box, cylinder ####
# the origin of each shape is considered at the shape's center
# for sphere
# radius := dimensions[0]
# for cylinder
# radius := dimensions[0]
# length := dimensions[1]
# the length is along the Z axis
# for box
# size_x := dimensions[0]
# size_y := dimensions[1]
# size_z := dimensions[2]
float64[] dimensions
#### define mesh ####
# list of triangles; triangle k is defined by tre vertices located
# at indices triangles[3k], triangles[3k+1], triangles[3k+2]
int32[] triangles
geometry_msgs/Point[] vertices
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: arm_navigation_msgs/LinkPadding
#name for the link
string link_name
# padding to apply to the link
float64 padding
================================================================================
MSG: arm_navigation_msgs/CollisionObject
# a header, used for interpreting the poses
Header header
# the id of the object
string id
# The padding used for filtering points near the object.
# This does not affect collision checking for the object.
# Set to negative to get zero padding.
float32 padding
#This contains what is to be done with the object
CollisionObjectOperation operation
#the shapes associated with the object
arm_navigation_msgs/Shape[] shapes
#the poses associated with the shapes - will be transformed using the header
geometry_msgs/Pose[] poses
================================================================================
MSG: arm_navigation_msgs/CollisionObjectOperation
#Puts the object into the environment
#or updates the object if already added
byte ADD=0
#Removes the object from the environment entirely
byte REMOVE=1
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes an attached object, detaches from the attached link
#But adds back in as regular object
byte DETACH_AND_ADD_AS_OBJECT=2
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes current object in the environment and removes it as
#a regular object
byte ATTACH_AND_REMOVE_AS_OBJECT=3
# Byte code for operation
byte operation
================================================================================
MSG: arm_navigation_msgs/AttachedCollisionObject
# The CollisionObject will be attached with a fixed joint to this link
# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation
# is set to REMOVE will remove all attached bodies attached to any object
string link_name
#Reserved for indicating that all attached objects should be removed
string REMOVE_ALL_ATTACHED_OBJECTS = "all"
#This contains the actual shapes and poses for the CollisionObject
#to be attached to the link
#If action is remove and no object.id is set, all objects
#attached to the link indicated by link_name will be removed
CollisionObject object
# The set of links that the attached objects are allowed to touch
# by default - the link_name is included by default
string[] touch_links
================================================================================
MSG: arm_navigation_msgs/CollisionMap
#header for interpreting box positions
Header header
#boxes for use in collision testing
OrientedBoundingBox[] boxes
================================================================================
MSG: arm_navigation_msgs/OrientedBoundingBox
#the center of the box
geometry_msgs/Point32 center
#the extents of the box, assuming the center is at the point
geometry_msgs/Point32 extents
#the axis of the box
geometry_msgs/Point32 axis
#the angle of rotation around the axis
float32 angle
================================================================================
MSG: geometry_msgs/Point32
# This contains the position of a point in free space(with 32 bits of precision).
# It is recommeded to use Point wherever possible instead of Point32.
#
# This recommendation is to promote interoperability.
#
# This message is designed to take up less space when sending
# lots of points at once, as in the case of a PointCloud.
float32 x
float32 y
float32 z
================================================================================
MSG: arm_navigation_msgs/OrderedCollisionOperations
# A set of collision operations that will be performed in the order they are specified
CollisionOperation[] collision_operations
================================================================================
MSG: arm_navigation_msgs/CollisionOperation
# A definition of a collision operation
# E.g. ("gripper",COLLISION_SET_ALL,ENABLE) will enable collisions
# between the gripper and all objects in the collision space
string object1
string object2
string COLLISION_SET_ALL="all"
string COLLISION_SET_OBJECTS="objects"
string COLLISION_SET_ATTACHED_OBJECTS="attached"
# The penetration distance to which collisions are allowed. This is 0.0 by default.
float64 penetration_distance
# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above
int32 operation
int32 DISABLE=0
int32 ENABLE=1
"""
__slots__ = ['planning_scene_diff','operations']
_slot_types = ['arm_navigation_msgs/PlanningScene','arm_navigation_msgs/OrderedCollisionOperations']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
planning_scene_diff,operations
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPlanningSceneRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
else:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene_diff.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.position))
length = len(self.planning_scene_diff.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.velocity))
length = len(self.planning_scene_diff.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.effort))
_x = self
buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:
_v1 = val1.position
_x = _v1
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v2 = val1.orientation
_x = _v2
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.fixed_frame_transforms:
_v3 = val1.header
buff.write(_struct_I.pack(_v3.seq))
_v4 = _v3.stamp
_x = _v4
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v3.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v5 = val1.transform
_v6 = _v5.translation
_x = _v6
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v7 = _v5.rotation
_x = _v7
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.pack(pattern, *val1.enabled))
length = len(self.planning_scene_diff.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v8 = val1.shape
buff.write(_struct_b.pack(_v8.type))
length = len(_v8.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *_v8.dimensions))
length = len(_v8.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *_v8.triangles))
length = len(_v8.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v8.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v9 = val1.pose_stamped
_v10 = _v9.header
buff.write(_struct_I.pack(_v10.seq))
_v11 = _v10.stamp
_x = _v11
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v10.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v12 = _v9.pose
_v13 = _v12.position
_x = _v13
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v14 = _v12.orientation
_x = _v14
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene_diff.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene_diff.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_objects:
_v15 = val1.header
buff.write(_struct_I.pack(_v15.seq))
_v16 = _v15.stamp
_x = _v16
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v15.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v17 = val1.operation
buff.write(_struct_b.pack(_v17.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val2.dimensions))
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val2.triangles))
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v18 = val2.position
_x = _v18
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v19 = val2.orientation
_x = _v19
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v20 = val1.object
_v21 = _v20.header
buff.write(_struct_I.pack(_v21.seq))
_v22 = _v21.stamp
_x = _v22
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v21.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v20.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v20.padding))
_v23 = _v20.operation
buff.write(_struct_b.pack(_v23.operation))
length = len(_v20.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v20.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val3.dimensions))
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val3.triangles))
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v20.poses)
buff.write(_struct_I.pack(length))
for val3 in _v20.poses:
_v24 = val3.position
_x = _v24
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v25 = val3.orientation
_x = _v25
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))
_x = self.planning_scene_diff.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_map.boxes:
_v26 = val1.center
_x = _v26
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v27 = val1.extents
_x = _v27
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v28 = val1.axis
_x = _v28
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
length = len(self.operations.collision_operations)
buff.write(_struct_I.pack(length))
for val1 in self.operations.collision_operations:
_x = val1.object1
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.object2
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1
buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])
_x = self
start = end
end += 8
(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v29 = val1.position
_x = _v29
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v30 = val1.orientation
_x = _v30
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v31 = val1.header
start = end
end += 4
(_v31.seq,) = _struct_I.unpack(str[start:end])
_v32 = _v31.stamp
_x = _v32
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v31.frame_id = str[start:end].decode('utf-8')
else:
_v31.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v33 = val1.transform
_v34 = _v33.translation
_x = _v34
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v35 = _v33.rotation
_x = _v35
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = struct.unpack(pattern, str[start:end])
val1.enabled = map(bool, val1.enabled)
self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v36 = val1.shape
start = end
end += 1
(_v36.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v36.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v36.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v36.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v36.vertices.append(val3)
_v37 = val1.pose_stamped
_v38 = _v37.header
start = end
end += 4
(_v38.seq,) = _struct_I.unpack(str[start:end])
_v39 = _v38.stamp
_x = _v39
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v38.frame_id = str[start:end].decode('utf-8')
else:
_v38.frame_id = str[start:end]
_v40 = _v37.pose
_v41 = _v40.position
_x = _v41
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v42 = _v40.orientation
_x = _v42
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v43 = val1.header
start = end
end += 4
(_v43.seq,) = _struct_I.unpack(str[start:end])
_v44 = _v43.stamp
_x = _v44
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v43.frame_id = str[start:end].decode('utf-8')
else:
_v43.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v45 = val1.operation
start = end
end += 1
(_v45.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v46 = val2.position
_x = _v46
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v47 = val2.orientation
_x = _v47
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene_diff.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v48 = val1.object
_v49 = _v48.header
start = end
end += 4
(_v49.seq,) = _struct_I.unpack(str[start:end])
_v50 = _v49.stamp
_x = _v50
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v49.frame_id = str[start:end].decode('utf-8')
else:
_v49.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v48.id = str[start:end].decode('utf-8')
else:
_v48.id = str[start:end]
start = end
end += 4
(_v48.padding,) = _struct_f.unpack(str[start:end])
_v51 = _v48.operation
start = end
end += 1
(_v51.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v48.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v48.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v48.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v52 = val3.position
_x = _v52
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v53 = val3.orientation
_x = _v53
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v48.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene_diff.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v54 = val1.center
_x = _v54
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v55 = val1.extents
_x = _v55
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v56 = val1.axis
_x = _v56
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.operations.collision_operations = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionOperation()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object1 = str[start:end].decode('utf-8')
else:
val1.object1 = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object2 = str[start:end].decode('utf-8')
else:
val1.object2 = str[start:end]
_x = val1
start = end
end += 12
(_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])
self.operations.collision_operations.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene_diff.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene_diff.robot_state.joint_state.position.tostring())
length = len(self.planning_scene_diff.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene_diff.robot_state.joint_state.velocity.tostring())
length = len(self.planning_scene_diff.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene_diff.robot_state.joint_state.effort.tostring())
_x = self
buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:
_v57 = val1.position
_x = _v57
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v58 = val1.orientation
_x = _v58
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.fixed_frame_transforms:
_v59 = val1.header
buff.write(_struct_I.pack(_v59.seq))
_v60 = _v59.stamp
_x = _v60
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v59.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v61 = val1.transform
_v62 = _v61.translation
_x = _v62
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v63 = _v61.rotation
_x = _v63
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(val1.enabled.tostring())
length = len(self.planning_scene_diff.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v64 = val1.shape
buff.write(_struct_b.pack(_v64.type))
length = len(_v64.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(_v64.dimensions.tostring())
length = len(_v64.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(_v64.triangles.tostring())
length = len(_v64.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v64.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v65 = val1.pose_stamped
_v66 = _v65.header
buff.write(_struct_I.pack(_v66.seq))
_v67 = _v66.stamp
_x = _v67
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v66.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v68 = _v65.pose
_v69 = _v68.position
_x = _v69
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v70 = _v68.orientation
_x = _v70
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene_diff.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene_diff.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_objects:
_v71 = val1.header
buff.write(_struct_I.pack(_v71.seq))
_v72 = _v71.stamp
_x = _v72
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v71.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v73 = val1.operation
buff.write(_struct_b.pack(_v73.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val2.dimensions.tostring())
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val2.triangles.tostring())
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v74 = val2.position
_x = _v74
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v75 = val2.orientation
_x = _v75
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v76 = val1.object
_v77 = _v76.header
buff.write(_struct_I.pack(_v77.seq))
_v78 = _v77.stamp
_x = _v78
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v77.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v76.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v76.padding))
_v79 = _v76.operation
buff.write(_struct_b.pack(_v79.operation))
length = len(_v76.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v76.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.dimensions.tostring())
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val3.triangles.tostring())
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v76.poses)
buff.write(_struct_I.pack(length))
for val3 in _v76.poses:
_v80 = val3.position
_x = _v80
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v81 = val3.orientation
_x = _v81
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))
_x = self.planning_scene_diff.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_map.boxes:
_v82 = val1.center
_x = _v82
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v83 = val1.extents
_x = _v83
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v84 = val1.axis
_x = _v84
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
length = len(self.operations.collision_operations)
buff.write(_struct_I.pack(length))
for val1 in self.operations.collision_operations:
_x = val1.object1
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.object2
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1
buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_x = self
start = end
end += 8
(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v85 = val1.position
_x = _v85
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v86 = val1.orientation
_x = _v86
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v87 = val1.header
start = end
end += 4
(_v87.seq,) = _struct_I.unpack(str[start:end])
_v88 = _v87.stamp
_x = _v88
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v87.frame_id = str[start:end].decode('utf-8')
else:
_v87.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v89 = val1.transform
_v90 = _v89.translation
_x = _v90
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v91 = _v89.rotation
_x = _v91
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
val1.enabled = map(bool, val1.enabled)
self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v92 = val1.shape
start = end
end += 1
(_v92.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v92.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v92.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v92.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v92.vertices.append(val3)
_v93 = val1.pose_stamped
_v94 = _v93.header
start = end
end += 4
(_v94.seq,) = _struct_I.unpack(str[start:end])
_v95 = _v94.stamp
_x = _v95
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v94.frame_id = str[start:end].decode('utf-8')
else:
_v94.frame_id = str[start:end]
_v96 = _v93.pose
_v97 = _v96.position
_x = _v97
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v98 = _v96.orientation
_x = _v98
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v99 = val1.header
start = end
end += 4
(_v99.seq,) = _struct_I.unpack(str[start:end])
_v100 = _v99.stamp
_x = _v100
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v99.frame_id = str[start:end].decode('utf-8')
else:
_v99.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v101 = val1.operation
start = end
end += 1
(_v101.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v102 = val2.position
_x = _v102
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v103 = val2.orientation
_x = _v103
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene_diff.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v104 = val1.object
_v105 = _v104.header
start = end
end += 4
(_v105.seq,) = _struct_I.unpack(str[start:end])
_v106 = _v105.stamp
_x = _v106
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v105.frame_id = str[start:end].decode('utf-8')
else:
_v105.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v104.id = str[start:end].decode('utf-8')
else:
_v104.id = str[start:end]
start = end
end += 4
(_v104.padding,) = _struct_f.unpack(str[start:end])
_v107 = _v104.operation
start = end
end += 1
(_v107.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v104.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v104.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v104.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v108 = val3.position
_x = _v108
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v109 = val3.orientation
_x = _v109
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v104.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene_diff.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v110 = val1.center
_x = _v110
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v111 = val1.extents
_x = _v111
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v112 = val1.axis
_x = _v112
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.operations.collision_operations = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionOperation()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object1 = str[start:end].decode('utf-8')
else:
val1.object1 = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object2 = str[start:end].decode('utf-8')
else:
val1.object2 = str[start:end]
_x = val1
start = end
end += 12
(_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])
self.operations.collision_operations.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_b = struct.Struct("<b")
_struct_d = struct.Struct("<d")
_struct_f = struct.Struct("<f")
_struct_di = struct.Struct("<di")
_struct_3f = struct.Struct("<3f")
_struct_3I = struct.Struct("<3I")
_struct_4d = struct.Struct("<4d")
_struct_2I = struct.Struct("<2I")
_struct_3d = struct.Struct("<3d")
"""autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import arm_navigation_msgs.msg
import geometry_msgs.msg
import std_msgs.msg
import genpy
import sensor_msgs.msg
class GetPlanningSceneResponse(genpy.Message):
_md5sum = "285525c9abe002fbafa99af84a14b4cb"
_type = "arm_navigation_msgs/GetPlanningSceneResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
PlanningScene planning_scene
================================================================================
MSG: arm_navigation_msgs/PlanningScene
#full robot state
arm_navigation_msgs/RobotState robot_state
#additional frames for duplicating tf
geometry_msgs/TransformStamped[] fixed_frame_transforms
#full allowed collision matrix
AllowedCollisionMatrix allowed_collision_matrix
#allowed contacts
arm_navigation_msgs/AllowedContactSpecification[] allowed_contacts
#all link paddings
arm_navigation_msgs/LinkPadding[] link_padding
#collision objects
arm_navigation_msgs/CollisionObject[] collision_objects
arm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects
#the collision map
arm_navigation_msgs/CollisionMap collision_map
================================================================================
MSG: arm_navigation_msgs/RobotState
# This message contains information about the robot state, i.e. the positions of its joints and links
sensor_msgs/JointState joint_state
arm_navigation_msgs/MultiDOFJointState multi_dof_joint_state
================================================================================
MSG: sensor_msgs/JointState
# This is a message that holds data to describe the state of a set of torque controlled joints.
#
# The state of each joint (revolute or prismatic) is defined by:
# * the position of the joint (rad or m),
# * the velocity of the joint (rad/s or m/s) and
# * the effort that is applied in the joint (Nm or N).
#
# Each joint is uniquely identified by its name
# The header specifies the time at which the joint states were recorded. All the joint states
# in one message have to be recorded at the same time.
#
# This message consists of a multiple arrays, one for each part of the joint state.
# The goal is to make each of the fields optional. When e.g. your joints have no
# effort associated with them, you can leave the effort array empty.
#
# All arrays in this message should have the same size, or be empty.
# This is the only way to uniquely associate the joint name with the correct
# states.
Header header
string[] name
float64[] position
float64[] velocity
float64[] effort
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: arm_navigation_msgs/MultiDOFJointState
#A representation of a multi-dof joint state
time stamp
string[] joint_names
string[] frame_ids
string[] child_frame_ids
geometry_msgs/Pose[] poses
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of postion and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
================================================================================
MSG: geometry_msgs/TransformStamped
# This expresses a transform from coordinate frame header.frame_id
# to the coordinate frame child_frame_id
#
# This message is mostly used by the
# <a href="http://www.ros.org/wiki/tf">tf</a> package.
# See it's documentation for more information.
Header header
string child_frame_id # the frame id of the child frame
Transform transform
================================================================================
MSG: geometry_msgs/Transform
# This represents the transform between two coordinate frames in free space.
Vector3 translation
Quaternion rotation
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
float64 x
float64 y
float64 z
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionMatrix
# the list of link names in the matrix
string[] link_names
# the individual entries in the allowed collision matrix
# symmetric, with same order as link_names
AllowedCollisionEntry[] entries
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionEntry
# whether or not collision checking is enabled
bool[] enabled
================================================================================
MSG: arm_navigation_msgs/AllowedContactSpecification
# The names of the regions
string name
# The shape of the region in the environment
arm_navigation_msgs/Shape shape
# The pose of the space defining the region
geometry_msgs/PoseStamped pose_stamped
# The set of links that will be allowed to have penetration contact within this region
string[] link_names
# The maximum penetration depth allowed for every link
float64 penetration_depth
================================================================================
MSG: arm_navigation_msgs/Shape
byte SPHERE=0
byte BOX=1
byte CYLINDER=2
byte MESH=3
byte type
#### define sphere, box, cylinder ####
# the origin of each shape is considered at the shape's center
# for sphere
# radius := dimensions[0]
# for cylinder
# radius := dimensions[0]
# length := dimensions[1]
# the length is along the Z axis
# for box
# size_x := dimensions[0]
# size_y := dimensions[1]
# size_z := dimensions[2]
float64[] dimensions
#### define mesh ####
# list of triangles; triangle k is defined by tre vertices located
# at indices triangles[3k], triangles[3k+1], triangles[3k+2]
int32[] triangles
geometry_msgs/Point[] vertices
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: arm_navigation_msgs/LinkPadding
#name for the link
string link_name
# padding to apply to the link
float64 padding
================================================================================
MSG: arm_navigation_msgs/CollisionObject
# a header, used for interpreting the poses
Header header
# the id of the object
string id
# The padding used for filtering points near the object.
# This does not affect collision checking for the object.
# Set to negative to get zero padding.
float32 padding
#This contains what is to be done with the object
CollisionObjectOperation operation
#the shapes associated with the object
arm_navigation_msgs/Shape[] shapes
#the poses associated with the shapes - will be transformed using the header
geometry_msgs/Pose[] poses
================================================================================
MSG: arm_navigation_msgs/CollisionObjectOperation
#Puts the object into the environment
#or updates the object if already added
byte ADD=0
#Removes the object from the environment entirely
byte REMOVE=1
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes an attached object, detaches from the attached link
#But adds back in as regular object
byte DETACH_AND_ADD_AS_OBJECT=2
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes current object in the environment and removes it as
#a regular object
byte ATTACH_AND_REMOVE_AS_OBJECT=3
# Byte code for operation
byte operation
================================================================================
MSG: arm_navigation_msgs/AttachedCollisionObject
# The CollisionObject will be attached with a fixed joint to this link
# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation
# is set to REMOVE will remove all attached bodies attached to any object
string link_name
#Reserved for indicating that all attached objects should be removed
string REMOVE_ALL_ATTACHED_OBJECTS = "all"
#This contains the actual shapes and poses for the CollisionObject
#to be attached to the link
#If action is remove and no object.id is set, all objects
#attached to the link indicated by link_name will be removed
CollisionObject object
# The set of links that the attached objects are allowed to touch
# by default - the link_name is included by default
string[] touch_links
================================================================================
MSG: arm_navigation_msgs/CollisionMap
#header for interpreting box positions
Header header
#boxes for use in collision testing
OrientedBoundingBox[] boxes
================================================================================
MSG: arm_navigation_msgs/OrientedBoundingBox
#the center of the box
geometry_msgs/Point32 center
#the extents of the box, assuming the center is at the point
geometry_msgs/Point32 extents
#the axis of the box
geometry_msgs/Point32 axis
#the angle of rotation around the axis
float32 angle
================================================================================
MSG: geometry_msgs/Point32
# This contains the position of a point in free space(with 32 bits of precision).
# It is recommeded to use Point wherever possible instead of Point32.
#
# This recommendation is to promote interoperability.
#
# This message is designed to take up less space when sending
# lots of points at once, as in the case of a PointCloud.
float32 x
float32 y
float32 z
"""
__slots__ = ['planning_scene']
_slot_types = ['arm_navigation_msgs/PlanningScene']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
planning_scene
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPlanningSceneResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.planning_scene is None:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
else:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.position))
length = len(self.planning_scene.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.velocity))
length = len(self.planning_scene.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.effort))
_x = self
buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:
_v113 = val1.position
_x = _v113
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v114 = val1.orientation
_x = _v114
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.fixed_frame_transforms:
_v115 = val1.header
buff.write(_struct_I.pack(_v115.seq))
_v116 = _v115.stamp
_x = _v116
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v115.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v117 = val1.transform
_v118 = _v117.translation
_x = _v118
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v119 = _v117.rotation
_x = _v119
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.pack(pattern, *val1.enabled))
length = len(self.planning_scene.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v120 = val1.shape
buff.write(_struct_b.pack(_v120.type))
length = len(_v120.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *_v120.dimensions))
length = len(_v120.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *_v120.triangles))
length = len(_v120.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v120.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v121 = val1.pose_stamped
_v122 = _v121.header
buff.write(_struct_I.pack(_v122.seq))
_v123 = _v122.stamp
_x = _v123
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v122.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v124 = _v121.pose
_v125 = _v124.position
_x = _v125
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v126 = _v124.orientation
_x = _v126
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_objects:
_v127 = val1.header
buff.write(_struct_I.pack(_v127.seq))
_v128 = _v127.stamp
_x = _v128
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v127.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v129 = val1.operation
buff.write(_struct_b.pack(_v129.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val2.dimensions))
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val2.triangles))
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v130 = val2.position
_x = _v130
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v131 = val2.orientation
_x = _v131
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v132 = val1.object
_v133 = _v132.header
buff.write(_struct_I.pack(_v133.seq))
_v134 = _v133.stamp
_x = _v134
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v133.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v132.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v132.padding))
_v135 = _v132.operation
buff.write(_struct_b.pack(_v135.operation))
length = len(_v132.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v132.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val3.dimensions))
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val3.triangles))
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v132.poses)
buff.write(_struct_I.pack(length))
for val3 in _v132.poses:
_v136 = val3.position
_x = _v136
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v137 = val3.orientation
_x = _v137
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))
_x = self.planning_scene.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_map.boxes:
_v138 = val1.center
_x = _v138
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v139 = val1.extents
_x = _v139
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v140 = val1.axis
_x = _v140
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.planning_scene is None:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])
_x = self
start = end
end += 8
(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v141 = val1.position
_x = _v141
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v142 = val1.orientation
_x = _v142
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v143 = val1.header
start = end
end += 4
(_v143.seq,) = _struct_I.unpack(str[start:end])
_v144 = _v143.stamp
_x = _v144
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v143.frame_id = str[start:end].decode('utf-8')
else:
_v143.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v145 = val1.transform
_v146 = _v145.translation
_x = _v146
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v147 = _v145.rotation
_x = _v147
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = struct.unpack(pattern, str[start:end])
val1.enabled = map(bool, val1.enabled)
self.planning_scene.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v148 = val1.shape
start = end
end += 1
(_v148.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v148.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v148.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v148.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v148.vertices.append(val3)
_v149 = val1.pose_stamped
_v150 = _v149.header
start = end
end += 4
(_v150.seq,) = _struct_I.unpack(str[start:end])
_v151 = _v150.stamp
_x = _v151
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v150.frame_id = str[start:end].decode('utf-8')
else:
_v150.frame_id = str[start:end]
_v152 = _v149.pose
_v153 = _v152.position
_x = _v153
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v154 = _v152.orientation
_x = _v154
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v155 = val1.header
start = end
end += 4
(_v155.seq,) = _struct_I.unpack(str[start:end])
_v156 = _v155.stamp
_x = _v156
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v155.frame_id = str[start:end].decode('utf-8')
else:
_v155.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v157 = val1.operation
start = end
end += 1
(_v157.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v158 = val2.position
_x = _v158
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v159 = val2.orientation
_x = _v159
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v160 = val1.object
_v161 = _v160.header
start = end
end += 4
(_v161.seq,) = _struct_I.unpack(str[start:end])
_v162 = _v161.stamp
_x = _v162
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v161.frame_id = str[start:end].decode('utf-8')
else:
_v161.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v160.id = str[start:end].decode('utf-8')
else:
_v160.id = str[start:end]
start = end
end += 4
(_v160.padding,) = _struct_f.unpack(str[start:end])
_v163 = _v160.operation
start = end
end += 1
(_v163.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v160.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v160.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v160.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v164 = val3.position
_x = _v164
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v165 = val3.orientation
_x = _v165
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v160.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v166 = val1.center
_x = _v166
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v167 = val1.extents
_x = _v167
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v168 = val1.axis
_x = _v168
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene.collision_map.boxes.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene.robot_state.joint_state.position.tostring())
length = len(self.planning_scene.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene.robot_state.joint_state.velocity.tostring())
length = len(self.planning_scene.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene.robot_state.joint_state.effort.tostring())
_x = self
buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:
_v169 = val1.position
_x = _v169
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v170 = val1.orientation
_x = _v170
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.fixed_frame_transforms:
_v171 = val1.header
buff.write(_struct_I.pack(_v171.seq))
_v172 = _v171.stamp
_x = _v172
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v171.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v173 = val1.transform
_v174 = _v173.translation
_x = _v174
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v175 = _v173.rotation
_x = _v175
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(val1.enabled.tostring())
length = len(self.planning_scene.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v176 = val1.shape
buff.write(_struct_b.pack(_v176.type))
length = len(_v176.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(_v176.dimensions.tostring())
length = len(_v176.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(_v176.triangles.tostring())
length = len(_v176.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v176.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v177 = val1.pose_stamped
_v178 = _v177.header
buff.write(_struct_I.pack(_v178.seq))
_v179 = _v178.stamp
_x = _v179
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v178.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v180 = _v177.pose
_v181 = _v180.position
_x = _v181
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v182 = _v180.orientation
_x = _v182
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_objects:
_v183 = val1.header
buff.write(_struct_I.pack(_v183.seq))
_v184 = _v183.stamp
_x = _v184
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v183.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v185 = val1.operation
buff.write(_struct_b.pack(_v185.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val2.dimensions.tostring())
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val2.triangles.tostring())
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v186 = val2.position
_x = _v186
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v187 = val2.orientation
_x = _v187
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v188 = val1.object
_v189 = _v188.header
buff.write(_struct_I.pack(_v189.seq))
_v190 = _v189.stamp
_x = _v190
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v189.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v188.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v188.padding))
_v191 = _v188.operation
buff.write(_struct_b.pack(_v191.operation))
length = len(_v188.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v188.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.dimensions.tostring())
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val3.triangles.tostring())
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v188.poses)
buff.write(_struct_I.pack(length))
for val3 in _v188.poses:
_v192 = val3.position
_x = _v192
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v193 = val3.orientation
_x = _v193
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))
_x = self.planning_scene.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_map.boxes:
_v194 = val1.center
_x = _v194
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v195 = val1.extents
_x = _v195
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v196 = val1.axis
_x = _v196
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.planning_scene is None:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_x = self
start = end
end += 8
(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v197 = val1.position
_x = _v197
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v198 = val1.orientation
_x = _v198
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v199 = val1.header
start = end
end += 4
(_v199.seq,) = _struct_I.unpack(str[start:end])
_v200 = _v199.stamp
_x = _v200
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v199.frame_id = str[start:end].decode('utf-8')
else:
_v199.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v201 = val1.transform
_v202 = _v201.translation
_x = _v202
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v203 = _v201.rotation
_x = _v203
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
val1.enabled = map(bool, val1.enabled)
self.planning_scene.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v204 = val1.shape
start = end
end += 1
(_v204.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v204.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v204.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v204.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v204.vertices.append(val3)
_v205 = val1.pose_stamped
_v206 = _v205.header
start = end
end += 4
(_v206.seq,) = _struct_I.unpack(str[start:end])
_v207 = _v206.stamp
_x = _v207
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v206.frame_id = str[start:end].decode('utf-8')
else:
_v206.frame_id = str[start:end]
_v208 = _v205.pose
_v209 = _v208.position
_x = _v209
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v210 = _v208.orientation
_x = _v210
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v211 = val1.header
start = end
end += 4
(_v211.seq,) = _struct_I.unpack(str[start:end])
_v212 = _v211.stamp
_x = _v212
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v211.frame_id = str[start:end].decode('utf-8')
else:
_v211.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v213 = val1.operation
start = end
end += 1
(_v213.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v214 = val2.position
_x = _v214
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v215 = val2.orientation
_x = _v215
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v216 = val1.object
_v217 = _v216.header
start = end
end += 4
(_v217.seq,) = _struct_I.unpack(str[start:end])
_v218 = _v217.stamp
_x = _v218
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v217.frame_id = str[start:end].decode('utf-8')
else:
_v217.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v216.id = str[start:end].decode('utf-8')
else:
_v216.id = str[start:end]
start = end
end += 4
(_v216.padding,) = _struct_f.unpack(str[start:end])
_v219 = _v216.operation
start = end
end += 1
(_v219.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v216.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v216.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v216.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v220 = val3.position
_x = _v220
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v221 = val3.orientation
_x = _v221
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v216.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v222 = val1.center
_x = _v222
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v223 = val1.extents
_x = _v223
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v224 = val1.axis
_x = _v224
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene.collision_map.boxes.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_b = struct.Struct("<b")
_struct_d = struct.Struct("<d")
_struct_f = struct.Struct("<f")
_struct_3f = struct.Struct("<3f")
_struct_3I = struct.Struct("<3I")
_struct_4d = struct.Struct("<4d")
_struct_2I = struct.Struct("<2I")
_struct_3d = struct.Struct("<3d")
class GetPlanningScene(object):
_type = 'arm_navigation_msgs/GetPlanningScene'
_md5sum = '0a7b07718e4e5c5d35740c730509a151'
_request_class = GetPlanningSceneRequest
_response_class = GetPlanningSceneResponse
|
normal
|
{
"blob_id": "b8e18877af990c533c642d4937354198a4676419",
"index": 5194,
"step-1": "<mask token>\n\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = '285525c9abe002fbafa99af84a14b4cb'\n _type = 'arm_navigation_msgs/GetPlanningSceneResponse'\n _has_header = False\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(\n pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n _v143.seq, = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n _v148.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n _v150.seq, = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n _v155.seq, = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n _v157.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n _v161.seq, = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n _v160.padding, = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n _v163.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.position\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.velocity\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.effort.\n tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n _v199.seq, = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n _v204.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n _v206.seq, = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n _v211.seq, = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n _v213.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n _v217.seq, = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n _v216.padding, = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n _v219.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n<mask token>\n\n\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n",
"step-2": "<mask token>\n\n\nclass GetPlanningSceneRequest(genpy.Message):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.position))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v1 = val1.position\n _x = _v1\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v2 = val1.orientation\n _x = _v2\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v3 = val1.header\n buff.write(_struct_I.pack(_v3.seq))\n _v4 = _v3.stamp\n _x = _v4\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v3.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v5 = val1.transform\n _v6 = _v5.translation\n _x = _v6\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v7 = _v5.rotation\n _x = _v7\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v8 = val1.shape\n buff.write(_struct_b.pack(_v8.type))\n length = len(_v8.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v8.dimensions))\n length = len(_v8.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v8.triangles))\n length = len(_v8.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v8.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v9 = val1.pose_stamped\n _v10 = _v9.header\n buff.write(_struct_I.pack(_v10.seq))\n _v11 = _v10.stamp\n _x = _v11\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v10.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v12 = _v9.pose\n _v13 = _v12.position\n _x = _v13\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v14 = _v12.orientation\n _x = _v14\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v15 = val1.header\n buff.write(_struct_I.pack(_v15.seq))\n _v16 = _v15.stamp\n _x = _v16\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v15.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v17 = val1.operation\n buff.write(_struct_b.pack(_v17.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v18 = val2.position\n _x = _v18\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v19 = val2.orientation\n _x = _v19\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v20 = val1.object\n _v21 = _v20.header\n buff.write(_struct_I.pack(_v21.seq))\n _v22 = _v21.stamp\n _x = _v22\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v21.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v20.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v20.padding))\n _v23 = _v20.operation\n buff.write(_struct_b.pack(_v23.operation))\n length = len(_v20.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v20.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.poses:\n _v24 = val3.position\n _x = _v24\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v25 = val3.orientation\n _x = _v25\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v26 = val1.center\n _x = _v26\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v27 = val1.extents\n _x = _v27\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v28 = val1.axis\n _x = _v28\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n <mask token>\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n position.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n velocity.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v57 = val1.position\n _x = _v57\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v58 = val1.orientation\n _x = _v58\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v59 = val1.header\n buff.write(_struct_I.pack(_v59.seq))\n _v60 = _v59.stamp\n _x = _v60\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v59.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v61 = val1.transform\n _v62 = _v61.translation\n _x = _v62\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v63 = _v61.rotation\n _x = _v63\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v64 = val1.shape\n buff.write(_struct_b.pack(_v64.type))\n length = len(_v64.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v64.dimensions.tostring())\n length = len(_v64.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v64.triangles.tostring())\n length = len(_v64.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v64.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v65 = val1.pose_stamped\n _v66 = _v65.header\n buff.write(_struct_I.pack(_v66.seq))\n _v67 = _v66.stamp\n _x = _v67\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v66.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v68 = _v65.pose\n _v69 = _v68.position\n _x = _v69\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v70 = _v68.orientation\n _x = _v70\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v71 = val1.header\n buff.write(_struct_I.pack(_v71.seq))\n _v72 = _v71.stamp\n _x = _v72\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v71.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v73 = val1.operation\n buff.write(_struct_b.pack(_v73.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v74 = val2.position\n _x = _v74\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v75 = val2.orientation\n _x = _v75\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v76 = val1.object\n _v77 = _v76.header\n buff.write(_struct_I.pack(_v77.seq))\n _v78 = _v77.stamp\n _x = _v78\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v77.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v76.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v76.padding))\n _v79 = _v76.operation\n buff.write(_struct_b.pack(_v79.operation))\n length = len(_v76.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v76.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.poses:\n _v80 = val3.position\n _x = _v80\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v81 = val3.orientation\n _x = _v81\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v82 = val1.center\n _x = _v82\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v83 = val1.extents\n _x = _v83\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v84 = val1.axis\n _x = _v84\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n <mask token>\n\n\n<mask token>\n\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = '285525c9abe002fbafa99af84a14b4cb'\n _type = 'arm_navigation_msgs/GetPlanningSceneResponse'\n _has_header = False\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(\n pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n _v143.seq, = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n _v148.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n _v150.seq, = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n _v155.seq, = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n _v157.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n _v161.seq, = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n _v160.padding, = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n _v163.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.position\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.velocity\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.effort.\n tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n _v199.seq, = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n _v204.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n _v206.seq, = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n _v211.seq, = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n _v213.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n _v217.seq, = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n _v216.padding, = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n _v219.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n<mask token>\n\n\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n",
"step-3": "<mask token>\n\n\nclass GetPlanningSceneRequest(genpy.Message):\n _md5sum = '67ad55e9bed9c8f21dfb4b9b1ca8df7d'\n _type = 'arm_navigation_msgs/GetPlanningSceneRequest'\n _has_header = False\n _full_text = \"\"\"\n\n\nPlanningScene planning_scene_diff\n\n\narm_navigation_msgs/OrderedCollisionOperations operations\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n================================================================================\nMSG: arm_navigation_msgs/OrderedCollisionOperations\n# A set of collision operations that will be performed in the order they are specified\nCollisionOperation[] collision_operations\n================================================================================\nMSG: arm_navigation_msgs/CollisionOperation\n# A definition of a collision operation\n# E.g. (\"gripper\",COLLISION_SET_ALL,ENABLE) will enable collisions \n# between the gripper and all objects in the collision space\n\nstring object1\nstring object2\nstring COLLISION_SET_ALL=\"all\"\nstring COLLISION_SET_OBJECTS=\"objects\"\nstring COLLISION_SET_ATTACHED_OBJECTS=\"attached\"\n\n# The penetration distance to which collisions are allowed. This is 0.0 by default.\nfloat64 penetration_distance\n\n# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above\nint32 operation\nint32 DISABLE=0\nint32 ENABLE=1\n\n\"\"\"\n __slots__ = ['planning_scene_diff', 'operations']\n _slot_types = ['arm_navigation_msgs/PlanningScene',\n 'arm_navigation_msgs/OrderedCollisionOperations']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene_diff,operations\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneRequest, self).__init__(*args, **kwds)\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n else:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.position))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v1 = val1.position\n _x = _v1\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v2 = val1.orientation\n _x = _v2\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v3 = val1.header\n buff.write(_struct_I.pack(_v3.seq))\n _v4 = _v3.stamp\n _x = _v4\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v3.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v5 = val1.transform\n _v6 = _v5.translation\n _x = _v6\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v7 = _v5.rotation\n _x = _v7\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v8 = val1.shape\n buff.write(_struct_b.pack(_v8.type))\n length = len(_v8.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v8.dimensions))\n length = len(_v8.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v8.triangles))\n length = len(_v8.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v8.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v9 = val1.pose_stamped\n _v10 = _v9.header\n buff.write(_struct_I.pack(_v10.seq))\n _v11 = _v10.stamp\n _x = _v11\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v10.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v12 = _v9.pose\n _v13 = _v12.position\n _x = _v13\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v14 = _v12.orientation\n _x = _v14\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v15 = val1.header\n buff.write(_struct_I.pack(_v15.seq))\n _v16 = _v15.stamp\n _x = _v16\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v15.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v17 = val1.operation\n buff.write(_struct_b.pack(_v17.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v18 = val2.position\n _x = _v18\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v19 = val2.orientation\n _x = _v19\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v20 = val1.object\n _v21 = _v20.header\n buff.write(_struct_I.pack(_v21.seq))\n _v22 = _v21.stamp\n _x = _v22\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v21.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v20.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v20.padding))\n _v23 = _v20.operation\n buff.write(_struct_b.pack(_v23.operation))\n length = len(_v20.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v20.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.poses:\n _v24 = val3.position\n _x = _v24\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v25 = val3.orientation\n _x = _v25\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v26 = val1.center\n _x = _v26\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v27 = val1.extents\n _x = _v27\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v28 = val1.axis\n _x = _v28\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.\n planning_scene_diff.robot_state.joint_state.header.stamp.\n secs, _x.planning_scene_diff.robot_state.joint_state.header\n .stamp.nsecs) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = (struct\n .unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = (struct\n .unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = (struct\n .unpack(pattern, str[start:end]))\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp\n .secs, _x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.nsecs) = _struct_2I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n joint_names) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.poses\n ) = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v29 = val1.position\n _x = _v29\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v30 = val1.orientation\n _x = _v30\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v31 = val1.header\n start = end\n end += 4\n _v31.seq, = _struct_I.unpack(str[start:end])\n _v32 = _v31.stamp\n _x = _v32\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v31.frame_id = str[start:end].decode('utf-8')\n else:\n _v31.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v33 = val1.transform\n _v34 = _v33.translation\n _x = _v34\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v35 = _v33.rotation\n _x = _v35\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v36 = val1.shape\n start = end\n end += 1\n _v36.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v36.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v36.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v36.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v36.vertices.append(val3)\n _v37 = val1.pose_stamped\n _v38 = _v37.header\n start = end\n end += 4\n _v38.seq, = _struct_I.unpack(str[start:end])\n _v39 = _v38.stamp\n _x = _v39\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v38.frame_id = str[start:end].decode('utf-8')\n else:\n _v38.frame_id = str[start:end]\n _v40 = _v37.pose\n _v41 = _v40.position\n _x = _v41\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v42 = _v40.orientation\n _x = _v42\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v43 = val1.header\n start = end\n end += 4\n _v43.seq, = _struct_I.unpack(str[start:end])\n _v44 = _v43.stamp\n _x = _v44\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v43.frame_id = str[start:end].decode('utf-8')\n else:\n _v43.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v45 = val1.operation\n start = end\n end += 1\n _v45.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v46 = val2.position\n _x = _v46\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v47 = val2.orientation\n _x = _v47\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v48 = val1.object\n _v49 = _v48.header\n start = end\n end += 4\n _v49.seq, = _struct_I.unpack(str[start:end])\n _v50 = _v49.stamp\n _x = _v50\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v49.frame_id = str[start:end].decode('utf-8')\n else:\n _v49.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v48.id = str[start:end].decode('utf-8')\n else:\n _v48.id = str[start:end]\n start = end\n end += 4\n _v48.padding, = _struct_f.unpack(str[start:end])\n _v51 = _v48.operation\n start = end\n end += 1\n _v51.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v48.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v48.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v48.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v52 = val3.position\n _x = _v52\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v53 = val3.orientation\n _x = _v53\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v48.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1\n )\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.\n planning_scene_diff.collision_map.header.stamp.secs, _x.\n planning_scene_diff.collision_map.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v54 = val1.center\n _x = _v54\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v55 = val1.extents\n _x = _v55\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v56 = val1.axis\n _x = _v56\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n _x.penetration_distance, _x.operation = _struct_di.unpack(str\n [start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n position.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n velocity.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v57 = val1.position\n _x = _v57\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v58 = val1.orientation\n _x = _v58\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v59 = val1.header\n buff.write(_struct_I.pack(_v59.seq))\n _v60 = _v59.stamp\n _x = _v60\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v59.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v61 = val1.transform\n _v62 = _v61.translation\n _x = _v62\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v63 = _v61.rotation\n _x = _v63\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v64 = val1.shape\n buff.write(_struct_b.pack(_v64.type))\n length = len(_v64.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v64.dimensions.tostring())\n length = len(_v64.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v64.triangles.tostring())\n length = len(_v64.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v64.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v65 = val1.pose_stamped\n _v66 = _v65.header\n buff.write(_struct_I.pack(_v66.seq))\n _v67 = _v66.stamp\n _x = _v67\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v66.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v68 = _v65.pose\n _v69 = _v68.position\n _x = _v69\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v70 = _v68.orientation\n _x = _v70\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v71 = val1.header\n buff.write(_struct_I.pack(_v71.seq))\n _v72 = _v71.stamp\n _x = _v72\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v71.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v73 = val1.operation\n buff.write(_struct_b.pack(_v73.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v74 = val2.position\n _x = _v74\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v75 = val2.orientation\n _x = _v75\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v76 = val1.object\n _v77 = _v76.header\n buff.write(_struct_I.pack(_v77.seq))\n _v78 = _v77.stamp\n _x = _v78\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v77.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v76.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v76.padding))\n _v79 = _v76.operation\n buff.write(_struct_b.pack(_v79.operation))\n length = len(_v76.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v76.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.poses:\n _v80 = val3.position\n _x = _v80\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v81 = val3.orientation\n _x = _v81\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v82 = val1.center\n _x = _v82\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v83 = val1.extents\n _x = _v83\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v84 = val1.axis\n _x = _v84\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.\n planning_scene_diff.robot_state.joint_state.header.stamp.\n secs, _x.planning_scene_diff.robot_state.joint_state.header\n .stamp.nsecs) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp\n .secs, _x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.nsecs) = _struct_2I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n joint_names) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.poses\n ) = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v85 = val1.position\n _x = _v85\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v86 = val1.orientation\n _x = _v86\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v87 = val1.header\n start = end\n end += 4\n _v87.seq, = _struct_I.unpack(str[start:end])\n _v88 = _v87.stamp\n _x = _v88\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v87.frame_id = str[start:end].decode('utf-8')\n else:\n _v87.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v89 = val1.transform\n _v90 = _v89.translation\n _x = _v90\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v91 = _v89.rotation\n _x = _v91\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v92 = val1.shape\n start = end\n end += 1\n _v92.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v92.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v92.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v92.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v92.vertices.append(val3)\n _v93 = val1.pose_stamped\n _v94 = _v93.header\n start = end\n end += 4\n _v94.seq, = _struct_I.unpack(str[start:end])\n _v95 = _v94.stamp\n _x = _v95\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v94.frame_id = str[start:end].decode('utf-8')\n else:\n _v94.frame_id = str[start:end]\n _v96 = _v93.pose\n _v97 = _v96.position\n _x = _v97\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v98 = _v96.orientation\n _x = _v98\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v99 = val1.header\n start = end\n end += 4\n _v99.seq, = _struct_I.unpack(str[start:end])\n _v100 = _v99.stamp\n _x = _v100\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v99.frame_id = str[start:end].decode('utf-8')\n else:\n _v99.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v101 = val1.operation\n start = end\n end += 1\n _v101.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v102 = val2.position\n _x = _v102\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v103 = val2.orientation\n _x = _v103\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v104 = val1.object\n _v105 = _v104.header\n start = end\n end += 4\n _v105.seq, = _struct_I.unpack(str[start:end])\n _v106 = _v105.stamp\n _x = _v106\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v105.frame_id = str[start:end].decode('utf-8')\n else:\n _v105.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v104.id = str[start:end].decode('utf-8')\n else:\n _v104.id = str[start:end]\n start = end\n end += 4\n _v104.padding, = _struct_f.unpack(str[start:end])\n _v107 = _v104.operation\n start = end\n end += 1\n _v107.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v104.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v104.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v104.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v108 = val3.position\n _x = _v108\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v109 = val3.orientation\n _x = _v109\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v104.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1\n )\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.\n planning_scene_diff.collision_map.header.stamp.secs, _x.\n planning_scene_diff.collision_map.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v110 = val1.center\n _x = _v110\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v111 = val1.extents\n _x = _v111\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v112 = val1.axis\n _x = _v112\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n _x.penetration_distance, _x.operation = _struct_di.unpack(str\n [start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n<mask token>\n\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = '285525c9abe002fbafa99af84a14b4cb'\n _type = 'arm_navigation_msgs/GetPlanningSceneResponse'\n _has_header = False\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(\n pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n _v143.seq, = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n _v148.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n _v150.seq, = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n _v155.seq, = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n _v157.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n _v161.seq, = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n _v160.padding, = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n _v163.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.position\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.velocity\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.effort.\n tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n _v199.seq, = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n _v204.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n _v206.seq, = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n _v211.seq, = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n _v213.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n _v217.seq, = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n _v216.padding, = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n _v219.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n<mask token>\n\n\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n",
"step-4": "<mask token>\nimport sys\npython3 = True if sys.hexversion > 50331648 else False\nimport genpy\nimport struct\nimport arm_navigation_msgs.msg\nimport geometry_msgs.msg\nimport std_msgs.msg\nimport genpy\nimport sensor_msgs.msg\n\n\nclass GetPlanningSceneRequest(genpy.Message):\n _md5sum = '67ad55e9bed9c8f21dfb4b9b1ca8df7d'\n _type = 'arm_navigation_msgs/GetPlanningSceneRequest'\n _has_header = False\n _full_text = \"\"\"\n\n\nPlanningScene planning_scene_diff\n\n\narm_navigation_msgs/OrderedCollisionOperations operations\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n================================================================================\nMSG: arm_navigation_msgs/OrderedCollisionOperations\n# A set of collision operations that will be performed in the order they are specified\nCollisionOperation[] collision_operations\n================================================================================\nMSG: arm_navigation_msgs/CollisionOperation\n# A definition of a collision operation\n# E.g. (\"gripper\",COLLISION_SET_ALL,ENABLE) will enable collisions \n# between the gripper and all objects in the collision space\n\nstring object1\nstring object2\nstring COLLISION_SET_ALL=\"all\"\nstring COLLISION_SET_OBJECTS=\"objects\"\nstring COLLISION_SET_ATTACHED_OBJECTS=\"attached\"\n\n# The penetration distance to which collisions are allowed. This is 0.0 by default.\nfloat64 penetration_distance\n\n# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above\nint32 operation\nint32 DISABLE=0\nint32 ENABLE=1\n\n\"\"\"\n __slots__ = ['planning_scene_diff', 'operations']\n _slot_types = ['arm_navigation_msgs/PlanningScene',\n 'arm_navigation_msgs/OrderedCollisionOperations']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene_diff,operations\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneRequest, self).__init__(*args, **kwds)\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n else:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.position))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v1 = val1.position\n _x = _v1\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v2 = val1.orientation\n _x = _v2\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v3 = val1.header\n buff.write(_struct_I.pack(_v3.seq))\n _v4 = _v3.stamp\n _x = _v4\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v3.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v5 = val1.transform\n _v6 = _v5.translation\n _x = _v6\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v7 = _v5.rotation\n _x = _v7\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v8 = val1.shape\n buff.write(_struct_b.pack(_v8.type))\n length = len(_v8.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v8.dimensions))\n length = len(_v8.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v8.triangles))\n length = len(_v8.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v8.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v9 = val1.pose_stamped\n _v10 = _v9.header\n buff.write(_struct_I.pack(_v10.seq))\n _v11 = _v10.stamp\n _x = _v11\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v10.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v12 = _v9.pose\n _v13 = _v12.position\n _x = _v13\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v14 = _v12.orientation\n _x = _v14\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v15 = val1.header\n buff.write(_struct_I.pack(_v15.seq))\n _v16 = _v15.stamp\n _x = _v16\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v15.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v17 = val1.operation\n buff.write(_struct_b.pack(_v17.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v18 = val2.position\n _x = _v18\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v19 = val2.orientation\n _x = _v19\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v20 = val1.object\n _v21 = _v20.header\n buff.write(_struct_I.pack(_v21.seq))\n _v22 = _v21.stamp\n _x = _v22\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v21.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v20.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v20.padding))\n _v23 = _v20.operation\n buff.write(_struct_b.pack(_v23.operation))\n length = len(_v20.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v20.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.poses:\n _v24 = val3.position\n _x = _v24\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v25 = val3.orientation\n _x = _v25\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v26 = val1.center\n _x = _v26\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v27 = val1.extents\n _x = _v27\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v28 = val1.axis\n _x = _v28\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.\n planning_scene_diff.robot_state.joint_state.header.stamp.\n secs, _x.planning_scene_diff.robot_state.joint_state.header\n .stamp.nsecs) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = (struct\n .unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = (struct\n .unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = (struct\n .unpack(pattern, str[start:end]))\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp\n .secs, _x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.nsecs) = _struct_2I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n joint_names) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.poses\n ) = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v29 = val1.position\n _x = _v29\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v30 = val1.orientation\n _x = _v30\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v31 = val1.header\n start = end\n end += 4\n _v31.seq, = _struct_I.unpack(str[start:end])\n _v32 = _v31.stamp\n _x = _v32\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v31.frame_id = str[start:end].decode('utf-8')\n else:\n _v31.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v33 = val1.transform\n _v34 = _v33.translation\n _x = _v34\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v35 = _v33.rotation\n _x = _v35\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v36 = val1.shape\n start = end\n end += 1\n _v36.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v36.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v36.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v36.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v36.vertices.append(val3)\n _v37 = val1.pose_stamped\n _v38 = _v37.header\n start = end\n end += 4\n _v38.seq, = _struct_I.unpack(str[start:end])\n _v39 = _v38.stamp\n _x = _v39\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v38.frame_id = str[start:end].decode('utf-8')\n else:\n _v38.frame_id = str[start:end]\n _v40 = _v37.pose\n _v41 = _v40.position\n _x = _v41\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v42 = _v40.orientation\n _x = _v42\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v43 = val1.header\n start = end\n end += 4\n _v43.seq, = _struct_I.unpack(str[start:end])\n _v44 = _v43.stamp\n _x = _v44\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v43.frame_id = str[start:end].decode('utf-8')\n else:\n _v43.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v45 = val1.operation\n start = end\n end += 1\n _v45.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v46 = val2.position\n _x = _v46\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v47 = val2.orientation\n _x = _v47\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v48 = val1.object\n _v49 = _v48.header\n start = end\n end += 4\n _v49.seq, = _struct_I.unpack(str[start:end])\n _v50 = _v49.stamp\n _x = _v50\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v49.frame_id = str[start:end].decode('utf-8')\n else:\n _v49.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v48.id = str[start:end].decode('utf-8')\n else:\n _v48.id = str[start:end]\n start = end\n end += 4\n _v48.padding, = _struct_f.unpack(str[start:end])\n _v51 = _v48.operation\n start = end\n end += 1\n _v51.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v48.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v48.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v48.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v52 = val3.position\n _x = _v52\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v53 = val3.orientation\n _x = _v53\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v48.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1\n )\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.\n planning_scene_diff.collision_map.header.stamp.secs, _x.\n planning_scene_diff.collision_map.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v54 = val1.center\n _x = _v54\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v55 = val1.extents\n _x = _v55\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v56 = val1.axis\n _x = _v56\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n _x.penetration_distance, _x.operation = _struct_di.unpack(str\n [start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n position.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n velocity.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v57 = val1.position\n _x = _v57\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v58 = val1.orientation\n _x = _v58\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v59 = val1.header\n buff.write(_struct_I.pack(_v59.seq))\n _v60 = _v59.stamp\n _x = _v60\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v59.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v61 = val1.transform\n _v62 = _v61.translation\n _x = _v62\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v63 = _v61.rotation\n _x = _v63\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v64 = val1.shape\n buff.write(_struct_b.pack(_v64.type))\n length = len(_v64.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v64.dimensions.tostring())\n length = len(_v64.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v64.triangles.tostring())\n length = len(_v64.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v64.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v65 = val1.pose_stamped\n _v66 = _v65.header\n buff.write(_struct_I.pack(_v66.seq))\n _v67 = _v66.stamp\n _x = _v67\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v66.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v68 = _v65.pose\n _v69 = _v68.position\n _x = _v69\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v70 = _v68.orientation\n _x = _v70\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v71 = val1.header\n buff.write(_struct_I.pack(_v71.seq))\n _v72 = _v71.stamp\n _x = _v72\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v71.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v73 = val1.operation\n buff.write(_struct_b.pack(_v73.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v74 = val2.position\n _x = _v74\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v75 = val2.orientation\n _x = _v75\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v76 = val1.object\n _v77 = _v76.header\n buff.write(_struct_I.pack(_v77.seq))\n _v78 = _v77.stamp\n _x = _v78\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v77.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v76.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v76.padding))\n _v79 = _v76.operation\n buff.write(_struct_b.pack(_v79.operation))\n length = len(_v76.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v76.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.poses:\n _v80 = val3.position\n _x = _v80\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v81 = val3.orientation\n _x = _v81\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v82 = val1.center\n _x = _v82\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v83 = val1.extents\n _x = _v83\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v84 = val1.axis\n _x = _v84\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.\n planning_scene_diff.robot_state.joint_state.header.stamp.\n secs, _x.planning_scene_diff.robot_state.joint_state.header\n .stamp.nsecs) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp\n .secs, _x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.nsecs) = _struct_2I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n joint_names) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.poses\n ) = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v85 = val1.position\n _x = _v85\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v86 = val1.orientation\n _x = _v86\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v87 = val1.header\n start = end\n end += 4\n _v87.seq, = _struct_I.unpack(str[start:end])\n _v88 = _v87.stamp\n _x = _v88\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v87.frame_id = str[start:end].decode('utf-8')\n else:\n _v87.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v89 = val1.transform\n _v90 = _v89.translation\n _x = _v90\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v91 = _v89.rotation\n _x = _v91\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v92 = val1.shape\n start = end\n end += 1\n _v92.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v92.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v92.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v92.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v92.vertices.append(val3)\n _v93 = val1.pose_stamped\n _v94 = _v93.header\n start = end\n end += 4\n _v94.seq, = _struct_I.unpack(str[start:end])\n _v95 = _v94.stamp\n _x = _v95\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v94.frame_id = str[start:end].decode('utf-8')\n else:\n _v94.frame_id = str[start:end]\n _v96 = _v93.pose\n _v97 = _v96.position\n _x = _v97\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v98 = _v96.orientation\n _x = _v98\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v99 = val1.header\n start = end\n end += 4\n _v99.seq, = _struct_I.unpack(str[start:end])\n _v100 = _v99.stamp\n _x = _v100\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v99.frame_id = str[start:end].decode('utf-8')\n else:\n _v99.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v101 = val1.operation\n start = end\n end += 1\n _v101.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v102 = val2.position\n _x = _v102\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v103 = val2.orientation\n _x = _v103\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v104 = val1.object\n _v105 = _v104.header\n start = end\n end += 4\n _v105.seq, = _struct_I.unpack(str[start:end])\n _v106 = _v105.stamp\n _x = _v106\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v105.frame_id = str[start:end].decode('utf-8')\n else:\n _v105.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v104.id = str[start:end].decode('utf-8')\n else:\n _v104.id = str[start:end]\n start = end\n end += 4\n _v104.padding, = _struct_f.unpack(str[start:end])\n _v107 = _v104.operation\n start = end\n end += 1\n _v107.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v104.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v104.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v104.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v108 = val3.position\n _x = _v108\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v109 = val3.orientation\n _x = _v109\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v104.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1\n )\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.\n planning_scene_diff.collision_map.header.stamp.secs, _x.\n planning_scene_diff.collision_map.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v110 = val1.center\n _x = _v110\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v111 = val1.extents\n _x = _v111\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v112 = val1.axis\n _x = _v112\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n _x.penetration_distance, _x.operation = _struct_di.unpack(str\n [start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n_struct_I = genpy.struct_I\n_struct_b = struct.Struct('<b')\n_struct_d = struct.Struct('<d')\n_struct_f = struct.Struct('<f')\n_struct_di = struct.Struct('<di')\n_struct_3f = struct.Struct('<3f')\n_struct_3I = struct.Struct('<3I')\n_struct_4d = struct.Struct('<4d')\n_struct_2I = struct.Struct('<2I')\n_struct_3d = struct.Struct('<3d')\n<mask token>\nimport sys\npython3 = True if sys.hexversion > 50331648 else False\nimport genpy\nimport struct\nimport arm_navigation_msgs.msg\nimport geometry_msgs.msg\nimport std_msgs.msg\nimport genpy\nimport sensor_msgs.msg\n\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = '285525c9abe002fbafa99af84a14b4cb'\n _type = 'arm_navigation_msgs/GetPlanningSceneResponse'\n _has_header = False\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(\n pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n _v143.seq, = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n _v148.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n _v150.seq, = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n _v155.seq, = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n _v157.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n _v161.seq, = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n _v160.padding, = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n _v163.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.position\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.velocity\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.effort.\n tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n _v199.seq, = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n _v204.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n _v206.seq, = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n _v211.seq, = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n _v213.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n _v217.seq, = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n _v216.padding, = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n _v219.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n_struct_I = genpy.struct_I\n_struct_b = struct.Struct('<b')\n_struct_d = struct.Struct('<d')\n_struct_f = struct.Struct('<f')\n_struct_3f = struct.Struct('<3f')\n_struct_3I = struct.Struct('<3I')\n_struct_4d = struct.Struct('<4d')\n_struct_2I = struct.Struct('<2I')\n_struct_3d = struct.Struct('<3d')\n\n\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n",
"step-5": "\"\"\"autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneRequest.msg. Do not edit.\"\"\"\nimport sys\npython3 = True if sys.hexversion > 0x03000000 else False\nimport genpy\nimport struct\n\nimport arm_navigation_msgs.msg\nimport geometry_msgs.msg\nimport std_msgs.msg\nimport genpy\nimport sensor_msgs.msg\n\nclass GetPlanningSceneRequest(genpy.Message):\n _md5sum = \"67ad55e9bed9c8f21dfb4b9b1ca8df7d\"\n _type = \"arm_navigation_msgs/GetPlanningSceneRequest\"\n _has_header = False #flag to mark the presence of a Header object\n _full_text = \"\"\"\n\n\nPlanningScene planning_scene_diff\n\n\narm_navigation_msgs/OrderedCollisionOperations operations\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n================================================================================\nMSG: arm_navigation_msgs/OrderedCollisionOperations\n# A set of collision operations that will be performed in the order they are specified\nCollisionOperation[] collision_operations\n================================================================================\nMSG: arm_navigation_msgs/CollisionOperation\n# A definition of a collision operation\n# E.g. (\"gripper\",COLLISION_SET_ALL,ENABLE) will enable collisions \n# between the gripper and all objects in the collision space\n\nstring object1\nstring object2\nstring COLLISION_SET_ALL=\"all\"\nstring COLLISION_SET_OBJECTS=\"objects\"\nstring COLLISION_SET_ATTACHED_OBJECTS=\"attached\"\n\n# The penetration distance to which collisions are allowed. This is 0.0 by default.\nfloat64 penetration_distance\n\n# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above\nint32 operation\nint32 DISABLE=0\nint32 ENABLE=1\n\n\"\"\"\n __slots__ = ['planning_scene_diff','operations']\n _slot_types = ['arm_navigation_msgs/PlanningScene','arm_navigation_msgs/OrderedCollisionOperations']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene_diff,operations\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneRequest, self).__init__(*args, **kwds)\n #message fields cannot be None, assign default values for those that are\n if self.planning_scene_diff is None:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n if self.operations is None:\n self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()\n else:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene_diff.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.position))\n length = len(self.planning_scene_diff.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.velocity))\n length = len(self.planning_scene_diff.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v1 = val1.position\n _x = _v1\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v2 = val1.orientation\n _x = _v2\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v3 = val1.header\n buff.write(_struct_I.pack(_v3.seq))\n _v4 = _v3.stamp\n _x = _v4\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v3.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v5 = val1.transform\n _v6 = _v5.translation\n _x = _v6\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v7 = _v5.rotation\n _x = _v7\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB'%length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v8 = val1.shape\n buff.write(_struct_b.pack(_v8.type))\n length = len(_v8.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *_v8.dimensions))\n length = len(_v8.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *_v8.triangles))\n length = len(_v8.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v8.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v9 = val1.pose_stamped\n _v10 = _v9.header\n buff.write(_struct_I.pack(_v10.seq))\n _v11 = _v10.stamp\n _x = _v11\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v10.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v12 = _v9.pose\n _v13 = _v12.position\n _x = _v13\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v14 = _v12.orientation\n _x = _v14\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v15 = val1.header\n buff.write(_struct_I.pack(_v15.seq))\n _v16 = _v15.stamp\n _x = _v16\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v15.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v17 = val1.operation\n buff.write(_struct_b.pack(_v17.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v18 = val2.position\n _x = _v18\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v19 = val2.orientation\n _x = _v19\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v20 = val1.object\n _v21 = _v20.header\n buff.write(_struct_I.pack(_v21.seq))\n _v22 = _v21.stamp\n _x = _v22\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v21.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = _v20.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(_v20.padding))\n _v23 = _v20.operation\n buff.write(_struct_b.pack(_v23.operation))\n length = len(_v20.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v20.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.poses:\n _v24 = val3.position\n _x = _v24\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v25 = val3.orientation\n _x = _v25\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v26 = val1.center\n _x = _v26\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v27 = val1.extents\n _x = _v27\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v28 = val1.axis\n _x = _v28\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))\n except struct.error as se: self._check_types(se)\n except TypeError as te: self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n if self.operations is None:\n self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v29 = val1.position\n _x = _v29\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v30 = val1.orientation\n _x = _v30\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v31 = val1.header\n start = end\n end += 4\n (_v31.seq,) = _struct_I.unpack(str[start:end])\n _v32 = _v31.stamp\n _x = _v32\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v31.frame_id = str[start:end].decode('utf-8')\n else:\n _v31.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v33 = val1.transform\n _v34 = _v33.translation\n _x = _v34\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v35 = _v33.rotation\n _x = _v35\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sB'%length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v36 = val1.shape\n start = end\n end += 1\n (_v36.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n _v36.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n _v36.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v36.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v36.vertices.append(val3)\n _v37 = val1.pose_stamped\n _v38 = _v37.header\n start = end\n end += 4\n (_v38.seq,) = _struct_I.unpack(str[start:end])\n _v39 = _v38.stamp\n _x = _v39\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v38.frame_id = str[start:end].decode('utf-8')\n else:\n _v38.frame_id = str[start:end]\n _v40 = _v37.pose\n _v41 = _v40.position\n _x = _v41\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v42 = _v40.orientation\n _x = _v42\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n (val1.penetration_depth,) = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n (val1.padding,) = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v43 = val1.header\n start = end\n end += 4\n (_v43.seq,) = _struct_I.unpack(str[start:end])\n _v44 = _v43.stamp\n _x = _v44\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v43.frame_id = str[start:end].decode('utf-8')\n else:\n _v43.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n (val1.padding,) = _struct_f.unpack(str[start:end])\n _v45 = val1.operation\n start = end\n end += 1\n (_v45.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val2.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v46 = val2.position\n _x = _v46\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v47 = val2.orientation\n _x = _v47\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v48 = val1.object\n _v49 = _v48.header\n start = end\n end += 4\n (_v49.seq,) = _struct_I.unpack(str[start:end])\n _v50 = _v49.stamp\n _x = _v50\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v49.frame_id = str[start:end].decode('utf-8')\n else:\n _v49.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v48.id = str[start:end].decode('utf-8')\n else:\n _v48.id = str[start:end]\n start = end\n end += 4\n (_v48.padding,) = _struct_f.unpack(str[start:end])\n _v51 = _v48.operation\n start = end\n end += 1\n (_v51.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v48.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val3.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v48.shapes.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v48.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v52 = val3.position\n _x = _v52\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v53 = val3.orientation\n _x = _v53\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n _v48.poses.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v54 = val1.center\n _x = _v54\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v55 = val1.extents\n _x = _v55\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v56 = val1.axis\n _x = _v56\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n (val1.angle,) = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n (_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill\n\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene_diff.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene_diff.robot_state.joint_state.position.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene_diff.robot_state.joint_state.velocity.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene_diff.robot_state.joint_state.effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v57 = val1.position\n _x = _v57\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v58 = val1.orientation\n _x = _v58\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v59 = val1.header\n buff.write(_struct_I.pack(_v59.seq))\n _v60 = _v59.stamp\n _x = _v60\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v59.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v61 = val1.transform\n _v62 = _v61.translation\n _x = _v62\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v63 = _v61.rotation\n _x = _v63\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB'%length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v64 = val1.shape\n buff.write(_struct_b.pack(_v64.type))\n length = len(_v64.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(_v64.dimensions.tostring())\n length = len(_v64.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(_v64.triangles.tostring())\n length = len(_v64.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v64.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v65 = val1.pose_stamped\n _v66 = _v65.header\n buff.write(_struct_I.pack(_v66.seq))\n _v67 = _v66.stamp\n _x = _v67\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v66.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v68 = _v65.pose\n _v69 = _v68.position\n _x = _v69\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v70 = _v68.orientation\n _x = _v70\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v71 = val1.header\n buff.write(_struct_I.pack(_v71.seq))\n _v72 = _v71.stamp\n _x = _v72\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v71.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v73 = val1.operation\n buff.write(_struct_b.pack(_v73.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v74 = val2.position\n _x = _v74\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v75 = val2.orientation\n _x = _v75\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v76 = val1.object\n _v77 = _v76.header\n buff.write(_struct_I.pack(_v77.seq))\n _v78 = _v77.stamp\n _x = _v78\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v77.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = _v76.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(_v76.padding))\n _v79 = _v76.operation\n buff.write(_struct_b.pack(_v79.operation))\n length = len(_v76.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v76.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.poses:\n _v80 = val3.position\n _x = _v80\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v81 = val3.orientation\n _x = _v81\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v82 = val1.center\n _x = _v82\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v83 = val1.extents\n _x = _v83\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v84 = val1.axis\n _x = _v84\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))\n except struct.error as se: self._check_types(se)\n except TypeError as te: self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n if self.operations is None:\n self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v85 = val1.position\n _x = _v85\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v86 = val1.orientation\n _x = _v86\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v87 = val1.header\n start = end\n end += 4\n (_v87.seq,) = _struct_I.unpack(str[start:end])\n _v88 = _v87.stamp\n _x = _v88\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v87.frame_id = str[start:end].decode('utf-8')\n else:\n _v87.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v89 = val1.transform\n _v90 = _v89.translation\n _x = _v90\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v91 = _v89.rotation\n _x = _v91\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sB'%length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v92 = val1.shape\n start = end\n end += 1\n (_v92.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n _v92.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n _v92.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v92.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v92.vertices.append(val3)\n _v93 = val1.pose_stamped\n _v94 = _v93.header\n start = end\n end += 4\n (_v94.seq,) = _struct_I.unpack(str[start:end])\n _v95 = _v94.stamp\n _x = _v95\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v94.frame_id = str[start:end].decode('utf-8')\n else:\n _v94.frame_id = str[start:end]\n _v96 = _v93.pose\n _v97 = _v96.position\n _x = _v97\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v98 = _v96.orientation\n _x = _v98\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n (val1.penetration_depth,) = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n (val1.padding,) = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v99 = val1.header\n start = end\n end += 4\n (_v99.seq,) = _struct_I.unpack(str[start:end])\n _v100 = _v99.stamp\n _x = _v100\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v99.frame_id = str[start:end].decode('utf-8')\n else:\n _v99.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n (val1.padding,) = _struct_f.unpack(str[start:end])\n _v101 = val1.operation\n start = end\n end += 1\n (_v101.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val2.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v102 = val2.position\n _x = _v102\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v103 = val2.orientation\n _x = _v103\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v104 = val1.object\n _v105 = _v104.header\n start = end\n end += 4\n (_v105.seq,) = _struct_I.unpack(str[start:end])\n _v106 = _v105.stamp\n _x = _v106\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v105.frame_id = str[start:end].decode('utf-8')\n else:\n _v105.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v104.id = str[start:end].decode('utf-8')\n else:\n _v104.id = str[start:end]\n start = end\n end += 4\n (_v104.padding,) = _struct_f.unpack(str[start:end])\n _v107 = _v104.operation\n start = end\n end += 1\n (_v107.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v104.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val3.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v104.shapes.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v104.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v108 = val3.position\n _x = _v108\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v109 = val3.orientation\n _x = _v109\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n _v104.poses.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v110 = val1.center\n _x = _v110\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v111 = val1.extents\n _x = _v111\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v112 = val1.axis\n _x = _v112\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n (val1.angle,) = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n (_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill\n\n_struct_I = genpy.struct_I\n_struct_b = struct.Struct(\"<b\")\n_struct_d = struct.Struct(\"<d\")\n_struct_f = struct.Struct(\"<f\")\n_struct_di = struct.Struct(\"<di\")\n_struct_3f = struct.Struct(\"<3f\")\n_struct_3I = struct.Struct(\"<3I\")\n_struct_4d = struct.Struct(\"<4d\")\n_struct_2I = struct.Struct(\"<2I\")\n_struct_3d = struct.Struct(\"<3d\")\n\"\"\"autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneResponse.msg. Do not edit.\"\"\"\nimport sys\npython3 = True if sys.hexversion > 0x03000000 else False\nimport genpy\nimport struct\n\nimport arm_navigation_msgs.msg\nimport geometry_msgs.msg\nimport std_msgs.msg\nimport genpy\nimport sensor_msgs.msg\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = \"285525c9abe002fbafa99af84a14b4cb\"\n _type = \"arm_navigation_msgs/GetPlanningSceneResponse\"\n _has_header = False #flag to mark the presence of a Header object\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n #message fields cannot be None, assign default values for those that are\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB'%length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se: self._check_types(se)\n except TypeError as te: self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n (_v143.seq,) = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sB'%length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n (_v148.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n (_v150.seq,) = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n (val1.penetration_depth,) = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n (val1.padding,) = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n (_v155.seq,) = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n (val1.padding,) = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n (_v157.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val2.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n (_v161.seq,) = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n (_v160.padding,) = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n (_v163.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val3.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n (val1.angle,) = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill\n\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene.robot_state.joint_state.position.tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene.robot_state.joint_state.velocity.tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene.robot_state.joint_state.effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB'%length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se: self._check_types(se)\n except TypeError as te: self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n (_v199.seq,) = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sB'%length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n (_v204.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n (_v206.seq,) = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n (val1.penetration_depth,) = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n (val1.padding,) = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n (_v211.seq,) = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n (val1.padding,) = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n (_v213.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val2.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n (_v217.seq,) = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n (_v216.padding,) = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n (_v219.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val3.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n (val1.angle,) = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill\n\n_struct_I = genpy.struct_I\n_struct_b = struct.Struct(\"<b\")\n_struct_d = struct.Struct(\"<d\")\n_struct_f = struct.Struct(\"<f\")\n_struct_3f = struct.Struct(\"<3f\")\n_struct_3I = struct.Struct(\"<3I\")\n_struct_4d = struct.Struct(\"<4d\")\n_struct_2I = struct.Struct(\"<2I\")\n_struct_3d = struct.Struct(\"<3d\")\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n",
"step-ids": [
10,
14,
18,
20,
21
]
}
|
[
10,
14,
18,
20,
21
] |
from typing import List
class NURBS:
def __init__(self, degree: int) -> None:
self._degree = degree
self._points = [] # type: List[complex]
self._weights = [] # type: List[float]
self._knots = [] # type: List[float]
def addPoint(self, p: complex) -> None:
self._points.append(p)
def addKnot(self, knot: float) -> None:
self._knots.append(knot)
def pointCount(self) -> int:
return len(self._points)
def calculate(self, segments: int) -> List[complex]:
while len(self._weights) < len(self._points):
self._weights.append(1.0)
ret = []
for n in range(0, segments):
u = self._knots[0] + (self._knots[-1] - self._knots[0]) * n / (segments - 1)
nku = []
for m in range(0, len(self._points)):
nku.append(self._weights[m] * self._N(m, self._degree, u))
point = complex(0, 0)
denom = sum(nku)
for m in range(0, len(self._points)):
if nku[m] != 0.0 and denom != 0.0:
r_iku = nku[m] / denom
if r_iku != 0.0:
point += self._points[m] * r_iku
ret.append(point)
return ret
def _N(self, i: int, n: int, u: float) -> float:
if n == 0:
if self._knots[i] <= u <= self._knots[i+1]:
return 1
return 0
else:
Nin1u = self._N(i, n - 1, u)
Ni1n1u = self._N(i + 1, n - 1, u)
if Nin1u == 0.0:
a = 0.0
else:
a = self._F(i, n, u) * Nin1u
if Ni1n1u == 0.0:
b = 0.0
else:
b = self._G(i, n, u) * Ni1n1u
return a + b
def _F(self, i: int, n: int, u: float) -> float:
denom = self._knots[i + n] - self._knots[i]
if denom == 0.0:
return 0.0
return (u - self._knots[i]) / denom
def _G(self, i: int, n: int, u: float) -> float:
denom = self._knots[i + n + 1] - self._knots[i]
if denom == 0:
return 0.0
return (self._knots[i + n + 1] - u) / denom
|
normal
|
{
"blob_id": "40b3cacf55f6c5056c3541d70d8b2c0e2cc7d01b",
"index": 2564,
"step-1": "<mask token>\n\n\nclass NURBS:\n <mask token>\n <mask token>\n\n def addKnot(self, knot: float) ->None:\n self._knots.append(knot)\n\n def pointCount(self) ->int:\n return len(self._points)\n <mask token>\n\n def _N(self, i: int, n: int, u: float) ->float:\n if n == 0:\n if self._knots[i] <= u <= self._knots[i + 1]:\n return 1\n return 0\n else:\n Nin1u = self._N(i, n - 1, u)\n Ni1n1u = self._N(i + 1, n - 1, u)\n if Nin1u == 0.0:\n a = 0.0\n else:\n a = self._F(i, n, u) * Nin1u\n if Ni1n1u == 0.0:\n b = 0.0\n else:\n b = self._G(i, n, u) * Ni1n1u\n return a + b\n\n def _F(self, i: int, n: int, u: float) ->float:\n denom = self._knots[i + n] - self._knots[i]\n if denom == 0.0:\n return 0.0\n return (u - self._knots[i]) / denom\n\n def _G(self, i: int, n: int, u: float) ->float:\n denom = self._knots[i + n + 1] - self._knots[i]\n if denom == 0:\n return 0.0\n return (self._knots[i + n + 1] - u) / denom\n",
"step-2": "<mask token>\n\n\nclass NURBS:\n <mask token>\n <mask token>\n\n def addKnot(self, knot: float) ->None:\n self._knots.append(knot)\n\n def pointCount(self) ->int:\n return len(self._points)\n\n def calculate(self, segments: int) ->List[complex]:\n while len(self._weights) < len(self._points):\n self._weights.append(1.0)\n ret = []\n for n in range(0, segments):\n u = self._knots[0] + (self._knots[-1] - self._knots[0]) * n / (\n segments - 1)\n nku = []\n for m in range(0, len(self._points)):\n nku.append(self._weights[m] * self._N(m, self._degree, u))\n point = complex(0, 0)\n denom = sum(nku)\n for m in range(0, len(self._points)):\n if nku[m] != 0.0 and denom != 0.0:\n r_iku = nku[m] / denom\n if r_iku != 0.0:\n point += self._points[m] * r_iku\n ret.append(point)\n return ret\n\n def _N(self, i: int, n: int, u: float) ->float:\n if n == 0:\n if self._knots[i] <= u <= self._knots[i + 1]:\n return 1\n return 0\n else:\n Nin1u = self._N(i, n - 1, u)\n Ni1n1u = self._N(i + 1, n - 1, u)\n if Nin1u == 0.0:\n a = 0.0\n else:\n a = self._F(i, n, u) * Nin1u\n if Ni1n1u == 0.0:\n b = 0.0\n else:\n b = self._G(i, n, u) * Ni1n1u\n return a + b\n\n def _F(self, i: int, n: int, u: float) ->float:\n denom = self._knots[i + n] - self._knots[i]\n if denom == 0.0:\n return 0.0\n return (u - self._knots[i]) / denom\n\n def _G(self, i: int, n: int, u: float) ->float:\n denom = self._knots[i + n + 1] - self._knots[i]\n if denom == 0:\n return 0.0\n return (self._knots[i + n + 1] - u) / denom\n",
"step-3": "<mask token>\n\n\nclass NURBS:\n\n def __init__(self, degree: int) ->None:\n self._degree = degree\n self._points = []\n self._weights = []\n self._knots = []\n <mask token>\n\n def addKnot(self, knot: float) ->None:\n self._knots.append(knot)\n\n def pointCount(self) ->int:\n return len(self._points)\n\n def calculate(self, segments: int) ->List[complex]:\n while len(self._weights) < len(self._points):\n self._weights.append(1.0)\n ret = []\n for n in range(0, segments):\n u = self._knots[0] + (self._knots[-1] - self._knots[0]) * n / (\n segments - 1)\n nku = []\n for m in range(0, len(self._points)):\n nku.append(self._weights[m] * self._N(m, self._degree, u))\n point = complex(0, 0)\n denom = sum(nku)\n for m in range(0, len(self._points)):\n if nku[m] != 0.0 and denom != 0.0:\n r_iku = nku[m] / denom\n if r_iku != 0.0:\n point += self._points[m] * r_iku\n ret.append(point)\n return ret\n\n def _N(self, i: int, n: int, u: float) ->float:\n if n == 0:\n if self._knots[i] <= u <= self._knots[i + 1]:\n return 1\n return 0\n else:\n Nin1u = self._N(i, n - 1, u)\n Ni1n1u = self._N(i + 1, n - 1, u)\n if Nin1u == 0.0:\n a = 0.0\n else:\n a = self._F(i, n, u) * Nin1u\n if Ni1n1u == 0.0:\n b = 0.0\n else:\n b = self._G(i, n, u) * Ni1n1u\n return a + b\n\n def _F(self, i: int, n: int, u: float) ->float:\n denom = self._knots[i + n] - self._knots[i]\n if denom == 0.0:\n return 0.0\n return (u - self._knots[i]) / denom\n\n def _G(self, i: int, n: int, u: float) ->float:\n denom = self._knots[i + n + 1] - self._knots[i]\n if denom == 0:\n return 0.0\n return (self._knots[i + n + 1] - u) / denom\n",
"step-4": "<mask token>\n\n\nclass NURBS:\n\n def __init__(self, degree: int) ->None:\n self._degree = degree\n self._points = []\n self._weights = []\n self._knots = []\n\n def addPoint(self, p: complex) ->None:\n self._points.append(p)\n\n def addKnot(self, knot: float) ->None:\n self._knots.append(knot)\n\n def pointCount(self) ->int:\n return len(self._points)\n\n def calculate(self, segments: int) ->List[complex]:\n while len(self._weights) < len(self._points):\n self._weights.append(1.0)\n ret = []\n for n in range(0, segments):\n u = self._knots[0] + (self._knots[-1] - self._knots[0]) * n / (\n segments - 1)\n nku = []\n for m in range(0, len(self._points)):\n nku.append(self._weights[m] * self._N(m, self._degree, u))\n point = complex(0, 0)\n denom = sum(nku)\n for m in range(0, len(self._points)):\n if nku[m] != 0.0 and denom != 0.0:\n r_iku = nku[m] / denom\n if r_iku != 0.0:\n point += self._points[m] * r_iku\n ret.append(point)\n return ret\n\n def _N(self, i: int, n: int, u: float) ->float:\n if n == 0:\n if self._knots[i] <= u <= self._knots[i + 1]:\n return 1\n return 0\n else:\n Nin1u = self._N(i, n - 1, u)\n Ni1n1u = self._N(i + 1, n - 1, u)\n if Nin1u == 0.0:\n a = 0.0\n else:\n a = self._F(i, n, u) * Nin1u\n if Ni1n1u == 0.0:\n b = 0.0\n else:\n b = self._G(i, n, u) * Ni1n1u\n return a + b\n\n def _F(self, i: int, n: int, u: float) ->float:\n denom = self._knots[i + n] - self._knots[i]\n if denom == 0.0:\n return 0.0\n return (u - self._knots[i]) / denom\n\n def _G(self, i: int, n: int, u: float) ->float:\n denom = self._knots[i + n + 1] - self._knots[i]\n if denom == 0:\n return 0.0\n return (self._knots[i + n + 1] - u) / denom\n",
"step-5": "from typing import List\n\n\nclass NURBS:\n def __init__(self, degree: int) -> None:\n self._degree = degree\n self._points = [] # type: List[complex]\n self._weights = [] # type: List[float]\n self._knots = [] # type: List[float]\n\n def addPoint(self, p: complex) -> None:\n self._points.append(p)\n\n def addKnot(self, knot: float) -> None:\n self._knots.append(knot)\n\n def pointCount(self) -> int:\n return len(self._points)\n\n def calculate(self, segments: int) -> List[complex]:\n while len(self._weights) < len(self._points):\n self._weights.append(1.0)\n\n ret = []\n for n in range(0, segments):\n u = self._knots[0] + (self._knots[-1] - self._knots[0]) * n / (segments - 1)\n nku = []\n for m in range(0, len(self._points)):\n nku.append(self._weights[m] * self._N(m, self._degree, u))\n\n point = complex(0, 0)\n denom = sum(nku)\n for m in range(0, len(self._points)):\n if nku[m] != 0.0 and denom != 0.0:\n r_iku = nku[m] / denom\n if r_iku != 0.0:\n point += self._points[m] * r_iku\n\n ret.append(point)\n return ret\n\n def _N(self, i: int, n: int, u: float) -> float:\n if n == 0:\n if self._knots[i] <= u <= self._knots[i+1]:\n return 1\n return 0\n else:\n Nin1u = self._N(i, n - 1, u)\n Ni1n1u = self._N(i + 1, n - 1, u)\n if Nin1u == 0.0:\n a = 0.0\n else:\n a = self._F(i, n, u) * Nin1u\n if Ni1n1u == 0.0:\n b = 0.0\n else:\n b = self._G(i, n, u) * Ni1n1u\n return a + b\n\n def _F(self, i: int, n: int, u: float) -> float:\n denom = self._knots[i + n] - self._knots[i]\n if denom == 0.0:\n return 0.0\n return (u - self._knots[i]) / denom\n\n def _G(self, i: int, n: int, u: float) -> float:\n denom = self._knots[i + n + 1] - self._knots[i]\n if denom == 0:\n return 0.0\n return (self._knots[i + n + 1] - u) / denom\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
# -*- coding=utf-8 -*-
from mako.template import Template
from xblock.fragment import Fragment
from .lookup import TemplateLookup # xblock_ifmo.lookup
from .utils import deep_update
class FragmentMakoChain(Fragment):
"""
Класс, позволяющий последовательно оборачивать экземпляры Fragment друг в
друга.
Для того, чтобы цепочка отработала, шаблон должен наследоваться от шаблона
ifmo_xblock_base и определять блок block_body.
Порядок оборачивания не определён.
"""
base = None
context = {}
_content = None
lookup_dirs = None
def __init__(self, content=None, base=None, lookup_dirs=None):
"""
Класс, позволяющий последовательно оборачивать экземпляры Fragment друг
в друга.
:param content: Содержимое фрагмента
:param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;
должен быть экземпляром FragmentMakoChain или None
:param lookup_dirs: Директории поиска шаблонов
:return:
"""
assert isinstance(base, FragmentMakoChain) or base is None
super(FragmentMakoChain, self).__init__(content=content)
self.base = base
self.lookup_dirs = lookup_dirs
def body_html(self):
template = self.build_chain()
return template.render(**self.context.get('render_context', {}))
def add_context(self, new_context):
deep_update(self.context, new_context)
def build_chain(self):
"""
Строит цепочку шаблонов.
В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,
поэтому порядок оборачивания не определён (точнее, его вычисляет
метод super()). Поскольку при рендере шаблона используется исключительно
lookup от шаблона, над которым он вызван, а не собственный Lookup для
каждого из шаблона в коллекции, необходимо добавить в коллекцию все
пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,
необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.
:param lookup: экземпляр TemplateLookup, в который будут записываться
новые пути и шаблоны, использующиеся как родительские
:return: tuple(template, lookup, base_template_id)
- template -- шаблон, который должен будет стать родителем
- lookup -- изменённый lookup
"""
def _build_chain(self, lookup=None):
old_base_name = "ifmo_xblock_base"
new_base_name = None
if self.base is not None:
import uuid
new_base_name = "{name}_{rnd}".format(name=old_base_name, rnd=str(uuid.uuid4()))
if hasattr(self.base, 'build_chain'):
base_template, base_lookup = _build_chain(self.base, lookup)
lookup.put_template(new_base_name, base_template)
else:
lookup.put_string(new_base_name, self.base.body_html())
lookup.append_dirs(self.base.lookup_dirs)
return Template(
text=self._content.replace(old_base_name, new_base_name) if new_base_name else self._content,
lookup=lookup
), lookup
lookup = TemplateLookup(directories=self.lookup_dirs)
template, _ = _build_chain(self, lookup)
return template
@property
def resources(self):
seen = set()
parent_res = self.base.resources if self.base else []
return [x for x in parent_res + self._resources if x not in seen and not seen.add(x)]
@property
def content(self):
return self.body_html()
@content.setter
def content(self, value):
self._content = value
|
normal
|
{
"blob_id": "9d904225afd4f4d0cf338ae16f031f8ab41639ad",
"index": 234,
"step-1": "<mask token>\n\n\nclass FragmentMakoChain(Fragment):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n old_base_name = 'ifmo_xblock_base'\n new_base_name = None\n if self.base is not None:\n import uuid\n new_base_name = '{name}_{rnd}'.format(name=old_base_name,\n rnd=str(uuid.uuid4()))\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup\n )\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n lookup.append_dirs(self.base.lookup_dirs)\n return Template(text=self._content.replace(old_base_name,\n new_base_name) if new_base_name else self._content, lookup=\n lookup), lookup\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n <mask token>\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-2": "<mask token>\n\n\nclass FragmentMakoChain(Fragment):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n old_base_name = 'ifmo_xblock_base'\n new_base_name = None\n if self.base is not None:\n import uuid\n new_base_name = '{name}_{rnd}'.format(name=old_base_name,\n rnd=str(uuid.uuid4()))\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup\n )\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n lookup.append_dirs(self.base.lookup_dirs)\n return Template(text=self._content.replace(old_base_name,\n new_base_name) if new_base_name else self._content, lookup=\n lookup), lookup\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n\n @property\n def resources(self):\n seen = set()\n parent_res = self.base.resources if self.base else []\n return [x for x in parent_res + self._resources if x not in seen and\n not seen.add(x)]\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-3": "<mask token>\n\n\nclass FragmentMakoChain(Fragment):\n <mask token>\n base = None\n context = {}\n _content = None\n lookup_dirs = None\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n old_base_name = 'ifmo_xblock_base'\n new_base_name = None\n if self.base is not None:\n import uuid\n new_base_name = '{name}_{rnd}'.format(name=old_base_name,\n rnd=str(uuid.uuid4()))\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup\n )\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n lookup.append_dirs(self.base.lookup_dirs)\n return Template(text=self._content.replace(old_base_name,\n new_base_name) if new_base_name else self._content, lookup=\n lookup), lookup\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n\n @property\n def resources(self):\n seen = set()\n parent_res = self.base.resources if self.base else []\n return [x for x in parent_res + self._resources if x not in seen and\n not seen.add(x)]\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-4": "from mako.template import Template\nfrom xblock.fragment import Fragment\nfrom .lookup import TemplateLookup\nfrom .utils import deep_update\n\n\nclass FragmentMakoChain(Fragment):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг в\n друга.\n\n Для того, чтобы цепочка отработала, шаблон должен наследоваться от шаблона\n ifmo_xblock_base и определять блок block_body.\n\n Порядок оборачивания не определён.\n \"\"\"\n base = None\n context = {}\n _content = None\n lookup_dirs = None\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n old_base_name = 'ifmo_xblock_base'\n new_base_name = None\n if self.base is not None:\n import uuid\n new_base_name = '{name}_{rnd}'.format(name=old_base_name,\n rnd=str(uuid.uuid4()))\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup\n )\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n lookup.append_dirs(self.base.lookup_dirs)\n return Template(text=self._content.replace(old_base_name,\n new_base_name) if new_base_name else self._content, lookup=\n lookup), lookup\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n\n @property\n def resources(self):\n seen = set()\n parent_res = self.base.resources if self.base else []\n return [x for x in parent_res + self._resources if x not in seen and\n not seen.add(x)]\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-5": "# -*- coding=utf-8 -*-\n\nfrom mako.template import Template\nfrom xblock.fragment import Fragment\n\nfrom .lookup import TemplateLookup # xblock_ifmo.lookup\nfrom .utils import deep_update\n\n\nclass FragmentMakoChain(Fragment):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг в\n друга.\n\n Для того, чтобы цепочка отработала, шаблон должен наследоваться от шаблона\n ifmo_xblock_base и определять блок block_body.\n\n Порядок оборачивания не определён.\n \"\"\"\n\n base = None\n context = {}\n _content = None\n lookup_dirs = None\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n\n old_base_name = \"ifmo_xblock_base\"\n new_base_name = None\n\n if self.base is not None:\n\n import uuid\n new_base_name = \"{name}_{rnd}\".format(name=old_base_name, rnd=str(uuid.uuid4()))\n\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup)\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n\n lookup.append_dirs(self.base.lookup_dirs)\n\n return Template(\n text=self._content.replace(old_base_name, new_base_name) if new_base_name else self._content,\n lookup=lookup\n ), lookup\n\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n\n @property\n def resources(self):\n seen = set()\n parent_res = self.base.resources if self.base else []\n return [x for x in parent_res + self._resources if x not in seen and not seen.add(x)]\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-ids": [
7,
8,
9,
11,
12
]
}
|
[
7,
8,
9,
11,
12
] |
from typing import List
class Solution:
def grayCode(self, n: int) ->List[int]:
res = [0] * 2 ** n
exp = 0
l = r = 1
for i in range(1, 2 ** n):
res[i] += res[r - i] + 2 ** exp
if i == r:
exp += 1
l = r + 1
r = l + 2 ** exp - 1
return res
|
normal
|
{
"blob_id": "dc600763b12edda05820721098e7e5bc80f74c89",
"index": 4798,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def grayCode(self, n: int) ->List[int]:\n res = [0] * 2 ** n\n exp = 0\n l = r = 1\n for i in range(1, 2 ** n):\n res[i] += res[r - i] + 2 ** exp\n if i == r:\n exp += 1\n l = r + 1\n r = l + 2 ** exp - 1\n return res\n",
"step-4": "from typing import List\n\n\nclass Solution:\n\n def grayCode(self, n: int) ->List[int]:\n res = [0] * 2 ** n\n exp = 0\n l = r = 1\n for i in range(1, 2 ** n):\n res[i] += res[r - i] + 2 ** exp\n if i == r:\n exp += 1\n l = r + 1\n r = l + 2 ** exp - 1\n return res\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
class Problem1(object):
def sum_below(self, threshold):
current_number = 1
total = 0
while current_number < threshold:
if (current_number % 3 == 0) or (current_number % 5 == 0):
total += current_number
current_number += 1
return total
if __name__ == '__main__':
problem1 = Problem1()
print problem1.sum_below(1000) # == 233168
|
normal
|
{
"blob_id": "918653cdeea8d91921f8b96779fcd3ebce491948",
"index": 1217,
"step-1": "#!/usr/bin/env python\nclass Problem1(object):\n def sum_below(self, threshold):\n current_number = 1\n total = 0\n while current_number < threshold:\n if (current_number % 3 == 0) or (current_number % 5 == 0):\n total += current_number\n current_number += 1\n return total\n\n\nif __name__ == '__main__':\n problem1 = Problem1()\n print problem1.sum_below(1000) # == 233168",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from nose.tools import assert_equal
def rec_coin(target, coins):
'''
INPUT: Target change amount and list of coin values
OUTPUT: Minimum coins needed to make change
Note, this solution is not optimized.
'''
# Default to target value
min_coins = target
# Check to see if we have a single coin match (BASE CASE)
if target in coins:
return 1
else:
# for every coin value that is <= than target
for i in [c for c in coins if c <= target]:
# Recursive Call (add a count coin and subtract from the target)
num_coins = 1 + rec_coin(target-i, coins)
# Reset Minimum if we have a new minimum
if num_coins < min_coins:
min_coins = num_coins
return min_coins
# consider using decorators to encapsulate memoization
def rec_coin_dynam(target, coins, known_results):
'''
INPUT: This function takes in a target amount and a list of possible coins to use.
It also takes a third parameter, known_results, indicating previously calculated results.
The known_results parameter shoud be started with [0] * (target+1)
OUTPUT: Minimum number of coins needed to make the target.
'''
# Default output to target
min_coins = target
# Base Case
if target in coins:
known_results[target] = 1
return 1
# Return a known result if it happens to be greater than 0
elif known_results[target] > 0:
return known_results[target]
else:
# for every coin value that is <= than target
for i in [c for c in coins if c <= target]:
# Recursive call, note how we include the known results!
num_coins = 1 + rec_coin_dynam(target-i, coins, known_results)
# Reset Minimum if we have a new minimum
if num_coins < min_coins:
min_coins = num_coins
# Reset the known result
known_results[target] = min_coins
return min_coins
def bottom_up_solution(n, coins):
# intialize the array
arr = [0] + [n]*(n)
for i in range(1, len(arr)):
min_coins = n
for coin in [c for c in coins if c <= i]:
min_coins = min(arr[i-coin] + 1, min_coins)
arr[i] = min_coins
return arr[n]
class TestCoins(object):
def check(self, solution):
coins = [1, 5, 10, 25]
assert_equal(solution(45, coins, [0]*(45+1)), 3)
assert_equal(solution(23, coins, [0]*(23+1)), 5)
assert_equal(solution(74, coins, [0]*(74+1)), 8)
print('Passed all tests.')
# Run Test
# test = TestCoins()
# test.check(rec_coin_dynam)
# print(bottom_up_solution(6, [1, 2, 5]))
# dynamic solution
target = 23
coins = [1, 2, 5, 10, 20]
known_results = [0]*(target+1)
print(rec_coin_dynam(target, coins, known_results))
|
normal
|
{
"blob_id": "f8c30f8ccd1b901fd750a2c9e14cab78e1d12a14",
"index": 4039,
"step-1": "<mask token>\n\n\ndef rec_coin(target, coins):\n \"\"\"\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n \"\"\"\n min_coins = target\n if target in coins:\n return 1\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin(target - i, coins)\n if num_coins < min_coins:\n min_coins = num_coins\n return min_coins\n\n\ndef rec_coin_dynam(target, coins, known_results):\n \"\"\"\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n \"\"\"\n min_coins = target\n if target in coins:\n known_results[target] = 1\n return 1\n elif known_results[target] > 0:\n return known_results[target]\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)\n if num_coins < min_coins:\n min_coins = num_coins\n known_results[target] = min_coins\n return min_coins\n\n\n<mask token>\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0] * (45 + 1)), 3)\n assert_equal(solution(23, coins, [0] * (23 + 1)), 5)\n assert_equal(solution(74, coins, [0] * (74 + 1)), 8)\n print('Passed all tests.')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef rec_coin(target, coins):\n \"\"\"\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n \"\"\"\n min_coins = target\n if target in coins:\n return 1\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin(target - i, coins)\n if num_coins < min_coins:\n min_coins = num_coins\n return min_coins\n\n\ndef rec_coin_dynam(target, coins, known_results):\n \"\"\"\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n \"\"\"\n min_coins = target\n if target in coins:\n known_results[target] = 1\n return 1\n elif known_results[target] > 0:\n return known_results[target]\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)\n if num_coins < min_coins:\n min_coins = num_coins\n known_results[target] = min_coins\n return min_coins\n\n\ndef bottom_up_solution(n, coins):\n arr = [0] + [n] * n\n for i in range(1, len(arr)):\n min_coins = n\n for coin in [c for c in coins if c <= i]:\n min_coins = min(arr[i - coin] + 1, min_coins)\n arr[i] = min_coins\n return arr[n]\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0] * (45 + 1)), 3)\n assert_equal(solution(23, coins, [0] * (23 + 1)), 5)\n assert_equal(solution(74, coins, [0] * (74 + 1)), 8)\n print('Passed all tests.')\n\n\n<mask token>\nprint(rec_coin_dynam(target, coins, known_results))\n",
"step-3": "<mask token>\n\n\ndef rec_coin(target, coins):\n \"\"\"\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n \"\"\"\n min_coins = target\n if target in coins:\n return 1\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin(target - i, coins)\n if num_coins < min_coins:\n min_coins = num_coins\n return min_coins\n\n\ndef rec_coin_dynam(target, coins, known_results):\n \"\"\"\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n \"\"\"\n min_coins = target\n if target in coins:\n known_results[target] = 1\n return 1\n elif known_results[target] > 0:\n return known_results[target]\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)\n if num_coins < min_coins:\n min_coins = num_coins\n known_results[target] = min_coins\n return min_coins\n\n\ndef bottom_up_solution(n, coins):\n arr = [0] + [n] * n\n for i in range(1, len(arr)):\n min_coins = n\n for coin in [c for c in coins if c <= i]:\n min_coins = min(arr[i - coin] + 1, min_coins)\n arr[i] = min_coins\n return arr[n]\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0] * (45 + 1)), 3)\n assert_equal(solution(23, coins, [0] * (23 + 1)), 5)\n assert_equal(solution(74, coins, [0] * (74 + 1)), 8)\n print('Passed all tests.')\n\n\ntarget = 23\ncoins = [1, 2, 5, 10, 20]\nknown_results = [0] * (target + 1)\nprint(rec_coin_dynam(target, coins, known_results))\n",
"step-4": "from nose.tools import assert_equal\n\n\ndef rec_coin(target, coins):\n \"\"\"\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n \"\"\"\n min_coins = target\n if target in coins:\n return 1\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin(target - i, coins)\n if num_coins < min_coins:\n min_coins = num_coins\n return min_coins\n\n\ndef rec_coin_dynam(target, coins, known_results):\n \"\"\"\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n \"\"\"\n min_coins = target\n if target in coins:\n known_results[target] = 1\n return 1\n elif known_results[target] > 0:\n return known_results[target]\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)\n if num_coins < min_coins:\n min_coins = num_coins\n known_results[target] = min_coins\n return min_coins\n\n\ndef bottom_up_solution(n, coins):\n arr = [0] + [n] * n\n for i in range(1, len(arr)):\n min_coins = n\n for coin in [c for c in coins if c <= i]:\n min_coins = min(arr[i - coin] + 1, min_coins)\n arr[i] = min_coins\n return arr[n]\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0] * (45 + 1)), 3)\n assert_equal(solution(23, coins, [0] * (23 + 1)), 5)\n assert_equal(solution(74, coins, [0] * (74 + 1)), 8)\n print('Passed all tests.')\n\n\ntarget = 23\ncoins = [1, 2, 5, 10, 20]\nknown_results = [0] * (target + 1)\nprint(rec_coin_dynam(target, coins, known_results))\n",
"step-5": "from nose.tools import assert_equal\n\n\ndef rec_coin(target, coins):\n '''\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n '''\n\n # Default to target value\n min_coins = target\n\n # Check to see if we have a single coin match (BASE CASE)\n if target in coins:\n return 1\n\n else:\n\n # for every coin value that is <= than target\n for i in [c for c in coins if c <= target]:\n\n # Recursive Call (add a count coin and subtract from the target)\n num_coins = 1 + rec_coin(target-i, coins)\n\n # Reset Minimum if we have a new minimum\n if num_coins < min_coins:\n\n min_coins = num_coins\n\n return min_coins\n\n\n# consider using decorators to encapsulate memoization\n\ndef rec_coin_dynam(target, coins, known_results):\n '''\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n '''\n\n # Default output to target\n min_coins = target\n\n # Base Case\n if target in coins:\n known_results[target] = 1\n return 1\n\n # Return a known result if it happens to be greater than 0\n elif known_results[target] > 0:\n return known_results[target]\n\n else:\n # for every coin value that is <= than target\n for i in [c for c in coins if c <= target]:\n\n # Recursive call, note how we include the known results!\n num_coins = 1 + rec_coin_dynam(target-i, coins, known_results)\n\n # Reset Minimum if we have a new minimum\n if num_coins < min_coins:\n min_coins = num_coins\n\n # Reset the known result\n known_results[target] = min_coins\n\n return min_coins\n\n\ndef bottom_up_solution(n, coins):\n\n # intialize the array\n arr = [0] + [n]*(n)\n\n for i in range(1, len(arr)):\n min_coins = n\n for coin in [c for c in coins if c <= i]:\n min_coins = min(arr[i-coin] + 1, min_coins)\n\n arr[i] = min_coins\n\n return arr[n]\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0]*(45+1)), 3)\n assert_equal(solution(23, coins, [0]*(23+1)), 5)\n assert_equal(solution(74, coins, [0]*(74+1)), 8)\n\n print('Passed all tests.')\n\n\n# Run Test\n# test = TestCoins()\n# test.check(rec_coin_dynam)\n\n# print(bottom_up_solution(6, [1, 2, 5]))\n\n\n# dynamic solution\ntarget = 23\ncoins = [1, 2, 5, 10, 20]\nknown_results = [0]*(target+1)\n\nprint(rec_coin_dynam(target, coins, known_results))\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
# -*- coding: utf-8 -*-
import numpy as np
def gauss_seidel(relax, est, stop):
"""
Método iterativo de Gauss-Seidel para o sistema linear do trabalho.
Onde relax é o fator de relaxação, est é o valor inicial, stop é o
critério de parada, n é a quantidade de linhas do sistema e k é o
número de iterações.
"""
k = 0
dif = 10000
n = len(est)
diff = np.zeros(n)
while dif > stop:
k += 1
est[0] = ((1 - relax) * est[0]) + relax * (1.50 - est[1])
for i in range(1, int(n/2)):
est[i] = ((1 - relax) * est[i]) + relax * \
((1.0 - est[i-1] - est[i+1] - est[i+25])/4)
for j in range(int(n/2), n-1):
est[j] = ((1 - relax) * est[j]) + relax * \
((2.0 - est[j-25] - est[j-1] - est[j+1])/5)
est[n-1] = ((1 - relax) * est[n-1]) + relax * (3.00 - est[n-2])
dif = max(abs(np.subtract(est, diff)))
diff = np.copy(est)
return [est, k]
|
normal
|
{
"blob_id": "51540a80c7b29dc0bbb6342ee45008108d54b6f2",
"index": 714,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef gauss_seidel(relax, est, stop):\n \"\"\"\n Método iterativo de Gauss-Seidel para o sistema linear do trabalho.\n Onde relax é o fator de relaxação, est é o valor inicial, stop é o\n critério de parada, n é a quantidade de linhas do sistema e k é o\n número de iterações.\n \"\"\"\n k = 0\n dif = 10000\n n = len(est)\n diff = np.zeros(n)\n while dif > stop:\n k += 1\n est[0] = (1 - relax) * est[0] + relax * (1.5 - est[1])\n for i in range(1, int(n / 2)):\n est[i] = (1 - relax) * est[i] + relax * ((1.0 - est[i - 1] -\n est[i + 1] - est[i + 25]) / 4)\n for j in range(int(n / 2), n - 1):\n est[j] = (1 - relax) * est[j] + relax * ((2.0 - est[j - 25] -\n est[j - 1] - est[j + 1]) / 5)\n est[n - 1] = (1 - relax) * est[n - 1] + relax * (3.0 - est[n - 2])\n dif = max(abs(np.subtract(est, diff)))\n diff = np.copy(est)\n return [est, k]\n",
"step-3": "import numpy as np\n\n\ndef gauss_seidel(relax, est, stop):\n \"\"\"\n Método iterativo de Gauss-Seidel para o sistema linear do trabalho.\n Onde relax é o fator de relaxação, est é o valor inicial, stop é o\n critério de parada, n é a quantidade de linhas do sistema e k é o\n número de iterações.\n \"\"\"\n k = 0\n dif = 10000\n n = len(est)\n diff = np.zeros(n)\n while dif > stop:\n k += 1\n est[0] = (1 - relax) * est[0] + relax * (1.5 - est[1])\n for i in range(1, int(n / 2)):\n est[i] = (1 - relax) * est[i] + relax * ((1.0 - est[i - 1] -\n est[i + 1] - est[i + 25]) / 4)\n for j in range(int(n / 2), n - 1):\n est[j] = (1 - relax) * est[j] + relax * ((2.0 - est[j - 25] -\n est[j - 1] - est[j + 1]) / 5)\n est[n - 1] = (1 - relax) * est[n - 1] + relax * (3.0 - est[n - 2])\n dif = max(abs(np.subtract(est, diff)))\n diff = np.copy(est)\n return [est, k]\n",
"step-4": "# -*- coding: utf-8 -*-\nimport numpy as np\n\n\ndef gauss_seidel(relax, est, stop):\n \"\"\"\n Método iterativo de Gauss-Seidel para o sistema linear do trabalho.\n Onde relax é o fator de relaxação, est é o valor inicial, stop é o\n critério de parada, n é a quantidade de linhas do sistema e k é o\n número de iterações.\n \"\"\"\n\n k = 0\n dif = 10000\n n = len(est)\n diff = np.zeros(n)\n\n while dif > stop:\n k += 1\n\n est[0] = ((1 - relax) * est[0]) + relax * (1.50 - est[1])\n\n for i in range(1, int(n/2)):\n est[i] = ((1 - relax) * est[i]) + relax * \\\n ((1.0 - est[i-1] - est[i+1] - est[i+25])/4)\n\n for j in range(int(n/2), n-1):\n est[j] = ((1 - relax) * est[j]) + relax * \\\n ((2.0 - est[j-25] - est[j-1] - est[j+1])/5)\n\n est[n-1] = ((1 - relax) * est[n-1]) + relax * (3.00 - est[n-2])\n\n dif = max(abs(np.subtract(est, diff)))\n diff = np.copy(est)\n\n return [est, k]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
You have a map that marks the locations of treasure islands. Some of the map area has jagged rocks and dangerous reefs.
Other areas are safe to sail in. There are other explorers trying to find the treasure.
So you must figure out a shortest route to one of the treasure islands.
Assume the map area is a two dimensional grid, represented by a matrix of characters.
You must start from one of the starting point (marked as S) of the map and can move one block up, down,
left or right at a time. The treasure island is marked as X. Any block with dangerous rocks or reefs will be marked as
D. You must not enter dangerous blocks. You cannot leave the map area. Other areas O are safe to sail in.
Output the minimum number of steps to get to any of the treasure islands.
"""
import math
def find_treasure_util(grid, i, j):
rows, columns = len(grid), len(grid[0])
queue = [((i, j), 0)]
directions = [[0, 1], [0, -1], [1, 0], [-1, 0]]
visited = [[-1 for _ in range(columns)] for _ in range(rows)]
while queue:
(x, y), step = queue.pop()
visited[x][y] = step
for direction in directions:
curr_x = x + direction[0]
curr_y = y + direction[1]
if 0 <= curr_x < rows and 0 <= curr_y < columns and grid[curr_x][curr_y] == 'X':
return step + 1
elif 0 <= curr_x < rows and 0 <= curr_y < columns \
and grid[curr_x][curr_y] != 'D' \
and visited[curr_x][curr_y] == -1:
queue.append(((curr_x, curr_y), step + 1))
return -1
def find_treasure(grid):
if not len(grid) or not len(grid[0]):
return -1
minimum_steps = math.inf
for i in range(len(grid)):
for j in range(len(grid[i])):
if grid[i][j] == 'S':
minimum_steps = min(minimum_steps, find_treasure_util(grid, i, j))
return minimum_steps
if __name__ == '__main__':
grid = [['S', 'O', 'O', 'S', 'S'],
['D', 'O', 'D', 'O', 'D'],
['O', 'O', 'O', 'O', 'X'],
['X', 'D', 'D', 'O', 'O'],
['X', 'D', 'D', 'D', 'O']]
print(find_treasure(grid))
|
normal
|
{
"blob_id": "e6851e86fa86ab2096f059218b2b8a2994642807",
"index": 3717,
"step-1": "<mask token>\n\n\ndef find_treasure(grid):\n if not len(grid) or not len(grid[0]):\n return -1\n minimum_steps = math.inf\n for i in range(len(grid)):\n for j in range(len(grid[i])):\n if grid[i][j] == 'S':\n minimum_steps = min(minimum_steps, find_treasure_util(grid,\n i, j))\n return minimum_steps\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef find_treasure_util(grid, i, j):\n rows, columns = len(grid), len(grid[0])\n queue = [((i, j), 0)]\n directions = [[0, 1], [0, -1], [1, 0], [-1, 0]]\n visited = [[(-1) for _ in range(columns)] for _ in range(rows)]\n while queue:\n (x, y), step = queue.pop()\n visited[x][y] = step\n for direction in directions:\n curr_x = x + direction[0]\n curr_y = y + direction[1]\n if 0 <= curr_x < rows and 0 <= curr_y < columns and grid[curr_x][\n curr_y] == 'X':\n return step + 1\n elif 0 <= curr_x < rows and 0 <= curr_y < columns and grid[curr_x][\n curr_y] != 'D' and visited[curr_x][curr_y] == -1:\n queue.append(((curr_x, curr_y), step + 1))\n return -1\n\n\ndef find_treasure(grid):\n if not len(grid) or not len(grid[0]):\n return -1\n minimum_steps = math.inf\n for i in range(len(grid)):\n for j in range(len(grid[i])):\n if grid[i][j] == 'S':\n minimum_steps = min(minimum_steps, find_treasure_util(grid,\n i, j))\n return minimum_steps\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef find_treasure_util(grid, i, j):\n rows, columns = len(grid), len(grid[0])\n queue = [((i, j), 0)]\n directions = [[0, 1], [0, -1], [1, 0], [-1, 0]]\n visited = [[(-1) for _ in range(columns)] for _ in range(rows)]\n while queue:\n (x, y), step = queue.pop()\n visited[x][y] = step\n for direction in directions:\n curr_x = x + direction[0]\n curr_y = y + direction[1]\n if 0 <= curr_x < rows and 0 <= curr_y < columns and grid[curr_x][\n curr_y] == 'X':\n return step + 1\n elif 0 <= curr_x < rows and 0 <= curr_y < columns and grid[curr_x][\n curr_y] != 'D' and visited[curr_x][curr_y] == -1:\n queue.append(((curr_x, curr_y), step + 1))\n return -1\n\n\ndef find_treasure(grid):\n if not len(grid) or not len(grid[0]):\n return -1\n minimum_steps = math.inf\n for i in range(len(grid)):\n for j in range(len(grid[i])):\n if grid[i][j] == 'S':\n minimum_steps = min(minimum_steps, find_treasure_util(grid,\n i, j))\n return minimum_steps\n\n\nif __name__ == '__main__':\n grid = [['S', 'O', 'O', 'S', 'S'], ['D', 'O', 'D', 'O', 'D'], ['O', 'O',\n 'O', 'O', 'X'], ['X', 'D', 'D', 'O', 'O'], ['X', 'D', 'D', 'D', 'O']]\n print(find_treasure(grid))\n",
"step-4": "<mask token>\nimport math\n\n\ndef find_treasure_util(grid, i, j):\n rows, columns = len(grid), len(grid[0])\n queue = [((i, j), 0)]\n directions = [[0, 1], [0, -1], [1, 0], [-1, 0]]\n visited = [[(-1) for _ in range(columns)] for _ in range(rows)]\n while queue:\n (x, y), step = queue.pop()\n visited[x][y] = step\n for direction in directions:\n curr_x = x + direction[0]\n curr_y = y + direction[1]\n if 0 <= curr_x < rows and 0 <= curr_y < columns and grid[curr_x][\n curr_y] == 'X':\n return step + 1\n elif 0 <= curr_x < rows and 0 <= curr_y < columns and grid[curr_x][\n curr_y] != 'D' and visited[curr_x][curr_y] == -1:\n queue.append(((curr_x, curr_y), step + 1))\n return -1\n\n\ndef find_treasure(grid):\n if not len(grid) or not len(grid[0]):\n return -1\n minimum_steps = math.inf\n for i in range(len(grid)):\n for j in range(len(grid[i])):\n if grid[i][j] == 'S':\n minimum_steps = min(minimum_steps, find_treasure_util(grid,\n i, j))\n return minimum_steps\n\n\nif __name__ == '__main__':\n grid = [['S', 'O', 'O', 'S', 'S'], ['D', 'O', 'D', 'O', 'D'], ['O', 'O',\n 'O', 'O', 'X'], ['X', 'D', 'D', 'O', 'O'], ['X', 'D', 'D', 'D', 'O']]\n print(find_treasure(grid))\n",
"step-5": "\"\"\"\nYou have a map that marks the locations of treasure islands. Some of the map area has jagged rocks and dangerous reefs.\nOther areas are safe to sail in. There are other explorers trying to find the treasure.\nSo you must figure out a shortest route to one of the treasure islands.\n\nAssume the map area is a two dimensional grid, represented by a matrix of characters.\nYou must start from one of the starting point (marked as S) of the map and can move one block up, down,\nleft or right at a time. The treasure island is marked as X. Any block with dangerous rocks or reefs will be marked as\nD. You must not enter dangerous blocks. You cannot leave the map area. Other areas O are safe to sail in.\nOutput the minimum number of steps to get to any of the treasure islands.\n\"\"\"\n\nimport math\n\n\ndef find_treasure_util(grid, i, j):\n rows, columns = len(grid), len(grid[0])\n queue = [((i, j), 0)]\n directions = [[0, 1], [0, -1], [1, 0], [-1, 0]]\n visited = [[-1 for _ in range(columns)] for _ in range(rows)]\n while queue:\n (x, y), step = queue.pop()\n visited[x][y] = step\n for direction in directions:\n curr_x = x + direction[0]\n curr_y = y + direction[1]\n if 0 <= curr_x < rows and 0 <= curr_y < columns and grid[curr_x][curr_y] == 'X':\n return step + 1\n elif 0 <= curr_x < rows and 0 <= curr_y < columns \\\n and grid[curr_x][curr_y] != 'D' \\\n and visited[curr_x][curr_y] == -1:\n queue.append(((curr_x, curr_y), step + 1))\n return -1\n\n\ndef find_treasure(grid):\n if not len(grid) or not len(grid[0]):\n return -1\n minimum_steps = math.inf\n for i in range(len(grid)):\n for j in range(len(grid[i])):\n if grid[i][j] == 'S':\n minimum_steps = min(minimum_steps, find_treasure_util(grid, i, j))\n return minimum_steps\n\n\nif __name__ == '__main__':\n grid = [['S', 'O', 'O', 'S', 'S'],\n ['D', 'O', 'D', 'O', 'D'],\n ['O', 'O', 'O', 'O', 'X'],\n ['X', 'D', 'D', 'O', 'O'],\n ['X', 'D', 'D', 'D', 'O']]\n print(find_treasure(grid))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/env python3
import io
import json
import fire
from collections import OrderedDict
def main(input, output):
vocab = OrderedDict({'</s>': 0, '<unk>': 1})
for line in io.open(input, 'r', encoding='utf-8'):
word, count = line.strip().split()
vocab[word] = len(vocab)
with io.open(output, 'w', encoding='utf-8') as out:
json.dump(vocab, out, indent=2, ensure_ascii=False)
if __name__ == '__main__':
fire.Fire(main)
|
normal
|
{
"blob_id": "e3665141397d52877242463d548c059272d13536",
"index": 863,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main(input, output):\n vocab = OrderedDict({'</s>': 0, '<unk>': 1})\n for line in io.open(input, 'r', encoding='utf-8'):\n word, count = line.strip().split()\n vocab[word] = len(vocab)\n with io.open(output, 'w', encoding='utf-8') as out:\n json.dump(vocab, out, indent=2, ensure_ascii=False)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main(input, output):\n vocab = OrderedDict({'</s>': 0, '<unk>': 1})\n for line in io.open(input, 'r', encoding='utf-8'):\n word, count = line.strip().split()\n vocab[word] = len(vocab)\n with io.open(output, 'w', encoding='utf-8') as out:\n json.dump(vocab, out, indent=2, ensure_ascii=False)\n\n\nif __name__ == '__main__':\n fire.Fire(main)\n",
"step-4": "import io\nimport json\nimport fire\nfrom collections import OrderedDict\n\n\ndef main(input, output):\n vocab = OrderedDict({'</s>': 0, '<unk>': 1})\n for line in io.open(input, 'r', encoding='utf-8'):\n word, count = line.strip().split()\n vocab[word] = len(vocab)\n with io.open(output, 'w', encoding='utf-8') as out:\n json.dump(vocab, out, indent=2, ensure_ascii=False)\n\n\nif __name__ == '__main__':\n fire.Fire(main)\n",
"step-5": "#!/usr/bin/env python3\n\nimport io\nimport json\nimport fire\nfrom collections import OrderedDict\n\n\ndef main(input, output):\n vocab = OrderedDict({'</s>': 0, '<unk>': 1})\n for line in io.open(input, 'r', encoding='utf-8'):\n word, count = line.strip().split()\n vocab[word] = len(vocab)\n with io.open(output, 'w', encoding='utf-8') as out:\n json.dump(vocab, out, indent=2, ensure_ascii=False)\n\n\nif __name__ == '__main__':\n fire.Fire(main)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from app_auth.recaptcha.services.recaptcha_service import validate_recaptcha
from django.shortcuts import render, redirect
from django.contrib import auth
from django.views import View
from rest_framework.permissions import IsAuthenticated
from rest_framework.views import APIView
from rest_framework.response import Response
from .common.bearer_authentication import CustomBearerAuthentication
from .models import User
from .forms import UserCreationForm
from .serializers import UserSerializer
from .user_backend import UserBackend
from .common.token_utils import get_or_set_token
from app.common.meta_config import get_meta
class Auth(View):
auth_class = UserBackend()
# Create your views here.
def authenticate(self, request, username, password):
user = self.auth_class.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
return True
return False
class Login(Auth):
def post(self, request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
# just so we can send back errors
if self.authenticate(request, username, password):
get_or_set_token(username)
return redirect('/')
return redirect('/')
def logout(request):
auth.logout(request)
return redirect('/')
class Signup(Auth):
form_class = UserCreationForm
def post(self, request):
form = self.form_class(request.POST)
if form.is_valid():
user = form.save(commit=False) # not saved permanently in db yet
# clean normalised data.
email = form.cleaned_data['email']
password = form.cleaned_data['password1']
# password setting.
user.set_password(password)
# register user.
user.save()
if self.authenticate(request, email, password):
return redirect('/')
return redirect('/')
class UserViewSet(APIView):
authentication_classes = [CustomBearerAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, format=None):
queryset = User.objects.all().order_by('-created_at')
serializer = UserSerializer(queryset, many=True)
content = {
'users': {
'data': serializer.data,
'page': 1,
'count': len(serializer.data)
},
'auth': str(request.auth),
}
return Response(content)
class LoginView(Auth):
template_name = 'app/login.html'
def get(self, request):
return render(
request,
self.template_name,
{ 'meta': get_meta('LoginView') }
)
def post(self, request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
recaptcha = request.POST.get('g-recaptcha-response')
valid = validate_recaptcha(recaptcha)
if (not valid): return redirect('/errors/unverified')
# just so we can send back errors
if self.authenticate(request, username, password):
get_or_set_token(username)
return redirect('/')
return render(
request,
self.template_name,
{
'errors': {
'authentication': 'Username or password is incorrect.'
},
'meta': get_meta('LoginView')
}
)
class SignupView(Auth):
template_name = 'app/signup.html'
form_class = UserCreationForm
def get(self, request):
return render(request, self.template_name, { 'meta': get_meta('SignupView') })
def post(self, request):
form = self.form_class(request.POST)
recaptcha = request.POST.get('g-recaptcha-response')
valid = validate_recaptcha(recaptcha)
if (not valid): return redirect('/errors/unverified')
if form.is_valid():
user = form.save(commit=False) # not saved permanently in db yet
# clean normalised data.
email = form.cleaned_data['email']
password = form.cleaned_data['password1']
# password setting.
user.set_password(password)
# register user.
user.save()
if self.authenticate(request, email, password):
return redirect('/')
else:
return render(
request,
self.template_name,
{
'errors': {
'authentication': 'Username or password is incorrect.'
},
'meta': get_meta('SignupView')
}
)
return render(
request,
self.template_name,
{
'errors': form.errors.get_json_data(),
'meta': get_meta('SignupView')
}
)
|
normal
|
{
"blob_id": "b2eb2d006d6285947cc5392e290af50f25a9f566",
"index": 4724,
"step-1": "<mask token>\n\n\nclass Signup(Auth):\n <mask token>\n <mask token>\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {'users': {'data': serializer.data, 'page': 1, 'count':\n len(serializer.data)}, 'auth': str(request.auth)}\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'LoginView')})\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'}, 'meta':\n get_meta('LoginView')})\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'SignupView')})\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'},\n 'meta': get_meta('SignupView')})\n return render(request, self.template_name, {'errors': form.errors.\n get_json_data(), 'meta': get_meta('SignupView')})\n",
"step-2": "<mask token>\n\n\nclass Signup(Auth):\n form_class = UserCreationForm\n\n def post(self, request):\n form = self.form_class(request.POST)\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n return redirect('/')\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {'users': {'data': serializer.data, 'page': 1, 'count':\n len(serializer.data)}, 'auth': str(request.auth)}\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'LoginView')})\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'}, 'meta':\n get_meta('LoginView')})\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'SignupView')})\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'},\n 'meta': get_meta('SignupView')})\n return render(request, self.template_name, {'errors': form.errors.\n get_json_data(), 'meta': get_meta('SignupView')})\n",
"step-3": "<mask token>\n\n\nclass Login(Auth):\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return redirect('/')\n\n\n<mask token>\n\n\nclass Signup(Auth):\n form_class = UserCreationForm\n\n def post(self, request):\n form = self.form_class(request.POST)\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n return redirect('/')\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {'users': {'data': serializer.data, 'page': 1, 'count':\n len(serializer.data)}, 'auth': str(request.auth)}\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'LoginView')})\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'}, 'meta':\n get_meta('LoginView')})\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'SignupView')})\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'},\n 'meta': get_meta('SignupView')})\n return render(request, self.template_name, {'errors': form.errors.\n get_json_data(), 'meta': get_meta('SignupView')})\n",
"step-4": "<mask token>\n\n\nclass Auth(View):\n <mask token>\n\n def authenticate(self, request, username, password):\n user = self.auth_class.authenticate(username=username, password=\n password)\n if user is not None:\n if user.is_active:\n auth.login(request, user)\n return True\n return False\n\n\nclass Login(Auth):\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return redirect('/')\n\n\n<mask token>\n\n\nclass Signup(Auth):\n form_class = UserCreationForm\n\n def post(self, request):\n form = self.form_class(request.POST)\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n return redirect('/')\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {'users': {'data': serializer.data, 'page': 1, 'count':\n len(serializer.data)}, 'auth': str(request.auth)}\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'LoginView')})\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'}, 'meta':\n get_meta('LoginView')})\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'SignupView')})\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'},\n 'meta': get_meta('SignupView')})\n return render(request, self.template_name, {'errors': form.errors.\n get_json_data(), 'meta': get_meta('SignupView')})\n",
"step-5": "from app_auth.recaptcha.services.recaptcha_service import validate_recaptcha\nfrom django.shortcuts import render, redirect\nfrom django.contrib import auth\nfrom django.views import View\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\n\nfrom .common.bearer_authentication import CustomBearerAuthentication\nfrom .models import User\nfrom .forms import UserCreationForm\nfrom .serializers import UserSerializer\nfrom .user_backend import UserBackend\nfrom .common.token_utils import get_or_set_token\nfrom app.common.meta_config import get_meta\n\n\nclass Auth(View):\n auth_class = UserBackend()\n\n # Create your views here.\n def authenticate(self, request, username, password):\n user = self.auth_class.authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n auth.login(request, user)\n return True\n return False\n\n\nclass Login(Auth):\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n\n # just so we can send back errors\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n\n return redirect('/')\n\n\ndef logout(request):\n auth.logout(request)\n return redirect('/')\n\n\nclass Signup(Auth):\n form_class = UserCreationForm\n\n def post(self, request):\n form = self.form_class(request.POST)\n if form.is_valid():\n user = form.save(commit=False) # not saved permanently in db yet\n\n # clean normalised data.\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n\n # password setting.\n user.set_password(password)\n\n # register user.\n user.save()\n\n if self.authenticate(request, email, password):\n return redirect('/')\n\n return redirect('/')\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {\n 'users': {\n 'data': serializer.data,\n 'page': 1,\n 'count': len(serializer.data)\n },\n 'auth': str(request.auth),\n }\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(\n request, \n self.template_name, \n { 'meta': get_meta('LoginView') }\n )\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if (not valid): return redirect('/errors/unverified')\n # just so we can send back errors\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n \n return render(\n request,\n self.template_name,\n {\n 'errors': {\n 'authentication': 'Username or password is incorrect.'\n },\n 'meta': get_meta('LoginView')\n }\n )\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, { 'meta': get_meta('SignupView') })\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if (not valid): return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False) # not saved permanently in db yet\n\n # clean normalised data.\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n\n # password setting.\n user.set_password(password)\n\n # register user.\n user.save()\n\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(\n request, \n self.template_name,\n {\n 'errors': {\n 'authentication': 'Username or password is incorrect.'\n },\n 'meta': get_meta('SignupView')\n }\n )\n\n return render(\n request, \n self.template_name,\n {\n 'errors': form.errors.get_json_data(),\n 'meta': get_meta('SignupView')\n }\n )",
"step-ids": [
12,
14,
16,
18,
22
]
}
|
[
12,
14,
16,
18,
22
] |
import argparse, os, joblib, json, torch
import pandas as pd
from utils import regression, dataset, lstm
PREDICT_X_SKIP_COLS = ["date", "weight", "ts_id", "resp", "resp_1", "resp_2", "resp_3", "resp_4"]
X_COLS = ["resp_1", "resp_2", "resp_3", "resp_4"]
Y_OUTPUT_COLS = ["date", "ts_id"]
Y_COL = ["resp"]
METRICS_INFO = ["mse", "r2", "mape"]
DROPOUT = 0.25
HIDDEN_SIZE = 20
def get_prediction_data(data, model_path):
x = data.drop(PREDICT_X_SKIP_COLS, axis=1)
y = data[X_COLS]
model = joblib.load(model_path)
(y_pred, metrics) = regression.evaluate(model, x, y, METRICS_INFO)
y_pred = pd.DataFrame(data=y_pred, columns=X_COLS)
return (y_pred, metrics)
def prepare_data(data_folder, model_path):
(train, test, na_value) = dataset.read_data(data_folder)
x_train = train[X_COLS]
y_train = train[Y_COL]
x_test = test[X_COLS]
y_test = test[Y_COL]
out_train = train[Y_OUTPUT_COLS]
out_test = test[Y_OUTPUT_COLS]
(x_pred_train , metrics_train) = get_prediction_data(train, model_path)
(x_pred_test, metrics_test) = get_prediction_data(test, model_path)
train = { "x": x_train, "y": y_train, "x_pred": x_pred_train, "out": out_train}
test = { "x": x_test, "y": y_test, "x_pred": x_pred_test, "out": out_test}
metrics = {
"reg_train_pred": metrics_train,
"reg_test_pred": metrics_test
}
return (train, test, metrics, na_value)
def postprocess_data(out_data, y_pred):
y_output = out_data.copy()
y_output[Y_COL] = y_pred
return y_output
def train_evaluate(data_folder, output_folder, model_path):
model = lstm.get_model(DROPOUT, len(X_COLS), HIDDEN_SIZE)
print("Preparing data...")
(train, test, metrics, na_value) = prepare_data(data_folder, model_path)
print("Training...")
model = lstm.train(model, train["x"], train["y"])
model = lstm.train(model, train["x_pred"], train["y"])
print("Evaluating...")
(y_pred, metrics_lstm) = lstm.evaluate(model, test["x"],
test["y"], METRICS_INFO)
(y_pred_reg, metrics_reg_lstm) = lstm.evaluate(model,
test["x_pred"], test["y"], METRICS_INFO)
metrics["lstm_pred"] = metrics_lstm
metrics["reg_lstm_pred"] = metrics_reg_lstm
print("Postprocessing data...")
y_output = postprocess_data(test["out"], y_pred)
y_output_reg = postprocess_data(test["out"], y_pred_reg)
output_path = os.path.join(output_folder, "pred.csv")
y_output.to_csv(output_path, index=False)
output_path = os.path.join(output_folder, "pred_reg.csv")
y_output_reg.to_csv(output_path, index=False)
result = { "metrics": metrics, "na_value": na_value }
result_path = os.path.join(output_folder, "result.json")
json_config = json.dumps(result, indent=4)
with open(result_path, "w") as result_file:
result_file.write(json_config)
model_path = os.path.join(output_folder, "lstm.mdl")
torch.save(model, model_path)
print("Output files (model, result, prediction) saved to {}".format(
output_folder))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--data_path", type=str, help="specifies the data folder path",
required=True)
parser.add_argument(
"--output_path", type=str, help="specifies the output folder path",
required=True)
parser.add_argument(
"--regression_model_path", type=str, required = True,
help="specifies the regression model path")
return vars(parser.parse_args())
def main():
args = parse_args()
print("Args: {}".format(args))
data_path = os.path.abspath(args["data_path"])
output_path = os.path.abspath(args["output_path"])
model_path = os.path.abspath(args["regression_model_path"])
train_evaluate(data_path, output_path, model_path)
main()
|
normal
|
{
"blob_id": "4bdff51a4e277889f4d54d4ace7a0f5384e74f1e",
"index": 9017,
"step-1": "<mask token>\n\n\ndef get_prediction_data(data, model_path):\n x = data.drop(PREDICT_X_SKIP_COLS, axis=1)\n y = data[X_COLS]\n model = joblib.load(model_path)\n y_pred, metrics = regression.evaluate(model, x, y, METRICS_INFO)\n y_pred = pd.DataFrame(data=y_pred, columns=X_COLS)\n return y_pred, metrics\n\n\ndef prepare_data(data_folder, model_path):\n train, test, na_value = dataset.read_data(data_folder)\n x_train = train[X_COLS]\n y_train = train[Y_COL]\n x_test = test[X_COLS]\n y_test = test[Y_COL]\n out_train = train[Y_OUTPUT_COLS]\n out_test = test[Y_OUTPUT_COLS]\n x_pred_train, metrics_train = get_prediction_data(train, model_path)\n x_pred_test, metrics_test = get_prediction_data(test, model_path)\n train = {'x': x_train, 'y': y_train, 'x_pred': x_pred_train, 'out':\n out_train}\n test = {'x': x_test, 'y': y_test, 'x_pred': x_pred_test, 'out': out_test}\n metrics = {'reg_train_pred': metrics_train, 'reg_test_pred': metrics_test}\n return train, test, metrics, na_value\n\n\ndef postprocess_data(out_data, y_pred):\n y_output = out_data.copy()\n y_output[Y_COL] = y_pred\n return y_output\n\n\ndef train_evaluate(data_folder, output_folder, model_path):\n model = lstm.get_model(DROPOUT, len(X_COLS), HIDDEN_SIZE)\n print('Preparing data...')\n train, test, metrics, na_value = prepare_data(data_folder, model_path)\n print('Training...')\n model = lstm.train(model, train['x'], train['y'])\n model = lstm.train(model, train['x_pred'], train['y'])\n print('Evaluating...')\n y_pred, metrics_lstm = lstm.evaluate(model, test['x'], test['y'],\n METRICS_INFO)\n y_pred_reg, metrics_reg_lstm = lstm.evaluate(model, test['x_pred'],\n test['y'], METRICS_INFO)\n metrics['lstm_pred'] = metrics_lstm\n metrics['reg_lstm_pred'] = metrics_reg_lstm\n print('Postprocessing data...')\n y_output = postprocess_data(test['out'], y_pred)\n y_output_reg = postprocess_data(test['out'], y_pred_reg)\n output_path = os.path.join(output_folder, 'pred.csv')\n y_output.to_csv(output_path, index=False)\n output_path = os.path.join(output_folder, 'pred_reg.csv')\n y_output_reg.to_csv(output_path, index=False)\n result = {'metrics': metrics, 'na_value': na_value}\n result_path = os.path.join(output_folder, 'result.json')\n json_config = json.dumps(result, indent=4)\n with open(result_path, 'w') as result_file:\n result_file.write(json_config)\n model_path = os.path.join(output_folder, 'lstm.mdl')\n torch.save(model, model_path)\n print('Output files (model, result, prediction) saved to {}'.format(\n output_folder))\n\n\ndef parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('--data_path', type=str, help=\n 'specifies the data folder path', required=True)\n parser.add_argument('--output_path', type=str, help=\n 'specifies the output folder path', required=True)\n parser.add_argument('--regression_model_path', type=str, required=True,\n help='specifies the regression model path')\n return vars(parser.parse_args())\n\n\ndef main():\n args = parse_args()\n print('Args: {}'.format(args))\n data_path = os.path.abspath(args['data_path'])\n output_path = os.path.abspath(args['output_path'])\n model_path = os.path.abspath(args['regression_model_path'])\n train_evaluate(data_path, output_path, model_path)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_prediction_data(data, model_path):\n x = data.drop(PREDICT_X_SKIP_COLS, axis=1)\n y = data[X_COLS]\n model = joblib.load(model_path)\n y_pred, metrics = regression.evaluate(model, x, y, METRICS_INFO)\n y_pred = pd.DataFrame(data=y_pred, columns=X_COLS)\n return y_pred, metrics\n\n\ndef prepare_data(data_folder, model_path):\n train, test, na_value = dataset.read_data(data_folder)\n x_train = train[X_COLS]\n y_train = train[Y_COL]\n x_test = test[X_COLS]\n y_test = test[Y_COL]\n out_train = train[Y_OUTPUT_COLS]\n out_test = test[Y_OUTPUT_COLS]\n x_pred_train, metrics_train = get_prediction_data(train, model_path)\n x_pred_test, metrics_test = get_prediction_data(test, model_path)\n train = {'x': x_train, 'y': y_train, 'x_pred': x_pred_train, 'out':\n out_train}\n test = {'x': x_test, 'y': y_test, 'x_pred': x_pred_test, 'out': out_test}\n metrics = {'reg_train_pred': metrics_train, 'reg_test_pred': metrics_test}\n return train, test, metrics, na_value\n\n\ndef postprocess_data(out_data, y_pred):\n y_output = out_data.copy()\n y_output[Y_COL] = y_pred\n return y_output\n\n\ndef train_evaluate(data_folder, output_folder, model_path):\n model = lstm.get_model(DROPOUT, len(X_COLS), HIDDEN_SIZE)\n print('Preparing data...')\n train, test, metrics, na_value = prepare_data(data_folder, model_path)\n print('Training...')\n model = lstm.train(model, train['x'], train['y'])\n model = lstm.train(model, train['x_pred'], train['y'])\n print('Evaluating...')\n y_pred, metrics_lstm = lstm.evaluate(model, test['x'], test['y'],\n METRICS_INFO)\n y_pred_reg, metrics_reg_lstm = lstm.evaluate(model, test['x_pred'],\n test['y'], METRICS_INFO)\n metrics['lstm_pred'] = metrics_lstm\n metrics['reg_lstm_pred'] = metrics_reg_lstm\n print('Postprocessing data...')\n y_output = postprocess_data(test['out'], y_pred)\n y_output_reg = postprocess_data(test['out'], y_pred_reg)\n output_path = os.path.join(output_folder, 'pred.csv')\n y_output.to_csv(output_path, index=False)\n output_path = os.path.join(output_folder, 'pred_reg.csv')\n y_output_reg.to_csv(output_path, index=False)\n result = {'metrics': metrics, 'na_value': na_value}\n result_path = os.path.join(output_folder, 'result.json')\n json_config = json.dumps(result, indent=4)\n with open(result_path, 'w') as result_file:\n result_file.write(json_config)\n model_path = os.path.join(output_folder, 'lstm.mdl')\n torch.save(model, model_path)\n print('Output files (model, result, prediction) saved to {}'.format(\n output_folder))\n\n\ndef parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('--data_path', type=str, help=\n 'specifies the data folder path', required=True)\n parser.add_argument('--output_path', type=str, help=\n 'specifies the output folder path', required=True)\n parser.add_argument('--regression_model_path', type=str, required=True,\n help='specifies the regression model path')\n return vars(parser.parse_args())\n\n\ndef main():\n args = parse_args()\n print('Args: {}'.format(args))\n data_path = os.path.abspath(args['data_path'])\n output_path = os.path.abspath(args['output_path'])\n model_path = os.path.abspath(args['regression_model_path'])\n train_evaluate(data_path, output_path, model_path)\n\n\nmain()\n",
"step-3": "<mask token>\nPREDICT_X_SKIP_COLS = ['date', 'weight', 'ts_id', 'resp', 'resp_1',\n 'resp_2', 'resp_3', 'resp_4']\nX_COLS = ['resp_1', 'resp_2', 'resp_3', 'resp_4']\nY_OUTPUT_COLS = ['date', 'ts_id']\nY_COL = ['resp']\nMETRICS_INFO = ['mse', 'r2', 'mape']\nDROPOUT = 0.25\nHIDDEN_SIZE = 20\n\n\ndef get_prediction_data(data, model_path):\n x = data.drop(PREDICT_X_SKIP_COLS, axis=1)\n y = data[X_COLS]\n model = joblib.load(model_path)\n y_pred, metrics = regression.evaluate(model, x, y, METRICS_INFO)\n y_pred = pd.DataFrame(data=y_pred, columns=X_COLS)\n return y_pred, metrics\n\n\ndef prepare_data(data_folder, model_path):\n train, test, na_value = dataset.read_data(data_folder)\n x_train = train[X_COLS]\n y_train = train[Y_COL]\n x_test = test[X_COLS]\n y_test = test[Y_COL]\n out_train = train[Y_OUTPUT_COLS]\n out_test = test[Y_OUTPUT_COLS]\n x_pred_train, metrics_train = get_prediction_data(train, model_path)\n x_pred_test, metrics_test = get_prediction_data(test, model_path)\n train = {'x': x_train, 'y': y_train, 'x_pred': x_pred_train, 'out':\n out_train}\n test = {'x': x_test, 'y': y_test, 'x_pred': x_pred_test, 'out': out_test}\n metrics = {'reg_train_pred': metrics_train, 'reg_test_pred': metrics_test}\n return train, test, metrics, na_value\n\n\ndef postprocess_data(out_data, y_pred):\n y_output = out_data.copy()\n y_output[Y_COL] = y_pred\n return y_output\n\n\ndef train_evaluate(data_folder, output_folder, model_path):\n model = lstm.get_model(DROPOUT, len(X_COLS), HIDDEN_SIZE)\n print('Preparing data...')\n train, test, metrics, na_value = prepare_data(data_folder, model_path)\n print('Training...')\n model = lstm.train(model, train['x'], train['y'])\n model = lstm.train(model, train['x_pred'], train['y'])\n print('Evaluating...')\n y_pred, metrics_lstm = lstm.evaluate(model, test['x'], test['y'],\n METRICS_INFO)\n y_pred_reg, metrics_reg_lstm = lstm.evaluate(model, test['x_pred'],\n test['y'], METRICS_INFO)\n metrics['lstm_pred'] = metrics_lstm\n metrics['reg_lstm_pred'] = metrics_reg_lstm\n print('Postprocessing data...')\n y_output = postprocess_data(test['out'], y_pred)\n y_output_reg = postprocess_data(test['out'], y_pred_reg)\n output_path = os.path.join(output_folder, 'pred.csv')\n y_output.to_csv(output_path, index=False)\n output_path = os.path.join(output_folder, 'pred_reg.csv')\n y_output_reg.to_csv(output_path, index=False)\n result = {'metrics': metrics, 'na_value': na_value}\n result_path = os.path.join(output_folder, 'result.json')\n json_config = json.dumps(result, indent=4)\n with open(result_path, 'w') as result_file:\n result_file.write(json_config)\n model_path = os.path.join(output_folder, 'lstm.mdl')\n torch.save(model, model_path)\n print('Output files (model, result, prediction) saved to {}'.format(\n output_folder))\n\n\ndef parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('--data_path', type=str, help=\n 'specifies the data folder path', required=True)\n parser.add_argument('--output_path', type=str, help=\n 'specifies the output folder path', required=True)\n parser.add_argument('--regression_model_path', type=str, required=True,\n help='specifies the regression model path')\n return vars(parser.parse_args())\n\n\ndef main():\n args = parse_args()\n print('Args: {}'.format(args))\n data_path = os.path.abspath(args['data_path'])\n output_path = os.path.abspath(args['output_path'])\n model_path = os.path.abspath(args['regression_model_path'])\n train_evaluate(data_path, output_path, model_path)\n\n\nmain()\n",
"step-4": "import argparse, os, joblib, json, torch\nimport pandas as pd\nfrom utils import regression, dataset, lstm\nPREDICT_X_SKIP_COLS = ['date', 'weight', 'ts_id', 'resp', 'resp_1',\n 'resp_2', 'resp_3', 'resp_4']\nX_COLS = ['resp_1', 'resp_2', 'resp_3', 'resp_4']\nY_OUTPUT_COLS = ['date', 'ts_id']\nY_COL = ['resp']\nMETRICS_INFO = ['mse', 'r2', 'mape']\nDROPOUT = 0.25\nHIDDEN_SIZE = 20\n\n\ndef get_prediction_data(data, model_path):\n x = data.drop(PREDICT_X_SKIP_COLS, axis=1)\n y = data[X_COLS]\n model = joblib.load(model_path)\n y_pred, metrics = regression.evaluate(model, x, y, METRICS_INFO)\n y_pred = pd.DataFrame(data=y_pred, columns=X_COLS)\n return y_pred, metrics\n\n\ndef prepare_data(data_folder, model_path):\n train, test, na_value = dataset.read_data(data_folder)\n x_train = train[X_COLS]\n y_train = train[Y_COL]\n x_test = test[X_COLS]\n y_test = test[Y_COL]\n out_train = train[Y_OUTPUT_COLS]\n out_test = test[Y_OUTPUT_COLS]\n x_pred_train, metrics_train = get_prediction_data(train, model_path)\n x_pred_test, metrics_test = get_prediction_data(test, model_path)\n train = {'x': x_train, 'y': y_train, 'x_pred': x_pred_train, 'out':\n out_train}\n test = {'x': x_test, 'y': y_test, 'x_pred': x_pred_test, 'out': out_test}\n metrics = {'reg_train_pred': metrics_train, 'reg_test_pred': metrics_test}\n return train, test, metrics, na_value\n\n\ndef postprocess_data(out_data, y_pred):\n y_output = out_data.copy()\n y_output[Y_COL] = y_pred\n return y_output\n\n\ndef train_evaluate(data_folder, output_folder, model_path):\n model = lstm.get_model(DROPOUT, len(X_COLS), HIDDEN_SIZE)\n print('Preparing data...')\n train, test, metrics, na_value = prepare_data(data_folder, model_path)\n print('Training...')\n model = lstm.train(model, train['x'], train['y'])\n model = lstm.train(model, train['x_pred'], train['y'])\n print('Evaluating...')\n y_pred, metrics_lstm = lstm.evaluate(model, test['x'], test['y'],\n METRICS_INFO)\n y_pred_reg, metrics_reg_lstm = lstm.evaluate(model, test['x_pred'],\n test['y'], METRICS_INFO)\n metrics['lstm_pred'] = metrics_lstm\n metrics['reg_lstm_pred'] = metrics_reg_lstm\n print('Postprocessing data...')\n y_output = postprocess_data(test['out'], y_pred)\n y_output_reg = postprocess_data(test['out'], y_pred_reg)\n output_path = os.path.join(output_folder, 'pred.csv')\n y_output.to_csv(output_path, index=False)\n output_path = os.path.join(output_folder, 'pred_reg.csv')\n y_output_reg.to_csv(output_path, index=False)\n result = {'metrics': metrics, 'na_value': na_value}\n result_path = os.path.join(output_folder, 'result.json')\n json_config = json.dumps(result, indent=4)\n with open(result_path, 'w') as result_file:\n result_file.write(json_config)\n model_path = os.path.join(output_folder, 'lstm.mdl')\n torch.save(model, model_path)\n print('Output files (model, result, prediction) saved to {}'.format(\n output_folder))\n\n\ndef parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('--data_path', type=str, help=\n 'specifies the data folder path', required=True)\n parser.add_argument('--output_path', type=str, help=\n 'specifies the output folder path', required=True)\n parser.add_argument('--regression_model_path', type=str, required=True,\n help='specifies the regression model path')\n return vars(parser.parse_args())\n\n\ndef main():\n args = parse_args()\n print('Args: {}'.format(args))\n data_path = os.path.abspath(args['data_path'])\n output_path = os.path.abspath(args['output_path'])\n model_path = os.path.abspath(args['regression_model_path'])\n train_evaluate(data_path, output_path, model_path)\n\n\nmain()\n",
"step-5": "import argparse, os, joblib, json, torch\nimport pandas as pd\nfrom utils import regression, dataset, lstm\n\nPREDICT_X_SKIP_COLS = [\"date\", \"weight\", \"ts_id\", \"resp\", \"resp_1\", \"resp_2\", \"resp_3\", \"resp_4\"]\nX_COLS = [\"resp_1\", \"resp_2\", \"resp_3\", \"resp_4\"]\nY_OUTPUT_COLS = [\"date\", \"ts_id\"]\nY_COL = [\"resp\"]\nMETRICS_INFO = [\"mse\", \"r2\", \"mape\"]\nDROPOUT = 0.25\nHIDDEN_SIZE = 20\n\ndef get_prediction_data(data, model_path):\n\tx = data.drop(PREDICT_X_SKIP_COLS, axis=1)\n\ty = data[X_COLS]\n\tmodel = joblib.load(model_path)\n\t(y_pred, metrics) = regression.evaluate(model, x, y, METRICS_INFO)\n\ty_pred = pd.DataFrame(data=y_pred, columns=X_COLS)\n\treturn (y_pred, metrics)\n\ndef prepare_data(data_folder, model_path):\n\t(train, test, na_value) = dataset.read_data(data_folder)\n\tx_train = train[X_COLS]\n\ty_train = train[Y_COL]\n\tx_test = test[X_COLS]\n\ty_test = test[Y_COL]\n\tout_train = train[Y_OUTPUT_COLS]\n\tout_test = test[Y_OUTPUT_COLS]\n\t(x_pred_train , metrics_train) = get_prediction_data(train, model_path)\n\t(x_pred_test, metrics_test) = get_prediction_data(test, model_path)\n\ttrain = { \"x\": x_train, \"y\": y_train, \"x_pred\": x_pred_train, \"out\": out_train}\n\ttest = { \"x\": x_test, \"y\": y_test, \"x_pred\": x_pred_test, \"out\": out_test}\n\tmetrics = {\n\t\t\"reg_train_pred\": metrics_train,\n\t\t\"reg_test_pred\": metrics_test\n\t}\n\treturn (train, test, metrics, na_value)\n\ndef postprocess_data(out_data, y_pred):\n\ty_output = out_data.copy()\n\ty_output[Y_COL] = y_pred\n\treturn y_output\n\ndef train_evaluate(data_folder, output_folder, model_path):\n\tmodel = lstm.get_model(DROPOUT, len(X_COLS), HIDDEN_SIZE)\n\n\tprint(\"Preparing data...\")\n\t(train, test, metrics, na_value) = prepare_data(data_folder, model_path)\n\n\tprint(\"Training...\")\n\tmodel = lstm.train(model, train[\"x\"], train[\"y\"])\n\tmodel = lstm.train(model, train[\"x_pred\"], train[\"y\"])\n\n\tprint(\"Evaluating...\")\n\t(y_pred, metrics_lstm) = lstm.evaluate(model, test[\"x\"],\n\t\ttest[\"y\"], METRICS_INFO)\n\t(y_pred_reg, metrics_reg_lstm) = lstm.evaluate(model,\n\t\ttest[\"x_pred\"], test[\"y\"], METRICS_INFO)\n\tmetrics[\"lstm_pred\"] = metrics_lstm\n\tmetrics[\"reg_lstm_pred\"] = metrics_reg_lstm\n\n\tprint(\"Postprocessing data...\")\n\ty_output = postprocess_data(test[\"out\"], y_pred)\n\ty_output_reg = postprocess_data(test[\"out\"], y_pred_reg)\n\n\toutput_path = os.path.join(output_folder, \"pred.csv\")\n\ty_output.to_csv(output_path, index=False)\n\n\toutput_path = os.path.join(output_folder, \"pred_reg.csv\")\n\ty_output_reg.to_csv(output_path, index=False)\n\n\tresult = { \"metrics\": metrics, \"na_value\": na_value }\n\tresult_path = os.path.join(output_folder, \"result.json\")\n\tjson_config = json.dumps(result, indent=4)\n\twith open(result_path, \"w\") as result_file:\n\t\tresult_file.write(json_config)\n\n\tmodel_path = os.path.join(output_folder, \"lstm.mdl\")\n\ttorch.save(model, model_path)\n\tprint(\"Output files (model, result, prediction) saved to {}\".format(\n\t\toutput_folder))\n\ndef parse_args():\n\tparser = argparse.ArgumentParser()\n\tparser.add_argument(\n\t\t\"--data_path\", type=str, help=\"specifies the data folder path\",\n\t\trequired=True)\n\tparser.add_argument(\n\t\t\"--output_path\", type=str, help=\"specifies the output folder path\",\n\t\trequired=True)\n\tparser.add_argument(\n\t\t\"--regression_model_path\", type=str, required = True,\n\t\thelp=\"specifies the regression model path\")\n\treturn vars(parser.parse_args())\n\ndef main():\n\targs = parse_args()\n\tprint(\"Args: {}\".format(args))\n\tdata_path = os.path.abspath(args[\"data_path\"])\n\toutput_path = os.path.abspath(args[\"output_path\"])\n\tmodel_path = os.path.abspath(args[\"regression_model_path\"])\n\ttrain_evaluate(data_path, output_path, model_path)\n\nmain()\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
import datetime
import os
import uuid
from abc import ABC, abstractmethod
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.contenttypes.fields import (GenericForeignKey,
GenericRelation)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from bluebird.templatetags.template_extra_filters import (plur_form,
proper_last_name)
from bluebird.tasks import calc_create_gen_async
from django_q.tasks import async_task
from .snippets import str_add_app, KLASS_TYPES, DOC_TYPE
NORM_TYPE = [
(0, '1 м2 общей площади'),
(1, '1 место'),
(2, '1 человек'),
]
POST_TYPE = [
(0, 'Клиент-менеджер'),
(1, 'Старший менеджер по работе с ЮЛ'),
(2, 'Менеджер'),
]
class Adress(models.Model):
state = models.CharField(verbose_name="Область", max_length=255)
city = models.CharField(verbose_name="Город", max_length=255)
street = models.CharField(verbose_name="Улица", max_length=255)
block = models.CharField(verbose_name="Номер дома", max_length=10)
class ContragentClass(models.Model):
name = models.CharField('Наименование', max_length=255)
class Contragent(models.Model):
"""
Класс Контрагента.
"""
# klass = models.ForeignKey(ContragentClass, on_delete=models.CASCADE)
klass = models.IntegerField(choices=KLASS_TYPES, default=0)
excell_name = models.CharField('Наименование контрагента (из Excell)',
max_length=255)
dadata_name = models.CharField('Наименование контрагента (из Dadata)',
max_length=255, blank=True, null=True)
debt = models.FloatField('Сумма задолжности', default=0.00)
debt_period = models.IntegerField('Количество неоплаченных периодов, мес.',
blank=True, null=True)
inn = models.BigIntegerField('ИНН контрагента', blank=True, null=True)
ogrn = models.BigIntegerField('ОГРН контрагента', blank=True, null=True)
kpp = models.BigIntegerField('КПП контрагента', blank=True, null=True)
rs = models.CharField('Р/с', max_length=255, blank=True, null=True)
ks = models.CharField('К/с', max_length=255, blank=True, null=True)
bank = models.CharField('Наименование банка', max_length=255, blank=True,
null=True)
bik = models.CharField('БИК', max_length=255, blank=True, null=True)
opf = models.CharField('ОПФ', max_length=255, blank=True, null=True)
director_status = models.CharField('Директор (физ. лицо либо юр. лицо)',
max_length=255, blank=True, null=True)
director_name = models.CharField('Имя либо иное наименование директора',
max_length=255, blank=True, null=True)
creation_date = models.DateField('Дата создания контрагента (юл)',
blank=True, null=True)
is_func = models.BooleanField('Признак активности контрагента',
default=True)
okved = models.CharField('ОКВЭД',
max_length=255, blank=True, null=True)
# TODO REWORK THIS AREA
physical_address = models.CharField('Физический адресс',
max_length=255)
legal_address = models.CharField('Юридический адресс',
max_length=255, blank=True, null=True)
# END OF REWORK
norm_value = models.ForeignKey('NormativeCategory',
related_name='normatives',
on_delete=models.CASCADE,
blank=True, null=True)
stat_value = models.FloatField('Показатель', blank=True, null=True)
contract_accept_date = models.DateField(
'Дата начала оказания услуг',
default=datetime.date.fromisoformat('2018-07-01'),
blank=True, null=True
)
current_date = models.DateField('Конечная дата оказания услуг',
default=datetime.date.today, blank=True,
null=True)
number_contract = models.OneToOneField('ContractNumberClass',
on_delete=models.CASCADE,
max_length=255,
blank=True, null=True)
current_contract_date = models.DateField('Дата заключения договора',
blank=True, null=True)
signed_user = models.ForeignKey('SignUser', blank=True, null=True,
on_delete=models.CASCADE,
related_name='signed')
platform = models.IntegerField('№ площадки',
blank=True, null=True)
judge_link = models.CharField(verbose_name="", max_length=255,
blank=True, null=True)
fss_link = models.CharField(verbose_name="", max_length=255,
blank=True, null=True)
personal_number = models.CharField(verbose_name="Лицевой счет",
max_length=255, blank=True, null=True)
passport_number = models.CharField(verbose_name="Номер паспорта",
max_length=15, blank=True, null=True)
passport_date = models.DateField(verbose_name="Дата выдачи пасспорта",
blank=True, null=True)
passport_origin = models.CharField(verbose_name="Кем выдан пасспорт",
max_length=15, blank=True, null=True)
snils = models.CharField(verbose_name="СНИЛС",
max_length=15, blank=True, null=True)
def create_package_and_folder(self):
self.check_and_create_parent_folder()
if not os.path.isdir(self.get_str_as_path()):
os.mkdir(self.get_str_as_path(), mode=0o777)
def check_and_create_parent_folder(self):
if not os.path.isdir(os.path.join(settings.MEDIA_ROOT,
KLASS_TYPES[self.klass][1])):
os.mkdir(os.path.join(settings.MEDIA_ROOT,
KLASS_TYPES[self.klass][1]), mode=0o777)
def get_str_as_path(self):
return os.path.join(os.path.join(settings.MEDIA_ROOT,
KLASS_TYPES[self.klass][1]),
f'{self.pk} {self.excell_name}')
@property
def current_user(self):
package = self.get_active_package()
if package:
res = [user for user in package.package_users.all(
) if package.package_state.is_permitted(user)]
return res
return None
@current_user.setter
def current_user(self, user):
package = self.get_active_package()
if package and not package.is_user_in_package(user, True):
package.package_users.add(user)
package.save()
@property
def active_package(self):
return self.get_active_package()
def get_all_packages(self):
return DocumentsPackage.objects.filter(contragent=self.pk) or None
def get_active_package(self):
res = DocumentsPackage.get_active_package(self)
return res
def reset_debt(self):
self.debt = 0
self.debt_period = 0
self.save()
def __str__(self):
return f'{self.excell_name}'
class Meta:
verbose_name_plural = "Контрагенты"
class SignUser(models.Model):
name = models.CharField('ФИО отвественного лица', max_length=255)
document = models.IntegerField('Документ основания', choices=DOC_TYPE,
default=0)
position = models.IntegerField('Должность', choices=POST_TYPE,
default=0)
doc_number = models.CharField('Номер документа', max_length=255)
doc_date = models.DateField('Дата начала действия документа')
address = models.CharField('Адресс', max_length=255)
city = models.ForeignKey('CityModel', on_delete=models.CASCADE,
blank=True, null=True)
tel_number = models.CharField('Телефон', max_length=255, default='')
sign = models.ImageField('Подпись', upload_to='signs/',
blank=True, null=True)
def __str__(self):
# return self.name
return f"{proper_last_name(self.name)}, {POST_TYPE[self.position][1]}"
def save(self, *args, **kwargs):
instance = SignUser.objects.get(id=self.id)
if self.sign != instance.sign and instance.sign:
if os.path.exists(instance.sign.url):
os.remove(instance.sign.url)
super().save(*args, **kwargs)
class Meta:
verbose_name_plural = "Отвественные лица с правом подписи"
class Commentary(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL,
on_delete=models.CASCADE, blank=True, null=True)
commentary_text = models.TextField('Комментарий', blank=True, null=True)
creation_date = models.DateTimeField('Дата создания', auto_now_add=True)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
class AbstractFileModel(models.Model):
file_name = models.CharField('Название файла', max_length=255,
null=True, blank=True)
file_path = models.CharField('Путь', max_length=255, blank=True, null=True)
creation_date = models.DateField('Дата создания файла',
blank=True, null=True)
# Подгрузка произвольного количества файлов
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
file_type = models.ForeignKey('DocumentTypeModel',
on_delete=models.CASCADE)
def delete(self, using=None, keep_parents=False):
if os.path.exists(str_add_app(self.file_path)):
os.remove(str_add_app(self.file_path))
return super().delete(using=using, keep_parents=keep_parents)
class Meta:
abstract = True
class SingleFile(AbstractFileModel):
def __str__(self):
return str(self.file_type)
class Meta:
verbose_name_plural = "Единичные файлы"
class PackFile(AbstractFileModel):
unique_number = models.ForeignKey('SyncUniqueNumber',
on_delete=models.CASCADE,
null=True, blank=True)
class Meta:
abstract = False
verbose_name_plural = "Фаилы набора"
def initialize_folder(self, path: str):
if self.file_type:
tmp_str_path = plur_form(self.file_type.doc_type)
if not os.path.isdir(f'{path}/{tmp_str_path}/'):
os.makedirs(f'{path}/{tmp_str_path}/')
else:
raise AttributeError()
def get_files_path(self, package: 'DocumentsPackage'):
tmp_path = package.get_save_path()
self.initialize_folder(tmp_path)
return os.path.join(tmp_path, f'{plur_form(self.file_type.doc_type)}/')
def other_files_directory_path(instance, filename):
p = instance.content_object.get_save_path()
return '{0}/прочие/{1}'.format(p, filename)
class OtherFile(AbstractFileModel):
file_obj = models.FileField('Произвольные файлы',
upload_to=other_files_directory_path,
max_length=500)
commentary = GenericRelation(Commentary, related_query_name='file')
class Meta:
verbose_name_plural = "Прочие файлы"
class ActExam(models.Model):
FOLDER = 'Акт осмотра/'
file_path = models.CharField('Путь', max_length=255, blank=True, null=True)
file_name = models.CharField('Название файла', max_length=255,
null=True, blank=True)
@classmethod
def initialize_folder(cls, path: str):
tmp_path = f'{path}/{cls.FOLDER}'
if not os.path.isdir(tmp_path):
os.makedirs(tmp_path)
@classmethod
def get_files_path(cls, package: 'DocumentsPackage'):
tmp_path = package.get_save_path()
ActExam.initialize_folder(tmp_path)
return os.path.join(tmp_path, cls.FOLDER)
def clear_file(self):
if os.path.exists(str_add_app(self.file_path)):
os.remove(str_add_app(self.file_path))
self.file_path = None
self.file_name = None
self.save()
def delete(self, using=None, keep_parents=False):
self.clear_file()
return super().delete(using=using, keep_parents=keep_parents)
class DocumentsPackage(models.Model):
""" Модель пакета документов.
contragent - ID контрагента
name_uuid - Уникальный ID пакета (каждый раз новый)
is_active - Является ли пакет активным. Если True, то пакет в работе. Если
False, то пакет закрыт.
is_automatic - Создан ли пакет автоматически или пользователь может
редактировать наборы файлов и некоторые характеристики. Если
True, то нельзя подгружать свои договора и редактировать
debt_plan. Если False, то редактирование возможно.
creation_date - Дата создания пакета.
debt_plan - Сумма долга. Если is_automatic == True, то значение не
редактируется. Если is_automatic == False, то значение
необходимо заполнить.
debt_fact - Сумма долга по факту. Заполняется при сторнировании или оплате.
tax_count - Госпошлина. Можно заполнять в любом случае.
package_users - Все пользователи пакета, работавшие с ним.
package_state - Состояние пакета.
package_state_date - Дата изменения состояния пакета.
single_files - Пакет одиночных документов.
pack_files - Пакет наборов файлов.
other_files - Произвольные файлы.
commentary - Комментарии.
"""
contragent = models.ForeignKey(Contragent, on_delete=models.CASCADE,
related_name='contragents',
related_query_name='contragent',
null=True, blank=True)
name_uuid = models.CharField('Идентификатор пакета', max_length=255,
default=uuid.uuid4, null=True, blank=True,
editable=False)
is_active = models.BooleanField('Активный пакет', default=True)
is_automatic = models.BooleanField('Создан автоматически', default=True)
creation_date = models.DateField('Дата создания пакета', auto_now_add=True)
debt_plan = models.FloatField('Сумма задолжности (плановая)',
default=0.00)
debt_fact = models.FloatField('Сумма задолжности (фактическая)',
default=0.00)
tax_count = models.FloatField('Госпошлина', default=0.00)
package_users = models.ManyToManyField(settings.AUTH_USER_MODEL,
related_name='packages')
package_state = models.ForeignKey('State', on_delete=models.CASCADE,
null=True, blank=True)
package_state_date = models.DateField('Дата последнего действия',
null=True, blank=True)
single_files = GenericRelation(SingleFile)
pack_files = GenericRelation(PackFile)
other_files = GenericRelation(OtherFile)
commentary = GenericRelation(Commentary, related_query_name='package')
act = models.ForeignKey(ActExam, on_delete=models.CASCADE,
null=True, blank=True)
def __str__(self):
return f'Пакет {self.name_uuid}'
def get_save_path(self):
if self.contragent:
return os.path.join(self.contragent.get_str_as_path(),
str(self.name_uuid))
else:
return f'{self.name_uuid}'
@classmethod
def get_active_package(cls, contragent: Contragent):
try:
res = cls.objects.get(contragent__id=contragent.pk, is_active=True)
return res
except ObjectDoesNotExist:
return None
def initialize_sub_folders(self):
os.makedirs(str(self.get_save_path()), exist_ok=True)
def is_user_in_package(self, user, use_department=False):
users = self.package_users.all()
if use_department:
depts = [tmp_user.department for tmp_user in users]
return (user.department in depts) or (user in users)
return user in users
def set_inactive(self):
self.is_active = False
self.save()
def change_state_to(self, new_state, is_backward):
self.package_state = new_state
self.package_state_date = datetime.date.today()
if not is_backward:
async_task(calc_create_gen_async, self.contragent, self, False,
group=self.name_uuid)
# TODO Journal log here!
self.save()
class Meta:
verbose_name_plural = "Пакеты документов"
class DocumentStateEntity(models.Model):
documents = models.ManyToManyField('DocumentTypeModel',
related_name='document_type')
states = models.ForeignKey('State', related_name='states',
on_delete=models.CASCADE,
blank=True, null=True)
template = models.ForeignKey('DocumentFileTemplate',
on_delete=models.CASCADE,
blank=True, null=True)
class DocumentFileTemplate(models.Model):
contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)
is_package = models.BooleanField('Набор файлов', default=False)
def __str__(self):
return KLASS_TYPES[self.contagent_type][1]
class Meta:
verbose_name_plural = "Шаблоны файлов"
# class SingleFilesTemplate(models.Model):
# contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)
# def __str__(self):
# return KLASS_TYPES[self.contagent_type][1]
# class Meta:
# verbose_name_plural = "Шаблоны единичных файлов"
# class PackFilesTemplate(models.Model):
# contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)
# documents = models.ManyToManyField('DocumentTypeModel',
# related_name='document_type_pack')
# def __str__(self):
# return KLASS_TYPES[self.contagent_type][1]
# class Meta:
# verbose_name_plural = "Шаблоны наборов файлов"
class NormativeCategory(models.Model):
""" Класс Категории норматива """
name = models.CharField('Вид объекта',
max_length=255)
norm_type = models.IntegerField('Показатель расчета', default=0,
choices=NORM_TYPE, blank=True, null=True)
normative = models.ManyToManyField('Normative', related_name='normatives',
verbose_name='Нормативы')
def __str__(self):
return self.name
@property
def print_norm_type(self):
return NORM_TYPE[self.norm_type][1]
class Meta:
verbose_name_plural = "Категории нормативов"
class Normative(models.Model):
""" Класс норматива """
since_date = models.DateField('Дата начала действия норматива',
null=True, blank=True)
up_to_date = models.DateField('Дата окончания действия норматива',
null=True, blank=True)
value = models.FloatField('Значение норматива (год.)',
null=True, blank=True)
def __str__(self):
return (f'Норматив: {self.value}/год.,'
+ f' действующий с {self.since_date.strftime("%d.%m.%Y")}'
+ f' по {self.up_to_date.strftime("%d.%m.%Y")}')
class Meta:
verbose_name_plural = "Нормативы"
class Contract(models.Model):
""" Класс контракта. Нужен что бы получать уникальный номер контракта.
Сохраняет дату когда был создан, для корректной генерации строкового
представления.
"""
date_field = models.DateField(auto_now_add=True)
def __str__(self):
return f'{self.pk:06}-{(self.date_field).year}/ТКО/01'
class Meta:
verbose_name_plural = "Сгенерированые номера договоров"
class ContractNumberClass(models.Model):
""" Модель класса прокси для соединения класса документа и контрагента.
Принимает на вход необязательные параметры:
new - определяем, надо генерировать новый номер или есть
старый. Булево значение. True = генерируем;
exist_number - существующий номер договора. Строка;
У класса есть такие поля как:
is_generated - хранит булево значение. Определяет был ли сгенерирован
номер или взят из внешних источников;
contract_obj - объект модели самого номера контракта;
contract_exist_number - существующий номер контракта. Пустая строка,
если мы сгенерировали новый номер;
contract_number - возвращает строковое представление номера, независимо
от того, сгенерирован код или получен из внешнего
источника.
"""
is_generated = models.BooleanField(default=False)
contract_obj = models.OneToOneField(Contract,
on_delete=models.CASCADE,
null=True, blank=True)
contract_exist_number = models.CharField(default='',
max_length=255,
null=True, blank=True)
@classmethod
def create(cls, new: bool = False, exist_number: str = ''):
contract_num_obj = cls(is_generated=new)
if new:
contract_num_obj.contract_obj = Contract.objects.create()
else:
contract_num_obj.contract_exist_number = exist_number
contract_num_obj.save()
return contract_num_obj
@property
def contract_number(self):
if self.is_generated:
return str(self.contract_obj)
else:
return self.contract_exist_number
def __str__(self):
return self.contract_number
class Meta:
verbose_name_plural = "Номера договоров"
class SyncUniqueNumber(models.Model):
def __str__(self):
return f'{self.pk:08}/01'
class Meta:
verbose_name_plural = "Номера документов"
class CityModel(models.Model):
name = models.CharField('Город', max_length=255, null=True, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = "Города"
class TemplateModel(models.Model):
template_path = models.CharField('Путь до шаблона', max_length=255)
city = models.ForeignKey(CityModel, on_delete=models.CASCADE)
contragent_type = models.IntegerField('Тип контрагента',
choices=KLASS_TYPES, default=0)
document_type = models.ForeignKey('DocumentTypeModel',
verbose_name='Тип документа',
on_delete=models.CASCADE)
def __str__(self):
return f'{str(self.document_type)}|\
{KLASS_TYPES[self.contragent_type][1]}|{self.city}'
class Meta:
verbose_name_plural = "Шаблоны документов"
class DocumentTypeModel(models.Model):
doc_type = models.CharField('Тип документа', max_length=255,
null=True, blank=True)
is_pack = models.BooleanField('Пакет документов', default=False)
def __str__(self):
return self.doc_type
class Meta:
verbose_name_plural = "Типы документов"
#########
# State #
#########
class State(models.Model):
name_state = models.CharField('Состояние', max_length=255)
departments = models.ManyToManyField('yellowbird.Department',
verbose_name='Отделы',
related_name='available_states')
is_initial_state = models.BooleanField('Начальное состояние',
default=False)
is_final_state = models.BooleanField('Конечное состояние', default=False)
def get_linked_events(self):
return Event.objects.filter(from_state=self.id)
def _is_dept_permitted(self, department):
return department in self.departments.all()
def is_permitted(self, user):
return (user.is_superuser or user.is_staff
or self._is_dept_permitted(user.department))
def __str__(self):
return self.name_state
class Meta:
verbose_name_plural = 'Состояния'
class Event(models.Model):
name_event = models.CharField('Событие', max_length=255)
from_state = models.ForeignKey(State, on_delete=models.CASCADE,
verbose_name='Исходное состояние',
blank=True, null=True,
related_name='begin_states')
to_state = models.ForeignKey(State, on_delete=models.CASCADE,
verbose_name='Конечное состояние',
blank=True, null=True,
related_name='end_states')
is_move_backward = models.BooleanField('Двигаемся обратно назад',
default=False)
def __str__(self):
return self.name_event
class Meta:
verbose_name_plural = 'События'
##############
# Strategies #
##############
class ListStrategy(ABC):
@abstractmethod
def execute_list_strategy(self, user):
raise NotImplementedError
@abstractmethod
def execute_single_strategy(self, pk, user):
raise NotImplementedError
class OnlyEmptyRecords(ListStrategy):
def execute_list_strategy(self, user):
contragents = Contragent.objects.all()
return [c for c in contragents if not c.active_package]
def execute_single_strategy(self, pk, user):
try:
res = Contragent.objects.get(pk=pk)
return res if (not res.active_package) else None
except Contragent.DoesNotExist:
return None
class OnlyMyRecordsStrategy(ListStrategy):
def execute_list_strategy(self, user):
contragents = Contragent.objects.filter(current_user__contain=user)
return contragents
def execute_single_strategy(self, pk, user):
try:
return Contragent.objects.get(pk=pk, current_user__contain=user)
except Contragent.DoesNotExist:
return None
class AllRecords(ListStrategy):
def execute_list_strategy(self, user):
contragents = Contragent.objects.all()
return contragents
def execute_single_strategy(self, pk, user):
try:
return Contragent.objects.get(pk=pk)
except Contragent.DoesNotExist:
return None
class AllInDepartmentRecords(ListStrategy):
def execute_list_strategy(self, user):
res = list()
contragents = Contragent.objects.all()
for c in contragents:
tmp_pack = c.get_active_package()
if tmp_pack:
tmp_state = tmp_pack.package_state
if tmp_state:
if tmp_state.is_permitted(user.department):
res.append(c)
else:
res.append(c)
else:
res.append(c)
return res
def execute_single_strategy(self, pk, user):
try:
contragent = Contragent.objects.get(pk=pk)
tmp_pack = contragent.get_active_package()
if tmp_pack:
tmp_list = [c.department == user.department
for c in contragent.current_user]
if any(tmp_list):
return contragent
return None
return contragent
except Contragent.DoesNotExist:
return None
class MyAndEmptyRecordsStrategy(ListStrategy):
def execute_list_strategy(self, user):
res = list()
contragents = Contragent.objects.all()
for c in contragents:
tmp_pack = c.get_active_package()
if tmp_pack:
tmp_state = tmp_pack.package_state
if tmp_state:
if tmp_state.is_permitted(user) and (
user in c.current_user):
res.append(c)
else:
res.append(c)
else:
res.append(c)
return res
def execute_single_strategy(self, pk, user):
try:
contragent = Contragent.objects.get(pk=pk)
tmp_pack = contragent.get_active_package()
if tmp_pack:
tmp_state = tmp_pack.package_state
if tmp_state:
if tmp_state.is_permitted(user) and (
user in contragent.current_user):
return contragent
return contragent
except Contragent.DoesNotExist:
return None
STRATEGIES_LIST = ['Мои записи и пустые', 'Все по отделу', 'Все',
'Только мои записи', 'Только пустые записи']
STRATEGIES_TUPLES = list(enumerate(STRATEGIES_LIST))
STRATEGIES_FUNCTIONS = [MyAndEmptyRecordsStrategy, AllInDepartmentRecords,
AllRecords, OnlyMyRecordsStrategy, OnlyEmptyRecords]
STRATEGIES = dict(zip(STRATEGIES_LIST, STRATEGIES_FUNCTIONS))
ZIP_FILES_ACTIONS = {
0: "Скачать весь пакет",
1: "Скачать основные файлы",
2: "Скачать акты",
3: "Скачать счета",
4: "Скачать счета фактуры",
5: "Скачать прочие файлы",
}
|
normal
|
{
"blob_id": "9da995184641525cd763ecdb0bca4f28159ae740",
"index": 7617,
"step-1": "<mask token>\n\n\nclass ActExam(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def get_files_path(cls, package: 'DocumentsPackage'):\n tmp_path = package.get_save_path()\n ActExam.initialize_folder(tmp_path)\n return os.path.join(tmp_path, cls.FOLDER)\n\n def clear_file(self):\n if os.path.exists(str_add_app(self.file_path)):\n os.remove(str_add_app(self.file_path))\n self.file_path = None\n self.file_name = None\n self.save()\n\n def delete(self, using=None, keep_parents=False):\n self.clear_file()\n return super().delete(using=using, keep_parents=keep_parents)\n\n\nclass DocumentsPackage(models.Model):\n \"\"\" Модель пакета документов.\n contragent - ID контрагента\n name_uuid - Уникальный ID пакета (каждый раз новый)\n is_active - Является ли пакет активным. Если True, то пакет в работе. Если\n False, то пакет закрыт.\n is_automatic - Создан ли пакет автоматически или пользователь может\n редактировать наборы файлов и некоторые характеристики. Если\n True, то нельзя подгружать свои договора и редактировать\n debt_plan. Если False, то редактирование возможно.\n creation_date - Дата создания пакета.\n debt_plan - Сумма долга. Если is_automatic == True, то значение не\n редактируется. Если is_automatic == False, то значение\n необходимо заполнить.\n debt_fact - Сумма долга по факту. Заполняется при сторнировании или оплате.\n tax_count - Госпошлина. Можно заполнять в любом случае.\n package_users - Все пользователи пакета, работавшие с ним.\n package_state - Состояние пакета.\n package_state_date - Дата изменения состояния пакета.\n single_files - Пакет одиночных документов. \n pack_files - Пакет наборов файлов.\n other_files - Произвольные файлы.\n commentary - Комментарии.\n \"\"\"\n contragent = models.ForeignKey(Contragent, on_delete=models.CASCADE,\n related_name='contragents', related_query_name='contragent', null=\n True, blank=True)\n name_uuid = models.CharField('Идентификатор пакета', max_length=255,\n default=uuid.uuid4, null=True, blank=True, editable=False)\n is_active = models.BooleanField('Активный пакет', default=True)\n is_automatic = models.BooleanField('Создан автоматически', default=True)\n creation_date = models.DateField('Дата создания пакета', auto_now_add=True)\n debt_plan = models.FloatField('Сумма задолжности (плановая)', default=0.0)\n debt_fact = models.FloatField('Сумма задолжности (фактическая)',\n default=0.0)\n tax_count = models.FloatField('Госпошлина', default=0.0)\n package_users = models.ManyToManyField(settings.AUTH_USER_MODEL,\n related_name='packages')\n package_state = models.ForeignKey('State', on_delete=models.CASCADE,\n null=True, blank=True)\n package_state_date = models.DateField('Дата последнего действия', null=\n True, blank=True)\n single_files = GenericRelation(SingleFile)\n pack_files = GenericRelation(PackFile)\n other_files = GenericRelation(OtherFile)\n commentary = GenericRelation(Commentary, related_query_name='package')\n act = models.ForeignKey(ActExam, on_delete=models.CASCADE, null=True,\n blank=True)\n\n def __str__(self):\n return f'Пакет {self.name_uuid}'\n\n def get_save_path(self):\n if self.contragent:\n return os.path.join(self.contragent.get_str_as_path(), str(self\n .name_uuid))\n else:\n return f'{self.name_uuid}'\n\n @classmethod\n def get_active_package(cls, contragent: Contragent):\n try:\n res = cls.objects.get(contragent__id=contragent.pk, is_active=True)\n return res\n except ObjectDoesNotExist:\n return None\n\n def initialize_sub_folders(self):\n os.makedirs(str(self.get_save_path()), exist_ok=True)\n\n def is_user_in_package(self, user, use_department=False):\n users = self.package_users.all()\n if use_department:\n depts = [tmp_user.department for tmp_user in users]\n return user.department in depts or user in users\n return user in users\n\n def set_inactive(self):\n self.is_active = False\n self.save()\n\n def change_state_to(self, new_state, is_backward):\n self.package_state = new_state\n self.package_state_date = datetime.date.today()\n if not is_backward:\n async_task(calc_create_gen_async, self.contragent, self, False,\n group=self.name_uuid)\n self.save()\n\n\n class Meta:\n verbose_name_plural = 'Пакеты документов'\n\n\nclass DocumentStateEntity(models.Model):\n documents = models.ManyToManyField('DocumentTypeModel', related_name=\n 'document_type')\n states = models.ForeignKey('State', related_name='states', on_delete=\n models.CASCADE, blank=True, null=True)\n template = models.ForeignKey('DocumentFileTemplate', on_delete=models.\n CASCADE, blank=True, null=True)\n\n\nclass DocumentFileTemplate(models.Model):\n contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)\n is_package = models.BooleanField('Набор файлов', default=False)\n\n def __str__(self):\n return KLASS_TYPES[self.contagent_type][1]\n\n\n class Meta:\n verbose_name_plural = 'Шаблоны файлов'\n\n\nclass NormativeCategory(models.Model):\n \"\"\" Класс Категории норматива \"\"\"\n name = models.CharField('Вид объекта', max_length=255)\n norm_type = models.IntegerField('Показатель расчета', default=0,\n choices=NORM_TYPE, blank=True, null=True)\n normative = models.ManyToManyField('Normative', related_name=\n 'normatives', verbose_name='Нормативы')\n\n def __str__(self):\n return self.name\n\n @property\n def print_norm_type(self):\n return NORM_TYPE[self.norm_type][1]\n\n\n class Meta:\n verbose_name_plural = 'Категории нормативов'\n\n\nclass Normative(models.Model):\n \"\"\" Класс норматива \"\"\"\n since_date = models.DateField('Дата начала действия норматива', null=\n True, blank=True)\n up_to_date = models.DateField('Дата окончания действия норматива', null\n =True, blank=True)\n value = models.FloatField('Значение норматива (год.)', null=True, blank\n =True)\n\n def __str__(self):\n return (f'Норматив: {self.value}/год.,' +\n f\" действующий с {self.since_date.strftime('%d.%m.%Y')}\" +\n f\" по {self.up_to_date.strftime('%d.%m.%Y')}\")\n\n\n class Meta:\n verbose_name_plural = 'Нормативы'\n\n\nclass Contract(models.Model):\n \"\"\" Класс контракта. Нужен что бы получать уникальный номер контракта.\n Сохраняет дату когда был создан, для корректной генерации строкового\n представления.\n \"\"\"\n date_field = models.DateField(auto_now_add=True)\n\n def __str__(self):\n return f'{self.pk:06}-{self.date_field.year}/ТКО/01'\n\n\n class Meta:\n verbose_name_plural = 'Сгенерированые номера договоров'\n\n\nclass ContractNumberClass(models.Model):\n \"\"\" Модель класса прокси для соединения класса документа и контрагента.\n\n Принимает на вход необязательные параметры:\n new - определяем, надо генерировать новый номер или есть\n старый. Булево значение. True = генерируем;\n exist_number - существующий номер договора. Строка;\n\n У класса есть такие поля как:\n is_generated - хранит булево значение. Определяет был ли сгенерирован\n номер или взят из внешних источников;\n contract_obj - объект модели самого номера контракта;\n contract_exist_number - существующий номер контракта. Пустая строка,\n если мы сгенерировали новый номер;\n contract_number - возвращает строковое представление номера, независимо\n от того, сгенерирован код или получен из внешнего\n источника.\n \"\"\"\n is_generated = models.BooleanField(default=False)\n contract_obj = models.OneToOneField(Contract, on_delete=models.CASCADE,\n null=True, blank=True)\n contract_exist_number = models.CharField(default='', max_length=255,\n null=True, blank=True)\n\n @classmethod\n def create(cls, new: bool=False, exist_number: str=''):\n contract_num_obj = cls(is_generated=new)\n if new:\n contract_num_obj.contract_obj = Contract.objects.create()\n else:\n contract_num_obj.contract_exist_number = exist_number\n contract_num_obj.save()\n return contract_num_obj\n\n @property\n def contract_number(self):\n if self.is_generated:\n return str(self.contract_obj)\n else:\n return self.contract_exist_number\n\n def __str__(self):\n return self.contract_number\n\n\n class Meta:\n verbose_name_plural = 'Номера договоров'\n\n\nclass SyncUniqueNumber(models.Model):\n\n def __str__(self):\n return f'{self.pk:08}/01'\n\n\n class Meta:\n verbose_name_plural = 'Номера документов'\n\n\nclass CityModel(models.Model):\n name = models.CharField('Город', max_length=255, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name_plural = 'Города'\n\n\nclass TemplateModel(models.Model):\n template_path = models.CharField('Путь до шаблона', max_length=255)\n city = models.ForeignKey(CityModel, on_delete=models.CASCADE)\n contragent_type = models.IntegerField('Тип контрагента', choices=\n KLASS_TYPES, default=0)\n document_type = models.ForeignKey('DocumentTypeModel', verbose_name=\n 'Тип документа', on_delete=models.CASCADE)\n\n def __str__(self):\n return (\n f'{str(self.document_type)}| {KLASS_TYPES[self.contragent_type][1]}|{self.city}'\n )\n\n\n class Meta:\n verbose_name_plural = 'Шаблоны документов'\n\n\nclass DocumentTypeModel(models.Model):\n doc_type = models.CharField('Тип документа', max_length=255, null=True,\n blank=True)\n is_pack = models.BooleanField('Пакет документов', default=False)\n\n def __str__(self):\n return self.doc_type\n\n\n class Meta:\n verbose_name_plural = 'Типы документов'\n\n\nclass State(models.Model):\n name_state = models.CharField('Состояние', max_length=255)\n departments = models.ManyToManyField('yellowbird.Department',\n verbose_name='Отделы', related_name='available_states')\n is_initial_state = models.BooleanField('Начальное состояние', default=False\n )\n is_final_state = models.BooleanField('Конечное состояние', default=False)\n\n def get_linked_events(self):\n return Event.objects.filter(from_state=self.id)\n\n def _is_dept_permitted(self, department):\n return department in self.departments.all()\n\n def is_permitted(self, user):\n return user.is_superuser or user.is_staff or self._is_dept_permitted(\n user.department)\n\n def __str__(self):\n return self.name_state\n\n\n class Meta:\n verbose_name_plural = 'Состояния'\n\n\nclass Event(models.Model):\n name_event = models.CharField('Событие', max_length=255)\n from_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Исходное состояние', blank=True, null=True,\n related_name='begin_states')\n to_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Конечное состояние', blank=True, null=True,\n related_name='end_states')\n is_move_backward = models.BooleanField('Двигаемся обратно назад',\n default=False)\n\n def __str__(self):\n return self.name_event\n\n\n class Meta:\n verbose_name_plural = 'События'\n\n\nclass ListStrategy(ABC):\n\n @abstractmethod\n def execute_list_strategy(self, user):\n raise NotImplementedError\n\n @abstractmethod\n def execute_single_strategy(self, pk, user):\n raise NotImplementedError\n\n\nclass OnlyEmptyRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return [c for c in contragents if not c.active_package]\n\n def execute_single_strategy(self, pk, user):\n try:\n res = Contragent.objects.get(pk=pk)\n return res if not res.active_package else None\n except Contragent.DoesNotExist:\n return None\n\n\nclass OnlyMyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.filter(current_user__contain=user)\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk, current_user__contain=user)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllInDepartmentRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user.department):\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_list = [(c.department == user.department) for c in\n contragent.current_user]\n if any(tmp_list):\n return contragent\n return None\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\nclass MyAndEmptyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user) and user in c.current_user:\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user\n ) and user in contragent.current_user:\n return contragent\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SignUser(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return f'{proper_last_name(self.name)}, {POST_TYPE[self.position][1]}'\n <mask token>\n\n\n class Meta:\n verbose_name_plural = 'Отвественные лица с правом подписи'\n\n\nclass Commentary(models.Model):\n user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE, blank=True, null=True)\n commentary_text = models.TextField('Комментарий', blank=True, null=True)\n creation_date = models.DateTimeField('Дата создания', auto_now_add=True)\n content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)\n object_id = models.PositiveIntegerField()\n content_object = GenericForeignKey('content_type', 'object_id')\n\n\nclass AbstractFileModel(models.Model):\n file_name = models.CharField('Название файла', max_length=255, null=\n True, blank=True)\n file_path = models.CharField('Путь', max_length=255, blank=True, null=True)\n creation_date = models.DateField('Дата создания файла', blank=True,\n null=True)\n content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)\n object_id = models.PositiveIntegerField()\n content_object = GenericForeignKey('content_type', 'object_id')\n file_type = models.ForeignKey('DocumentTypeModel', on_delete=models.CASCADE\n )\n\n def delete(self, using=None, keep_parents=False):\n if os.path.exists(str_add_app(self.file_path)):\n os.remove(str_add_app(self.file_path))\n return super().delete(using=using, keep_parents=keep_parents)\n\n\n class Meta:\n abstract = True\n\n\nclass SingleFile(AbstractFileModel):\n\n def __str__(self):\n return str(self.file_type)\n\n\n class Meta:\n verbose_name_plural = 'Единичные файлы'\n\n\nclass PackFile(AbstractFileModel):\n unique_number = models.ForeignKey('SyncUniqueNumber', on_delete=models.\n CASCADE, null=True, blank=True)\n\n\n class Meta:\n abstract = False\n verbose_name_plural = 'Фаилы набора'\n\n def initialize_folder(self, path: str):\n if self.file_type:\n tmp_str_path = plur_form(self.file_type.doc_type)\n if not os.path.isdir(f'{path}/{tmp_str_path}/'):\n os.makedirs(f'{path}/{tmp_str_path}/')\n else:\n raise AttributeError()\n\n def get_files_path(self, package: 'DocumentsPackage'):\n tmp_path = package.get_save_path()\n self.initialize_folder(tmp_path)\n return os.path.join(tmp_path, f'{plur_form(self.file_type.doc_type)}/')\n\n\n<mask token>\n\n\nclass OtherFile(AbstractFileModel):\n file_obj = models.FileField('Произвольные файлы', upload_to=\n other_files_directory_path, max_length=500)\n commentary = GenericRelation(Commentary, related_query_name='file')\n\n\n class Meta:\n verbose_name_plural = 'Прочие файлы'\n\n\nclass ActExam(models.Model):\n FOLDER = 'Акт осмотра/'\n file_path = models.CharField('Путь', max_length=255, blank=True, null=True)\n file_name = models.CharField('Название файла', max_length=255, null=\n True, blank=True)\n\n @classmethod\n def initialize_folder(cls, path: str):\n tmp_path = f'{path}/{cls.FOLDER}'\n if not os.path.isdir(tmp_path):\n os.makedirs(tmp_path)\n\n @classmethod\n def get_files_path(cls, package: 'DocumentsPackage'):\n tmp_path = package.get_save_path()\n ActExam.initialize_folder(tmp_path)\n return os.path.join(tmp_path, cls.FOLDER)\n\n def clear_file(self):\n if os.path.exists(str_add_app(self.file_path)):\n os.remove(str_add_app(self.file_path))\n self.file_path = None\n self.file_name = None\n self.save()\n\n def delete(self, using=None, keep_parents=False):\n self.clear_file()\n return super().delete(using=using, keep_parents=keep_parents)\n\n\nclass DocumentsPackage(models.Model):\n \"\"\" Модель пакета документов.\n contragent - ID контрагента\n name_uuid - Уникальный ID пакета (каждый раз новый)\n is_active - Является ли пакет активным. Если True, то пакет в работе. Если\n False, то пакет закрыт.\n is_automatic - Создан ли пакет автоматически или пользователь может\n редактировать наборы файлов и некоторые характеристики. Если\n True, то нельзя подгружать свои договора и редактировать\n debt_plan. Если False, то редактирование возможно.\n creation_date - Дата создания пакета.\n debt_plan - Сумма долга. Если is_automatic == True, то значение не\n редактируется. Если is_automatic == False, то значение\n необходимо заполнить.\n debt_fact - Сумма долга по факту. Заполняется при сторнировании или оплате.\n tax_count - Госпошлина. Можно заполнять в любом случае.\n package_users - Все пользователи пакета, работавшие с ним.\n package_state - Состояние пакета.\n package_state_date - Дата изменения состояния пакета.\n single_files - Пакет одиночных документов. \n pack_files - Пакет наборов файлов.\n other_files - Произвольные файлы.\n commentary - Комментарии.\n \"\"\"\n contragent = models.ForeignKey(Contragent, on_delete=models.CASCADE,\n related_name='contragents', related_query_name='contragent', null=\n True, blank=True)\n name_uuid = models.CharField('Идентификатор пакета', max_length=255,\n default=uuid.uuid4, null=True, blank=True, editable=False)\n is_active = models.BooleanField('Активный пакет', default=True)\n is_automatic = models.BooleanField('Создан автоматически', default=True)\n creation_date = models.DateField('Дата создания пакета', auto_now_add=True)\n debt_plan = models.FloatField('Сумма задолжности (плановая)', default=0.0)\n debt_fact = models.FloatField('Сумма задолжности (фактическая)',\n default=0.0)\n tax_count = models.FloatField('Госпошлина', default=0.0)\n package_users = models.ManyToManyField(settings.AUTH_USER_MODEL,\n related_name='packages')\n package_state = models.ForeignKey('State', on_delete=models.CASCADE,\n null=True, blank=True)\n package_state_date = models.DateField('Дата последнего действия', null=\n True, blank=True)\n single_files = GenericRelation(SingleFile)\n pack_files = GenericRelation(PackFile)\n other_files = GenericRelation(OtherFile)\n commentary = GenericRelation(Commentary, related_query_name='package')\n act = models.ForeignKey(ActExam, on_delete=models.CASCADE, null=True,\n blank=True)\n\n def __str__(self):\n return f'Пакет {self.name_uuid}'\n\n def get_save_path(self):\n if self.contragent:\n return os.path.join(self.contragent.get_str_as_path(), str(self\n .name_uuid))\n else:\n return f'{self.name_uuid}'\n\n @classmethod\n def get_active_package(cls, contragent: Contragent):\n try:\n res = cls.objects.get(contragent__id=contragent.pk, is_active=True)\n return res\n except ObjectDoesNotExist:\n return None\n\n def initialize_sub_folders(self):\n os.makedirs(str(self.get_save_path()), exist_ok=True)\n\n def is_user_in_package(self, user, use_department=False):\n users = self.package_users.all()\n if use_department:\n depts = [tmp_user.department for tmp_user in users]\n return user.department in depts or user in users\n return user in users\n\n def set_inactive(self):\n self.is_active = False\n self.save()\n\n def change_state_to(self, new_state, is_backward):\n self.package_state = new_state\n self.package_state_date = datetime.date.today()\n if not is_backward:\n async_task(calc_create_gen_async, self.contragent, self, False,\n group=self.name_uuid)\n self.save()\n\n\n class Meta:\n verbose_name_plural = 'Пакеты документов'\n\n\nclass DocumentStateEntity(models.Model):\n documents = models.ManyToManyField('DocumentTypeModel', related_name=\n 'document_type')\n states = models.ForeignKey('State', related_name='states', on_delete=\n models.CASCADE, blank=True, null=True)\n template = models.ForeignKey('DocumentFileTemplate', on_delete=models.\n CASCADE, blank=True, null=True)\n\n\nclass DocumentFileTemplate(models.Model):\n contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)\n is_package = models.BooleanField('Набор файлов', default=False)\n\n def __str__(self):\n return KLASS_TYPES[self.contagent_type][1]\n\n\n class Meta:\n verbose_name_plural = 'Шаблоны файлов'\n\n\nclass NormativeCategory(models.Model):\n \"\"\" Класс Категории норматива \"\"\"\n name = models.CharField('Вид объекта', max_length=255)\n norm_type = models.IntegerField('Показатель расчета', default=0,\n choices=NORM_TYPE, blank=True, null=True)\n normative = models.ManyToManyField('Normative', related_name=\n 'normatives', verbose_name='Нормативы')\n\n def __str__(self):\n return self.name\n\n @property\n def print_norm_type(self):\n return NORM_TYPE[self.norm_type][1]\n\n\n class Meta:\n verbose_name_plural = 'Категории нормативов'\n\n\nclass Normative(models.Model):\n \"\"\" Класс норматива \"\"\"\n since_date = models.DateField('Дата начала действия норматива', null=\n True, blank=True)\n up_to_date = models.DateField('Дата окончания действия норматива', null\n =True, blank=True)\n value = models.FloatField('Значение норматива (год.)', null=True, blank\n =True)\n\n def __str__(self):\n return (f'Норматив: {self.value}/год.,' +\n f\" действующий с {self.since_date.strftime('%d.%m.%Y')}\" +\n f\" по {self.up_to_date.strftime('%d.%m.%Y')}\")\n\n\n class Meta:\n verbose_name_plural = 'Нормативы'\n\n\nclass Contract(models.Model):\n \"\"\" Класс контракта. Нужен что бы получать уникальный номер контракта.\n Сохраняет дату когда был создан, для корректной генерации строкового\n представления.\n \"\"\"\n date_field = models.DateField(auto_now_add=True)\n\n def __str__(self):\n return f'{self.pk:06}-{self.date_field.year}/ТКО/01'\n\n\n class Meta:\n verbose_name_plural = 'Сгенерированые номера договоров'\n\n\nclass ContractNumberClass(models.Model):\n \"\"\" Модель класса прокси для соединения класса документа и контрагента.\n\n Принимает на вход необязательные параметры:\n new - определяем, надо генерировать новый номер или есть\n старый. Булево значение. True = генерируем;\n exist_number - существующий номер договора. Строка;\n\n У класса есть такие поля как:\n is_generated - хранит булево значение. Определяет был ли сгенерирован\n номер или взят из внешних источников;\n contract_obj - объект модели самого номера контракта;\n contract_exist_number - существующий номер контракта. Пустая строка,\n если мы сгенерировали новый номер;\n contract_number - возвращает строковое представление номера, независимо\n от того, сгенерирован код или получен из внешнего\n источника.\n \"\"\"\n is_generated = models.BooleanField(default=False)\n contract_obj = models.OneToOneField(Contract, on_delete=models.CASCADE,\n null=True, blank=True)\n contract_exist_number = models.CharField(default='', max_length=255,\n null=True, blank=True)\n\n @classmethod\n def create(cls, new: bool=False, exist_number: str=''):\n contract_num_obj = cls(is_generated=new)\n if new:\n contract_num_obj.contract_obj = Contract.objects.create()\n else:\n contract_num_obj.contract_exist_number = exist_number\n contract_num_obj.save()\n return contract_num_obj\n\n @property\n def contract_number(self):\n if self.is_generated:\n return str(self.contract_obj)\n else:\n return self.contract_exist_number\n\n def __str__(self):\n return self.contract_number\n\n\n class Meta:\n verbose_name_plural = 'Номера договоров'\n\n\nclass SyncUniqueNumber(models.Model):\n\n def __str__(self):\n return f'{self.pk:08}/01'\n\n\n class Meta:\n verbose_name_plural = 'Номера документов'\n\n\nclass CityModel(models.Model):\n name = models.CharField('Город', max_length=255, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name_plural = 'Города'\n\n\nclass TemplateModel(models.Model):\n template_path = models.CharField('Путь до шаблона', max_length=255)\n city = models.ForeignKey(CityModel, on_delete=models.CASCADE)\n contragent_type = models.IntegerField('Тип контрагента', choices=\n KLASS_TYPES, default=0)\n document_type = models.ForeignKey('DocumentTypeModel', verbose_name=\n 'Тип документа', on_delete=models.CASCADE)\n\n def __str__(self):\n return (\n f'{str(self.document_type)}| {KLASS_TYPES[self.contragent_type][1]}|{self.city}'\n )\n\n\n class Meta:\n verbose_name_plural = 'Шаблоны документов'\n\n\nclass DocumentTypeModel(models.Model):\n doc_type = models.CharField('Тип документа', max_length=255, null=True,\n blank=True)\n is_pack = models.BooleanField('Пакет документов', default=False)\n\n def __str__(self):\n return self.doc_type\n\n\n class Meta:\n verbose_name_plural = 'Типы документов'\n\n\nclass State(models.Model):\n name_state = models.CharField('Состояние', max_length=255)\n departments = models.ManyToManyField('yellowbird.Department',\n verbose_name='Отделы', related_name='available_states')\n is_initial_state = models.BooleanField('Начальное состояние', default=False\n )\n is_final_state = models.BooleanField('Конечное состояние', default=False)\n\n def get_linked_events(self):\n return Event.objects.filter(from_state=self.id)\n\n def _is_dept_permitted(self, department):\n return department in self.departments.all()\n\n def is_permitted(self, user):\n return user.is_superuser or user.is_staff or self._is_dept_permitted(\n user.department)\n\n def __str__(self):\n return self.name_state\n\n\n class Meta:\n verbose_name_plural = 'Состояния'\n\n\nclass Event(models.Model):\n name_event = models.CharField('Событие', max_length=255)\n from_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Исходное состояние', blank=True, null=True,\n related_name='begin_states')\n to_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Конечное состояние', blank=True, null=True,\n related_name='end_states')\n is_move_backward = models.BooleanField('Двигаемся обратно назад',\n default=False)\n\n def __str__(self):\n return self.name_event\n\n\n class Meta:\n verbose_name_plural = 'События'\n\n\nclass ListStrategy(ABC):\n\n @abstractmethod\n def execute_list_strategy(self, user):\n raise NotImplementedError\n\n @abstractmethod\n def execute_single_strategy(self, pk, user):\n raise NotImplementedError\n\n\nclass OnlyEmptyRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return [c for c in contragents if not c.active_package]\n\n def execute_single_strategy(self, pk, user):\n try:\n res = Contragent.objects.get(pk=pk)\n return res if not res.active_package else None\n except Contragent.DoesNotExist:\n return None\n\n\nclass OnlyMyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.filter(current_user__contain=user)\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk, current_user__contain=user)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllInDepartmentRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user.department):\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_list = [(c.department == user.department) for c in\n contragent.current_user]\n if any(tmp_list):\n return contragent\n return None\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\nclass MyAndEmptyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user) and user in c.current_user:\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user\n ) and user in contragent.current_user:\n return contragent\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SignUser(models.Model):\n name = models.CharField('ФИО отвественного лица', max_length=255)\n document = models.IntegerField('Документ основания', choices=DOC_TYPE,\n default=0)\n position = models.IntegerField('Должность', choices=POST_TYPE, default=0)\n doc_number = models.CharField('Номер документа', max_length=255)\n doc_date = models.DateField('Дата начала действия документа')\n address = models.CharField('Адресс', max_length=255)\n city = models.ForeignKey('CityModel', on_delete=models.CASCADE, blank=\n True, null=True)\n tel_number = models.CharField('Телефон', max_length=255, default='')\n sign = models.ImageField('Подпись', upload_to='signs/', blank=True,\n null=True)\n\n def __str__(self):\n return f'{proper_last_name(self.name)}, {POST_TYPE[self.position][1]}'\n\n def save(self, *args, **kwargs):\n instance = SignUser.objects.get(id=self.id)\n if self.sign != instance.sign and instance.sign:\n if os.path.exists(instance.sign.url):\n os.remove(instance.sign.url)\n super().save(*args, **kwargs)\n\n\n class Meta:\n verbose_name_plural = 'Отвественные лица с правом подписи'\n\n\nclass Commentary(models.Model):\n user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE, blank=True, null=True)\n commentary_text = models.TextField('Комментарий', blank=True, null=True)\n creation_date = models.DateTimeField('Дата создания', auto_now_add=True)\n content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)\n object_id = models.PositiveIntegerField()\n content_object = GenericForeignKey('content_type', 'object_id')\n\n\nclass AbstractFileModel(models.Model):\n file_name = models.CharField('Название файла', max_length=255, null=\n True, blank=True)\n file_path = models.CharField('Путь', max_length=255, blank=True, null=True)\n creation_date = models.DateField('Дата создания файла', blank=True,\n null=True)\n content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)\n object_id = models.PositiveIntegerField()\n content_object = GenericForeignKey('content_type', 'object_id')\n file_type = models.ForeignKey('DocumentTypeModel', on_delete=models.CASCADE\n )\n\n def delete(self, using=None, keep_parents=False):\n if os.path.exists(str_add_app(self.file_path)):\n os.remove(str_add_app(self.file_path))\n return super().delete(using=using, keep_parents=keep_parents)\n\n\n class Meta:\n abstract = True\n\n\nclass SingleFile(AbstractFileModel):\n\n def __str__(self):\n return str(self.file_type)\n\n\n class Meta:\n verbose_name_plural = 'Единичные файлы'\n\n\nclass PackFile(AbstractFileModel):\n unique_number = models.ForeignKey('SyncUniqueNumber', on_delete=models.\n CASCADE, null=True, blank=True)\n\n\n class Meta:\n abstract = False\n verbose_name_plural = 'Фаилы набора'\n\n def initialize_folder(self, path: str):\n if self.file_type:\n tmp_str_path = plur_form(self.file_type.doc_type)\n if not os.path.isdir(f'{path}/{tmp_str_path}/'):\n os.makedirs(f'{path}/{tmp_str_path}/')\n else:\n raise AttributeError()\n\n def get_files_path(self, package: 'DocumentsPackage'):\n tmp_path = package.get_save_path()\n self.initialize_folder(tmp_path)\n return os.path.join(tmp_path, f'{plur_form(self.file_type.doc_type)}/')\n\n\n<mask token>\n\n\nclass OtherFile(AbstractFileModel):\n file_obj = models.FileField('Произвольные файлы', upload_to=\n other_files_directory_path, max_length=500)\n commentary = GenericRelation(Commentary, related_query_name='file')\n\n\n class Meta:\n verbose_name_plural = 'Прочие файлы'\n\n\nclass ActExam(models.Model):\n FOLDER = 'Акт осмотра/'\n file_path = models.CharField('Путь', max_length=255, blank=True, null=True)\n file_name = models.CharField('Название файла', max_length=255, null=\n True, blank=True)\n\n @classmethod\n def initialize_folder(cls, path: str):\n tmp_path = f'{path}/{cls.FOLDER}'\n if not os.path.isdir(tmp_path):\n os.makedirs(tmp_path)\n\n @classmethod\n def get_files_path(cls, package: 'DocumentsPackage'):\n tmp_path = package.get_save_path()\n ActExam.initialize_folder(tmp_path)\n return os.path.join(tmp_path, cls.FOLDER)\n\n def clear_file(self):\n if os.path.exists(str_add_app(self.file_path)):\n os.remove(str_add_app(self.file_path))\n self.file_path = None\n self.file_name = None\n self.save()\n\n def delete(self, using=None, keep_parents=False):\n self.clear_file()\n return super().delete(using=using, keep_parents=keep_parents)\n\n\nclass DocumentsPackage(models.Model):\n \"\"\" Модель пакета документов.\n contragent - ID контрагента\n name_uuid - Уникальный ID пакета (каждый раз новый)\n is_active - Является ли пакет активным. Если True, то пакет в работе. Если\n False, то пакет закрыт.\n is_automatic - Создан ли пакет автоматически или пользователь может\n редактировать наборы файлов и некоторые характеристики. Если\n True, то нельзя подгружать свои договора и редактировать\n debt_plan. Если False, то редактирование возможно.\n creation_date - Дата создания пакета.\n debt_plan - Сумма долга. Если is_automatic == True, то значение не\n редактируется. Если is_automatic == False, то значение\n необходимо заполнить.\n debt_fact - Сумма долга по факту. Заполняется при сторнировании или оплате.\n tax_count - Госпошлина. Можно заполнять в любом случае.\n package_users - Все пользователи пакета, работавшие с ним.\n package_state - Состояние пакета.\n package_state_date - Дата изменения состояния пакета.\n single_files - Пакет одиночных документов. \n pack_files - Пакет наборов файлов.\n other_files - Произвольные файлы.\n commentary - Комментарии.\n \"\"\"\n contragent = models.ForeignKey(Contragent, on_delete=models.CASCADE,\n related_name='contragents', related_query_name='contragent', null=\n True, blank=True)\n name_uuid = models.CharField('Идентификатор пакета', max_length=255,\n default=uuid.uuid4, null=True, blank=True, editable=False)\n is_active = models.BooleanField('Активный пакет', default=True)\n is_automatic = models.BooleanField('Создан автоматически', default=True)\n creation_date = models.DateField('Дата создания пакета', auto_now_add=True)\n debt_plan = models.FloatField('Сумма задолжности (плановая)', default=0.0)\n debt_fact = models.FloatField('Сумма задолжности (фактическая)',\n default=0.0)\n tax_count = models.FloatField('Госпошлина', default=0.0)\n package_users = models.ManyToManyField(settings.AUTH_USER_MODEL,\n related_name='packages')\n package_state = models.ForeignKey('State', on_delete=models.CASCADE,\n null=True, blank=True)\n package_state_date = models.DateField('Дата последнего действия', null=\n True, blank=True)\n single_files = GenericRelation(SingleFile)\n pack_files = GenericRelation(PackFile)\n other_files = GenericRelation(OtherFile)\n commentary = GenericRelation(Commentary, related_query_name='package')\n act = models.ForeignKey(ActExam, on_delete=models.CASCADE, null=True,\n blank=True)\n\n def __str__(self):\n return f'Пакет {self.name_uuid}'\n\n def get_save_path(self):\n if self.contragent:\n return os.path.join(self.contragent.get_str_as_path(), str(self\n .name_uuid))\n else:\n return f'{self.name_uuid}'\n\n @classmethod\n def get_active_package(cls, contragent: Contragent):\n try:\n res = cls.objects.get(contragent__id=contragent.pk, is_active=True)\n return res\n except ObjectDoesNotExist:\n return None\n\n def initialize_sub_folders(self):\n os.makedirs(str(self.get_save_path()), exist_ok=True)\n\n def is_user_in_package(self, user, use_department=False):\n users = self.package_users.all()\n if use_department:\n depts = [tmp_user.department for tmp_user in users]\n return user.department in depts or user in users\n return user in users\n\n def set_inactive(self):\n self.is_active = False\n self.save()\n\n def change_state_to(self, new_state, is_backward):\n self.package_state = new_state\n self.package_state_date = datetime.date.today()\n if not is_backward:\n async_task(calc_create_gen_async, self.contragent, self, False,\n group=self.name_uuid)\n self.save()\n\n\n class Meta:\n verbose_name_plural = 'Пакеты документов'\n\n\nclass DocumentStateEntity(models.Model):\n documents = models.ManyToManyField('DocumentTypeModel', related_name=\n 'document_type')\n states = models.ForeignKey('State', related_name='states', on_delete=\n models.CASCADE, blank=True, null=True)\n template = models.ForeignKey('DocumentFileTemplate', on_delete=models.\n CASCADE, blank=True, null=True)\n\n\nclass DocumentFileTemplate(models.Model):\n contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)\n is_package = models.BooleanField('Набор файлов', default=False)\n\n def __str__(self):\n return KLASS_TYPES[self.contagent_type][1]\n\n\n class Meta:\n verbose_name_plural = 'Шаблоны файлов'\n\n\nclass NormativeCategory(models.Model):\n \"\"\" Класс Категории норматива \"\"\"\n name = models.CharField('Вид объекта', max_length=255)\n norm_type = models.IntegerField('Показатель расчета', default=0,\n choices=NORM_TYPE, blank=True, null=True)\n normative = models.ManyToManyField('Normative', related_name=\n 'normatives', verbose_name='Нормативы')\n\n def __str__(self):\n return self.name\n\n @property\n def print_norm_type(self):\n return NORM_TYPE[self.norm_type][1]\n\n\n class Meta:\n verbose_name_plural = 'Категории нормативов'\n\n\nclass Normative(models.Model):\n \"\"\" Класс норматива \"\"\"\n since_date = models.DateField('Дата начала действия норматива', null=\n True, blank=True)\n up_to_date = models.DateField('Дата окончания действия норматива', null\n =True, blank=True)\n value = models.FloatField('Значение норматива (год.)', null=True, blank\n =True)\n\n def __str__(self):\n return (f'Норматив: {self.value}/год.,' +\n f\" действующий с {self.since_date.strftime('%d.%m.%Y')}\" +\n f\" по {self.up_to_date.strftime('%d.%m.%Y')}\")\n\n\n class Meta:\n verbose_name_plural = 'Нормативы'\n\n\nclass Contract(models.Model):\n \"\"\" Класс контракта. Нужен что бы получать уникальный номер контракта.\n Сохраняет дату когда был создан, для корректной генерации строкового\n представления.\n \"\"\"\n date_field = models.DateField(auto_now_add=True)\n\n def __str__(self):\n return f'{self.pk:06}-{self.date_field.year}/ТКО/01'\n\n\n class Meta:\n verbose_name_plural = 'Сгенерированые номера договоров'\n\n\nclass ContractNumberClass(models.Model):\n \"\"\" Модель класса прокси для соединения класса документа и контрагента.\n\n Принимает на вход необязательные параметры:\n new - определяем, надо генерировать новый номер или есть\n старый. Булево значение. True = генерируем;\n exist_number - существующий номер договора. Строка;\n\n У класса есть такие поля как:\n is_generated - хранит булево значение. Определяет был ли сгенерирован\n номер или взят из внешних источников;\n contract_obj - объект модели самого номера контракта;\n contract_exist_number - существующий номер контракта. Пустая строка,\n если мы сгенерировали новый номер;\n contract_number - возвращает строковое представление номера, независимо\n от того, сгенерирован код или получен из внешнего\n источника.\n \"\"\"\n is_generated = models.BooleanField(default=False)\n contract_obj = models.OneToOneField(Contract, on_delete=models.CASCADE,\n null=True, blank=True)\n contract_exist_number = models.CharField(default='', max_length=255,\n null=True, blank=True)\n\n @classmethod\n def create(cls, new: bool=False, exist_number: str=''):\n contract_num_obj = cls(is_generated=new)\n if new:\n contract_num_obj.contract_obj = Contract.objects.create()\n else:\n contract_num_obj.contract_exist_number = exist_number\n contract_num_obj.save()\n return contract_num_obj\n\n @property\n def contract_number(self):\n if self.is_generated:\n return str(self.contract_obj)\n else:\n return self.contract_exist_number\n\n def __str__(self):\n return self.contract_number\n\n\n class Meta:\n verbose_name_plural = 'Номера договоров'\n\n\nclass SyncUniqueNumber(models.Model):\n\n def __str__(self):\n return f'{self.pk:08}/01'\n\n\n class Meta:\n verbose_name_plural = 'Номера документов'\n\n\nclass CityModel(models.Model):\n name = models.CharField('Город', max_length=255, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name_plural = 'Города'\n\n\nclass TemplateModel(models.Model):\n template_path = models.CharField('Путь до шаблона', max_length=255)\n city = models.ForeignKey(CityModel, on_delete=models.CASCADE)\n contragent_type = models.IntegerField('Тип контрагента', choices=\n KLASS_TYPES, default=0)\n document_type = models.ForeignKey('DocumentTypeModel', verbose_name=\n 'Тип документа', on_delete=models.CASCADE)\n\n def __str__(self):\n return (\n f'{str(self.document_type)}| {KLASS_TYPES[self.contragent_type][1]}|{self.city}'\n )\n\n\n class Meta:\n verbose_name_plural = 'Шаблоны документов'\n\n\nclass DocumentTypeModel(models.Model):\n doc_type = models.CharField('Тип документа', max_length=255, null=True,\n blank=True)\n is_pack = models.BooleanField('Пакет документов', default=False)\n\n def __str__(self):\n return self.doc_type\n\n\n class Meta:\n verbose_name_plural = 'Типы документов'\n\n\nclass State(models.Model):\n name_state = models.CharField('Состояние', max_length=255)\n departments = models.ManyToManyField('yellowbird.Department',\n verbose_name='Отделы', related_name='available_states')\n is_initial_state = models.BooleanField('Начальное состояние', default=False\n )\n is_final_state = models.BooleanField('Конечное состояние', default=False)\n\n def get_linked_events(self):\n return Event.objects.filter(from_state=self.id)\n\n def _is_dept_permitted(self, department):\n return department in self.departments.all()\n\n def is_permitted(self, user):\n return user.is_superuser or user.is_staff or self._is_dept_permitted(\n user.department)\n\n def __str__(self):\n return self.name_state\n\n\n class Meta:\n verbose_name_plural = 'Состояния'\n\n\nclass Event(models.Model):\n name_event = models.CharField('Событие', max_length=255)\n from_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Исходное состояние', blank=True, null=True,\n related_name='begin_states')\n to_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Конечное состояние', blank=True, null=True,\n related_name='end_states')\n is_move_backward = models.BooleanField('Двигаемся обратно назад',\n default=False)\n\n def __str__(self):\n return self.name_event\n\n\n class Meta:\n verbose_name_plural = 'События'\n\n\nclass ListStrategy(ABC):\n\n @abstractmethod\n def execute_list_strategy(self, user):\n raise NotImplementedError\n\n @abstractmethod\n def execute_single_strategy(self, pk, user):\n raise NotImplementedError\n\n\nclass OnlyEmptyRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return [c for c in contragents if not c.active_package]\n\n def execute_single_strategy(self, pk, user):\n try:\n res = Contragent.objects.get(pk=pk)\n return res if not res.active_package else None\n except Contragent.DoesNotExist:\n return None\n\n\nclass OnlyMyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.filter(current_user__contain=user)\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk, current_user__contain=user)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllInDepartmentRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user.department):\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_list = [(c.department == user.department) for c in\n contragent.current_user]\n if any(tmp_list):\n return contragent\n return None\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\nclass MyAndEmptyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user) and user in c.current_user:\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user\n ) and user in contragent.current_user:\n return contragent\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Contragent(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def check_and_create_parent_folder(self):\n if not os.path.isdir(os.path.join(settings.MEDIA_ROOT, KLASS_TYPES[\n self.klass][1])):\n os.mkdir(os.path.join(settings.MEDIA_ROOT, KLASS_TYPES[self.\n klass][1]), mode=511)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_active_package(self):\n res = DocumentsPackage.get_active_package(self)\n return res\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name_plural = 'Контрагенты'\n\n\nclass SignUser(models.Model):\n name = models.CharField('ФИО отвественного лица', max_length=255)\n document = models.IntegerField('Документ основания', choices=DOC_TYPE,\n default=0)\n position = models.IntegerField('Должность', choices=POST_TYPE, default=0)\n doc_number = models.CharField('Номер документа', max_length=255)\n doc_date = models.DateField('Дата начала действия документа')\n address = models.CharField('Адресс', max_length=255)\n city = models.ForeignKey('CityModel', on_delete=models.CASCADE, blank=\n True, null=True)\n tel_number = models.CharField('Телефон', max_length=255, default='')\n sign = models.ImageField('Подпись', upload_to='signs/', blank=True,\n null=True)\n\n def __str__(self):\n return f'{proper_last_name(self.name)}, {POST_TYPE[self.position][1]}'\n\n def save(self, *args, **kwargs):\n instance = SignUser.objects.get(id=self.id)\n if self.sign != instance.sign and instance.sign:\n if os.path.exists(instance.sign.url):\n os.remove(instance.sign.url)\n super().save(*args, **kwargs)\n\n\n class Meta:\n verbose_name_plural = 'Отвественные лица с правом подписи'\n\n\nclass Commentary(models.Model):\n user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE, blank=True, null=True)\n commentary_text = models.TextField('Комментарий', blank=True, null=True)\n creation_date = models.DateTimeField('Дата создания', auto_now_add=True)\n content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)\n object_id = models.PositiveIntegerField()\n content_object = GenericForeignKey('content_type', 'object_id')\n\n\nclass AbstractFileModel(models.Model):\n file_name = models.CharField('Название файла', max_length=255, null=\n True, blank=True)\n file_path = models.CharField('Путь', max_length=255, blank=True, null=True)\n creation_date = models.DateField('Дата создания файла', blank=True,\n null=True)\n content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)\n object_id = models.PositiveIntegerField()\n content_object = GenericForeignKey('content_type', 'object_id')\n file_type = models.ForeignKey('DocumentTypeModel', on_delete=models.CASCADE\n )\n\n def delete(self, using=None, keep_parents=False):\n if os.path.exists(str_add_app(self.file_path)):\n os.remove(str_add_app(self.file_path))\n return super().delete(using=using, keep_parents=keep_parents)\n\n\n class Meta:\n abstract = True\n\n\nclass SingleFile(AbstractFileModel):\n\n def __str__(self):\n return str(self.file_type)\n\n\n class Meta:\n verbose_name_plural = 'Единичные файлы'\n\n\nclass PackFile(AbstractFileModel):\n unique_number = models.ForeignKey('SyncUniqueNumber', on_delete=models.\n CASCADE, null=True, blank=True)\n\n\n class Meta:\n abstract = False\n verbose_name_plural = 'Фаилы набора'\n\n def initialize_folder(self, path: str):\n if self.file_type:\n tmp_str_path = plur_form(self.file_type.doc_type)\n if not os.path.isdir(f'{path}/{tmp_str_path}/'):\n os.makedirs(f'{path}/{tmp_str_path}/')\n else:\n raise AttributeError()\n\n def get_files_path(self, package: 'DocumentsPackage'):\n tmp_path = package.get_save_path()\n self.initialize_folder(tmp_path)\n return os.path.join(tmp_path, f'{plur_form(self.file_type.doc_type)}/')\n\n\n<mask token>\n\n\nclass OtherFile(AbstractFileModel):\n file_obj = models.FileField('Произвольные файлы', upload_to=\n other_files_directory_path, max_length=500)\n commentary = GenericRelation(Commentary, related_query_name='file')\n\n\n class Meta:\n verbose_name_plural = 'Прочие файлы'\n\n\nclass ActExam(models.Model):\n FOLDER = 'Акт осмотра/'\n file_path = models.CharField('Путь', max_length=255, blank=True, null=True)\n file_name = models.CharField('Название файла', max_length=255, null=\n True, blank=True)\n\n @classmethod\n def initialize_folder(cls, path: str):\n tmp_path = f'{path}/{cls.FOLDER}'\n if not os.path.isdir(tmp_path):\n os.makedirs(tmp_path)\n\n @classmethod\n def get_files_path(cls, package: 'DocumentsPackage'):\n tmp_path = package.get_save_path()\n ActExam.initialize_folder(tmp_path)\n return os.path.join(tmp_path, cls.FOLDER)\n\n def clear_file(self):\n if os.path.exists(str_add_app(self.file_path)):\n os.remove(str_add_app(self.file_path))\n self.file_path = None\n self.file_name = None\n self.save()\n\n def delete(self, using=None, keep_parents=False):\n self.clear_file()\n return super().delete(using=using, keep_parents=keep_parents)\n\n\nclass DocumentsPackage(models.Model):\n \"\"\" Модель пакета документов.\n contragent - ID контрагента\n name_uuid - Уникальный ID пакета (каждый раз новый)\n is_active - Является ли пакет активным. Если True, то пакет в работе. Если\n False, то пакет закрыт.\n is_automatic - Создан ли пакет автоматически или пользователь может\n редактировать наборы файлов и некоторые характеристики. Если\n True, то нельзя подгружать свои договора и редактировать\n debt_plan. Если False, то редактирование возможно.\n creation_date - Дата создания пакета.\n debt_plan - Сумма долга. Если is_automatic == True, то значение не\n редактируется. Если is_automatic == False, то значение\n необходимо заполнить.\n debt_fact - Сумма долга по факту. Заполняется при сторнировании или оплате.\n tax_count - Госпошлина. Можно заполнять в любом случае.\n package_users - Все пользователи пакета, работавшие с ним.\n package_state - Состояние пакета.\n package_state_date - Дата изменения состояния пакета.\n single_files - Пакет одиночных документов. \n pack_files - Пакет наборов файлов.\n other_files - Произвольные файлы.\n commentary - Комментарии.\n \"\"\"\n contragent = models.ForeignKey(Contragent, on_delete=models.CASCADE,\n related_name='contragents', related_query_name='contragent', null=\n True, blank=True)\n name_uuid = models.CharField('Идентификатор пакета', max_length=255,\n default=uuid.uuid4, null=True, blank=True, editable=False)\n is_active = models.BooleanField('Активный пакет', default=True)\n is_automatic = models.BooleanField('Создан автоматически', default=True)\n creation_date = models.DateField('Дата создания пакета', auto_now_add=True)\n debt_plan = models.FloatField('Сумма задолжности (плановая)', default=0.0)\n debt_fact = models.FloatField('Сумма задолжности (фактическая)',\n default=0.0)\n tax_count = models.FloatField('Госпошлина', default=0.0)\n package_users = models.ManyToManyField(settings.AUTH_USER_MODEL,\n related_name='packages')\n package_state = models.ForeignKey('State', on_delete=models.CASCADE,\n null=True, blank=True)\n package_state_date = models.DateField('Дата последнего действия', null=\n True, blank=True)\n single_files = GenericRelation(SingleFile)\n pack_files = GenericRelation(PackFile)\n other_files = GenericRelation(OtherFile)\n commentary = GenericRelation(Commentary, related_query_name='package')\n act = models.ForeignKey(ActExam, on_delete=models.CASCADE, null=True,\n blank=True)\n\n def __str__(self):\n return f'Пакет {self.name_uuid}'\n\n def get_save_path(self):\n if self.contragent:\n return os.path.join(self.contragent.get_str_as_path(), str(self\n .name_uuid))\n else:\n return f'{self.name_uuid}'\n\n @classmethod\n def get_active_package(cls, contragent: Contragent):\n try:\n res = cls.objects.get(contragent__id=contragent.pk, is_active=True)\n return res\n except ObjectDoesNotExist:\n return None\n\n def initialize_sub_folders(self):\n os.makedirs(str(self.get_save_path()), exist_ok=True)\n\n def is_user_in_package(self, user, use_department=False):\n users = self.package_users.all()\n if use_department:\n depts = [tmp_user.department for tmp_user in users]\n return user.department in depts or user in users\n return user in users\n\n def set_inactive(self):\n self.is_active = False\n self.save()\n\n def change_state_to(self, new_state, is_backward):\n self.package_state = new_state\n self.package_state_date = datetime.date.today()\n if not is_backward:\n async_task(calc_create_gen_async, self.contragent, self, False,\n group=self.name_uuid)\n self.save()\n\n\n class Meta:\n verbose_name_plural = 'Пакеты документов'\n\n\nclass DocumentStateEntity(models.Model):\n documents = models.ManyToManyField('DocumentTypeModel', related_name=\n 'document_type')\n states = models.ForeignKey('State', related_name='states', on_delete=\n models.CASCADE, blank=True, null=True)\n template = models.ForeignKey('DocumentFileTemplate', on_delete=models.\n CASCADE, blank=True, null=True)\n\n\nclass DocumentFileTemplate(models.Model):\n contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)\n is_package = models.BooleanField('Набор файлов', default=False)\n\n def __str__(self):\n return KLASS_TYPES[self.contagent_type][1]\n\n\n class Meta:\n verbose_name_plural = 'Шаблоны файлов'\n\n\nclass NormativeCategory(models.Model):\n \"\"\" Класс Категории норматива \"\"\"\n name = models.CharField('Вид объекта', max_length=255)\n norm_type = models.IntegerField('Показатель расчета', default=0,\n choices=NORM_TYPE, blank=True, null=True)\n normative = models.ManyToManyField('Normative', related_name=\n 'normatives', verbose_name='Нормативы')\n\n def __str__(self):\n return self.name\n\n @property\n def print_norm_type(self):\n return NORM_TYPE[self.norm_type][1]\n\n\n class Meta:\n verbose_name_plural = 'Категории нормативов'\n\n\nclass Normative(models.Model):\n \"\"\" Класс норматива \"\"\"\n since_date = models.DateField('Дата начала действия норматива', null=\n True, blank=True)\n up_to_date = models.DateField('Дата окончания действия норматива', null\n =True, blank=True)\n value = models.FloatField('Значение норматива (год.)', null=True, blank\n =True)\n\n def __str__(self):\n return (f'Норматив: {self.value}/год.,' +\n f\" действующий с {self.since_date.strftime('%d.%m.%Y')}\" +\n f\" по {self.up_to_date.strftime('%d.%m.%Y')}\")\n\n\n class Meta:\n verbose_name_plural = 'Нормативы'\n\n\nclass Contract(models.Model):\n \"\"\" Класс контракта. Нужен что бы получать уникальный номер контракта.\n Сохраняет дату когда был создан, для корректной генерации строкового\n представления.\n \"\"\"\n date_field = models.DateField(auto_now_add=True)\n\n def __str__(self):\n return f'{self.pk:06}-{self.date_field.year}/ТКО/01'\n\n\n class Meta:\n verbose_name_plural = 'Сгенерированые номера договоров'\n\n\nclass ContractNumberClass(models.Model):\n \"\"\" Модель класса прокси для соединения класса документа и контрагента.\n\n Принимает на вход необязательные параметры:\n new - определяем, надо генерировать новый номер или есть\n старый. Булево значение. True = генерируем;\n exist_number - существующий номер договора. Строка;\n\n У класса есть такие поля как:\n is_generated - хранит булево значение. Определяет был ли сгенерирован\n номер или взят из внешних источников;\n contract_obj - объект модели самого номера контракта;\n contract_exist_number - существующий номер контракта. Пустая строка,\n если мы сгенерировали новый номер;\n contract_number - возвращает строковое представление номера, независимо\n от того, сгенерирован код или получен из внешнего\n источника.\n \"\"\"\n is_generated = models.BooleanField(default=False)\n contract_obj = models.OneToOneField(Contract, on_delete=models.CASCADE,\n null=True, blank=True)\n contract_exist_number = models.CharField(default='', max_length=255,\n null=True, blank=True)\n\n @classmethod\n def create(cls, new: bool=False, exist_number: str=''):\n contract_num_obj = cls(is_generated=new)\n if new:\n contract_num_obj.contract_obj = Contract.objects.create()\n else:\n contract_num_obj.contract_exist_number = exist_number\n contract_num_obj.save()\n return contract_num_obj\n\n @property\n def contract_number(self):\n if self.is_generated:\n return str(self.contract_obj)\n else:\n return self.contract_exist_number\n\n def __str__(self):\n return self.contract_number\n\n\n class Meta:\n verbose_name_plural = 'Номера договоров'\n\n\nclass SyncUniqueNumber(models.Model):\n\n def __str__(self):\n return f'{self.pk:08}/01'\n\n\n class Meta:\n verbose_name_plural = 'Номера документов'\n\n\nclass CityModel(models.Model):\n name = models.CharField('Город', max_length=255, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name_plural = 'Города'\n\n\nclass TemplateModel(models.Model):\n template_path = models.CharField('Путь до шаблона', max_length=255)\n city = models.ForeignKey(CityModel, on_delete=models.CASCADE)\n contragent_type = models.IntegerField('Тип контрагента', choices=\n KLASS_TYPES, default=0)\n document_type = models.ForeignKey('DocumentTypeModel', verbose_name=\n 'Тип документа', on_delete=models.CASCADE)\n\n def __str__(self):\n return (\n f'{str(self.document_type)}| {KLASS_TYPES[self.contragent_type][1]}|{self.city}'\n )\n\n\n class Meta:\n verbose_name_plural = 'Шаблоны документов'\n\n\nclass DocumentTypeModel(models.Model):\n doc_type = models.CharField('Тип документа', max_length=255, null=True,\n blank=True)\n is_pack = models.BooleanField('Пакет документов', default=False)\n\n def __str__(self):\n return self.doc_type\n\n\n class Meta:\n verbose_name_plural = 'Типы документов'\n\n\nclass State(models.Model):\n name_state = models.CharField('Состояние', max_length=255)\n departments = models.ManyToManyField('yellowbird.Department',\n verbose_name='Отделы', related_name='available_states')\n is_initial_state = models.BooleanField('Начальное состояние', default=False\n )\n is_final_state = models.BooleanField('Конечное состояние', default=False)\n\n def get_linked_events(self):\n return Event.objects.filter(from_state=self.id)\n\n def _is_dept_permitted(self, department):\n return department in self.departments.all()\n\n def is_permitted(self, user):\n return user.is_superuser or user.is_staff or self._is_dept_permitted(\n user.department)\n\n def __str__(self):\n return self.name_state\n\n\n class Meta:\n verbose_name_plural = 'Состояния'\n\n\nclass Event(models.Model):\n name_event = models.CharField('Событие', max_length=255)\n from_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Исходное состояние', blank=True, null=True,\n related_name='begin_states')\n to_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Конечное состояние', blank=True, null=True,\n related_name='end_states')\n is_move_backward = models.BooleanField('Двигаемся обратно назад',\n default=False)\n\n def __str__(self):\n return self.name_event\n\n\n class Meta:\n verbose_name_plural = 'События'\n\n\nclass ListStrategy(ABC):\n\n @abstractmethod\n def execute_list_strategy(self, user):\n raise NotImplementedError\n\n @abstractmethod\n def execute_single_strategy(self, pk, user):\n raise NotImplementedError\n\n\nclass OnlyEmptyRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return [c for c in contragents if not c.active_package]\n\n def execute_single_strategy(self, pk, user):\n try:\n res = Contragent.objects.get(pk=pk)\n return res if not res.active_package else None\n except Contragent.DoesNotExist:\n return None\n\n\nclass OnlyMyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.filter(current_user__contain=user)\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk, current_user__contain=user)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllInDepartmentRecords(ListStrategy):\n\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user.department):\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_list = [(c.department == user.department) for c in\n contragent.current_user]\n if any(tmp_list):\n return contragent\n return None\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\nclass MyAndEmptyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user) and user in c.current_user:\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user\n ) and user in contragent.current_user:\n return contragent\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\n<mask token>\n",
"step-5": "import datetime\nimport os\nimport uuid\nfrom abc import ABC, abstractmethod\n\nfrom django.conf import settings\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.contrib.contenttypes.fields import (GenericForeignKey,\n GenericRelation)\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.db import models\n\nfrom bluebird.templatetags.template_extra_filters import (plur_form,\nproper_last_name)\nfrom bluebird.tasks import calc_create_gen_async\n\nfrom django_q.tasks import async_task\n\nfrom .snippets import str_add_app, KLASS_TYPES, DOC_TYPE\n\n\nNORM_TYPE = [\n (0, '1 м2 общей площади'),\n (1, '1 место'),\n (2, '1 человек'),\n]\n\n\nPOST_TYPE = [\n (0, 'Клиент-менеджер'),\n (1, 'Старший менеджер по работе с ЮЛ'),\n (2, 'Менеджер'),\n]\n\n\nclass Adress(models.Model):\n state = models.CharField(verbose_name=\"Область\", max_length=255)\n city = models.CharField(verbose_name=\"Город\", max_length=255)\n street = models.CharField(verbose_name=\"Улица\", max_length=255)\n block = models.CharField(verbose_name=\"Номер дома\", max_length=10)\n\n\nclass ContragentClass(models.Model):\n name = models.CharField('Наименование', max_length=255)\n\n\nclass Contragent(models.Model):\n \"\"\"\n Класс Контрагента.\n\n \"\"\"\n # klass = models.ForeignKey(ContragentClass, on_delete=models.CASCADE)\n klass = models.IntegerField(choices=KLASS_TYPES, default=0)\n excell_name = models.CharField('Наименование контрагента (из Excell)',\n max_length=255)\n dadata_name = models.CharField('Наименование контрагента (из Dadata)',\n max_length=255, blank=True, null=True)\n debt = models.FloatField('Сумма задолжности', default=0.00)\n debt_period = models.IntegerField('Количество неоплаченных периодов, мес.',\n blank=True, null=True)\n inn = models.BigIntegerField('ИНН контрагента', blank=True, null=True)\n ogrn = models.BigIntegerField('ОГРН контрагента', blank=True, null=True)\n kpp = models.BigIntegerField('КПП контрагента', blank=True, null=True)\n\n rs = models.CharField('Р/с', max_length=255, blank=True, null=True)\n ks = models.CharField('К/с', max_length=255, blank=True, null=True)\n bank = models.CharField('Наименование банка', max_length=255, blank=True,\n null=True)\n bik = models.CharField('БИК', max_length=255, blank=True, null=True)\n opf = models.CharField('ОПФ', max_length=255, blank=True, null=True)\n\n director_status = models.CharField('Директор (физ. лицо либо юр. лицо)',\n max_length=255, blank=True, null=True)\n director_name = models.CharField('Имя либо иное наименование директора',\n max_length=255, blank=True, null=True)\n creation_date = models.DateField('Дата создания контрагента (юл)',\n blank=True, null=True)\n is_func = models.BooleanField('Признак активности контрагента',\n default=True)\n okved = models.CharField('ОКВЭД',\n max_length=255, blank=True, null=True)\n\n # TODO REWORK THIS AREA\n physical_address = models.CharField('Физический адресс',\n max_length=255)\n legal_address = models.CharField('Юридический адресс',\n max_length=255, blank=True, null=True)\n # END OF REWORK\n\n norm_value = models.ForeignKey('NormativeCategory',\n related_name='normatives',\n on_delete=models.CASCADE,\n blank=True, null=True)\n stat_value = models.FloatField('Показатель', blank=True, null=True)\n contract_accept_date = models.DateField(\n 'Дата начала оказания услуг',\n default=datetime.date.fromisoformat('2018-07-01'),\n blank=True, null=True\n )\n current_date = models.DateField('Конечная дата оказания услуг',\n default=datetime.date.today, blank=True,\n null=True)\n number_contract = models.OneToOneField('ContractNumberClass',\n on_delete=models.CASCADE,\n max_length=255,\n blank=True, null=True)\n current_contract_date = models.DateField('Дата заключения договора',\n blank=True, null=True)\n signed_user = models.ForeignKey('SignUser', blank=True, null=True,\n on_delete=models.CASCADE,\n related_name='signed')\n\n platform = models.IntegerField('№ площадки',\n blank=True, null=True)\n\n judge_link = models.CharField(verbose_name=\"\", max_length=255,\n blank=True, null=True)\n fss_link = models.CharField(verbose_name=\"\", max_length=255,\n blank=True, null=True)\n\n personal_number = models.CharField(verbose_name=\"Лицевой счет\",\n max_length=255, blank=True, null=True)\n\n passport_number = models.CharField(verbose_name=\"Номер паспорта\",\n max_length=15, blank=True, null=True)\n passport_date = models.DateField(verbose_name=\"Дата выдачи пасспорта\",\n blank=True, null=True)\n passport_origin = models.CharField(verbose_name=\"Кем выдан пасспорт\",\n max_length=15, blank=True, null=True)\n snils = models.CharField(verbose_name=\"СНИЛС\",\n max_length=15, blank=True, null=True)\n\n def create_package_and_folder(self):\n self.check_and_create_parent_folder()\n if not os.path.isdir(self.get_str_as_path()):\n os.mkdir(self.get_str_as_path(), mode=0o777)\n\n def check_and_create_parent_folder(self):\n if not os.path.isdir(os.path.join(settings.MEDIA_ROOT,\n KLASS_TYPES[self.klass][1])):\n os.mkdir(os.path.join(settings.MEDIA_ROOT,\n KLASS_TYPES[self.klass][1]), mode=0o777)\n\n def get_str_as_path(self):\n return os.path.join(os.path.join(settings.MEDIA_ROOT,\n KLASS_TYPES[self.klass][1]),\n f'{self.pk} {self.excell_name}')\n\n @property\n def current_user(self):\n package = self.get_active_package()\n if package:\n res = [user for user in package.package_users.all(\n ) if package.package_state.is_permitted(user)]\n return res\n return None\n\n @current_user.setter\n def current_user(self, user):\n package = self.get_active_package()\n if package and not package.is_user_in_package(user, True):\n package.package_users.add(user)\n package.save()\n\n @property\n def active_package(self):\n return self.get_active_package()\n\n def get_all_packages(self):\n return DocumentsPackage.objects.filter(contragent=self.pk) or None\n\n def get_active_package(self):\n res = DocumentsPackage.get_active_package(self)\n return res\n\n def reset_debt(self):\n self.debt = 0\n self.debt_period = 0\n self.save()\n\n def __str__(self):\n return f'{self.excell_name}'\n\n class Meta:\n verbose_name_plural = \"Контрагенты\"\n\n\nclass SignUser(models.Model):\n name = models.CharField('ФИО отвественного лица', max_length=255)\n document = models.IntegerField('Документ основания', choices=DOC_TYPE,\n default=0)\n position = models.IntegerField('Должность', choices=POST_TYPE,\n default=0)\n doc_number = models.CharField('Номер документа', max_length=255)\n doc_date = models.DateField('Дата начала действия документа')\n address = models.CharField('Адресс', max_length=255)\n city = models.ForeignKey('CityModel', on_delete=models.CASCADE,\n blank=True, null=True)\n tel_number = models.CharField('Телефон', max_length=255, default='')\n sign = models.ImageField('Подпись', upload_to='signs/',\n blank=True, null=True)\n\n def __str__(self):\n # return self.name\n return f\"{proper_last_name(self.name)}, {POST_TYPE[self.position][1]}\"\n\n def save(self, *args, **kwargs):\n instance = SignUser.objects.get(id=self.id)\n if self.sign != instance.sign and instance.sign:\n if os.path.exists(instance.sign.url):\n os.remove(instance.sign.url)\n super().save(*args, **kwargs)\n\n class Meta:\n verbose_name_plural = \"Отвественные лица с правом подписи\"\n\n\nclass Commentary(models.Model):\n user = models.ForeignKey(settings.AUTH_USER_MODEL,\n on_delete=models.CASCADE, blank=True, null=True)\n commentary_text = models.TextField('Комментарий', blank=True, null=True)\n creation_date = models.DateTimeField('Дата создания', auto_now_add=True)\n\n content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)\n object_id = models.PositiveIntegerField()\n content_object = GenericForeignKey('content_type', 'object_id')\n\n\nclass AbstractFileModel(models.Model):\n file_name = models.CharField('Название файла', max_length=255,\n null=True, blank=True)\n file_path = models.CharField('Путь', max_length=255, blank=True, null=True)\n creation_date = models.DateField('Дата создания файла',\n blank=True, null=True)\n\n # Подгрузка произвольного количества файлов\n content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)\n object_id = models.PositiveIntegerField()\n content_object = GenericForeignKey('content_type', 'object_id')\n\n file_type = models.ForeignKey('DocumentTypeModel',\n on_delete=models.CASCADE)\n\n def delete(self, using=None, keep_parents=False):\n if os.path.exists(str_add_app(self.file_path)):\n os.remove(str_add_app(self.file_path))\n return super().delete(using=using, keep_parents=keep_parents)\n\n class Meta:\n abstract = True\n\n\nclass SingleFile(AbstractFileModel):\n\n def __str__(self):\n return str(self.file_type)\n\n class Meta:\n verbose_name_plural = \"Единичные файлы\"\n\n\nclass PackFile(AbstractFileModel):\n unique_number = models.ForeignKey('SyncUniqueNumber',\n on_delete=models.CASCADE,\n null=True, blank=True)\n\n class Meta:\n abstract = False\n verbose_name_plural = \"Фаилы набора\"\n\n def initialize_folder(self, path: str):\n if self.file_type:\n tmp_str_path = plur_form(self.file_type.doc_type)\n if not os.path.isdir(f'{path}/{tmp_str_path}/'):\n os.makedirs(f'{path}/{tmp_str_path}/')\n else:\n raise AttributeError()\n\n def get_files_path(self, package: 'DocumentsPackage'):\n tmp_path = package.get_save_path()\n self.initialize_folder(tmp_path)\n return os.path.join(tmp_path, f'{plur_form(self.file_type.doc_type)}/')\n\n\ndef other_files_directory_path(instance, filename):\n p = instance.content_object.get_save_path()\n return '{0}/прочие/{1}'.format(p, filename)\n\n\nclass OtherFile(AbstractFileModel):\n file_obj = models.FileField('Произвольные файлы',\n upload_to=other_files_directory_path,\n max_length=500)\n\n commentary = GenericRelation(Commentary, related_query_name='file')\n\n class Meta:\n verbose_name_plural = \"Прочие файлы\"\n\n\nclass ActExam(models.Model):\n FOLDER = 'Акт осмотра/'\n\n file_path = models.CharField('Путь', max_length=255, blank=True, null=True)\n file_name = models.CharField('Название файла', max_length=255,\n null=True, blank=True)\n\n @classmethod\n def initialize_folder(cls, path: str):\n tmp_path = f'{path}/{cls.FOLDER}'\n if not os.path.isdir(tmp_path):\n os.makedirs(tmp_path)\n\n @classmethod\n def get_files_path(cls, package: 'DocumentsPackage'):\n tmp_path = package.get_save_path()\n ActExam.initialize_folder(tmp_path)\n return os.path.join(tmp_path, cls.FOLDER)\n\n def clear_file(self):\n if os.path.exists(str_add_app(self.file_path)):\n os.remove(str_add_app(self.file_path))\n self.file_path = None\n self.file_name = None\n self.save()\n\n def delete(self, using=None, keep_parents=False):\n self.clear_file()\n return super().delete(using=using, keep_parents=keep_parents)\n\n\nclass DocumentsPackage(models.Model):\n \"\"\" Модель пакета документов.\n contragent - ID контрагента\n name_uuid - Уникальный ID пакета (каждый раз новый)\n is_active - Является ли пакет активным. Если True, то пакет в работе. Если\n False, то пакет закрыт.\n is_automatic - Создан ли пакет автоматически или пользователь может\n редактировать наборы файлов и некоторые характеристики. Если\n True, то нельзя подгружать свои договора и редактировать\n debt_plan. Если False, то редактирование возможно.\n creation_date - Дата создания пакета.\n debt_plan - Сумма долга. Если is_automatic == True, то значение не\n редактируется. Если is_automatic == False, то значение\n необходимо заполнить.\n debt_fact - Сумма долга по факту. Заполняется при сторнировании или оплате.\n tax_count - Госпошлина. Можно заполнять в любом случае.\n package_users - Все пользователи пакета, работавшие с ним.\n package_state - Состояние пакета.\n package_state_date - Дата изменения состояния пакета.\n single_files - Пакет одиночных документов. \n pack_files - Пакет наборов файлов.\n other_files - Произвольные файлы.\n commentary - Комментарии.\n \"\"\"\n contragent = models.ForeignKey(Contragent, on_delete=models.CASCADE,\n related_name='contragents',\n related_query_name='contragent',\n null=True, blank=True)\n name_uuid = models.CharField('Идентификатор пакета', max_length=255,\n default=uuid.uuid4, null=True, blank=True,\n editable=False)\n is_active = models.BooleanField('Активный пакет', default=True)\n is_automatic = models.BooleanField('Создан автоматически', default=True)\n creation_date = models.DateField('Дата создания пакета', auto_now_add=True)\n\n debt_plan = models.FloatField('Сумма задолжности (плановая)',\n default=0.00)\n debt_fact = models.FloatField('Сумма задолжности (фактическая)',\n default=0.00)\n tax_count = models.FloatField('Госпошлина', default=0.00)\n\n package_users = models.ManyToManyField(settings.AUTH_USER_MODEL,\n related_name='packages')\n\n package_state = models.ForeignKey('State', on_delete=models.CASCADE,\n null=True, blank=True)\n\n package_state_date = models.DateField('Дата последнего действия',\n null=True, blank=True)\n\n single_files = GenericRelation(SingleFile)\n\n pack_files = GenericRelation(PackFile)\n\n other_files = GenericRelation(OtherFile)\n\n commentary = GenericRelation(Commentary, related_query_name='package')\n \n act = models.ForeignKey(ActExam, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return f'Пакет {self.name_uuid}'\n\n def get_save_path(self):\n if self.contragent:\n return os.path.join(self.contragent.get_str_as_path(),\n str(self.name_uuid))\n else:\n return f'{self.name_uuid}'\n\n @classmethod\n def get_active_package(cls, contragent: Contragent):\n try:\n res = cls.objects.get(contragent__id=contragent.pk, is_active=True)\n return res\n except ObjectDoesNotExist:\n return None\n\n def initialize_sub_folders(self):\n os.makedirs(str(self.get_save_path()), exist_ok=True)\n\n def is_user_in_package(self, user, use_department=False):\n users = self.package_users.all()\n if use_department:\n depts = [tmp_user.department for tmp_user in users]\n return (user.department in depts) or (user in users)\n return user in users\n\n def set_inactive(self):\n self.is_active = False\n self.save()\n\n def change_state_to(self, new_state, is_backward):\n self.package_state = new_state\n self.package_state_date = datetime.date.today()\n if not is_backward:\n async_task(calc_create_gen_async, self.contragent, self, False,\n group=self.name_uuid)\n # TODO Journal log here!\n self.save()\n\n class Meta:\n verbose_name_plural = \"Пакеты документов\"\n\n\nclass DocumentStateEntity(models.Model):\n documents = models.ManyToManyField('DocumentTypeModel',\n related_name='document_type')\n states = models.ForeignKey('State', related_name='states',\n on_delete=models.CASCADE,\n blank=True, null=True)\n template = models.ForeignKey('DocumentFileTemplate',\n on_delete=models.CASCADE,\n blank=True, null=True)\n\n\nclass DocumentFileTemplate(models.Model):\n contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)\n is_package = models.BooleanField('Набор файлов', default=False)\n\n def __str__(self):\n return KLASS_TYPES[self.contagent_type][1]\n\n class Meta:\n verbose_name_plural = \"Шаблоны файлов\"\n\n# class SingleFilesTemplate(models.Model):\n# contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)\n\n# def __str__(self):\n# return KLASS_TYPES[self.contagent_type][1]\n\n# class Meta:\n# verbose_name_plural = \"Шаблоны единичных файлов\"\n\n\n# class PackFilesTemplate(models.Model):\n# contagent_type = models.IntegerField(choices=KLASS_TYPES, default=0)\n# documents = models.ManyToManyField('DocumentTypeModel',\n# related_name='document_type_pack')\n\n# def __str__(self):\n# return KLASS_TYPES[self.contagent_type][1]\n\n# class Meta:\n# verbose_name_plural = \"Шаблоны наборов файлов\"\n\n\nclass NormativeCategory(models.Model):\n \"\"\" Класс Категории норматива \"\"\"\n name = models.CharField('Вид объекта',\n max_length=255)\n norm_type = models.IntegerField('Показатель расчета', default=0,\n choices=NORM_TYPE, blank=True, null=True)\n normative = models.ManyToManyField('Normative', related_name='normatives',\n verbose_name='Нормативы')\n\n def __str__(self):\n return self.name\n\n @property\n def print_norm_type(self):\n return NORM_TYPE[self.norm_type][1]\n\n class Meta:\n verbose_name_plural = \"Категории нормативов\"\n\n\nclass Normative(models.Model):\n \"\"\" Класс норматива \"\"\"\n since_date = models.DateField('Дата начала действия норматива',\n null=True, blank=True)\n up_to_date = models.DateField('Дата окончания действия норматива',\n null=True, blank=True)\n value = models.FloatField('Значение норматива (год.)',\n null=True, blank=True)\n\n def __str__(self):\n return (f'Норматив: {self.value}/год.,'\n + f' действующий с {self.since_date.strftime(\"%d.%m.%Y\")}'\n + f' по {self.up_to_date.strftime(\"%d.%m.%Y\")}')\n\n class Meta:\n verbose_name_plural = \"Нормативы\"\n\n\nclass Contract(models.Model):\n \"\"\" Класс контракта. Нужен что бы получать уникальный номер контракта.\n Сохраняет дату когда был создан, для корректной генерации строкового\n представления.\n \"\"\"\n date_field = models.DateField(auto_now_add=True)\n\n def __str__(self):\n return f'{self.pk:06}-{(self.date_field).year}/ТКО/01'\n\n class Meta:\n verbose_name_plural = \"Сгенерированые номера договоров\"\n\n\nclass ContractNumberClass(models.Model):\n \"\"\" Модель класса прокси для соединения класса документа и контрагента.\n\n Принимает на вход необязательные параметры:\n new - определяем, надо генерировать новый номер или есть\n старый. Булево значение. True = генерируем;\n exist_number - существующий номер договора. Строка;\n\n У класса есть такие поля как:\n is_generated - хранит булево значение. Определяет был ли сгенерирован\n номер или взят из внешних источников;\n contract_obj - объект модели самого номера контракта;\n contract_exist_number - существующий номер контракта. Пустая строка,\n если мы сгенерировали новый номер;\n contract_number - возвращает строковое представление номера, независимо\n от того, сгенерирован код или получен из внешнего\n источника.\n \"\"\"\n is_generated = models.BooleanField(default=False)\n contract_obj = models.OneToOneField(Contract,\n on_delete=models.CASCADE,\n null=True, blank=True)\n contract_exist_number = models.CharField(default='',\n max_length=255,\n null=True, blank=True)\n\n @classmethod\n def create(cls, new: bool = False, exist_number: str = ''):\n contract_num_obj = cls(is_generated=new)\n if new:\n contract_num_obj.contract_obj = Contract.objects.create()\n else:\n contract_num_obj.contract_exist_number = exist_number\n contract_num_obj.save()\n return contract_num_obj\n\n @property\n def contract_number(self):\n if self.is_generated:\n return str(self.contract_obj)\n else:\n return self.contract_exist_number\n\n def __str__(self):\n return self.contract_number\n\n class Meta:\n verbose_name_plural = \"Номера договоров\"\n\n\nclass SyncUniqueNumber(models.Model):\n\n def __str__(self):\n return f'{self.pk:08}/01'\n\n class Meta:\n verbose_name_plural = \"Номера документов\"\n\n\nclass CityModel(models.Model):\n name = models.CharField('Город', max_length=255, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n class Meta:\n verbose_name_plural = \"Города\"\n\n\nclass TemplateModel(models.Model):\n template_path = models.CharField('Путь до шаблона', max_length=255)\n city = models.ForeignKey(CityModel, on_delete=models.CASCADE)\n contragent_type = models.IntegerField('Тип контрагента',\n choices=KLASS_TYPES, default=0)\n document_type = models.ForeignKey('DocumentTypeModel',\n verbose_name='Тип документа',\n on_delete=models.CASCADE)\n\n def __str__(self):\n return f'{str(self.document_type)}|\\\n {KLASS_TYPES[self.contragent_type][1]}|{self.city}'\n\n class Meta:\n verbose_name_plural = \"Шаблоны документов\"\n\n\nclass DocumentTypeModel(models.Model):\n doc_type = models.CharField('Тип документа', max_length=255,\n null=True, blank=True)\n is_pack = models.BooleanField('Пакет документов', default=False)\n\n def __str__(self):\n return self.doc_type\n\n class Meta:\n verbose_name_plural = \"Типы документов\"\n\n\n#########\n# State #\n#########\n\nclass State(models.Model):\n name_state = models.CharField('Состояние', max_length=255)\n departments = models.ManyToManyField('yellowbird.Department',\n verbose_name='Отделы',\n related_name='available_states')\n is_initial_state = models.BooleanField('Начальное состояние',\n default=False)\n is_final_state = models.BooleanField('Конечное состояние', default=False)\n\n def get_linked_events(self):\n return Event.objects.filter(from_state=self.id)\n\n def _is_dept_permitted(self, department):\n return department in self.departments.all()\n\n def is_permitted(self, user):\n return (user.is_superuser or user.is_staff\n or self._is_dept_permitted(user.department))\n\n def __str__(self):\n return self.name_state\n\n class Meta:\n verbose_name_plural = 'Состояния'\n\n\nclass Event(models.Model):\n name_event = models.CharField('Событие', max_length=255)\n from_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Исходное состояние',\n blank=True, null=True,\n related_name='begin_states')\n to_state = models.ForeignKey(State, on_delete=models.CASCADE,\n verbose_name='Конечное состояние',\n blank=True, null=True,\n related_name='end_states')\n is_move_backward = models.BooleanField('Двигаемся обратно назад',\n default=False)\n\n def __str__(self):\n return self.name_event\n\n class Meta:\n verbose_name_plural = 'События'\n\n##############\n# Strategies #\n##############\n\n\nclass ListStrategy(ABC):\n\n @abstractmethod\n def execute_list_strategy(self, user):\n raise NotImplementedError\n\n @abstractmethod\n def execute_single_strategy(self, pk, user):\n raise NotImplementedError\n\n\nclass OnlyEmptyRecords(ListStrategy):\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return [c for c in contragents if not c.active_package]\n\n def execute_single_strategy(self, pk, user):\n try:\n res = Contragent.objects.get(pk=pk)\n return res if (not res.active_package) else None\n except Contragent.DoesNotExist:\n return None\n\n\nclass OnlyMyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.filter(current_user__contain=user)\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk, current_user__contain=user)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllRecords(ListStrategy):\n def execute_list_strategy(self, user):\n contragents = Contragent.objects.all()\n return contragents\n\n def execute_single_strategy(self, pk, user):\n try:\n return Contragent.objects.get(pk=pk)\n except Contragent.DoesNotExist:\n return None\n\n\nclass AllInDepartmentRecords(ListStrategy):\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user.department):\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_list = [c.department == user.department\n for c in contragent.current_user]\n if any(tmp_list):\n return contragent\n return None\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\nclass MyAndEmptyRecordsStrategy(ListStrategy):\n\n def execute_list_strategy(self, user):\n res = list()\n contragents = Contragent.objects.all()\n for c in contragents:\n tmp_pack = c.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user) and (\n user in c.current_user):\n res.append(c)\n else:\n res.append(c)\n else:\n res.append(c)\n return res\n\n def execute_single_strategy(self, pk, user):\n try:\n contragent = Contragent.objects.get(pk=pk)\n tmp_pack = contragent.get_active_package()\n if tmp_pack:\n tmp_state = tmp_pack.package_state\n if tmp_state:\n if tmp_state.is_permitted(user) and (\n user in contragent.current_user):\n return contragent\n return contragent\n except Contragent.DoesNotExist:\n return None\n\n\nSTRATEGIES_LIST = ['Мои записи и пустые', 'Все по отделу', 'Все',\n 'Только мои записи', 'Только пустые записи']\n\nSTRATEGIES_TUPLES = list(enumerate(STRATEGIES_LIST))\n\nSTRATEGIES_FUNCTIONS = [MyAndEmptyRecordsStrategy, AllInDepartmentRecords,\n AllRecords, OnlyMyRecordsStrategy, OnlyEmptyRecords]\n\nSTRATEGIES = dict(zip(STRATEGIES_LIST, STRATEGIES_FUNCTIONS))\n\nZIP_FILES_ACTIONS = {\n 0: \"Скачать весь пакет\",\n 1: \"Скачать основные файлы\",\n 2: \"Скачать акты\",\n 3: \"Скачать счета\",\n 4: \"Скачать счета фактуры\",\n 5: \"Скачать прочие файлы\",\n}\n",
"step-ids": [
76,
93,
95,
98,
116
]
}
|
[
76,
93,
95,
98,
116
] |
#!/usr/bin/env python
"""
haxor
Unofficial Python wrapper for official Hacker News API
@author avinash sajjanshetty
@email [email protected]
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
import json
import sys
import requests
from .settings import supported_api_versions
__all__ = [
'User',
'Item',
'HackerNews',
'InvalidAPIVersion',
'InvalidItemID',
'InvalidUserID']
class InvalidItemID(Exception):
pass
class InvalidUserID(Exception):
pass
class InvalidAPIVersion(Exception):
pass
class HTTPError(Exception):
pass
class HackerNews(object):
def __init__(self, version='v0'):
"""
Args:
version (string): specifies Hacker News API version. Default is `v0`.
Raises:
InvalidAPIVersion: If Hacker News version is not supported.
"""
try:
self.base_url = supported_api_versions[version]
except KeyError:
raise InvalidAPIVersion
def _get(self, url):
"""Internal method used for GET requests
Args:
url (string): URL to send GET.
Returns:
requests' response object
Raises:
HTTPError: If HTTP request failed.
"""
response = requests.get(url)
if response.status_code == requests.codes.ok:
return response
else:
raise HTTPError
def _get_page(self, page):
return self._get('{0}{1}.json'.format(self.base_url, page))
def _get_page_param(self, page, param):
return self._get('{0}{1}/{2}.json'.format(self.base_url, page, param))
def get_item(self, item_id):
"""Returns Hacker News `Item` object.
Args:
item_id (int or string): Unique item id of Hacker News story, comment etc.
Returns:
`Item` object representing Hacker News item.
Raises:
InvalidItemID: If corresponding Hacker News story does not exist.
"""
response = self._get_page_param('item', item_id).json()
if not response:
raise InvalidItemID
return Item(response)
def get_user(self, user_id):
"""Returns Hacker News `User` object.
Args:
user_id (string): unique user id of a Hacker News user.
Returns:
`User` object representing a user on Hacker News.
Raises:
InvalidUserID: If no such user exists on Hacker News.
"""
response = self._get_page_param('user', user_id).json()
if not response:
raise InvalidUserID
return User(response)
def top_stories(self, limit=None):
"""Returns list of item ids of current top stories
Args:
limit (int): specifies the number of stories to be returned.
Returns:
`list` object containing ids of top stories.
"""
return self._get_page('topstories').json()[:limit]
def new_stories(self, limit=None):
"""Returns list of item ids of current new stories
Args:
limit (int): specifies the number of stories to be returned.
Returns:
`list` object containing ids of new stories.
"""
return self._get_page('newstories').json()[:limit]
def ask_stories(self, limit=None):
"""Returns list of item ids of latest Ask HN stories
Args:
limit (int): specifies the number of stories to be returned.
Returns:
`list` object containing ids of Ask HN stories.
"""
return self._get_page('askstories').json()[:limit]
def show_stories(self, limit=None):
"""Returns list of item ids of latest Show HN stories
Args:
limit (int): specifies the number of stories to be returned.
Returns:
`list` object containing ids of Show HN stories.
"""
return self._get_page('showstories').json()[:limit]
def job_stories(self, limit=None):
"""Returns list of item ids of latest Job stories
Args:
limit (int): specifies the number of stories to be returned.
Returns:
`list` object containing ids of Job stories.
"""
return self._get_page('jobstories').json()[:limit]
def updates(self):
"""Returns list of item ids and user ids that have been
changed/updated recently.
Returns:
`dict` with two keys whose values are `list` objects
"""
return self._get_page('updates').json()
def get_max_item(self):
"""Returns list of item ids of current top stories
Args:
limit (int): specifies the number of stories to be returned.
Returns:
`int` if successful.
"""
return self._get_page('maxitem').json()
class Item(object):
"""
Represents stories, comments, jobs, Ask HNs and polls
"""
def __init__(self, data):
self.item_id = data.get('id')
self.deleted = data.get('deleted')
self.item_type = data.get('type')
self.by = data.get('by')
self.submission_time = datetime.datetime.fromtimestamp(
data.get(
'time',
0))
self.text = data.get('text')
self.dead = data.get('dead')
self.parent = data.get('parent')
self.kids = data.get('kids')
self.descendants = data.get('descendants')
self.url = data.get('url')
self.score = data.get('score')
self.title = data.get('title')
self.parts = data.get('parts')
self.raw = json.dumps(data)
def __repr__(self):
retval = '<hackernews.Item: {0} - {1}>'.format(
self.item_id, self.title)
if sys.version_info.major < 3:
return retval.encode('utf-8', errors='backslashreplace')
return retval
class User(object):
"""
Represents a hacker i.e. a user on Hacker News
"""
def __init__(self, data):
self.user_id = data.get('id')
self.delay = data.get('delay')
self.created = datetime.datetime.fromtimestamp(data.get('created', 0))
self.karma = data.get('karma')
self.about = data.get('about')
self.submitted = data.get('submitted')
self.raw = json.dumps(data)
def __repr__(self):
retval = '<hackernews.User: {0}>'.format(self.user_id)
if sys.version_info.major < 3:
return retval.encode('utf-8', errors='backslashreplace')
return retval
|
normal
|
{
"blob_id": "e14c7eb11c06d6de5c2f9f8adfb8b742fcb432e1",
"index": 8073,
"step-1": "<mask token>\n\n\nclass HackerNews(object):\n <mask token>\n\n def _get(self, url):\n \"\"\"Internal method used for GET requests\n\n Args:\n url (string): URL to send GET.\n\n Returns:\n requests' response object\n\n Raises:\n HTTPError: If HTTP request failed.\n\n \"\"\"\n response = requests.get(url)\n if response.status_code == requests.codes.ok:\n return response\n else:\n raise HTTPError\n <mask token>\n <mask token>\n <mask token>\n\n def get_user(self, user_id):\n \"\"\"Returns Hacker News `User` object.\n\n Args:\n user_id (string): unique user id of a Hacker News user.\n\n Returns:\n `User` object representing a user on Hacker News.\n\n Raises:\n InvalidUserID: If no such user exists on Hacker News.\n\n \"\"\"\n response = self._get_page_param('user', user_id).json()\n if not response:\n raise InvalidUserID\n return User(response)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Item(object):\n \"\"\"\n Represents stories, comments, jobs, Ask HNs and polls\n \"\"\"\n\n def __init__(self, data):\n self.item_id = data.get('id')\n self.deleted = data.get('deleted')\n self.item_type = data.get('type')\n self.by = data.get('by')\n self.submission_time = datetime.datetime.fromtimestamp(data.get(\n 'time', 0))\n self.text = data.get('text')\n self.dead = data.get('dead')\n self.parent = data.get('parent')\n self.kids = data.get('kids')\n self.descendants = data.get('descendants')\n self.url = data.get('url')\n self.score = data.get('score')\n self.title = data.get('title')\n self.parts = data.get('parts')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.Item: {0} - {1}>'.format(self.item_id, self.title\n )\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n\n\nclass User(object):\n \"\"\"\n Represents a hacker i.e. a user on Hacker News\n \"\"\"\n\n def __init__(self, data):\n self.user_id = data.get('id')\n self.delay = data.get('delay')\n self.created = datetime.datetime.fromtimestamp(data.get('created', 0))\n self.karma = data.get('karma')\n self.about = data.get('about')\n self.submitted = data.get('submitted')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.User: {0}>'.format(self.user_id)\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n",
"step-2": "<mask token>\n\n\nclass HackerNews(object):\n <mask token>\n\n def _get(self, url):\n \"\"\"Internal method used for GET requests\n\n Args:\n url (string): URL to send GET.\n\n Returns:\n requests' response object\n\n Raises:\n HTTPError: If HTTP request failed.\n\n \"\"\"\n response = requests.get(url)\n if response.status_code == requests.codes.ok:\n return response\n else:\n raise HTTPError\n\n def _get_page(self, page):\n return self._get('{0}{1}.json'.format(self.base_url, page))\n\n def _get_page_param(self, page, param):\n return self._get('{0}{1}/{2}.json'.format(self.base_url, page, param))\n\n def get_item(self, item_id):\n \"\"\"Returns Hacker News `Item` object.\n\n Args:\n item_id (int or string): Unique item id of Hacker News story, comment etc.\n\n Returns:\n `Item` object representing Hacker News item.\n\n Raises:\n InvalidItemID: If corresponding Hacker News story does not exist.\n\n \"\"\"\n response = self._get_page_param('item', item_id).json()\n if not response:\n raise InvalidItemID\n return Item(response)\n\n def get_user(self, user_id):\n \"\"\"Returns Hacker News `User` object.\n\n Args:\n user_id (string): unique user id of a Hacker News user.\n\n Returns:\n `User` object representing a user on Hacker News.\n\n Raises:\n InvalidUserID: If no such user exists on Hacker News.\n\n \"\"\"\n response = self._get_page_param('user', user_id).json()\n if not response:\n raise InvalidUserID\n return User(response)\n <mask token>\n <mask token>\n\n def ask_stories(self, limit=None):\n \"\"\"Returns list of item ids of latest Ask HN stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of Ask HN stories.\n \"\"\"\n return self._get_page('askstories').json()[:limit]\n <mask token>\n <mask token>\n\n def updates(self):\n \"\"\"Returns list of item ids and user ids that have been\n changed/updated recently.\n\n Returns:\n `dict` with two keys whose values are `list` objects\n \"\"\"\n return self._get_page('updates').json()\n <mask token>\n\n\nclass Item(object):\n \"\"\"\n Represents stories, comments, jobs, Ask HNs and polls\n \"\"\"\n\n def __init__(self, data):\n self.item_id = data.get('id')\n self.deleted = data.get('deleted')\n self.item_type = data.get('type')\n self.by = data.get('by')\n self.submission_time = datetime.datetime.fromtimestamp(data.get(\n 'time', 0))\n self.text = data.get('text')\n self.dead = data.get('dead')\n self.parent = data.get('parent')\n self.kids = data.get('kids')\n self.descendants = data.get('descendants')\n self.url = data.get('url')\n self.score = data.get('score')\n self.title = data.get('title')\n self.parts = data.get('parts')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.Item: {0} - {1}>'.format(self.item_id, self.title\n )\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n\n\nclass User(object):\n \"\"\"\n Represents a hacker i.e. a user on Hacker News\n \"\"\"\n\n def __init__(self, data):\n self.user_id = data.get('id')\n self.delay = data.get('delay')\n self.created = datetime.datetime.fromtimestamp(data.get('created', 0))\n self.karma = data.get('karma')\n self.about = data.get('about')\n self.submitted = data.get('submitted')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.User: {0}>'.format(self.user_id)\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n",
"step-3": "<mask token>\n\n\nclass HackerNews(object):\n\n def __init__(self, version='v0'):\n \"\"\"\n Args:\n version (string): specifies Hacker News API version. Default is `v0`.\n\n Raises:\n InvalidAPIVersion: If Hacker News version is not supported.\n\n \"\"\"\n try:\n self.base_url = supported_api_versions[version]\n except KeyError:\n raise InvalidAPIVersion\n\n def _get(self, url):\n \"\"\"Internal method used for GET requests\n\n Args:\n url (string): URL to send GET.\n\n Returns:\n requests' response object\n\n Raises:\n HTTPError: If HTTP request failed.\n\n \"\"\"\n response = requests.get(url)\n if response.status_code == requests.codes.ok:\n return response\n else:\n raise HTTPError\n\n def _get_page(self, page):\n return self._get('{0}{1}.json'.format(self.base_url, page))\n\n def _get_page_param(self, page, param):\n return self._get('{0}{1}/{2}.json'.format(self.base_url, page, param))\n\n def get_item(self, item_id):\n \"\"\"Returns Hacker News `Item` object.\n\n Args:\n item_id (int or string): Unique item id of Hacker News story, comment etc.\n\n Returns:\n `Item` object representing Hacker News item.\n\n Raises:\n InvalidItemID: If corresponding Hacker News story does not exist.\n\n \"\"\"\n response = self._get_page_param('item', item_id).json()\n if not response:\n raise InvalidItemID\n return Item(response)\n\n def get_user(self, user_id):\n \"\"\"Returns Hacker News `User` object.\n\n Args:\n user_id (string): unique user id of a Hacker News user.\n\n Returns:\n `User` object representing a user on Hacker News.\n\n Raises:\n InvalidUserID: If no such user exists on Hacker News.\n\n \"\"\"\n response = self._get_page_param('user', user_id).json()\n if not response:\n raise InvalidUserID\n return User(response)\n <mask token>\n <mask token>\n\n def ask_stories(self, limit=None):\n \"\"\"Returns list of item ids of latest Ask HN stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of Ask HN stories.\n \"\"\"\n return self._get_page('askstories').json()[:limit]\n <mask token>\n <mask token>\n\n def updates(self):\n \"\"\"Returns list of item ids and user ids that have been\n changed/updated recently.\n\n Returns:\n `dict` with two keys whose values are `list` objects\n \"\"\"\n return self._get_page('updates').json()\n <mask token>\n\n\nclass Item(object):\n \"\"\"\n Represents stories, comments, jobs, Ask HNs and polls\n \"\"\"\n\n def __init__(self, data):\n self.item_id = data.get('id')\n self.deleted = data.get('deleted')\n self.item_type = data.get('type')\n self.by = data.get('by')\n self.submission_time = datetime.datetime.fromtimestamp(data.get(\n 'time', 0))\n self.text = data.get('text')\n self.dead = data.get('dead')\n self.parent = data.get('parent')\n self.kids = data.get('kids')\n self.descendants = data.get('descendants')\n self.url = data.get('url')\n self.score = data.get('score')\n self.title = data.get('title')\n self.parts = data.get('parts')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.Item: {0} - {1}>'.format(self.item_id, self.title\n )\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n\n\nclass User(object):\n \"\"\"\n Represents a hacker i.e. a user on Hacker News\n \"\"\"\n\n def __init__(self, data):\n self.user_id = data.get('id')\n self.delay = data.get('delay')\n self.created = datetime.datetime.fromtimestamp(data.get('created', 0))\n self.karma = data.get('karma')\n self.about = data.get('about')\n self.submitted = data.get('submitted')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.User: {0}>'.format(self.user_id)\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n",
"step-4": "<mask token>\n\n\nclass HackerNews(object):\n\n def __init__(self, version='v0'):\n \"\"\"\n Args:\n version (string): specifies Hacker News API version. Default is `v0`.\n\n Raises:\n InvalidAPIVersion: If Hacker News version is not supported.\n\n \"\"\"\n try:\n self.base_url = supported_api_versions[version]\n except KeyError:\n raise InvalidAPIVersion\n\n def _get(self, url):\n \"\"\"Internal method used for GET requests\n\n Args:\n url (string): URL to send GET.\n\n Returns:\n requests' response object\n\n Raises:\n HTTPError: If HTTP request failed.\n\n \"\"\"\n response = requests.get(url)\n if response.status_code == requests.codes.ok:\n return response\n else:\n raise HTTPError\n\n def _get_page(self, page):\n return self._get('{0}{1}.json'.format(self.base_url, page))\n\n def _get_page_param(self, page, param):\n return self._get('{0}{1}/{2}.json'.format(self.base_url, page, param))\n\n def get_item(self, item_id):\n \"\"\"Returns Hacker News `Item` object.\n\n Args:\n item_id (int or string): Unique item id of Hacker News story, comment etc.\n\n Returns:\n `Item` object representing Hacker News item.\n\n Raises:\n InvalidItemID: If corresponding Hacker News story does not exist.\n\n \"\"\"\n response = self._get_page_param('item', item_id).json()\n if not response:\n raise InvalidItemID\n return Item(response)\n\n def get_user(self, user_id):\n \"\"\"Returns Hacker News `User` object.\n\n Args:\n user_id (string): unique user id of a Hacker News user.\n\n Returns:\n `User` object representing a user on Hacker News.\n\n Raises:\n InvalidUserID: If no such user exists on Hacker News.\n\n \"\"\"\n response = self._get_page_param('user', user_id).json()\n if not response:\n raise InvalidUserID\n return User(response)\n\n def top_stories(self, limit=None):\n \"\"\"Returns list of item ids of current top stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of top stories.\n \"\"\"\n return self._get_page('topstories').json()[:limit]\n\n def new_stories(self, limit=None):\n \"\"\"Returns list of item ids of current new stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of new stories.\n \"\"\"\n return self._get_page('newstories').json()[:limit]\n\n def ask_stories(self, limit=None):\n \"\"\"Returns list of item ids of latest Ask HN stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of Ask HN stories.\n \"\"\"\n return self._get_page('askstories').json()[:limit]\n <mask token>\n <mask token>\n\n def updates(self):\n \"\"\"Returns list of item ids and user ids that have been\n changed/updated recently.\n\n Returns:\n `dict` with two keys whose values are `list` objects\n \"\"\"\n return self._get_page('updates').json()\n\n def get_max_item(self):\n \"\"\"Returns list of item ids of current top stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `int` if successful.\n \"\"\"\n return self._get_page('maxitem').json()\n\n\nclass Item(object):\n \"\"\"\n Represents stories, comments, jobs, Ask HNs and polls\n \"\"\"\n\n def __init__(self, data):\n self.item_id = data.get('id')\n self.deleted = data.get('deleted')\n self.item_type = data.get('type')\n self.by = data.get('by')\n self.submission_time = datetime.datetime.fromtimestamp(data.get(\n 'time', 0))\n self.text = data.get('text')\n self.dead = data.get('dead')\n self.parent = data.get('parent')\n self.kids = data.get('kids')\n self.descendants = data.get('descendants')\n self.url = data.get('url')\n self.score = data.get('score')\n self.title = data.get('title')\n self.parts = data.get('parts')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.Item: {0} - {1}>'.format(self.item_id, self.title\n )\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n\n\nclass User(object):\n \"\"\"\n Represents a hacker i.e. a user on Hacker News\n \"\"\"\n\n def __init__(self, data):\n self.user_id = data.get('id')\n self.delay = data.get('delay')\n self.created = datetime.datetime.fromtimestamp(data.get('created', 0))\n self.karma = data.get('karma')\n self.about = data.get('about')\n self.submitted = data.get('submitted')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.User: {0}>'.format(self.user_id)\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n",
"step-5": "#!/usr/bin/env python\n\n\"\"\"\nhaxor\nUnofficial Python wrapper for official Hacker News API\n\n@author avinash sajjanshetty\n@email [email protected]\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import unicode_literals\nimport datetime\nimport json\nimport sys\n\nimport requests\n\nfrom .settings import supported_api_versions\n\n__all__ = [\n 'User',\n 'Item',\n 'HackerNews',\n 'InvalidAPIVersion',\n 'InvalidItemID',\n 'InvalidUserID']\n\n\nclass InvalidItemID(Exception):\n pass\n\n\nclass InvalidUserID(Exception):\n pass\n\n\nclass InvalidAPIVersion(Exception):\n pass\n\n\nclass HTTPError(Exception):\n pass\n\n\nclass HackerNews(object):\n\n def __init__(self, version='v0'):\n \"\"\"\n Args:\n version (string): specifies Hacker News API version. Default is `v0`.\n\n Raises:\n InvalidAPIVersion: If Hacker News version is not supported.\n\n \"\"\"\n try:\n self.base_url = supported_api_versions[version]\n except KeyError:\n raise InvalidAPIVersion\n\n def _get(self, url):\n \"\"\"Internal method used for GET requests\n\n Args:\n url (string): URL to send GET.\n\n Returns:\n requests' response object\n\n Raises:\n HTTPError: If HTTP request failed.\n\n \"\"\"\n response = requests.get(url)\n if response.status_code == requests.codes.ok:\n return response\n else:\n raise HTTPError\n\n def _get_page(self, page):\n return self._get('{0}{1}.json'.format(self.base_url, page))\n\n def _get_page_param(self, page, param):\n return self._get('{0}{1}/{2}.json'.format(self.base_url, page, param))\n\n def get_item(self, item_id):\n \"\"\"Returns Hacker News `Item` object.\n\n Args:\n item_id (int or string): Unique item id of Hacker News story, comment etc.\n\n Returns:\n `Item` object representing Hacker News item.\n\n Raises:\n InvalidItemID: If corresponding Hacker News story does not exist.\n\n \"\"\"\n\n response = self._get_page_param('item', item_id).json()\n\n if not response:\n raise InvalidItemID\n\n return Item(response)\n\n def get_user(self, user_id):\n \"\"\"Returns Hacker News `User` object.\n\n Args:\n user_id (string): unique user id of a Hacker News user.\n\n Returns:\n `User` object representing a user on Hacker News.\n\n Raises:\n InvalidUserID: If no such user exists on Hacker News.\n\n \"\"\"\n response = self._get_page_param('user', user_id).json()\n\n if not response:\n raise InvalidUserID\n\n return User(response)\n\n def top_stories(self, limit=None):\n \"\"\"Returns list of item ids of current top stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of top stories.\n \"\"\"\n return self._get_page('topstories').json()[:limit]\n\n def new_stories(self, limit=None):\n \"\"\"Returns list of item ids of current new stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of new stories.\n \"\"\"\n return self._get_page('newstories').json()[:limit]\n\n def ask_stories(self, limit=None):\n \"\"\"Returns list of item ids of latest Ask HN stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of Ask HN stories.\n \"\"\"\n return self._get_page('askstories').json()[:limit]\n\n def show_stories(self, limit=None):\n \"\"\"Returns list of item ids of latest Show HN stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of Show HN stories.\n \"\"\"\n return self._get_page('showstories').json()[:limit]\n\n def job_stories(self, limit=None):\n \"\"\"Returns list of item ids of latest Job stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `list` object containing ids of Job stories.\n \"\"\"\n return self._get_page('jobstories').json()[:limit]\n\n def updates(self):\n \"\"\"Returns list of item ids and user ids that have been\n changed/updated recently.\n\n Returns:\n `dict` with two keys whose values are `list` objects\n \"\"\"\n return self._get_page('updates').json()\n\n def get_max_item(self):\n \"\"\"Returns list of item ids of current top stories\n\n Args:\n limit (int): specifies the number of stories to be returned.\n\n Returns:\n `int` if successful.\n \"\"\"\n return self._get_page('maxitem').json()\n\n\nclass Item(object):\n\n \"\"\"\n Represents stories, comments, jobs, Ask HNs and polls\n \"\"\"\n\n def __init__(self, data):\n self.item_id = data.get('id')\n self.deleted = data.get('deleted')\n self.item_type = data.get('type')\n self.by = data.get('by')\n self.submission_time = datetime.datetime.fromtimestamp(\n data.get(\n 'time',\n 0))\n self.text = data.get('text')\n self.dead = data.get('dead')\n self.parent = data.get('parent')\n self.kids = data.get('kids')\n self.descendants = data.get('descendants')\n self.url = data.get('url')\n self.score = data.get('score')\n self.title = data.get('title')\n self.parts = data.get('parts')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.Item: {0} - {1}>'.format(\n self.item_id, self.title)\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n\n\nclass User(object):\n\n \"\"\"\n Represents a hacker i.e. a user on Hacker News\n \"\"\"\n\n def __init__(self, data):\n self.user_id = data.get('id')\n self.delay = data.get('delay')\n self.created = datetime.datetime.fromtimestamp(data.get('created', 0))\n self.karma = data.get('karma')\n self.about = data.get('about')\n self.submitted = data.get('submitted')\n self.raw = json.dumps(data)\n\n def __repr__(self):\n retval = '<hackernews.User: {0}>'.format(self.user_id)\n if sys.version_info.major < 3:\n return retval.encode('utf-8', errors='backslashreplace')\n return retval\n",
"step-ids": [
11,
16,
17,
20,
29
]
}
|
[
11,
16,
17,
20,
29
] |
from __future__ import annotations
import asyncio
import signal
from functools import wraps
from typing import TYPE_CHECKING, Awaitable, Callable
import click
from .utils import import_obj
if TYPE_CHECKING:
from donald.manager import Donald
from .types import TV
def import_manager(path: str) -> Donald:
"""Import a manager from a python path."""
manager: Donald = import_obj(path)
return manager
def process_await(fn: Callable[..., Awaitable[TV]]) -> Callable[..., TV]:
@wraps(fn)
@click.pass_context
def wrapper(ctx, *args, **kwargs):
loop = ctx.obj["loop"]
return loop.run_until_complete(fn(ctx, *args, **kwargs))
return wrapper
@click.group()
@click.option(
"-M",
"--manager",
"manager",
required=True,
help="Python path to the manager",
)
@click.pass_context
def cli(ctx: click.Context, manager: str):
ctx.obj["manager"] = import_manager(manager)
@cli.command(help="Launch a worker")
@click.option("-S", "--scheduler", "scheduler", is_flag=True, help="Start a scheduler")
@process_await
async def worker(ctx: click.Context, *, scheduler: bool = False, **params):
"""Launch a worker."""
loop = ctx.obj["loop"]
async def stop():
loop.remove_signal_handler(signal.SIGTERM)
loop.remove_signal_handler(signal.SIGINT)
await worker.stop()
if scheduler:
await manager.scheduler.stop()
await manager.stop()
loop.add_signal_handler(signal.SIGINT, lambda: loop.create_task(stop()))
loop.add_signal_handler(signal.SIGTERM, lambda: loop.create_task(stop()))
manager: Donald = ctx.obj["manager"]
await manager.start()
if scheduler:
manager.scheduler.start()
worker = manager.create_worker(show_banner=True, **params)
worker.start()
await worker.wait()
@cli.command(help="Launch a scheduler")
@process_await
async def scheduler(ctx: click.Context):
loop = ctx.obj["loop"]
async def stop():
loop.remove_signal_handler(signal.SIGTERM)
loop.remove_signal_handler(signal.SIGINT)
await manager.scheduler.stop()
await manager.stop()
loop.add_signal_handler(signal.SIGINT, lambda: loop.create_task(stop()))
loop.add_signal_handler(signal.SIGTERM, lambda: loop.create_task(stop()))
manager: Donald = ctx.obj["manager"]
await manager.start()
manager.scheduler.start()
await manager.scheduler.wait()
def main():
loop = asyncio.get_event_loop()
cli(obj={"loop": loop})
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "3da4896f368f067a339db5cc89201c93ba8166ce",
"index": 6220,
"step-1": "<mask token>\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:\n\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj['loop']\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n return wrapper\n\n\[email protected]()\[email protected]('-M', '--manager', 'manager', required=True, help=\n 'Python path to the manager')\[email protected]_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj['manager'] = import_manager(manager)\n\n\n<mask token>\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={'loop': loop})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef import_manager(path: str) ->Donald:\n \"\"\"Import a manager from a python path.\"\"\"\n manager: Donald = import_obj(path)\n return manager\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:\n\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj['loop']\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n return wrapper\n\n\[email protected]()\[email protected]('-M', '--manager', 'manager', required=True, help=\n 'Python path to the manager')\[email protected]_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj['manager'] = import_manager(manager)\n\n\n<mask token>\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={'loop': loop})\n\n\n<mask token>\n",
"step-3": "<mask token>\nif TYPE_CHECKING:\n from donald.manager import Donald\n from .types import TV\n\n\ndef import_manager(path: str) ->Donald:\n \"\"\"Import a manager from a python path.\"\"\"\n manager: Donald = import_obj(path)\n return manager\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:\n\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj['loop']\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n return wrapper\n\n\[email protected]()\[email protected]('-M', '--manager', 'manager', required=True, help=\n 'Python path to the manager')\[email protected]_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj['manager'] = import_manager(manager)\n\n\[email protected](help='Launch a worker')\[email protected]('-S', '--scheduler', 'scheduler', is_flag=True, help=\n 'Start a scheduler')\n@process_await\nasync def worker(ctx: click.Context, *, scheduler: bool=False, **params):\n \"\"\"Launch a worker.\"\"\"\n loop = ctx.obj['loop']\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await worker.stop()\n if scheduler:\n await manager.scheduler.stop()\n await manager.stop()\n loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))\n manager: Donald = ctx.obj['manager']\n await manager.start()\n if scheduler:\n manager.scheduler.start()\n worker = manager.create_worker(show_banner=True, **params)\n worker.start()\n await worker.wait()\n\n\[email protected](help='Launch a scheduler')\n@process_await\nasync def scheduler(ctx: click.Context):\n loop = ctx.obj['loop']\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await manager.scheduler.stop()\n await manager.stop()\n loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))\n manager: Donald = ctx.obj['manager']\n await manager.start()\n manager.scheduler.start()\n await manager.scheduler.wait()\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={'loop': loop})\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from __future__ import annotations\nimport asyncio\nimport signal\nfrom functools import wraps\nfrom typing import TYPE_CHECKING, Awaitable, Callable\nimport click\nfrom .utils import import_obj\nif TYPE_CHECKING:\n from donald.manager import Donald\n from .types import TV\n\n\ndef import_manager(path: str) ->Donald:\n \"\"\"Import a manager from a python path.\"\"\"\n manager: Donald = import_obj(path)\n return manager\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:\n\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj['loop']\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n return wrapper\n\n\[email protected]()\[email protected]('-M', '--manager', 'manager', required=True, help=\n 'Python path to the manager')\[email protected]_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj['manager'] = import_manager(manager)\n\n\[email protected](help='Launch a worker')\[email protected]('-S', '--scheduler', 'scheduler', is_flag=True, help=\n 'Start a scheduler')\n@process_await\nasync def worker(ctx: click.Context, *, scheduler: bool=False, **params):\n \"\"\"Launch a worker.\"\"\"\n loop = ctx.obj['loop']\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await worker.stop()\n if scheduler:\n await manager.scheduler.stop()\n await manager.stop()\n loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))\n manager: Donald = ctx.obj['manager']\n await manager.start()\n if scheduler:\n manager.scheduler.start()\n worker = manager.create_worker(show_banner=True, **params)\n worker.start()\n await worker.wait()\n\n\[email protected](help='Launch a scheduler')\n@process_await\nasync def scheduler(ctx: click.Context):\n loop = ctx.obj['loop']\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await manager.scheduler.stop()\n await manager.stop()\n loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))\n manager: Donald = ctx.obj['manager']\n await manager.start()\n manager.scheduler.start()\n await manager.scheduler.wait()\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={'loop': loop})\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from __future__ import annotations\n\nimport asyncio\nimport signal\nfrom functools import wraps\nfrom typing import TYPE_CHECKING, Awaitable, Callable\n\nimport click\n\nfrom .utils import import_obj\n\nif TYPE_CHECKING:\n from donald.manager import Donald\n\n from .types import TV\n\n\ndef import_manager(path: str) -> Donald:\n \"\"\"Import a manager from a python path.\"\"\"\n manager: Donald = import_obj(path)\n return manager\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) -> Callable[..., TV]:\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj[\"loop\"]\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n\n return wrapper\n\n\[email protected]()\[email protected](\n \"-M\",\n \"--manager\",\n \"manager\",\n required=True,\n help=\"Python path to the manager\",\n)\[email protected]_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj[\"manager\"] = import_manager(manager)\n\n\[email protected](help=\"Launch a worker\")\[email protected](\"-S\", \"--scheduler\", \"scheduler\", is_flag=True, help=\"Start a scheduler\")\n@process_await\nasync def worker(ctx: click.Context, *, scheduler: bool = False, **params):\n \"\"\"Launch a worker.\"\"\"\n\n loop = ctx.obj[\"loop\"]\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await worker.stop()\n if scheduler:\n await manager.scheduler.stop()\n await manager.stop()\n\n loop.add_signal_handler(signal.SIGINT, lambda: loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda: loop.create_task(stop()))\n\n manager: Donald = ctx.obj[\"manager\"]\n await manager.start()\n if scheduler:\n manager.scheduler.start()\n\n worker = manager.create_worker(show_banner=True, **params)\n worker.start()\n\n await worker.wait()\n\n\[email protected](help=\"Launch a scheduler\")\n@process_await\nasync def scheduler(ctx: click.Context):\n loop = ctx.obj[\"loop\"]\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await manager.scheduler.stop()\n await manager.stop()\n\n loop.add_signal_handler(signal.SIGINT, lambda: loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda: loop.create_task(stop()))\n\n manager: Donald = ctx.obj[\"manager\"]\n await manager.start()\n\n manager.scheduler.start()\n await manager.scheduler.wait()\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={\"loop\": loop})\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
#!/usr/bin/env python
'''
fix a time and then draw the instant geopotential (contour) from
/gws/nopw/j04/ncas_generic/users/renql/ERA5_subdaily/ERA5_NH_z_1989.nc,
spatial filtered relative vorticity (shaded) from
~/ERA5-1HR-lev/ERA5_VOR850_1hr_1995_DET/ERA5_VOR850_1hr_1995_DET_T63filt.nc
and identified feature points from
~/ERA5-1HR-lev/ERA5_VOR850_1hr_1995_DET/fft_trs_pos
Loop through the height (850, 500, 250)
20211116
'''
import sys
import subprocess
import xarray as xr
import numpy as np
import pandas as pd
from datetime import datetime
import gc #garbage collector
import matplotlib
import matplotlib.pyplot as plt
from matplotlib import colors
import cartopy.crs as ccrs
import cartopy.feature as cfeat
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
import cmaps
from PIL import Image, ImageDraw, ImageSequence
def calc_frames(new_time):
old_time = datetime(new_time.year-1, 11, 30, 23)
days = (new_time - old_time).days
sec = (new_time - old_time).seconds
hours = days * 24 + sec/3600
return int(hours)
def read_point_fixtime(filname,fixtime,flonl,flonr,flats,flatn):
ff = open(filname,"r")
line1 = ff.readline()
line2 = ff.readline()
line3 = ff.readline()
line4 = ff.readline()
plat = []
plon = []
line = ff.readline()
while line:
if line.strip().split(" ")[0] == "TRACK_ID":
num = int(ff.readline().strip().split(" ")[-1])
for nl in range(0,num,1):
data = list(map(float,ff.readline().strip().split(" ")))
if str(int(data[0])) == fixtime and \
data[1]<=flonr and data[1] >= flonl and data[2]<=flatn and data[2]>=flats :
plat.append(data[2])
plon.append(data[1])
line = ff.readline()
ff.close()
print("%s total feature point in %s : %d"%(filname,fixtime,len(plat)))
return plat, plon
lonl=0 #0 #
lonr=150#360#
lats=15 #0 #
latn=70 #90 #
lat_sp = 20
lon_sp = 30
nrow = 3
ncol = 1
bmlo = 0.1
title_font=18
label_font=14
dtime = pd.date_range(start='1995-01-01 00',periods=60, freq='6H',closed=None)
#dtime = pd.date_range(start='1995-01-01 00',end='1995-01-15 00', freq='6H',closed=None)
create_gif = True #False#
nfilt="T63"
lev = [850,500,250]
cnlvl =[[-8 ,1 ]]
cnlvl2 = [30,50,100]
varname = 'z'
path = '/home/users/qd201969/ERA5-1HR-lev/'
datapath = "/gws/nopw/j04/ncas_generic/users/renql/"#t/ERA5_NH_t_1989.nc
figdir = "/home/users/qd201969/uor_track/fig/"
f = xr.open_dataset("%sERA5_subdaily/%s/ERA5_NH_%s_%d.nc"%(datapath,varname,varname,dtime[0].year))
lat = f['latitude'].data
lon = f['longitude'].data
ilon = lon[(lon>=lonl) & (lon<=lonr)]
ilat = lat[(lat>=lats) & (lat<=latn)]
ds = xr.open_dataset("/home/users/qd201969/gtopo30_0.9x1.25.nc")
phis = ds['PHIS'].sel(lon=ilon,lat=ilat,method="nearest").load()
phis = phis/9.8 # transfer from m2/s2 to m
del ds
gc.collect()
nl = 0
fcolors = cmaps.BlueDarkRed18
cnlevels = np.arange(cnlvl[nl][0], cnlvl[nl][0]+cnlvl[nl][1]*(fcolors.N-1), cnlvl[nl][1])
norm = colors.BoundaryNorm(boundaries=cnlevels, ncolors=fcolors.N,extend='both')
params = {'legend.fontsize': label_font,
'axes.labelsize': label_font,
'axes.titlesize':label_font,
'xtick.labelsize':label_font,
'ytick.labelsize':label_font}
plt.rcParams.update(params)
for nt in range(len(dtime)):
fig = plt.figure(figsize=(12,12),dpi=100)
ax = fig.subplots(nrow,ncol, subplot_kw=dict(projection=ccrs.PlateCarree())) #sharex=True, sharey=True
for nl in range(len(lev)):
var = f[varname].sel(time=dtime[nt],level=lev[nl],longitude=ilon,latitude=ilat)
var.data = var.data/9.8
path2 = "%sERA5_VOR%d_1hr_%d_DET/"%(path,lev[nl],dtime[nt].year)
plat, plon = read_point_fixtime(path2+"fft_trs_pos",dtime[nt].strftime('%Y%m%d%H'),lonl,lonr,lats,latn)
fvor = xr.open_dataset("%sERA5_VOR%d_1hr_%d_DET_%sfilt.nc"%(path2,lev[nl],dtime[nt].year,nfilt))
var1 = fvor['var'].sel(time=calc_frames(dtime[nt]),level = 1,lon=ilon,lat=ilat,method="nearest").load()
#fvor = xr.open_dataset("%sERA5_VOR_1h_dec_jan/ERA5_VOR%d_1hr_dec-jan%d_DET.nc"%(datapath,lev[nl],dtime[nt].year))
#var1 = fvor['var138'].sel(time=dtime[nt],lev=float(lev[nl]*100),lat=ilat,lon=ilon,method="nearest").load()
var1.values = var1.values*1e5
axe = ax[nl]
axe.add_feature(cfeat.COASTLINE.with_scale('110m'),edgecolor='black', linewidth=0.8, zorder=1)
axe.set_title("%s %dhPa (%d)"%(dtime[nt].strftime('%Y-%m-%d-%H:00'), lev[nl], len(plat)),fontsize=title_font)
shad = axe.contourf(ilon, ilat, var1, cnlevels,
transform=ccrs.PlateCarree(),cmap=fcolors,extend='both',norm=norm)
cont = axe.contour(ilon, ilat, var, np.arange(1000,15000,cnlvl2[nl]),
transform=ccrs.PlateCarree(), colors='gray', linewidths=1.5)
#pint = axe.plot(plon,plat,color='darkviolet', marker='o', markersize=12, transform=ccrs.PlateCarree())
pint = axe.scatter(plon,plat,10.0**2,color='k', marker='o', transform=ccrs.PlateCarree())
topo = axe.contour(ilon, ilat, phis, [1500,3000],
transform=ccrs.PlateCarree(),colors='black',linewidths=1.2)
axe.set_yticks(np.arange(lats,latn,lat_sp), crs=ccrs.PlateCarree())
axe.yaxis.set_major_formatter(LatitudeFormatter(degree_symbol=''))
axe.set_xticks(np.arange(lonl,lonr,lon_sp), crs=ccrs.PlateCarree())
axe.xaxis.set_major_formatter(LongitudeFormatter(degree_symbol=''))
position = fig.add_axes([0.85, bmlo+0.1, 0.015, 0.7]) #left, bottom, width, height
cb = plt.colorbar(shad, cax=position ,orientation='vertical')#, shrink=.9)
cb.set_label(label='T5~63 Relative Vort (1e5)', size=label_font) #, weight='bold'
plt.tight_layout(rect=(0,bmlo,1,1))
plt.savefig(figdir+"filt_vor_%s.png"%(dtime[nt].strftime('%Y%m%d%H')), bbox_inches='tight',pad_inches=0.01)
if create_gif == True:
figname = figdir+"filt_vor_*.png"
fn_stream = subprocess.check_output("ls "+figname, shell=True).decode('utf-8')
fn_list = fn_stream.split()
print(fn_list[0])
print('filenumber : '+str(len(fn_list)))
gif_name = figname.rsplit("_",1)[0]+".gif"
frames = []
for itm in fn_list:
frame = Image.open(itm)
frames.append(frame)
frames[0].save(gif_name, save_all=True, append_images=frames[1:],\
duration = 1000, loop=0, disposal=1)
subprocess.run('rm -f %s'%(figname),shell=True)
|
normal
|
{
"blob_id": "09a468e11651eb60e0805c151bda270e0ebecca9",
"index": 4853,
"step-1": "<mask token>\n\n\ndef calc_frames(new_time):\n old_time = datetime(new_time.year - 1, 11, 30, 23)\n days = (new_time - old_time).days\n sec = (new_time - old_time).seconds\n hours = days * 24 + sec / 3600\n return int(hours)\n\n\ndef read_point_fixtime(filname, fixtime, flonl, flonr, flats, flatn):\n ff = open(filname, 'r')\n line1 = ff.readline()\n line2 = ff.readline()\n line3 = ff.readline()\n line4 = ff.readline()\n plat = []\n plon = []\n line = ff.readline()\n while line:\n if line.strip().split(' ')[0] == 'TRACK_ID':\n num = int(ff.readline().strip().split(' ')[-1])\n for nl in range(0, num, 1):\n data = list(map(float, ff.readline().strip().split(' ')))\n if str(int(data[0])) == fixtime and data[1] <= flonr and data[1\n ] >= flonl and data[2] <= flatn and data[2] >= flats:\n plat.append(data[2])\n plon.append(data[1])\n line = ff.readline()\n ff.close()\n print('%s total feature point in %s : %d' % (filname, fixtime, len(plat)))\n return plat, plon\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef calc_frames(new_time):\n old_time = datetime(new_time.year - 1, 11, 30, 23)\n days = (new_time - old_time).days\n sec = (new_time - old_time).seconds\n hours = days * 24 + sec / 3600\n return int(hours)\n\n\ndef read_point_fixtime(filname, fixtime, flonl, flonr, flats, flatn):\n ff = open(filname, 'r')\n line1 = ff.readline()\n line2 = ff.readline()\n line3 = ff.readline()\n line4 = ff.readline()\n plat = []\n plon = []\n line = ff.readline()\n while line:\n if line.strip().split(' ')[0] == 'TRACK_ID':\n num = int(ff.readline().strip().split(' ')[-1])\n for nl in range(0, num, 1):\n data = list(map(float, ff.readline().strip().split(' ')))\n if str(int(data[0])) == fixtime and data[1] <= flonr and data[1\n ] >= flonl and data[2] <= flatn and data[2] >= flats:\n plat.append(data[2])\n plon.append(data[1])\n line = ff.readline()\n ff.close()\n print('%s total feature point in %s : %d' % (filname, fixtime, len(plat)))\n return plat, plon\n\n\n<mask token>\ndel ds\ngc.collect()\n<mask token>\nplt.rcParams.update(params)\nfor nt in range(len(dtime)):\n fig = plt.figure(figsize=(12, 12), dpi=100)\n ax = fig.subplots(nrow, ncol, subplot_kw=dict(projection=ccrs.\n PlateCarree()))\n for nl in range(len(lev)):\n var = f[varname].sel(time=dtime[nt], level=lev[nl], longitude=ilon,\n latitude=ilat)\n var.data = var.data / 9.8\n path2 = '%sERA5_VOR%d_1hr_%d_DET/' % (path, lev[nl], dtime[nt].year)\n plat, plon = read_point_fixtime(path2 + 'fft_trs_pos', dtime[nt].\n strftime('%Y%m%d%H'), lonl, lonr, lats, latn)\n fvor = xr.open_dataset('%sERA5_VOR%d_1hr_%d_DET_%sfilt.nc' % (path2,\n lev[nl], dtime[nt].year, nfilt))\n var1 = fvor['var'].sel(time=calc_frames(dtime[nt]), level=1, lon=\n ilon, lat=ilat, method='nearest').load()\n var1.values = var1.values * 100000.0\n axe = ax[nl]\n axe.add_feature(cfeat.COASTLINE.with_scale('110m'), edgecolor=\n 'black', linewidth=0.8, zorder=1)\n axe.set_title('%s %dhPa (%d)' % (dtime[nt].strftime(\n '%Y-%m-%d-%H:00'), lev[nl], len(plat)), fontsize=title_font)\n shad = axe.contourf(ilon, ilat, var1, cnlevels, transform=ccrs.\n PlateCarree(), cmap=fcolors, extend='both', norm=norm)\n cont = axe.contour(ilon, ilat, var, np.arange(1000, 15000, cnlvl2[\n nl]), transform=ccrs.PlateCarree(), colors='gray', linewidths=1.5)\n pint = axe.scatter(plon, plat, 10.0 ** 2, color='k', marker='o',\n transform=ccrs.PlateCarree())\n topo = axe.contour(ilon, ilat, phis, [1500, 3000], transform=ccrs.\n PlateCarree(), colors='black', linewidths=1.2)\n axe.set_yticks(np.arange(lats, latn, lat_sp), crs=ccrs.PlateCarree())\n axe.yaxis.set_major_formatter(LatitudeFormatter(degree_symbol=''))\n axe.set_xticks(np.arange(lonl, lonr, lon_sp), crs=ccrs.PlateCarree())\n axe.xaxis.set_major_formatter(LongitudeFormatter(degree_symbol=''))\n position = fig.add_axes([0.85, bmlo + 0.1, 0.015, 0.7])\n cb = plt.colorbar(shad, cax=position, orientation='vertical')\n cb.set_label(label='T5~63 Relative Vort (1e5)', size=label_font)\n plt.tight_layout(rect=(0, bmlo, 1, 1))\n plt.savefig(figdir + 'filt_vor_%s.png' % dtime[nt].strftime('%Y%m%d%H'),\n bbox_inches='tight', pad_inches=0.01)\nif create_gif == True:\n figname = figdir + 'filt_vor_*.png'\n fn_stream = subprocess.check_output('ls ' + figname, shell=True).decode(\n 'utf-8')\n fn_list = fn_stream.split()\n print(fn_list[0])\n print('filenumber : ' + str(len(fn_list)))\n gif_name = figname.rsplit('_', 1)[0] + '.gif'\n frames = []\n for itm in fn_list:\n frame = Image.open(itm)\n frames.append(frame)\n frames[0].save(gif_name, save_all=True, append_images=frames[1:],\n duration=1000, loop=0, disposal=1)\n subprocess.run('rm -f %s' % figname, shell=True)\n",
"step-3": "<mask token>\n\n\ndef calc_frames(new_time):\n old_time = datetime(new_time.year - 1, 11, 30, 23)\n days = (new_time - old_time).days\n sec = (new_time - old_time).seconds\n hours = days * 24 + sec / 3600\n return int(hours)\n\n\ndef read_point_fixtime(filname, fixtime, flonl, flonr, flats, flatn):\n ff = open(filname, 'r')\n line1 = ff.readline()\n line2 = ff.readline()\n line3 = ff.readline()\n line4 = ff.readline()\n plat = []\n plon = []\n line = ff.readline()\n while line:\n if line.strip().split(' ')[0] == 'TRACK_ID':\n num = int(ff.readline().strip().split(' ')[-1])\n for nl in range(0, num, 1):\n data = list(map(float, ff.readline().strip().split(' ')))\n if str(int(data[0])) == fixtime and data[1] <= flonr and data[1\n ] >= flonl and data[2] <= flatn and data[2] >= flats:\n plat.append(data[2])\n plon.append(data[1])\n line = ff.readline()\n ff.close()\n print('%s total feature point in %s : %d' % (filname, fixtime, len(plat)))\n return plat, plon\n\n\nlonl = 0\nlonr = 150\nlats = 15\nlatn = 70\nlat_sp = 20\nlon_sp = 30\nnrow = 3\nncol = 1\nbmlo = 0.1\ntitle_font = 18\nlabel_font = 14\ndtime = pd.date_range(start='1995-01-01 00', periods=60, freq='6H', closed=None\n )\ncreate_gif = True\nnfilt = 'T63'\nlev = [850, 500, 250]\ncnlvl = [[-8, 1]]\ncnlvl2 = [30, 50, 100]\nvarname = 'z'\npath = '/home/users/qd201969/ERA5-1HR-lev/'\ndatapath = '/gws/nopw/j04/ncas_generic/users/renql/'\nfigdir = '/home/users/qd201969/uor_track/fig/'\nf = xr.open_dataset('%sERA5_subdaily/%s/ERA5_NH_%s_%d.nc' % (datapath,\n varname, varname, dtime[0].year))\nlat = f['latitude'].data\nlon = f['longitude'].data\nilon = lon[(lon >= lonl) & (lon <= lonr)]\nilat = lat[(lat >= lats) & (lat <= latn)]\nds = xr.open_dataset('/home/users/qd201969/gtopo30_0.9x1.25.nc')\nphis = ds['PHIS'].sel(lon=ilon, lat=ilat, method='nearest').load()\nphis = phis / 9.8\ndel ds\ngc.collect()\nnl = 0\nfcolors = cmaps.BlueDarkRed18\ncnlevels = np.arange(cnlvl[nl][0], cnlvl[nl][0] + cnlvl[nl][1] * (fcolors.N -\n 1), cnlvl[nl][1])\nnorm = colors.BoundaryNorm(boundaries=cnlevels, ncolors=fcolors.N, extend=\n 'both')\nparams = {'legend.fontsize': label_font, 'axes.labelsize': label_font,\n 'axes.titlesize': label_font, 'xtick.labelsize': label_font,\n 'ytick.labelsize': label_font}\nplt.rcParams.update(params)\nfor nt in range(len(dtime)):\n fig = plt.figure(figsize=(12, 12), dpi=100)\n ax = fig.subplots(nrow, ncol, subplot_kw=dict(projection=ccrs.\n PlateCarree()))\n for nl in range(len(lev)):\n var = f[varname].sel(time=dtime[nt], level=lev[nl], longitude=ilon,\n latitude=ilat)\n var.data = var.data / 9.8\n path2 = '%sERA5_VOR%d_1hr_%d_DET/' % (path, lev[nl], dtime[nt].year)\n plat, plon = read_point_fixtime(path2 + 'fft_trs_pos', dtime[nt].\n strftime('%Y%m%d%H'), lonl, lonr, lats, latn)\n fvor = xr.open_dataset('%sERA5_VOR%d_1hr_%d_DET_%sfilt.nc' % (path2,\n lev[nl], dtime[nt].year, nfilt))\n var1 = fvor['var'].sel(time=calc_frames(dtime[nt]), level=1, lon=\n ilon, lat=ilat, method='nearest').load()\n var1.values = var1.values * 100000.0\n axe = ax[nl]\n axe.add_feature(cfeat.COASTLINE.with_scale('110m'), edgecolor=\n 'black', linewidth=0.8, zorder=1)\n axe.set_title('%s %dhPa (%d)' % (dtime[nt].strftime(\n '%Y-%m-%d-%H:00'), lev[nl], len(plat)), fontsize=title_font)\n shad = axe.contourf(ilon, ilat, var1, cnlevels, transform=ccrs.\n PlateCarree(), cmap=fcolors, extend='both', norm=norm)\n cont = axe.contour(ilon, ilat, var, np.arange(1000, 15000, cnlvl2[\n nl]), transform=ccrs.PlateCarree(), colors='gray', linewidths=1.5)\n pint = axe.scatter(plon, plat, 10.0 ** 2, color='k', marker='o',\n transform=ccrs.PlateCarree())\n topo = axe.contour(ilon, ilat, phis, [1500, 3000], transform=ccrs.\n PlateCarree(), colors='black', linewidths=1.2)\n axe.set_yticks(np.arange(lats, latn, lat_sp), crs=ccrs.PlateCarree())\n axe.yaxis.set_major_formatter(LatitudeFormatter(degree_symbol=''))\n axe.set_xticks(np.arange(lonl, lonr, lon_sp), crs=ccrs.PlateCarree())\n axe.xaxis.set_major_formatter(LongitudeFormatter(degree_symbol=''))\n position = fig.add_axes([0.85, bmlo + 0.1, 0.015, 0.7])\n cb = plt.colorbar(shad, cax=position, orientation='vertical')\n cb.set_label(label='T5~63 Relative Vort (1e5)', size=label_font)\n plt.tight_layout(rect=(0, bmlo, 1, 1))\n plt.savefig(figdir + 'filt_vor_%s.png' % dtime[nt].strftime('%Y%m%d%H'),\n bbox_inches='tight', pad_inches=0.01)\nif create_gif == True:\n figname = figdir + 'filt_vor_*.png'\n fn_stream = subprocess.check_output('ls ' + figname, shell=True).decode(\n 'utf-8')\n fn_list = fn_stream.split()\n print(fn_list[0])\n print('filenumber : ' + str(len(fn_list)))\n gif_name = figname.rsplit('_', 1)[0] + '.gif'\n frames = []\n for itm in fn_list:\n frame = Image.open(itm)\n frames.append(frame)\n frames[0].save(gif_name, save_all=True, append_images=frames[1:],\n duration=1000, loop=0, disposal=1)\n subprocess.run('rm -f %s' % figname, shell=True)\n",
"step-4": "<mask token>\nimport sys\nimport subprocess\nimport xarray as xr\nimport numpy as np\nimport pandas as pd\nfrom datetime import datetime\nimport gc\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom matplotlib import colors\nimport cartopy.crs as ccrs\nimport cartopy.feature as cfeat\nfrom cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter\nimport cmaps\nfrom PIL import Image, ImageDraw, ImageSequence\n\n\ndef calc_frames(new_time):\n old_time = datetime(new_time.year - 1, 11, 30, 23)\n days = (new_time - old_time).days\n sec = (new_time - old_time).seconds\n hours = days * 24 + sec / 3600\n return int(hours)\n\n\ndef read_point_fixtime(filname, fixtime, flonl, flonr, flats, flatn):\n ff = open(filname, 'r')\n line1 = ff.readline()\n line2 = ff.readline()\n line3 = ff.readline()\n line4 = ff.readline()\n plat = []\n plon = []\n line = ff.readline()\n while line:\n if line.strip().split(' ')[0] == 'TRACK_ID':\n num = int(ff.readline().strip().split(' ')[-1])\n for nl in range(0, num, 1):\n data = list(map(float, ff.readline().strip().split(' ')))\n if str(int(data[0])) == fixtime and data[1] <= flonr and data[1\n ] >= flonl and data[2] <= flatn and data[2] >= flats:\n plat.append(data[2])\n plon.append(data[1])\n line = ff.readline()\n ff.close()\n print('%s total feature point in %s : %d' % (filname, fixtime, len(plat)))\n return plat, plon\n\n\nlonl = 0\nlonr = 150\nlats = 15\nlatn = 70\nlat_sp = 20\nlon_sp = 30\nnrow = 3\nncol = 1\nbmlo = 0.1\ntitle_font = 18\nlabel_font = 14\ndtime = pd.date_range(start='1995-01-01 00', periods=60, freq='6H', closed=None\n )\ncreate_gif = True\nnfilt = 'T63'\nlev = [850, 500, 250]\ncnlvl = [[-8, 1]]\ncnlvl2 = [30, 50, 100]\nvarname = 'z'\npath = '/home/users/qd201969/ERA5-1HR-lev/'\ndatapath = '/gws/nopw/j04/ncas_generic/users/renql/'\nfigdir = '/home/users/qd201969/uor_track/fig/'\nf = xr.open_dataset('%sERA5_subdaily/%s/ERA5_NH_%s_%d.nc' % (datapath,\n varname, varname, dtime[0].year))\nlat = f['latitude'].data\nlon = f['longitude'].data\nilon = lon[(lon >= lonl) & (lon <= lonr)]\nilat = lat[(lat >= lats) & (lat <= latn)]\nds = xr.open_dataset('/home/users/qd201969/gtopo30_0.9x1.25.nc')\nphis = ds['PHIS'].sel(lon=ilon, lat=ilat, method='nearest').load()\nphis = phis / 9.8\ndel ds\ngc.collect()\nnl = 0\nfcolors = cmaps.BlueDarkRed18\ncnlevels = np.arange(cnlvl[nl][0], cnlvl[nl][0] + cnlvl[nl][1] * (fcolors.N -\n 1), cnlvl[nl][1])\nnorm = colors.BoundaryNorm(boundaries=cnlevels, ncolors=fcolors.N, extend=\n 'both')\nparams = {'legend.fontsize': label_font, 'axes.labelsize': label_font,\n 'axes.titlesize': label_font, 'xtick.labelsize': label_font,\n 'ytick.labelsize': label_font}\nplt.rcParams.update(params)\nfor nt in range(len(dtime)):\n fig = plt.figure(figsize=(12, 12), dpi=100)\n ax = fig.subplots(nrow, ncol, subplot_kw=dict(projection=ccrs.\n PlateCarree()))\n for nl in range(len(lev)):\n var = f[varname].sel(time=dtime[nt], level=lev[nl], longitude=ilon,\n latitude=ilat)\n var.data = var.data / 9.8\n path2 = '%sERA5_VOR%d_1hr_%d_DET/' % (path, lev[nl], dtime[nt].year)\n plat, plon = read_point_fixtime(path2 + 'fft_trs_pos', dtime[nt].\n strftime('%Y%m%d%H'), lonl, lonr, lats, latn)\n fvor = xr.open_dataset('%sERA5_VOR%d_1hr_%d_DET_%sfilt.nc' % (path2,\n lev[nl], dtime[nt].year, nfilt))\n var1 = fvor['var'].sel(time=calc_frames(dtime[nt]), level=1, lon=\n ilon, lat=ilat, method='nearest').load()\n var1.values = var1.values * 100000.0\n axe = ax[nl]\n axe.add_feature(cfeat.COASTLINE.with_scale('110m'), edgecolor=\n 'black', linewidth=0.8, zorder=1)\n axe.set_title('%s %dhPa (%d)' % (dtime[nt].strftime(\n '%Y-%m-%d-%H:00'), lev[nl], len(plat)), fontsize=title_font)\n shad = axe.contourf(ilon, ilat, var1, cnlevels, transform=ccrs.\n PlateCarree(), cmap=fcolors, extend='both', norm=norm)\n cont = axe.contour(ilon, ilat, var, np.arange(1000, 15000, cnlvl2[\n nl]), transform=ccrs.PlateCarree(), colors='gray', linewidths=1.5)\n pint = axe.scatter(plon, plat, 10.0 ** 2, color='k', marker='o',\n transform=ccrs.PlateCarree())\n topo = axe.contour(ilon, ilat, phis, [1500, 3000], transform=ccrs.\n PlateCarree(), colors='black', linewidths=1.2)\n axe.set_yticks(np.arange(lats, latn, lat_sp), crs=ccrs.PlateCarree())\n axe.yaxis.set_major_formatter(LatitudeFormatter(degree_symbol=''))\n axe.set_xticks(np.arange(lonl, lonr, lon_sp), crs=ccrs.PlateCarree())\n axe.xaxis.set_major_formatter(LongitudeFormatter(degree_symbol=''))\n position = fig.add_axes([0.85, bmlo + 0.1, 0.015, 0.7])\n cb = plt.colorbar(shad, cax=position, orientation='vertical')\n cb.set_label(label='T5~63 Relative Vort (1e5)', size=label_font)\n plt.tight_layout(rect=(0, bmlo, 1, 1))\n plt.savefig(figdir + 'filt_vor_%s.png' % dtime[nt].strftime('%Y%m%d%H'),\n bbox_inches='tight', pad_inches=0.01)\nif create_gif == True:\n figname = figdir + 'filt_vor_*.png'\n fn_stream = subprocess.check_output('ls ' + figname, shell=True).decode(\n 'utf-8')\n fn_list = fn_stream.split()\n print(fn_list[0])\n print('filenumber : ' + str(len(fn_list)))\n gif_name = figname.rsplit('_', 1)[0] + '.gif'\n frames = []\n for itm in fn_list:\n frame = Image.open(itm)\n frames.append(frame)\n frames[0].save(gif_name, save_all=True, append_images=frames[1:],\n duration=1000, loop=0, disposal=1)\n subprocess.run('rm -f %s' % figname, shell=True)\n",
"step-5": "#!/usr/bin/env python\n'''\nfix a time and then draw the instant geopotential (contour) from \n/gws/nopw/j04/ncas_generic/users/renql/ERA5_subdaily/ERA5_NH_z_1989.nc,\n\nspatial filtered relative vorticity (shaded) from \n~/ERA5-1HR-lev/ERA5_VOR850_1hr_1995_DET/ERA5_VOR850_1hr_1995_DET_T63filt.nc\n\nand identified feature points from \n~/ERA5-1HR-lev/ERA5_VOR850_1hr_1995_DET/fft_trs_pos\n\nLoop through the height (850, 500, 250)\n\n20211116\n'''\nimport sys\nimport subprocess\nimport xarray as xr\nimport numpy as np\nimport pandas as pd\nfrom datetime import datetime\nimport gc #garbage collector\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom matplotlib import colors\nimport cartopy.crs as ccrs\nimport cartopy.feature as cfeat\nfrom cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter\nimport cmaps\nfrom PIL import Image, ImageDraw, ImageSequence\n\ndef calc_frames(new_time):\n old_time = datetime(new_time.year-1, 11, 30, 23)\n days = (new_time - old_time).days\n sec = (new_time - old_time).seconds\n hours = days * 24 + sec/3600\n return int(hours)\n\ndef read_point_fixtime(filname,fixtime,flonl,flonr,flats,flatn):\n ff = open(filname,\"r\") \n line1 = ff.readline()\n line2 = ff.readline()\n line3 = ff.readline()\n line4 = ff.readline()\n \n plat = []\n plon = []\n line = ff.readline()\n while line:\n if line.strip().split(\" \")[0] == \"TRACK_ID\":\n num = int(ff.readline().strip().split(\" \")[-1])\n for nl in range(0,num,1):\n data = list(map(float,ff.readline().strip().split(\" \")))\n if str(int(data[0])) == fixtime and \\\n data[1]<=flonr and data[1] >= flonl and data[2]<=flatn and data[2]>=flats :\n plat.append(data[2])\n plon.append(data[1])\n line = ff.readline()\n ff.close()\n print(\"%s total feature point in %s : %d\"%(filname,fixtime,len(plat)))\n return plat, plon \n\nlonl=0 #0 #\nlonr=150#360#\nlats=15 #0 #\nlatn=70 #90 #\nlat_sp = 20\nlon_sp = 30\n\nnrow = 3\nncol = 1\nbmlo = 0.1\ntitle_font=18\nlabel_font=14\n\ndtime = pd.date_range(start='1995-01-01 00',periods=60, freq='6H',closed=None)\n#dtime = pd.date_range(start='1995-01-01 00',end='1995-01-15 00', freq='6H',closed=None)\ncreate_gif = True #False#\nnfilt=\"T63\"\nlev = [850,500,250]\ncnlvl =[[-8 ,1 ]]\ncnlvl2 = [30,50,100]\nvarname = 'z'\npath = '/home/users/qd201969/ERA5-1HR-lev/'\ndatapath = \"/gws/nopw/j04/ncas_generic/users/renql/\"#t/ERA5_NH_t_1989.nc\nfigdir = \"/home/users/qd201969/uor_track/fig/\"\n\nf = xr.open_dataset(\"%sERA5_subdaily/%s/ERA5_NH_%s_%d.nc\"%(datapath,varname,varname,dtime[0].year))\nlat = f['latitude'].data\nlon = f['longitude'].data\nilon = lon[(lon>=lonl) & (lon<=lonr)]\nilat = lat[(lat>=lats) & (lat<=latn)]\nds = xr.open_dataset(\"/home/users/qd201969/gtopo30_0.9x1.25.nc\")\nphis = ds['PHIS'].sel(lon=ilon,lat=ilat,method=\"nearest\").load()\nphis = phis/9.8 # transfer from m2/s2 to m\ndel ds\ngc.collect()\n\nnl = 0\nfcolors = cmaps.BlueDarkRed18\ncnlevels = np.arange(cnlvl[nl][0], cnlvl[nl][0]+cnlvl[nl][1]*(fcolors.N-1), cnlvl[nl][1])\nnorm = colors.BoundaryNorm(boundaries=cnlevels, ncolors=fcolors.N,extend='both')\n\nparams = {'legend.fontsize': label_font,\n 'axes.labelsize': label_font,\n 'axes.titlesize':label_font,\n 'xtick.labelsize':label_font,\n 'ytick.labelsize':label_font}\nplt.rcParams.update(params)\n\nfor nt in range(len(dtime)):\n fig = plt.figure(figsize=(12,12),dpi=100)\n ax = fig.subplots(nrow,ncol, subplot_kw=dict(projection=ccrs.PlateCarree())) #sharex=True, sharey=True\n for nl in range(len(lev)):\n var = f[varname].sel(time=dtime[nt],level=lev[nl],longitude=ilon,latitude=ilat)\n var.data = var.data/9.8\n\n path2 = \"%sERA5_VOR%d_1hr_%d_DET/\"%(path,lev[nl],dtime[nt].year)\n plat, plon = read_point_fixtime(path2+\"fft_trs_pos\",dtime[nt].strftime('%Y%m%d%H'),lonl,lonr,lats,latn)\n \n fvor = xr.open_dataset(\"%sERA5_VOR%d_1hr_%d_DET_%sfilt.nc\"%(path2,lev[nl],dtime[nt].year,nfilt))\n var1 = fvor['var'].sel(time=calc_frames(dtime[nt]),level = 1,lon=ilon,lat=ilat,method=\"nearest\").load()\n #fvor = xr.open_dataset(\"%sERA5_VOR_1h_dec_jan/ERA5_VOR%d_1hr_dec-jan%d_DET.nc\"%(datapath,lev[nl],dtime[nt].year))\n #var1 = fvor['var138'].sel(time=dtime[nt],lev=float(lev[nl]*100),lat=ilat,lon=ilon,method=\"nearest\").load()\n var1.values = var1.values*1e5\n\n axe = ax[nl]\n axe.add_feature(cfeat.COASTLINE.with_scale('110m'),edgecolor='black', linewidth=0.8, zorder=1) \n axe.set_title(\"%s %dhPa (%d)\"%(dtime[nt].strftime('%Y-%m-%d-%H:00'), lev[nl], len(plat)),fontsize=title_font)\n\n shad = axe.contourf(ilon, ilat, var1, cnlevels,\n transform=ccrs.PlateCarree(),cmap=fcolors,extend='both',norm=norm)\n \n cont = axe.contour(ilon, ilat, var, np.arange(1000,15000,cnlvl2[nl]), \n transform=ccrs.PlateCarree(), colors='gray', linewidths=1.5)\n \n #pint = axe.plot(plon,plat,color='darkviolet', marker='o', markersize=12, transform=ccrs.PlateCarree())\n pint = axe.scatter(plon,plat,10.0**2,color='k', marker='o', transform=ccrs.PlateCarree())\n\n topo = axe.contour(ilon, ilat, phis, [1500,3000],\n transform=ccrs.PlateCarree(),colors='black',linewidths=1.2)\n\n axe.set_yticks(np.arange(lats,latn,lat_sp), crs=ccrs.PlateCarree())\n axe.yaxis.set_major_formatter(LatitudeFormatter(degree_symbol=''))\n axe.set_xticks(np.arange(lonl,lonr,lon_sp), crs=ccrs.PlateCarree())\n axe.xaxis.set_major_formatter(LongitudeFormatter(degree_symbol=''))\n\n position = fig.add_axes([0.85, bmlo+0.1, 0.015, 0.7]) #left, bottom, width, height\n cb = plt.colorbar(shad, cax=position ,orientation='vertical')#, shrink=.9)\n cb.set_label(label='T5~63 Relative Vort (1e5)', size=label_font) #, weight='bold'\n\n plt.tight_layout(rect=(0,bmlo,1,1))\n plt.savefig(figdir+\"filt_vor_%s.png\"%(dtime[nt].strftime('%Y%m%d%H')), bbox_inches='tight',pad_inches=0.01)\n\nif create_gif == True:\n figname = figdir+\"filt_vor_*.png\"\n fn_stream = subprocess.check_output(\"ls \"+figname, shell=True).decode('utf-8')\n fn_list = fn_stream.split()\n print(fn_list[0])\n print('filenumber : '+str(len(fn_list)))\n gif_name = figname.rsplit(\"_\",1)[0]+\".gif\" \n\n frames = []\n for itm in fn_list:\n frame = Image.open(itm)\n frames.append(frame)\n\n frames[0].save(gif_name, save_all=True, append_images=frames[1:],\\\n duration = 1000, loop=0, disposal=1)\n subprocess.run('rm -f %s'%(figname),shell=True)\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
"""
This module contains RestrictBlend context manager.
"""
__all__ = (
"RestrictBlend",
)
import bpy as _bpy
class _RestrictContext:
__slots__ = ()
_real_data = _bpy.data
# safe, the pointer never changes
_real_pref = _bpy.context.preferences
@property
def window_manager(self):
return self._real_data.window_managers[0]
@property
def preferences(self):
return self._real_pref
class _RestrictData:
__slots__ = ()
_context_restrict = _RestrictContext()
_data_restrict = _RestrictData()
class RestrictBlend:
__slots__ = ("context", "data")
def __enter__(self):
self.data = _bpy.data
self.context = _bpy.context
_bpy.data = _data_restrict
_bpy.context = _context_restrict
def __exit__(self, type, value, traceback):
_bpy.data = self.data
_bpy.context = self.context
|
normal
|
{
"blob_id": "aa4226c377368d1ece4e556db9b7fdd0134472c9",
"index": 5450,
"step-1": "<mask token>\n\n\nclass _RestrictData:\n __slots__ = ()\n\n\n<mask token>\n\n\nclass RestrictBlend:\n __slots__ = 'context', 'data'\n\n def __enter__(self):\n self.data = _bpy.data\n self.context = _bpy.context\n _bpy.data = _data_restrict\n _bpy.context = _context_restrict\n\n def __exit__(self, type, value, traceback):\n _bpy.data = self.data\n _bpy.context = self.context\n",
"step-2": "<mask token>\n\n\nclass _RestrictContext:\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def window_manager(self):\n return self._real_data.window_managers[0]\n\n @property\n def preferences(self):\n return self._real_pref\n\n\nclass _RestrictData:\n __slots__ = ()\n\n\n<mask token>\n\n\nclass RestrictBlend:\n __slots__ = 'context', 'data'\n\n def __enter__(self):\n self.data = _bpy.data\n self.context = _bpy.context\n _bpy.data = _data_restrict\n _bpy.context = _context_restrict\n\n def __exit__(self, type, value, traceback):\n _bpy.data = self.data\n _bpy.context = self.context\n",
"step-3": "<mask token>\n\n\nclass _RestrictContext:\n __slots__ = ()\n _real_data = _bpy.data\n _real_pref = _bpy.context.preferences\n\n @property\n def window_manager(self):\n return self._real_data.window_managers[0]\n\n @property\n def preferences(self):\n return self._real_pref\n\n\nclass _RestrictData:\n __slots__ = ()\n\n\n<mask token>\n\n\nclass RestrictBlend:\n __slots__ = 'context', 'data'\n\n def __enter__(self):\n self.data = _bpy.data\n self.context = _bpy.context\n _bpy.data = _data_restrict\n _bpy.context = _context_restrict\n\n def __exit__(self, type, value, traceback):\n _bpy.data = self.data\n _bpy.context = self.context\n",
"step-4": "<mask token>\n__all__ = 'RestrictBlend',\n<mask token>\n\n\nclass _RestrictContext:\n __slots__ = ()\n _real_data = _bpy.data\n _real_pref = _bpy.context.preferences\n\n @property\n def window_manager(self):\n return self._real_data.window_managers[0]\n\n @property\n def preferences(self):\n return self._real_pref\n\n\nclass _RestrictData:\n __slots__ = ()\n\n\n_context_restrict = _RestrictContext()\n_data_restrict = _RestrictData()\n\n\nclass RestrictBlend:\n __slots__ = 'context', 'data'\n\n def __enter__(self):\n self.data = _bpy.data\n self.context = _bpy.context\n _bpy.data = _data_restrict\n _bpy.context = _context_restrict\n\n def __exit__(self, type, value, traceback):\n _bpy.data = self.data\n _bpy.context = self.context\n",
"step-5": "# ##### BEGIN GPL LICENSE BLOCK #####\n#\n# This program is free software; you can redistribute it and/or\n# modify it under the terms of the GNU General Public License\n# as published by the Free Software Foundation; either version 2\n# of the License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program; if not, write to the Free Software Foundation,\n# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.\n#\n# ##### END GPL LICENSE BLOCK #####\n\n# <pep8-80 compliant>\n\n\"\"\"\nThis module contains RestrictBlend context manager.\n\"\"\"\n\n__all__ = (\n \"RestrictBlend\",\n)\n\nimport bpy as _bpy\n\n\nclass _RestrictContext:\n __slots__ = ()\n _real_data = _bpy.data\n # safe, the pointer never changes\n _real_pref = _bpy.context.preferences\n\n @property\n def window_manager(self):\n return self._real_data.window_managers[0]\n\n @property\n def preferences(self):\n return self._real_pref\n\n\nclass _RestrictData:\n __slots__ = ()\n\n\n_context_restrict = _RestrictContext()\n_data_restrict = _RestrictData()\n\n\nclass RestrictBlend:\n __slots__ = (\"context\", \"data\")\n\n def __enter__(self):\n self.data = _bpy.data\n self.context = _bpy.context\n _bpy.data = _data_restrict\n _bpy.context = _context_restrict\n\n def __exit__(self, type, value, traceback):\n _bpy.data = self.data\n _bpy.context = self.context\n",
"step-ids": [
6,
9,
10,
11,
13
]
}
|
[
6,
9,
10,
11,
13
] |
N, M, T = map(int, input().split())
AB = [list(map(int, input().split())) for i in range(M)]
now_time = 0
battery = N
ans = 'Yes'
for a, b in AB:
# カフェに付くまでにの消費
battery -= a-now_time
if battery <= 0:
ans = 'No'
break
# カフェでの充電
battery += b-a
battery = min(battery, N)
# 現在時刻をカフェを出る時間に更新
now_time = b
# 最後のカフェを出てから帰宅までの消費
battery -= T-now_time
if battery <= 0:
ans = 'No'
print(ans)
|
normal
|
{
"blob_id": "15a7f6a63536ed24b6cf17395643476c689ec99b",
"index": 8499,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor a, b in AB:\n battery -= a - now_time\n if battery <= 0:\n ans = 'No'\n break\n battery += b - a\n battery = min(battery, N)\n now_time = b\nbattery -= T - now_time\nif battery <= 0:\n ans = 'No'\nprint(ans)\n",
"step-3": "N, M, T = map(int, input().split())\nAB = [list(map(int, input().split())) for i in range(M)]\nnow_time = 0\nbattery = N\nans = 'Yes'\nfor a, b in AB:\n battery -= a - now_time\n if battery <= 0:\n ans = 'No'\n break\n battery += b - a\n battery = min(battery, N)\n now_time = b\nbattery -= T - now_time\nif battery <= 0:\n ans = 'No'\nprint(ans)\n",
"step-4": "N, M, T = map(int, input().split())\nAB = [list(map(int, input().split())) for i in range(M)]\n\nnow_time = 0\nbattery = N\n\nans = 'Yes'\nfor a, b in AB:\n\n # カフェに付くまでにの消費\n battery -= a-now_time\n if battery <= 0:\n ans = 'No'\n break\n\n # カフェでの充電\n battery += b-a\n battery = min(battery, N)\n\n # 現在時刻をカフェを出る時間に更新\n now_time = b\n\n# 最後のカフェを出てから帰宅までの消費\nbattery -= T-now_time\nif battery <= 0:\n ans = 'No'\nprint(ans)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#! /usr/bin/env python
import tensorflow as tf
import numpy as np
import os
import time
import datetime
import data_helpers
from text_rnn import TextRNN
from tensorflow.contrib import learn
# Parameters
# ==================================================
# Data loading params
flags = tf.app.flags
FLAGS = flags.FLAGS
# Model Hyperparameters
tf.flags.DEFINE_integer("embedding_dim", 100, "Dimensionality of character embedding (default: 100)")
tf.flags.DEFINE_float("dropout_keep_prob", 0.5, "Dropout keep probability (default: 0.5)")
# Training parameters
tf.flags.DEFINE_integer("batch_size", 128, "Batch Size (default: 64)")
tf.flags.DEFINE_integer("num_epochs", 100, "Number of training epochs (default: 200)")
tf.flags.DEFINE_integer("evaluate_every", 500, "Evaluate model on dev set after this many steps (default: 100)")
tf.flags.DEFINE_integer("checkpoint_every", 500, "Save model after this many steps (default: 100)")
tf.flags.DEFINE_integer("num_checkpoints", 3, "Number of checkpoints to store (default: 5)")
# Misc Parameters
tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")
# Data Preparation
# ==================================================
# Load data
print("\nLoading train data...")
x_train, y_train = data_helpers.load_splitted_data_and_labels('../data/toxic_yes_train.txt', '../data/toxic_no_train.txt')
print("x_train length:{0}, y_train shape:{1}".format(len(x_train), y_train.shape))
print(x_train[0], y_train[0])
print("\nLoading dev data...")
x_dev, y_dev = data_helpers.load_splitted_data_and_labels('../data/toxic_yes_dev.txt', '../data/toxic_no_dev.txt')
print("x_dev length:{0}, y_dev shape:{1}".format(len(x_dev), y_dev.shape))
print(x_dev[-1], y_dev[-1])
x = x_train+x_dev
print("x length:{0}".format(len(x)))
# Build vocabulary
# max_sent_length, sent = max([(len(i.split(" ")),i) for i in x])
# print("Max sent length = {0}".format(max_sent_length))
# print("Sent with max length = {0}".format(sent))
max_sent_length = 80
vocab_processor = learn.preprocessing.VocabularyProcessor(max_sent_length)
x = np.array(list(vocab_processor.fit_transform(x))) #x is an iterable, [n_samples, max_sent_length] Word-id matrix.
print("Shape of word-id matrix: {0}".format(x.shape))
#Transform x_train and x_dev to word-id matrix
x_train = np.array(list(vocab_processor.transform(x_train)))
print("Shape of x_train matrix: {0}".format(x_train.shape))
x_dev = np.array(list(vocab_processor.transform(x_dev)))
print("Shape of x_dev matrix: {0}".format(x_dev.shape))
# Randomly shuffle data
np.random.seed(10)
shuffle_indices = np.random.permutation(np.arange(len(y_train)))
x_train = x_train[shuffle_indices]
y_train = y_train[shuffle_indices]
del x
vocabsize = len(vocab_processor.vocabulary_)
print("Vocabulary Size: {:d}".format(vocabsize))
# Training
# ==================================================
with tf.Graph().as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement)
sess = tf.Session(config=session_conf)
with sess.as_default():
rnn = TextRNN(
sequence_length=x_train.shape[1],
num_classes=y_train.shape[1],
vocab_size=vocabsize,
embedding_size=FLAGS.embedding_dim)
# Define Training procedure
global_step = tf.Variable(0, name="global_step", trainable=False)
optimizer = tf.train.AdamOptimizer(1e-3)
grads_and_vars = optimizer.compute_gradients(rnn.loss)
train_op = optimizer.apply_gradients(grads_and_vars, global_step=global_step)
# Keep track of gradient values and sparsity (optional)
grad_summaries = []
for g, v in grads_and_vars:
if g is not None:
grad_hist_summary = tf.summary.histogram("{}/grad/hist".format(v.name), g)
sparsity_summary = tf.summary.scalar("{}/grad/sparsity".format(v.name), tf.nn.zero_fraction(g))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
grad_summaries_merged = tf.summary.merge(grad_summaries)
# Output directory for models and summaries
# timestamp = str(int(time.time()))
# out_dir = os.path.abspath(os.path.join(os.path.curdir, "runs", timestamp))
out_dir = os.path.abspath(os.path.join(os.path.curdir, "runs"))
print("Writing to {}\n".format(out_dir))
# Summaries for loss and accuracy
loss_summary = tf.summary.scalar("loss", rnn.loss)
acc_summary = tf.summary.scalar("accuracy", rnn.accuracy)
# Train Summaries
train_summary_op = tf.summary.merge([loss_summary, acc_summary, grad_summaries_merged])
train_summary_dir = os.path.join(out_dir, "summaries", "train")
train_summary_writer = tf.summary.FileWriter(train_summary_dir, sess.graph)
# Dev summaries
dev_summary_op = tf.summary.merge([loss_summary, acc_summary])
dev_summary_dir = os.path.join(out_dir, "summaries", "dev")
dev_summary_writer = tf.summary.FileWriter(dev_summary_dir, sess.graph)
# Checkpoint directory. Tensorflow assumes this directory already exists so we need to create it
checkpoint_dir = os.path.abspath(os.path.join(out_dir, "checkpoints"))
checkpoint_prefix = os.path.join(checkpoint_dir, "model")
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
saver = tf.train.Saver(tf.global_variables(), max_to_keep=FLAGS.num_checkpoints)
# Write vocabulary
vocab_processor.save(os.path.join(out_dir, "vocab"))
# Initialize all variables
sess.run(tf.global_variables_initializer())
vocabulary = vocab_processor.vocabulary_
initEmbeddings = data_helpers.load_embedding_vectors_glove(vocabulary)
sess.run(rnn.W_embed.assign(initEmbeddings))
for v in tf.trainable_variables():
print(v.name)
def train_step(x_batch, y_batch):
"""
A single training step
"""
feed_dict = {
rnn.input_x: x_batch,
rnn.input_y: y_batch,
rnn.dropout_keep_prob: FLAGS.dropout_keep_prob
}
_, step, summaries, loss, accuracy = sess.run(
[train_op, global_step, train_summary_op, rnn.loss, rnn.accuracy],
feed_dict)
time_str = datetime.datetime.now().isoformat()
# print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
train_summary_writer.add_summary(summaries, step)
return loss,accuracy
def dev_step(x_batch, y_batch, writer=None):
"""
Evaluates model on a dev set
"""
feed_dict = {
rnn.input_x: x_batch,
rnn.input_y: y_batch,
rnn.dropout_keep_prob: 1.0
}
step, summaries, loss, accuracy = sess.run(
[global_step, dev_summary_op, rnn.loss, rnn.accuracy],
feed_dict)
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
if writer:
writer.add_summary(summaries, step)
return accuracy
# Create batches agnostic of class distributions
batches = data_helpers.batch_iter(list(zip(x_train, y_train)), FLAGS.batch_size, FLAGS.num_epochs)
# Create batches aware of imbalance in class distributions
# batches = data_helpers.makeBatches(x_train, y_train[:,1].tolist(), FLAGS.batch_size, FLAGS.num_epochs)
# Training loop. For each batch...
prev_val_acc = 0
for batch in batches:
x_batch, y_batch = zip(*batch)
train_loss, train_acc = train_step(x_batch, y_batch)
current_step = tf.train.global_step(sess, global_step)
if current_step % FLAGS.evaluate_every == 0:
print("\nTrain loss:{0}, Train accuracy:{1}".format(train_loss, train_acc))
print("Evaluation:")
val_acc = dev_step(x_dev, y_dev, writer=dev_summary_writer)
if val_acc > 0.95 and val_acc > prev_val_acc:
save_path = saver.save(sess, checkpoint_prefix, global_step=current_step)
print("Model checkpoint saved at {0}, accuracy={1}".format(save_path, round(val_acc, 3)))
prev_val_acc = val_acc
print("")
|
normal
|
{
"blob_id": "aa1a7de92b971b6d10d09b2f8ca2c55516e538e4",
"index": 9904,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntf.flags.DEFINE_integer('embedding_dim', 100,\n 'Dimensionality of character embedding (default: 100)')\ntf.flags.DEFINE_float('dropout_keep_prob', 0.5,\n 'Dropout keep probability (default: 0.5)')\ntf.flags.DEFINE_integer('batch_size', 128, 'Batch Size (default: 64)')\ntf.flags.DEFINE_integer('num_epochs', 100,\n 'Number of training epochs (default: 200)')\ntf.flags.DEFINE_integer('evaluate_every', 500,\n 'Evaluate model on dev set after this many steps (default: 100)')\ntf.flags.DEFINE_integer('checkpoint_every', 500,\n 'Save model after this many steps (default: 100)')\ntf.flags.DEFINE_integer('num_checkpoints', 3,\n 'Number of checkpoints to store (default: 5)')\ntf.flags.DEFINE_boolean('allow_soft_placement', True,\n 'Allow device soft device placement')\ntf.flags.DEFINE_boolean('log_device_placement', False,\n 'Log placement of ops on devices')\nprint(\"\"\"\nLoading train data...\"\"\")\n<mask token>\nprint('x_train length:{0}, y_train shape:{1}'.format(len(x_train), y_train.\n shape))\nprint(x_train[0], y_train[0])\nprint(\"\"\"\nLoading dev data...\"\"\")\n<mask token>\nprint('x_dev length:{0}, y_dev shape:{1}'.format(len(x_dev), y_dev.shape))\nprint(x_dev[-1], y_dev[-1])\n<mask token>\nprint('x length:{0}'.format(len(x)))\n<mask token>\nprint('Shape of word-id matrix: {0}'.format(x.shape))\n<mask token>\nprint('Shape of x_train matrix: {0}'.format(x_train.shape))\n<mask token>\nprint('Shape of x_dev matrix: {0}'.format(x_dev.shape))\nnp.random.seed(10)\n<mask token>\ndel x\n<mask token>\nprint('Vocabulary Size: {:d}'.format(vocabsize))\nwith tf.Graph().as_default():\n session_conf = tf.ConfigProto(allow_soft_placement=FLAGS.\n allow_soft_placement, log_device_placement=FLAGS.log_device_placement)\n sess = tf.Session(config=session_conf)\n with sess.as_default():\n rnn = TextRNN(sequence_length=x_train.shape[1], num_classes=y_train\n .shape[1], vocab_size=vocabsize, embedding_size=FLAGS.embedding_dim\n )\n global_step = tf.Variable(0, name='global_step', trainable=False)\n optimizer = tf.train.AdamOptimizer(0.001)\n grads_and_vars = optimizer.compute_gradients(rnn.loss)\n train_op = optimizer.apply_gradients(grads_and_vars, global_step=\n global_step)\n grad_summaries = []\n for g, v in grads_and_vars:\n if g is not None:\n grad_hist_summary = tf.summary.histogram('{}/grad/hist'.\n format(v.name), g)\n sparsity_summary = tf.summary.scalar('{}/grad/sparsity'.\n format(v.name), tf.nn.zero_fraction(g))\n grad_summaries.append(grad_hist_summary)\n grad_summaries.append(sparsity_summary)\n grad_summaries_merged = tf.summary.merge(grad_summaries)\n out_dir = os.path.abspath(os.path.join(os.path.curdir, 'runs'))\n print('Writing to {}\\n'.format(out_dir))\n loss_summary = tf.summary.scalar('loss', rnn.loss)\n acc_summary = tf.summary.scalar('accuracy', rnn.accuracy)\n train_summary_op = tf.summary.merge([loss_summary, acc_summary,\n grad_summaries_merged])\n train_summary_dir = os.path.join(out_dir, 'summaries', 'train')\n train_summary_writer = tf.summary.FileWriter(train_summary_dir,\n sess.graph)\n dev_summary_op = tf.summary.merge([loss_summary, acc_summary])\n dev_summary_dir = os.path.join(out_dir, 'summaries', 'dev')\n dev_summary_writer = tf.summary.FileWriter(dev_summary_dir, sess.graph)\n checkpoint_dir = os.path.abspath(os.path.join(out_dir, 'checkpoints'))\n checkpoint_prefix = os.path.join(checkpoint_dir, 'model')\n if not os.path.exists(checkpoint_dir):\n os.makedirs(checkpoint_dir)\n saver = tf.train.Saver(tf.global_variables(), max_to_keep=FLAGS.\n num_checkpoints)\n vocab_processor.save(os.path.join(out_dir, 'vocab'))\n sess.run(tf.global_variables_initializer())\n vocabulary = vocab_processor.vocabulary_\n initEmbeddings = data_helpers.load_embedding_vectors_glove(vocabulary)\n sess.run(rnn.W_embed.assign(initEmbeddings))\n for v in tf.trainable_variables():\n print(v.name)\n\n def train_step(x_batch, y_batch):\n \"\"\"\n A single training step\n \"\"\"\n feed_dict = {rnn.input_x: x_batch, rnn.input_y: y_batch, rnn.\n dropout_keep_prob: FLAGS.dropout_keep_prob}\n _, step, summaries, loss, accuracy = sess.run([train_op,\n global_step, train_summary_op, rnn.loss, rnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n train_summary_writer.add_summary(summaries, step)\n return loss, accuracy\n\n def dev_step(x_batch, y_batch, writer=None):\n \"\"\"\n Evaluates model on a dev set\n \"\"\"\n feed_dict = {rnn.input_x: x_batch, rnn.input_y: y_batch, rnn.\n dropout_keep_prob: 1.0}\n step, summaries, loss, accuracy = sess.run([global_step,\n dev_summary_op, rnn.loss, rnn.accuracy], feed_dict)\n time_str = datetime.datetime.now().isoformat()\n print('{}: step {}, loss {:g}, acc {:g}'.format(time_str, step,\n loss, accuracy))\n if writer:\n writer.add_summary(summaries, step)\n return accuracy\n batches = data_helpers.batch_iter(list(zip(x_train, y_train)),\n FLAGS.batch_size, FLAGS.num_epochs)\n prev_val_acc = 0\n for batch in batches:\n x_batch, y_batch = zip(*batch)\n train_loss, train_acc = train_step(x_batch, y_batch)\n current_step = tf.train.global_step(sess, global_step)\n if current_step % FLAGS.evaluate_every == 0:\n print('\\nTrain loss:{0}, Train accuracy:{1}'.format(\n train_loss, train_acc))\n print('Evaluation:')\n val_acc = dev_step(x_dev, y_dev, writer=dev_summary_writer)\n if val_acc > 0.95 and val_acc > prev_val_acc:\n save_path = saver.save(sess, checkpoint_prefix,\n global_step=current_step)\n print('Model checkpoint saved at {0}, accuracy={1}'.\n format(save_path, round(val_acc, 3)))\n prev_val_acc = val_acc\n print('')\n",
"step-3": "<mask token>\nflags = tf.app.flags\nFLAGS = flags.FLAGS\ntf.flags.DEFINE_integer('embedding_dim', 100,\n 'Dimensionality of character embedding (default: 100)')\ntf.flags.DEFINE_float('dropout_keep_prob', 0.5,\n 'Dropout keep probability (default: 0.5)')\ntf.flags.DEFINE_integer('batch_size', 128, 'Batch Size (default: 64)')\ntf.flags.DEFINE_integer('num_epochs', 100,\n 'Number of training epochs (default: 200)')\ntf.flags.DEFINE_integer('evaluate_every', 500,\n 'Evaluate model on dev set after this many steps (default: 100)')\ntf.flags.DEFINE_integer('checkpoint_every', 500,\n 'Save model after this many steps (default: 100)')\ntf.flags.DEFINE_integer('num_checkpoints', 3,\n 'Number of checkpoints to store (default: 5)')\ntf.flags.DEFINE_boolean('allow_soft_placement', True,\n 'Allow device soft device placement')\ntf.flags.DEFINE_boolean('log_device_placement', False,\n 'Log placement of ops on devices')\nprint(\"\"\"\nLoading train data...\"\"\")\nx_train, y_train = data_helpers.load_splitted_data_and_labels(\n '../data/toxic_yes_train.txt', '../data/toxic_no_train.txt')\nprint('x_train length:{0}, y_train shape:{1}'.format(len(x_train), y_train.\n shape))\nprint(x_train[0], y_train[0])\nprint(\"\"\"\nLoading dev data...\"\"\")\nx_dev, y_dev = data_helpers.load_splitted_data_and_labels(\n '../data/toxic_yes_dev.txt', '../data/toxic_no_dev.txt')\nprint('x_dev length:{0}, y_dev shape:{1}'.format(len(x_dev), y_dev.shape))\nprint(x_dev[-1], y_dev[-1])\nx = x_train + x_dev\nprint('x length:{0}'.format(len(x)))\nmax_sent_length = 80\nvocab_processor = learn.preprocessing.VocabularyProcessor(max_sent_length)\nx = np.array(list(vocab_processor.fit_transform(x)))\nprint('Shape of word-id matrix: {0}'.format(x.shape))\nx_train = np.array(list(vocab_processor.transform(x_train)))\nprint('Shape of x_train matrix: {0}'.format(x_train.shape))\nx_dev = np.array(list(vocab_processor.transform(x_dev)))\nprint('Shape of x_dev matrix: {0}'.format(x_dev.shape))\nnp.random.seed(10)\nshuffle_indices = np.random.permutation(np.arange(len(y_train)))\nx_train = x_train[shuffle_indices]\ny_train = y_train[shuffle_indices]\ndel x\nvocabsize = len(vocab_processor.vocabulary_)\nprint('Vocabulary Size: {:d}'.format(vocabsize))\nwith tf.Graph().as_default():\n session_conf = tf.ConfigProto(allow_soft_placement=FLAGS.\n allow_soft_placement, log_device_placement=FLAGS.log_device_placement)\n sess = tf.Session(config=session_conf)\n with sess.as_default():\n rnn = TextRNN(sequence_length=x_train.shape[1], num_classes=y_train\n .shape[1], vocab_size=vocabsize, embedding_size=FLAGS.embedding_dim\n )\n global_step = tf.Variable(0, name='global_step', trainable=False)\n optimizer = tf.train.AdamOptimizer(0.001)\n grads_and_vars = optimizer.compute_gradients(rnn.loss)\n train_op = optimizer.apply_gradients(grads_and_vars, global_step=\n global_step)\n grad_summaries = []\n for g, v in grads_and_vars:\n if g is not None:\n grad_hist_summary = tf.summary.histogram('{}/grad/hist'.\n format(v.name), g)\n sparsity_summary = tf.summary.scalar('{}/grad/sparsity'.\n format(v.name), tf.nn.zero_fraction(g))\n grad_summaries.append(grad_hist_summary)\n grad_summaries.append(sparsity_summary)\n grad_summaries_merged = tf.summary.merge(grad_summaries)\n out_dir = os.path.abspath(os.path.join(os.path.curdir, 'runs'))\n print('Writing to {}\\n'.format(out_dir))\n loss_summary = tf.summary.scalar('loss', rnn.loss)\n acc_summary = tf.summary.scalar('accuracy', rnn.accuracy)\n train_summary_op = tf.summary.merge([loss_summary, acc_summary,\n grad_summaries_merged])\n train_summary_dir = os.path.join(out_dir, 'summaries', 'train')\n train_summary_writer = tf.summary.FileWriter(train_summary_dir,\n sess.graph)\n dev_summary_op = tf.summary.merge([loss_summary, acc_summary])\n dev_summary_dir = os.path.join(out_dir, 'summaries', 'dev')\n dev_summary_writer = tf.summary.FileWriter(dev_summary_dir, sess.graph)\n checkpoint_dir = os.path.abspath(os.path.join(out_dir, 'checkpoints'))\n checkpoint_prefix = os.path.join(checkpoint_dir, 'model')\n if not os.path.exists(checkpoint_dir):\n os.makedirs(checkpoint_dir)\n saver = tf.train.Saver(tf.global_variables(), max_to_keep=FLAGS.\n num_checkpoints)\n vocab_processor.save(os.path.join(out_dir, 'vocab'))\n sess.run(tf.global_variables_initializer())\n vocabulary = vocab_processor.vocabulary_\n initEmbeddings = data_helpers.load_embedding_vectors_glove(vocabulary)\n sess.run(rnn.W_embed.assign(initEmbeddings))\n for v in tf.trainable_variables():\n print(v.name)\n\n def train_step(x_batch, y_batch):\n \"\"\"\n A single training step\n \"\"\"\n feed_dict = {rnn.input_x: x_batch, rnn.input_y: y_batch, rnn.\n dropout_keep_prob: FLAGS.dropout_keep_prob}\n _, step, summaries, loss, accuracy = sess.run([train_op,\n global_step, train_summary_op, rnn.loss, rnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n train_summary_writer.add_summary(summaries, step)\n return loss, accuracy\n\n def dev_step(x_batch, y_batch, writer=None):\n \"\"\"\n Evaluates model on a dev set\n \"\"\"\n feed_dict = {rnn.input_x: x_batch, rnn.input_y: y_batch, rnn.\n dropout_keep_prob: 1.0}\n step, summaries, loss, accuracy = sess.run([global_step,\n dev_summary_op, rnn.loss, rnn.accuracy], feed_dict)\n time_str = datetime.datetime.now().isoformat()\n print('{}: step {}, loss {:g}, acc {:g}'.format(time_str, step,\n loss, accuracy))\n if writer:\n writer.add_summary(summaries, step)\n return accuracy\n batches = data_helpers.batch_iter(list(zip(x_train, y_train)),\n FLAGS.batch_size, FLAGS.num_epochs)\n prev_val_acc = 0\n for batch in batches:\n x_batch, y_batch = zip(*batch)\n train_loss, train_acc = train_step(x_batch, y_batch)\n current_step = tf.train.global_step(sess, global_step)\n if current_step % FLAGS.evaluate_every == 0:\n print('\\nTrain loss:{0}, Train accuracy:{1}'.format(\n train_loss, train_acc))\n print('Evaluation:')\n val_acc = dev_step(x_dev, y_dev, writer=dev_summary_writer)\n if val_acc > 0.95 and val_acc > prev_val_acc:\n save_path = saver.save(sess, checkpoint_prefix,\n global_step=current_step)\n print('Model checkpoint saved at {0}, accuracy={1}'.\n format(save_path, round(val_acc, 3)))\n prev_val_acc = val_acc\n print('')\n",
"step-4": "import tensorflow as tf\nimport numpy as np\nimport os\nimport time\nimport datetime\nimport data_helpers\nfrom text_rnn import TextRNN\nfrom tensorflow.contrib import learn\nflags = tf.app.flags\nFLAGS = flags.FLAGS\ntf.flags.DEFINE_integer('embedding_dim', 100,\n 'Dimensionality of character embedding (default: 100)')\ntf.flags.DEFINE_float('dropout_keep_prob', 0.5,\n 'Dropout keep probability (default: 0.5)')\ntf.flags.DEFINE_integer('batch_size', 128, 'Batch Size (default: 64)')\ntf.flags.DEFINE_integer('num_epochs', 100,\n 'Number of training epochs (default: 200)')\ntf.flags.DEFINE_integer('evaluate_every', 500,\n 'Evaluate model on dev set after this many steps (default: 100)')\ntf.flags.DEFINE_integer('checkpoint_every', 500,\n 'Save model after this many steps (default: 100)')\ntf.flags.DEFINE_integer('num_checkpoints', 3,\n 'Number of checkpoints to store (default: 5)')\ntf.flags.DEFINE_boolean('allow_soft_placement', True,\n 'Allow device soft device placement')\ntf.flags.DEFINE_boolean('log_device_placement', False,\n 'Log placement of ops on devices')\nprint(\"\"\"\nLoading train data...\"\"\")\nx_train, y_train = data_helpers.load_splitted_data_and_labels(\n '../data/toxic_yes_train.txt', '../data/toxic_no_train.txt')\nprint('x_train length:{0}, y_train shape:{1}'.format(len(x_train), y_train.\n shape))\nprint(x_train[0], y_train[0])\nprint(\"\"\"\nLoading dev data...\"\"\")\nx_dev, y_dev = data_helpers.load_splitted_data_and_labels(\n '../data/toxic_yes_dev.txt', '../data/toxic_no_dev.txt')\nprint('x_dev length:{0}, y_dev shape:{1}'.format(len(x_dev), y_dev.shape))\nprint(x_dev[-1], y_dev[-1])\nx = x_train + x_dev\nprint('x length:{0}'.format(len(x)))\nmax_sent_length = 80\nvocab_processor = learn.preprocessing.VocabularyProcessor(max_sent_length)\nx = np.array(list(vocab_processor.fit_transform(x)))\nprint('Shape of word-id matrix: {0}'.format(x.shape))\nx_train = np.array(list(vocab_processor.transform(x_train)))\nprint('Shape of x_train matrix: {0}'.format(x_train.shape))\nx_dev = np.array(list(vocab_processor.transform(x_dev)))\nprint('Shape of x_dev matrix: {0}'.format(x_dev.shape))\nnp.random.seed(10)\nshuffle_indices = np.random.permutation(np.arange(len(y_train)))\nx_train = x_train[shuffle_indices]\ny_train = y_train[shuffle_indices]\ndel x\nvocabsize = len(vocab_processor.vocabulary_)\nprint('Vocabulary Size: {:d}'.format(vocabsize))\nwith tf.Graph().as_default():\n session_conf = tf.ConfigProto(allow_soft_placement=FLAGS.\n allow_soft_placement, log_device_placement=FLAGS.log_device_placement)\n sess = tf.Session(config=session_conf)\n with sess.as_default():\n rnn = TextRNN(sequence_length=x_train.shape[1], num_classes=y_train\n .shape[1], vocab_size=vocabsize, embedding_size=FLAGS.embedding_dim\n )\n global_step = tf.Variable(0, name='global_step', trainable=False)\n optimizer = tf.train.AdamOptimizer(0.001)\n grads_and_vars = optimizer.compute_gradients(rnn.loss)\n train_op = optimizer.apply_gradients(grads_and_vars, global_step=\n global_step)\n grad_summaries = []\n for g, v in grads_and_vars:\n if g is not None:\n grad_hist_summary = tf.summary.histogram('{}/grad/hist'.\n format(v.name), g)\n sparsity_summary = tf.summary.scalar('{}/grad/sparsity'.\n format(v.name), tf.nn.zero_fraction(g))\n grad_summaries.append(grad_hist_summary)\n grad_summaries.append(sparsity_summary)\n grad_summaries_merged = tf.summary.merge(grad_summaries)\n out_dir = os.path.abspath(os.path.join(os.path.curdir, 'runs'))\n print('Writing to {}\\n'.format(out_dir))\n loss_summary = tf.summary.scalar('loss', rnn.loss)\n acc_summary = tf.summary.scalar('accuracy', rnn.accuracy)\n train_summary_op = tf.summary.merge([loss_summary, acc_summary,\n grad_summaries_merged])\n train_summary_dir = os.path.join(out_dir, 'summaries', 'train')\n train_summary_writer = tf.summary.FileWriter(train_summary_dir,\n sess.graph)\n dev_summary_op = tf.summary.merge([loss_summary, acc_summary])\n dev_summary_dir = os.path.join(out_dir, 'summaries', 'dev')\n dev_summary_writer = tf.summary.FileWriter(dev_summary_dir, sess.graph)\n checkpoint_dir = os.path.abspath(os.path.join(out_dir, 'checkpoints'))\n checkpoint_prefix = os.path.join(checkpoint_dir, 'model')\n if not os.path.exists(checkpoint_dir):\n os.makedirs(checkpoint_dir)\n saver = tf.train.Saver(tf.global_variables(), max_to_keep=FLAGS.\n num_checkpoints)\n vocab_processor.save(os.path.join(out_dir, 'vocab'))\n sess.run(tf.global_variables_initializer())\n vocabulary = vocab_processor.vocabulary_\n initEmbeddings = data_helpers.load_embedding_vectors_glove(vocabulary)\n sess.run(rnn.W_embed.assign(initEmbeddings))\n for v in tf.trainable_variables():\n print(v.name)\n\n def train_step(x_batch, y_batch):\n \"\"\"\n A single training step\n \"\"\"\n feed_dict = {rnn.input_x: x_batch, rnn.input_y: y_batch, rnn.\n dropout_keep_prob: FLAGS.dropout_keep_prob}\n _, step, summaries, loss, accuracy = sess.run([train_op,\n global_step, train_summary_op, rnn.loss, rnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n train_summary_writer.add_summary(summaries, step)\n return loss, accuracy\n\n def dev_step(x_batch, y_batch, writer=None):\n \"\"\"\n Evaluates model on a dev set\n \"\"\"\n feed_dict = {rnn.input_x: x_batch, rnn.input_y: y_batch, rnn.\n dropout_keep_prob: 1.0}\n step, summaries, loss, accuracy = sess.run([global_step,\n dev_summary_op, rnn.loss, rnn.accuracy], feed_dict)\n time_str = datetime.datetime.now().isoformat()\n print('{}: step {}, loss {:g}, acc {:g}'.format(time_str, step,\n loss, accuracy))\n if writer:\n writer.add_summary(summaries, step)\n return accuracy\n batches = data_helpers.batch_iter(list(zip(x_train, y_train)),\n FLAGS.batch_size, FLAGS.num_epochs)\n prev_val_acc = 0\n for batch in batches:\n x_batch, y_batch = zip(*batch)\n train_loss, train_acc = train_step(x_batch, y_batch)\n current_step = tf.train.global_step(sess, global_step)\n if current_step % FLAGS.evaluate_every == 0:\n print('\\nTrain loss:{0}, Train accuracy:{1}'.format(\n train_loss, train_acc))\n print('Evaluation:')\n val_acc = dev_step(x_dev, y_dev, writer=dev_summary_writer)\n if val_acc > 0.95 and val_acc > prev_val_acc:\n save_path = saver.save(sess, checkpoint_prefix,\n global_step=current_step)\n print('Model checkpoint saved at {0}, accuracy={1}'.\n format(save_path, round(val_acc, 3)))\n prev_val_acc = val_acc\n print('')\n",
"step-5": "#! /usr/bin/env python\n\nimport tensorflow as tf\nimport numpy as np\nimport os\nimport time\nimport datetime\nimport data_helpers\nfrom text_rnn import TextRNN\nfrom tensorflow.contrib import learn\n\n\n# Parameters\n# ==================================================\n\n# Data loading params\nflags = tf.app.flags\nFLAGS = flags.FLAGS\n\n\n# Model Hyperparameters\ntf.flags.DEFINE_integer(\"embedding_dim\", 100, \"Dimensionality of character embedding (default: 100)\")\ntf.flags.DEFINE_float(\"dropout_keep_prob\", 0.5, \"Dropout keep probability (default: 0.5)\")\n\n# Training parameters\ntf.flags.DEFINE_integer(\"batch_size\", 128, \"Batch Size (default: 64)\")\ntf.flags.DEFINE_integer(\"num_epochs\", 100, \"Number of training epochs (default: 200)\")\ntf.flags.DEFINE_integer(\"evaluate_every\", 500, \"Evaluate model on dev set after this many steps (default: 100)\")\ntf.flags.DEFINE_integer(\"checkpoint_every\", 500, \"Save model after this many steps (default: 100)\")\ntf.flags.DEFINE_integer(\"num_checkpoints\", 3, \"Number of checkpoints to store (default: 5)\")\n\n# Misc Parameters\ntf.flags.DEFINE_boolean(\"allow_soft_placement\", True, \"Allow device soft device placement\")\ntf.flags.DEFINE_boolean(\"log_device_placement\", False, \"Log placement of ops on devices\")\n\n\n# Data Preparation\n# ==================================================\n# Load data\nprint(\"\\nLoading train data...\")\nx_train, y_train = data_helpers.load_splitted_data_and_labels('../data/toxic_yes_train.txt', '../data/toxic_no_train.txt')\nprint(\"x_train length:{0}, y_train shape:{1}\".format(len(x_train), y_train.shape))\nprint(x_train[0], y_train[0])\n\nprint(\"\\nLoading dev data...\")\nx_dev, y_dev = data_helpers.load_splitted_data_and_labels('../data/toxic_yes_dev.txt', '../data/toxic_no_dev.txt')\nprint(\"x_dev length:{0}, y_dev shape:{1}\".format(len(x_dev), y_dev.shape))\nprint(x_dev[-1], y_dev[-1])\n\nx = x_train+x_dev\nprint(\"x length:{0}\".format(len(x)))\n\n# Build vocabulary\n# max_sent_length, sent = max([(len(i.split(\" \")),i) for i in x])\n# print(\"Max sent length = {0}\".format(max_sent_length))\n# print(\"Sent with max length = {0}\".format(sent))\nmax_sent_length = 80\nvocab_processor = learn.preprocessing.VocabularyProcessor(max_sent_length)\nx = np.array(list(vocab_processor.fit_transform(x))) #x is an iterable, [n_samples, max_sent_length] Word-id matrix.\nprint(\"Shape of word-id matrix: {0}\".format(x.shape))\n\n#Transform x_train and x_dev to word-id matrix\nx_train = np.array(list(vocab_processor.transform(x_train)))\nprint(\"Shape of x_train matrix: {0}\".format(x_train.shape))\nx_dev = np.array(list(vocab_processor.transform(x_dev)))\nprint(\"Shape of x_dev matrix: {0}\".format(x_dev.shape))\n\n# Randomly shuffle data\nnp.random.seed(10)\nshuffle_indices = np.random.permutation(np.arange(len(y_train)))\nx_train = x_train[shuffle_indices]\ny_train = y_train[shuffle_indices]\n\ndel x\n\nvocabsize = len(vocab_processor.vocabulary_)\nprint(\"Vocabulary Size: {:d}\".format(vocabsize))\n\n# Training\n# ==================================================\n\nwith tf.Graph().as_default():\n session_conf = tf.ConfigProto(\n allow_soft_placement=FLAGS.allow_soft_placement,\n log_device_placement=FLAGS.log_device_placement)\n sess = tf.Session(config=session_conf)\n with sess.as_default():\n rnn = TextRNN(\n sequence_length=x_train.shape[1],\n num_classes=y_train.shape[1],\n vocab_size=vocabsize,\n embedding_size=FLAGS.embedding_dim)\n\n # Define Training procedure\n global_step = tf.Variable(0, name=\"global_step\", trainable=False)\n optimizer = tf.train.AdamOptimizer(1e-3)\n grads_and_vars = optimizer.compute_gradients(rnn.loss)\n train_op = optimizer.apply_gradients(grads_and_vars, global_step=global_step)\n\n # Keep track of gradient values and sparsity (optional)\n grad_summaries = []\n for g, v in grads_and_vars:\n if g is not None:\n grad_hist_summary = tf.summary.histogram(\"{}/grad/hist\".format(v.name), g)\n sparsity_summary = tf.summary.scalar(\"{}/grad/sparsity\".format(v.name), tf.nn.zero_fraction(g))\n grad_summaries.append(grad_hist_summary)\n grad_summaries.append(sparsity_summary)\n grad_summaries_merged = tf.summary.merge(grad_summaries)\n\n # Output directory for models and summaries\n # timestamp = str(int(time.time()))\n # out_dir = os.path.abspath(os.path.join(os.path.curdir, \"runs\", timestamp))\n out_dir = os.path.abspath(os.path.join(os.path.curdir, \"runs\"))\n print(\"Writing to {}\\n\".format(out_dir))\n\n # Summaries for loss and accuracy\n loss_summary = tf.summary.scalar(\"loss\", rnn.loss)\n acc_summary = tf.summary.scalar(\"accuracy\", rnn.accuracy)\n\n # Train Summaries\n train_summary_op = tf.summary.merge([loss_summary, acc_summary, grad_summaries_merged])\n train_summary_dir = os.path.join(out_dir, \"summaries\", \"train\")\n train_summary_writer = tf.summary.FileWriter(train_summary_dir, sess.graph)\n\n # Dev summaries\n dev_summary_op = tf.summary.merge([loss_summary, acc_summary])\n dev_summary_dir = os.path.join(out_dir, \"summaries\", \"dev\")\n dev_summary_writer = tf.summary.FileWriter(dev_summary_dir, sess.graph)\n\n # Checkpoint directory. Tensorflow assumes this directory already exists so we need to create it\n checkpoint_dir = os.path.abspath(os.path.join(out_dir, \"checkpoints\"))\n checkpoint_prefix = os.path.join(checkpoint_dir, \"model\")\n if not os.path.exists(checkpoint_dir):\n os.makedirs(checkpoint_dir)\n saver = tf.train.Saver(tf.global_variables(), max_to_keep=FLAGS.num_checkpoints)\n\n # Write vocabulary\n vocab_processor.save(os.path.join(out_dir, \"vocab\"))\n\n # Initialize all variables\n sess.run(tf.global_variables_initializer())\n\n vocabulary = vocab_processor.vocabulary_\n initEmbeddings = data_helpers.load_embedding_vectors_glove(vocabulary)\n sess.run(rnn.W_embed.assign(initEmbeddings))\n\n for v in tf.trainable_variables():\n print(v.name)\n\n def train_step(x_batch, y_batch):\n \"\"\"\n A single training step\n \"\"\"\n feed_dict = {\n rnn.input_x: x_batch,\n rnn.input_y: y_batch,\n rnn.dropout_keep_prob: FLAGS.dropout_keep_prob\n }\n _, step, summaries, loss, accuracy = sess.run(\n [train_op, global_step, train_summary_op, rnn.loss, rnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n # print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)\n\n return loss,accuracy\n\n def dev_step(x_batch, y_batch, writer=None):\n \"\"\"\n Evaluates model on a dev set\n \"\"\"\n feed_dict = {\n rnn.input_x: x_batch,\n rnn.input_y: y_batch,\n rnn.dropout_keep_prob: 1.0\n }\n step, summaries, loss, accuracy = sess.run(\n [global_step, dev_summary_op, rnn.loss, rnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n if writer:\n writer.add_summary(summaries, step)\n\n return accuracy\n\n # Create batches agnostic of class distributions\n batches = data_helpers.batch_iter(list(zip(x_train, y_train)), FLAGS.batch_size, FLAGS.num_epochs)\n\n # Create batches aware of imbalance in class distributions\n # batches = data_helpers.makeBatches(x_train, y_train[:,1].tolist(), FLAGS.batch_size, FLAGS.num_epochs)\n\n # Training loop. For each batch...\n prev_val_acc = 0\n for batch in batches:\n x_batch, y_batch = zip(*batch)\n train_loss, train_acc = train_step(x_batch, y_batch)\n current_step = tf.train.global_step(sess, global_step)\n if current_step % FLAGS.evaluate_every == 0:\n print(\"\\nTrain loss:{0}, Train accuracy:{1}\".format(train_loss, train_acc))\n print(\"Evaluation:\")\n val_acc = dev_step(x_dev, y_dev, writer=dev_summary_writer)\n if val_acc > 0.95 and val_acc > prev_val_acc:\n save_path = saver.save(sess, checkpoint_prefix, global_step=current_step)\n print(\"Model checkpoint saved at {0}, accuracy={1}\".format(save_path, round(val_acc, 3)))\n prev_val_acc = val_acc\n\n print(\"\")",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
n=int(input("enter the number\n"))
sum=0
for i in range(1,n-1):
rem=n%i
if(rem==0):
sum=sum+i
if(sum==n):
print("the number is perfect")
else:
print("not prime")
|
normal
|
{
"blob_id": "5721786b61cf8706b1d401a46d06f2d32153df8b",
"index": 765,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(1, n - 1):\n rem = n % i\n if rem == 0:\n sum = sum + i\nif sum == n:\n print('the number is perfect')\nelse:\n print('not prime')\n",
"step-3": "n = int(input('enter the number\\n'))\nsum = 0\nfor i in range(1, n - 1):\n rem = n % i\n if rem == 0:\n sum = sum + i\nif sum == n:\n print('the number is perfect')\nelse:\n print('not prime')\n",
"step-4": "n=int(input(\"enter the number\\n\"))\nsum=0\nfor i in range(1,n-1):\n rem=n%i\n if(rem==0):\n sum=sum+i\nif(sum==n):\n print(\"the number is perfect\")\nelse:\n print(\"not prime\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
##Problem 10 «The number of even elements of the sequence» (Medium)
##Statement
##Determine the number of even elements in the sequence ending with the number 0.
a = True
i = 0
while a is True:
x = int(input())
if x != 0:
if x%2 == 0:
i = i+1
else:
a =False
print(i)
|
normal
|
{
"blob_id": "2eddd446dc59695b185be368b359bae78a868b90",
"index": 9918,
"step-1": "\n##Problem 10 «The number of even elements of the sequence» (Medium)\n##Statement\n##Determine the number of even elements in the sequence ending with the number 0. \n\n\na = True\ni = 0\nwhile a is True:\n x = int(input())\n if x != 0:\n if x%2 == 0:\n i = i+1\n else:\n a =False\nprint(i)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import subprocess
import re
class Command:
InputSize = 1
OutputSize = 2
MultiThreadable = True
ShareResources = False
def __init__(self, bin, config, showerr=False):
self.travatar = subprocess.Popen([bin, "-config_file", config, "-trace_out", "STDOUT", "-in_format", "egret", "-buffer", "false"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None if showerr else subprocess.PIPE, universal_newlines=True)
self.span_reg = re.compile(r"\[([0-9]+), ([0-9]+)\]")
def routine(self, instream):
egret_tree = instream[0]
if not egret_tree.startswith("success\n"):
return (egret_tree, "",)
egret_tree = egret_tree[8:]
self.travatar.stdin.write(egret_tree)
self.travatar.stdin.flush()
travatar_trace = self.travatar.stdout.readline()
spltrace = travatar_trace.split(" ||| ")
m = self.span_reg.match(spltrace[1])
inputlen = int(m.group(2))
while True:
travatar_trace_line = self.travatar.stdout.readline()
spltrace = travatar_trace_line.split(" ||| ")
spltree = spltrace[2].split(" ")
for x in spltree:
if x and x[0] == x[-1] == "\"":
inputlen -= 1
spltrace[4] = ".\n"
travatar_trace += " ||| ".join(spltrace)
if not inputlen:
break
travatar_output = self.travatar.stdout.readline().rstrip("\n")
return ("success\n" + travatar_output + "\n" + travatar_trace, travatar_output,)
|
normal
|
{
"blob_id": "91cef72962332e7efcc86f1b19da4382bd72a466",
"index": 9278,
"step-1": "<mask token>\n\n\nclass Command:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Command:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, bin, config, showerr=False):\n self.travatar = subprocess.Popen([bin, '-config_file', config,\n '-trace_out', 'STDOUT', '-in_format', 'egret', '-buffer',\n 'false'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr\n =None if showerr else subprocess.PIPE, universal_newlines=True)\n self.span_reg = re.compile('\\\\[([0-9]+), ([0-9]+)\\\\]')\n\n def routine(self, instream):\n egret_tree = instream[0]\n if not egret_tree.startswith('success\\n'):\n return egret_tree, ''\n egret_tree = egret_tree[8:]\n self.travatar.stdin.write(egret_tree)\n self.travatar.stdin.flush()\n travatar_trace = self.travatar.stdout.readline()\n spltrace = travatar_trace.split(' ||| ')\n m = self.span_reg.match(spltrace[1])\n inputlen = int(m.group(2))\n while True:\n travatar_trace_line = self.travatar.stdout.readline()\n spltrace = travatar_trace_line.split(' ||| ')\n spltree = spltrace[2].split(' ')\n for x in spltree:\n if x and x[0] == x[-1] == '\"':\n inputlen -= 1\n spltrace[4] = '.\\n'\n travatar_trace += ' ||| '.join(spltrace)\n if not inputlen:\n break\n travatar_output = self.travatar.stdout.readline().rstrip('\\n')\n return ('success\\n' + travatar_output + '\\n' + travatar_trace,\n travatar_output)\n",
"step-3": "<mask token>\n\n\nclass Command:\n InputSize = 1\n OutputSize = 2\n MultiThreadable = True\n ShareResources = False\n\n def __init__(self, bin, config, showerr=False):\n self.travatar = subprocess.Popen([bin, '-config_file', config,\n '-trace_out', 'STDOUT', '-in_format', 'egret', '-buffer',\n 'false'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr\n =None if showerr else subprocess.PIPE, universal_newlines=True)\n self.span_reg = re.compile('\\\\[([0-9]+), ([0-9]+)\\\\]')\n\n def routine(self, instream):\n egret_tree = instream[0]\n if not egret_tree.startswith('success\\n'):\n return egret_tree, ''\n egret_tree = egret_tree[8:]\n self.travatar.stdin.write(egret_tree)\n self.travatar.stdin.flush()\n travatar_trace = self.travatar.stdout.readline()\n spltrace = travatar_trace.split(' ||| ')\n m = self.span_reg.match(spltrace[1])\n inputlen = int(m.group(2))\n while True:\n travatar_trace_line = self.travatar.stdout.readline()\n spltrace = travatar_trace_line.split(' ||| ')\n spltree = spltrace[2].split(' ')\n for x in spltree:\n if x and x[0] == x[-1] == '\"':\n inputlen -= 1\n spltrace[4] = '.\\n'\n travatar_trace += ' ||| '.join(spltrace)\n if not inputlen:\n break\n travatar_output = self.travatar.stdout.readline().rstrip('\\n')\n return ('success\\n' + travatar_output + '\\n' + travatar_trace,\n travatar_output)\n",
"step-4": "import subprocess\nimport re\n\n\nclass Command:\n InputSize = 1\n OutputSize = 2\n MultiThreadable = True\n ShareResources = False\n\n def __init__(self, bin, config, showerr=False):\n self.travatar = subprocess.Popen([bin, '-config_file', config,\n '-trace_out', 'STDOUT', '-in_format', 'egret', '-buffer',\n 'false'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr\n =None if showerr else subprocess.PIPE, universal_newlines=True)\n self.span_reg = re.compile('\\\\[([0-9]+), ([0-9]+)\\\\]')\n\n def routine(self, instream):\n egret_tree = instream[0]\n if not egret_tree.startswith('success\\n'):\n return egret_tree, ''\n egret_tree = egret_tree[8:]\n self.travatar.stdin.write(egret_tree)\n self.travatar.stdin.flush()\n travatar_trace = self.travatar.stdout.readline()\n spltrace = travatar_trace.split(' ||| ')\n m = self.span_reg.match(spltrace[1])\n inputlen = int(m.group(2))\n while True:\n travatar_trace_line = self.travatar.stdout.readline()\n spltrace = travatar_trace_line.split(' ||| ')\n spltree = spltrace[2].split(' ')\n for x in spltree:\n if x and x[0] == x[-1] == '\"':\n inputlen -= 1\n spltrace[4] = '.\\n'\n travatar_trace += ' ||| '.join(spltrace)\n if not inputlen:\n break\n travatar_output = self.travatar.stdout.readline().rstrip('\\n')\n return ('success\\n' + travatar_output + '\\n' + travatar_trace,\n travatar_output)\n",
"step-5": "import subprocess\nimport re\n\n\nclass Command:\n\n InputSize = 1\n OutputSize = 2\n MultiThreadable = True\n ShareResources = False\n\n def __init__(self, bin, config, showerr=False):\n self.travatar = subprocess.Popen([bin, \"-config_file\", config, \"-trace_out\", \"STDOUT\", \"-in_format\", \"egret\", \"-buffer\", \"false\"],\n stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None if showerr else subprocess.PIPE, universal_newlines=True)\n\n self.span_reg = re.compile(r\"\\[([0-9]+), ([0-9]+)\\]\")\n\n def routine(self, instream):\n egret_tree = instream[0]\n if not egret_tree.startswith(\"success\\n\"):\n return (egret_tree, \"\",)\n\n egret_tree = egret_tree[8:]\n self.travatar.stdin.write(egret_tree)\n self.travatar.stdin.flush()\n\n travatar_trace = self.travatar.stdout.readline()\n spltrace = travatar_trace.split(\" ||| \")\n m = self.span_reg.match(spltrace[1])\n\n inputlen = int(m.group(2))\n\n while True:\n travatar_trace_line = self.travatar.stdout.readline()\n spltrace = travatar_trace_line.split(\" ||| \")\n spltree = spltrace[2].split(\" \")\n for x in spltree:\n if x and x[0] == x[-1] == \"\\\"\":\n inputlen -= 1\n spltrace[4] = \".\\n\"\n travatar_trace += \" ||| \".join(spltrace)\n if not inputlen:\n break\n \n travatar_output = self.travatar.stdout.readline().rstrip(\"\\n\")\n\n return (\"success\\n\" + travatar_output + \"\\n\" + travatar_trace, travatar_output,)\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# uploadops.py
# CS304-Final Project
# Created by: Megan Shum, Maxine Hood, Mina Hattori
#!/usr/local/bin/python2.7
# This file handles all the SQL calls for the upload page.
import sys
import MySQLdb
import dbconn2
def uploadPost(conn, username, description, location, time_stamp, pathname):
'''Inserts post in Posts table'''
curs = conn.cursor(MySQLdb.cursors.DictCursor) # results as Dictionaries
curs.execute('insert into posts(username, description, location, time_stamp, pic) values(%s, %s, %s, %s, %s)', [username, description, location, time_stamp, pathname])
# ================================================================
# This starts the ball rolling, *if* the script is run as a script,
# rather than just being imported.
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage: {name} nm".format(name=sys.argv[0])
else:
DSN = dbconn2.read_cnf()
DSN['db'] = 'mmm_db' # the database we want to connect to
dbconn2.connect(DSN)
print lookupByNM(sys.argv[1])
|
normal
|
{
"blob_id": "f0deb8ccaf50ea0abb9e1632eaa4354a4f21dece",
"index": 5794,
"step-1": "# uploadops.py\n# CS304-Final Project\n# Created by: Megan Shum, Maxine Hood, Mina Hattori\n#!/usr/local/bin/python2.7\n# This file handles all the SQL calls for the upload page.\n\nimport sys\nimport MySQLdb\nimport dbconn2\n\ndef uploadPost(conn, username, description, location, time_stamp, pathname):\n '''Inserts post in Posts table'''\n curs = conn.cursor(MySQLdb.cursors.DictCursor) # results as Dictionaries\n curs.execute('insert into posts(username, description, location, time_stamp, pic) values(%s, %s, %s, %s, %s)', [username, description, location, time_stamp, pathname])\n\n# ================================================================\n# This starts the ball rolling, *if* the script is run as a script,\n# rather than just being imported.\n\nif __name__ == '__main__':\n if len(sys.argv) < 2:\n print \"Usage: {name} nm\".format(name=sys.argv[0])\n else:\n DSN = dbconn2.read_cnf()\n DSN['db'] = 'mmm_db' # the database we want to connect to\n dbconn2.connect(DSN)\n print lookupByNM(sys.argv[1])\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from .entity import EventBase, event_class
from .. import LOG as _LOG
LOG = _LOG.getChild('entity.event')
@event_class()
class FunctionCallEvent(EventBase):
"""
function call
"""
deferred = True
def parse_jsondict(self, jsdict):
assert 'func_name' in jsdict['option'], 'func_name required'
super(FunctionCallEvent, self).parse_jsondict(jsdict)
@event_class()
class PacketEvent(EventBase):
"""
L7 packet message
"""
deferred = True
@classmethod
def from_message(cls, src_process, dst_process, message):
inst = cls()
# we do not set inst.process here
inst.option = {
'src_process': src_process,
'dst_process': dst_process,
'message': message
}
return inst
@event_class()
class LogEvent(EventBase):
"""
syslog (not deferrable)
"""
deferred = False
@event_class()
class InspectionEndEvent(EventBase):
"""
Inspection end (not deferrable)
"""
deferred = False
|
normal
|
{
"blob_id": "9a665d126d7b48adbd876b48c3d8806eabea1108",
"index": 3716,
"step-1": "<mask token>\n\n\n@event_class()\nclass FunctionCallEvent(EventBase):\n <mask token>\n <mask token>\n <mask token>\n\n\n@event_class()\nclass PacketEvent(EventBase):\n \"\"\"\n L7 packet message\n \"\"\"\n deferred = True\n\n @classmethod\n def from_message(cls, src_process, dst_process, message):\n inst = cls()\n inst.option = {'src_process': src_process, 'dst_process':\n dst_process, 'message': message}\n return inst\n\n\n@event_class()\nclass LogEvent(EventBase):\n \"\"\"\n syslog (not deferrable)\n \"\"\"\n deferred = False\n\n\n@event_class()\nclass InspectionEndEvent(EventBase):\n \"\"\"\n Inspection end (not deferrable)\n \"\"\"\n deferred = False\n",
"step-2": "<mask token>\n\n\n@event_class()\nclass FunctionCallEvent(EventBase):\n <mask token>\n <mask token>\n\n def parse_jsondict(self, jsdict):\n assert 'func_name' in jsdict['option'], 'func_name required'\n super(FunctionCallEvent, self).parse_jsondict(jsdict)\n\n\n@event_class()\nclass PacketEvent(EventBase):\n \"\"\"\n L7 packet message\n \"\"\"\n deferred = True\n\n @classmethod\n def from_message(cls, src_process, dst_process, message):\n inst = cls()\n inst.option = {'src_process': src_process, 'dst_process':\n dst_process, 'message': message}\n return inst\n\n\n@event_class()\nclass LogEvent(EventBase):\n \"\"\"\n syslog (not deferrable)\n \"\"\"\n deferred = False\n\n\n@event_class()\nclass InspectionEndEvent(EventBase):\n \"\"\"\n Inspection end (not deferrable)\n \"\"\"\n deferred = False\n",
"step-3": "<mask token>\n\n\n@event_class()\nclass FunctionCallEvent(EventBase):\n <mask token>\n deferred = True\n\n def parse_jsondict(self, jsdict):\n assert 'func_name' in jsdict['option'], 'func_name required'\n super(FunctionCallEvent, self).parse_jsondict(jsdict)\n\n\n@event_class()\nclass PacketEvent(EventBase):\n \"\"\"\n L7 packet message\n \"\"\"\n deferred = True\n\n @classmethod\n def from_message(cls, src_process, dst_process, message):\n inst = cls()\n inst.option = {'src_process': src_process, 'dst_process':\n dst_process, 'message': message}\n return inst\n\n\n@event_class()\nclass LogEvent(EventBase):\n \"\"\"\n syslog (not deferrable)\n \"\"\"\n deferred = False\n\n\n@event_class()\nclass InspectionEndEvent(EventBase):\n \"\"\"\n Inspection end (not deferrable)\n \"\"\"\n deferred = False\n",
"step-4": "<mask token>\nLOG = _LOG.getChild('entity.event')\n\n\n@event_class()\nclass FunctionCallEvent(EventBase):\n \"\"\"\n function call\n \"\"\"\n deferred = True\n\n def parse_jsondict(self, jsdict):\n assert 'func_name' in jsdict['option'], 'func_name required'\n super(FunctionCallEvent, self).parse_jsondict(jsdict)\n\n\n@event_class()\nclass PacketEvent(EventBase):\n \"\"\"\n L7 packet message\n \"\"\"\n deferred = True\n\n @classmethod\n def from_message(cls, src_process, dst_process, message):\n inst = cls()\n inst.option = {'src_process': src_process, 'dst_process':\n dst_process, 'message': message}\n return inst\n\n\n@event_class()\nclass LogEvent(EventBase):\n \"\"\"\n syslog (not deferrable)\n \"\"\"\n deferred = False\n\n\n@event_class()\nclass InspectionEndEvent(EventBase):\n \"\"\"\n Inspection end (not deferrable)\n \"\"\"\n deferred = False\n",
"step-5": "from .entity import EventBase, event_class\nfrom .. import LOG as _LOG\nLOG = _LOG.getChild('entity.event')\n\n@event_class()\nclass FunctionCallEvent(EventBase):\n \"\"\"\n function call\n \"\"\"\n deferred = True\n def parse_jsondict(self, jsdict):\n assert 'func_name' in jsdict['option'], 'func_name required'\n super(FunctionCallEvent, self).parse_jsondict(jsdict)\n\n \n@event_class()\nclass PacketEvent(EventBase):\n \"\"\"\n L7 packet message\n \"\"\"\n deferred = True\n\n @classmethod\n def from_message(cls, src_process, dst_process, message):\n inst = cls()\n # we do not set inst.process here\n inst.option = {\n 'src_process': src_process,\n 'dst_process': dst_process,\n 'message': message\n } \n return inst\n\n\n@event_class()\nclass LogEvent(EventBase):\n \"\"\"\n syslog (not deferrable)\n \"\"\"\n deferred = False\n\n\n@event_class()\nclass InspectionEndEvent(EventBase):\n \"\"\"\n Inspection end (not deferrable)\n \"\"\"\n deferred = False\n \n",
"step-ids": [
11,
12,
13,
15,
17
]
}
|
[
11,
12,
13,
15,
17
] |
from quantopian.algorithm import order_optimal_portfolio
from quantopian.algorithm import attach_pipeline, pipeline_output
from quantopian.pipeline import Pipeline
from quantopian.pipeline.data.builtin import USEquityPricing
from quantopian.pipeline.factors import SimpleMovingAverage
from quantopian.pipeline.filters import QTradableStocksUS
import quantopian.optimize as opt
from quantopian.pipeline.factors import Returns
def initialize(context):
# Schedule our rebalance function to run at the end of
# each day, when the market closes
#set_slippage(slippage.FixedSlippage(spread=0.0, volume_limit=1))
#set_slippage(slippage.FixedBasisPointsSlippage(basis_points=0, volume_limit=100))
#set_slippage(slippage.VolumeShareSlippage(0))
schedule_function(
my_rebalance,
date_rules.every_day(),
time_rules.market_close(minutes=1 )
)
# Create our pipeline and attach it to our algorithm.
my_pipe = make_pipeline()
attach_pipeline(my_pipe, 'my_pipeline')
def make_pipeline():
#longs = Returns(window_length=2).percentile_between(0,20,mask=QTradableStocksUS())
#shorts = Returns(window_length=2).percentile_between(80,100,mask=QTradableStocksUS())
longs = Returns(window_length=2).bottom(1,mask=QTradableStocksUS())
shorts = Returns(window_length=2).top(1,mask=QTradableStocksUS())
return Pipeline(
columns={
'longs': longs,
'shorts': shorts,
},
screen=QTradableStocksUS()& (shorts | longs)
)
def compute_target_weights(context, data):
"""
Compute ordering weights.
"""
# Initialize empty target weights dictionary.
# This will map securities to their target weight.
weights = {}
# If there are securities in our longs and shorts lists,
# compute even target weights for each security.
if context.longs :
long_weight = 0.5 / len(context.longs)
if context.shorts:
short_weight = -0.5 / len(context.shorts)
#if ~(context.longs & context.shorts):
# return weights
# Exit positions in our portfolio if they are not
# in our longs or shorts lists.
for security in context.portfolio.positions:
if security not in context.longs and security not in context.shorts and data.can_trade(security):
weights[security] = 0
for security in context.longs:
weights[security] = long_weight
for security in context.shorts:
weights[security] = short_weight
return weights
def before_trading_start(context, data):
"""
Get pipeline results.
"""
# Gets our pipeline output every day.
pipe_results = pipeline_output('my_pipeline')
# Go long in securities for which the 'longs' value is True,
# and check if they can be traded.
context.longs = []
for sec in pipe_results[pipe_results['longs']].index.tolist():
if data.can_trade(sec):
context.longs.append(sec)
#print(context.longs)
#print('Longs: ')
#print(context.longs)
# Go short in securities for which the 'shorts' value is True,
# and check if they can be traded.
context.shorts = []
for sec in pipe_results[pipe_results['shorts']].index.tolist():
if data.can_trade(sec):
context.shorts.append(sec)
#print('Shorts: ')
#print(context.shorts)
def my_rebalance(context, data):
"""
Rebalance daily
"""
for stock in context.portfolio.positions:
#print('selling everything')
#print(stock)
order_target_percent(stock, 0.0)
# Calculate target weights to rebalance
#print(context)
target_weights = compute_target_weights(context, data)
#print(target_weights)
# If we have target weights, rebalance our portfolio
if target_weights:
order_optimal_portfolio(
objective=opt.TargetWeights(target_weights),
constraints=[],
)
|
normal
|
{
"blob_id": "c447d1fe38a4af43de39e05d46dacbe88249d427",
"index": 3654,
"step-1": "<mask token>\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef initialize(context):\n schedule_function(my_rebalance, date_rules.every_day(), time_rules.\n market_close(minutes=1))\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\n<mask token>\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef initialize(context):\n schedule_function(my_rebalance, date_rules.every_day(), time_rules.\n market_close(minutes=1))\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\n<mask token>\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\ndef before_trading_start(context, data):\n \"\"\"\n Get pipeline results.\n \"\"\"\n pipe_results = pipeline_output('my_pipeline')\n context.longs = []\n for sec in pipe_results[pipe_results['longs']].index.tolist():\n if data.can_trade(sec):\n context.longs.append(sec)\n context.shorts = []\n for sec in pipe_results[pipe_results['shorts']].index.tolist():\n if data.can_trade(sec):\n context.shorts.append(sec)\n\n\ndef my_rebalance(context, data):\n \"\"\"\n Rebalance daily\n \"\"\"\n for stock in context.portfolio.positions:\n order_target_percent(stock, 0.0)\n target_weights = compute_target_weights(context, data)\n if target_weights:\n order_optimal_portfolio(objective=opt.TargetWeights(target_weights),\n constraints=[])\n",
"step-4": "from quantopian.algorithm import order_optimal_portfolio\nfrom quantopian.algorithm import attach_pipeline, pipeline_output\nfrom quantopian.pipeline import Pipeline\nfrom quantopian.pipeline.data.builtin import USEquityPricing\nfrom quantopian.pipeline.factors import SimpleMovingAverage\nfrom quantopian.pipeline.filters import QTradableStocksUS\nimport quantopian.optimize as opt\nfrom quantopian.pipeline.factors import Returns\n\n\ndef initialize(context):\n schedule_function(my_rebalance, date_rules.every_day(), time_rules.\n market_close(minutes=1))\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\ndef make_pipeline():\n longs = Returns(window_length=2).bottom(1, mask=QTradableStocksUS())\n shorts = Returns(window_length=2).top(1, mask=QTradableStocksUS())\n return Pipeline(columns={'longs': longs, 'shorts': shorts}, screen=\n QTradableStocksUS() & (shorts | longs))\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\ndef before_trading_start(context, data):\n \"\"\"\n Get pipeline results.\n \"\"\"\n pipe_results = pipeline_output('my_pipeline')\n context.longs = []\n for sec in pipe_results[pipe_results['longs']].index.tolist():\n if data.can_trade(sec):\n context.longs.append(sec)\n context.shorts = []\n for sec in pipe_results[pipe_results['shorts']].index.tolist():\n if data.can_trade(sec):\n context.shorts.append(sec)\n\n\ndef my_rebalance(context, data):\n \"\"\"\n Rebalance daily\n \"\"\"\n for stock in context.portfolio.positions:\n order_target_percent(stock, 0.0)\n target_weights = compute_target_weights(context, data)\n if target_weights:\n order_optimal_portfolio(objective=opt.TargetWeights(target_weights),\n constraints=[])\n",
"step-5": "from quantopian.algorithm import order_optimal_portfolio\nfrom quantopian.algorithm import attach_pipeline, pipeline_output\nfrom quantopian.pipeline import Pipeline\nfrom quantopian.pipeline.data.builtin import USEquityPricing\nfrom quantopian.pipeline.factors import SimpleMovingAverage\nfrom quantopian.pipeline.filters import QTradableStocksUS\nimport quantopian.optimize as opt\nfrom quantopian.pipeline.factors import Returns\n\ndef initialize(context):\n # Schedule our rebalance function to run at the end of\n # each day, when the market closes\n #set_slippage(slippage.FixedSlippage(spread=0.0, volume_limit=1))\n #set_slippage(slippage.FixedBasisPointsSlippage(basis_points=0, volume_limit=100))\n #set_slippage(slippage.VolumeShareSlippage(0))\n schedule_function(\n my_rebalance,\n date_rules.every_day(),\n time_rules.market_close(minutes=1 )\n )\n\n # Create our pipeline and attach it to our algorithm.\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\n\ndef make_pipeline():\n \n #longs = Returns(window_length=2).percentile_between(0,20,mask=QTradableStocksUS())\n #shorts = Returns(window_length=2).percentile_between(80,100,mask=QTradableStocksUS())\n longs = Returns(window_length=2).bottom(1,mask=QTradableStocksUS())\n shorts = Returns(window_length=2).top(1,mask=QTradableStocksUS()) \n\n return Pipeline(\n columns={\n 'longs': longs,\n 'shorts': shorts,\n },\n screen=QTradableStocksUS()& (shorts | longs)\n )\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n\n # Initialize empty target weights dictionary.\n # This will map securities to their target weight.\n weights = {}\n\n # If there are securities in our longs and shorts lists,\n # compute even target weights for each security.\n if context.longs :\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n #if ~(context.longs & context.shorts):\n # return weights\n\n # Exit positions in our portfolio if they are not\n # in our longs or shorts lists.\n for security in context.portfolio.positions:\n if security not in context.longs and security not in context.shorts and data.can_trade(security):\n weights[security] = 0\n\n for security in context.longs:\n weights[security] = long_weight\n\n for security in context.shorts:\n weights[security] = short_weight\n\n return weights\n\ndef before_trading_start(context, data):\n \"\"\"\n Get pipeline results.\n \"\"\"\n\n # Gets our pipeline output every day.\n pipe_results = pipeline_output('my_pipeline')\n\n # Go long in securities for which the 'longs' value is True,\n # and check if they can be traded.\n context.longs = []\n for sec in pipe_results[pipe_results['longs']].index.tolist():\n if data.can_trade(sec):\n context.longs.append(sec)\n #print(context.longs)\n #print('Longs: ') \n #print(context.longs)\n # Go short in securities for which the 'shorts' value is True,\n # and check if they can be traded.\n context.shorts = []\n for sec in pipe_results[pipe_results['shorts']].index.tolist():\n if data.can_trade(sec):\n context.shorts.append(sec)\n #print('Shorts: ')\n #print(context.shorts)\n \n \n \ndef my_rebalance(context, data):\n \"\"\"\n Rebalance daily\n \"\"\"\n for stock in context.portfolio.positions:\n #print('selling everything')\n #print(stock)\n order_target_percent(stock, 0.0) \n # Calculate target weights to rebalance\n #print(context)\n target_weights = compute_target_weights(context, data)\n #print(target_weights)\n\n # If we have target weights, rebalance our portfolio\n if target_weights:\n order_optimal_portfolio(\n objective=opt.TargetWeights(target_weights),\n constraints=[],\n )\n",
"step-ids": [
1,
2,
4,
6,
7
]
}
|
[
1,
2,
4,
6,
7
] |
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 24 12:16:15 2020
@author: zhangjuefei
"""
import sys
sys.path.append('../..')
import numpy as np
from sklearn.datasets import fetch_openml
from sklearn.preprocessing import OneHotEncoder
import matrixslow as ms
# 加载MNIST数据集,取一部分样本并归一化
X, y = fetch_openml('mnist_784', version=1, return_X_y=True)
X, y = X[:1000] / 255, y.astype(np.int)[:1000]
# 将整数形式的标签转换成One-Hot编码
oh = OneHotEncoder(sparse=False)
one_hot_label = oh.fit_transform(y.values.reshape(-1, 1))
# 输入图像尺寸
img_shape = (28, 28)
# 输入图像
x = ms.core.Variable(img_shape, init=False, trainable=False)
# One-Hot标签
one_hot = ms.core.Variable(dim=(10, 1), init=False, trainable=False)
# 第一卷积层
conv1 = ms.layer.conv([x], img_shape, 3, (5, 5), "ReLU")
# 第一池化层
pooling1 = ms.layer.pooling(conv1, (3, 3), (2, 2))
# 第二卷积层
conv2 = ms.layer.conv(pooling1, (14, 14), 3, (3, 3), "ReLU")
# 第二池化层
pooling2 = ms.layer.pooling(conv2, (3, 3), (2, 2))
# 全连接层
fc1 = ms.layer.fc(ms.ops.Concat(*pooling2), 147, 120, "ReLU")
# 输出层
output = ms.layer.fc(fc1, 120, 10, "None")
# 分类概率
predict = ms.ops.SoftMax(output)
# 交叉熵损失
loss = ms.ops.loss.CrossEntropyWithSoftMax(output, one_hot)
# 学习率
learning_rate = 0.005
# 优化器
optimizer = ms.optimizer.Adam(ms.default_graph, loss, learning_rate)
# 批大小
batch_size = 32
# 训练
for epoch in range(60):
batch_count = 0
for i in range(len(X)):
feature = np.mat(X.values[i]).reshape(img_shape)
label = np.mat(one_hot_label[i]).T
x.set_value(feature)
one_hot.set_value(label)
optimizer.one_step()
batch_count += 1
if batch_count >= batch_size:
print("epoch: {:d}, iteration: {:d}, loss: {:.3f}".format(epoch + 1, i + 1, loss.value[0, 0]))
optimizer.update()
batch_count = 0
pred = []
for i in range(len(X)):
feature = np.mat(X[i]).reshape(img_shape)
x.set_value(feature)
predict.forward()
pred.append(predict.value.A.ravel())
pred = np.array(pred).argmax(axis=1)
accuracy = (y == pred).astype(np.int).sum() / len(X)
print("epoch: {:d}, accuracy: {:.3f}".format(epoch + 1, accuracy))
|
normal
|
{
"blob_id": "63f155f7da958e9b6865007c701f7cf986b0cbac",
"index": 7800,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsys.path.append('../..')\n<mask token>\nfor epoch in range(60):\n batch_count = 0\n for i in range(len(X)):\n feature = np.mat(X.values[i]).reshape(img_shape)\n label = np.mat(one_hot_label[i]).T\n x.set_value(feature)\n one_hot.set_value(label)\n optimizer.one_step()\n batch_count += 1\n if batch_count >= batch_size:\n print('epoch: {:d}, iteration: {:d}, loss: {:.3f}'.format(epoch +\n 1, i + 1, loss.value[0, 0]))\n optimizer.update()\n batch_count = 0\n pred = []\n for i in range(len(X)):\n feature = np.mat(X[i]).reshape(img_shape)\n x.set_value(feature)\n predict.forward()\n pred.append(predict.value.A.ravel())\n pred = np.array(pred).argmax(axis=1)\n accuracy = (y == pred).astype(np.int).sum() / len(X)\n print('epoch: {:d}, accuracy: {:.3f}'.format(epoch + 1, accuracy))\n",
"step-3": "<mask token>\nsys.path.append('../..')\n<mask token>\nX, y = fetch_openml('mnist_784', version=1, return_X_y=True)\nX, y = X[:1000] / 255, y.astype(np.int)[:1000]\noh = OneHotEncoder(sparse=False)\none_hot_label = oh.fit_transform(y.values.reshape(-1, 1))\nimg_shape = 28, 28\nx = ms.core.Variable(img_shape, init=False, trainable=False)\none_hot = ms.core.Variable(dim=(10, 1), init=False, trainable=False)\nconv1 = ms.layer.conv([x], img_shape, 3, (5, 5), 'ReLU')\npooling1 = ms.layer.pooling(conv1, (3, 3), (2, 2))\nconv2 = ms.layer.conv(pooling1, (14, 14), 3, (3, 3), 'ReLU')\npooling2 = ms.layer.pooling(conv2, (3, 3), (2, 2))\nfc1 = ms.layer.fc(ms.ops.Concat(*pooling2), 147, 120, 'ReLU')\noutput = ms.layer.fc(fc1, 120, 10, 'None')\npredict = ms.ops.SoftMax(output)\nloss = ms.ops.loss.CrossEntropyWithSoftMax(output, one_hot)\nlearning_rate = 0.005\noptimizer = ms.optimizer.Adam(ms.default_graph, loss, learning_rate)\nbatch_size = 32\nfor epoch in range(60):\n batch_count = 0\n for i in range(len(X)):\n feature = np.mat(X.values[i]).reshape(img_shape)\n label = np.mat(one_hot_label[i]).T\n x.set_value(feature)\n one_hot.set_value(label)\n optimizer.one_step()\n batch_count += 1\n if batch_count >= batch_size:\n print('epoch: {:d}, iteration: {:d}, loss: {:.3f}'.format(epoch +\n 1, i + 1, loss.value[0, 0]))\n optimizer.update()\n batch_count = 0\n pred = []\n for i in range(len(X)):\n feature = np.mat(X[i]).reshape(img_shape)\n x.set_value(feature)\n predict.forward()\n pred.append(predict.value.A.ravel())\n pred = np.array(pred).argmax(axis=1)\n accuracy = (y == pred).astype(np.int).sum() / len(X)\n print('epoch: {:d}, accuracy: {:.3f}'.format(epoch + 1, accuracy))\n",
"step-4": "<mask token>\nimport sys\nsys.path.append('../..')\nimport numpy as np\nfrom sklearn.datasets import fetch_openml\nfrom sklearn.preprocessing import OneHotEncoder\nimport matrixslow as ms\nX, y = fetch_openml('mnist_784', version=1, return_X_y=True)\nX, y = X[:1000] / 255, y.astype(np.int)[:1000]\noh = OneHotEncoder(sparse=False)\none_hot_label = oh.fit_transform(y.values.reshape(-1, 1))\nimg_shape = 28, 28\nx = ms.core.Variable(img_shape, init=False, trainable=False)\none_hot = ms.core.Variable(dim=(10, 1), init=False, trainable=False)\nconv1 = ms.layer.conv([x], img_shape, 3, (5, 5), 'ReLU')\npooling1 = ms.layer.pooling(conv1, (3, 3), (2, 2))\nconv2 = ms.layer.conv(pooling1, (14, 14), 3, (3, 3), 'ReLU')\npooling2 = ms.layer.pooling(conv2, (3, 3), (2, 2))\nfc1 = ms.layer.fc(ms.ops.Concat(*pooling2), 147, 120, 'ReLU')\noutput = ms.layer.fc(fc1, 120, 10, 'None')\npredict = ms.ops.SoftMax(output)\nloss = ms.ops.loss.CrossEntropyWithSoftMax(output, one_hot)\nlearning_rate = 0.005\noptimizer = ms.optimizer.Adam(ms.default_graph, loss, learning_rate)\nbatch_size = 32\nfor epoch in range(60):\n batch_count = 0\n for i in range(len(X)):\n feature = np.mat(X.values[i]).reshape(img_shape)\n label = np.mat(one_hot_label[i]).T\n x.set_value(feature)\n one_hot.set_value(label)\n optimizer.one_step()\n batch_count += 1\n if batch_count >= batch_size:\n print('epoch: {:d}, iteration: {:d}, loss: {:.3f}'.format(epoch +\n 1, i + 1, loss.value[0, 0]))\n optimizer.update()\n batch_count = 0\n pred = []\n for i in range(len(X)):\n feature = np.mat(X[i]).reshape(img_shape)\n x.set_value(feature)\n predict.forward()\n pred.append(predict.value.A.ravel())\n pred = np.array(pred).argmax(axis=1)\n accuracy = (y == pred).astype(np.int).sum() / len(X)\n print('epoch: {:d}, accuracy: {:.3f}'.format(epoch + 1, accuracy))\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Mar 24 12:16:15 2020\n\n@author: zhangjuefei\n\"\"\"\n\nimport sys\nsys.path.append('../..')\n\nimport numpy as np\nfrom sklearn.datasets import fetch_openml\nfrom sklearn.preprocessing import OneHotEncoder\nimport matrixslow as ms\n\n# 加载MNIST数据集,取一部分样本并归一化\nX, y = fetch_openml('mnist_784', version=1, return_X_y=True)\nX, y = X[:1000] / 255, y.astype(np.int)[:1000]\n\n# 将整数形式的标签转换成One-Hot编码\noh = OneHotEncoder(sparse=False)\none_hot_label = oh.fit_transform(y.values.reshape(-1, 1))\n\n# 输入图像尺寸\nimg_shape = (28, 28)\n\n# 输入图像\nx = ms.core.Variable(img_shape, init=False, trainable=False)\n\n# One-Hot标签\none_hot = ms.core.Variable(dim=(10, 1), init=False, trainable=False)\n\n# 第一卷积层\nconv1 = ms.layer.conv([x], img_shape, 3, (5, 5), \"ReLU\")\n\n# 第一池化层\npooling1 = ms.layer.pooling(conv1, (3, 3), (2, 2))\n\n# 第二卷积层\nconv2 = ms.layer.conv(pooling1, (14, 14), 3, (3, 3), \"ReLU\")\n\n# 第二池化层\npooling2 = ms.layer.pooling(conv2, (3, 3), (2, 2))\n\n# 全连接层\nfc1 = ms.layer.fc(ms.ops.Concat(*pooling2), 147, 120, \"ReLU\")\n\n# 输出层\noutput = ms.layer.fc(fc1, 120, 10, \"None\")\n\n# 分类概率\npredict = ms.ops.SoftMax(output)\n\n# 交叉熵损失\nloss = ms.ops.loss.CrossEntropyWithSoftMax(output, one_hot)\n\n# 学习率\nlearning_rate = 0.005\n\n# 优化器\noptimizer = ms.optimizer.Adam(ms.default_graph, loss, learning_rate)\n\n# 批大小\nbatch_size = 32\n\n# 训练\nfor epoch in range(60):\n \n batch_count = 0\n \n for i in range(len(X)):\n \n feature = np.mat(X.values[i]).reshape(img_shape)\n label = np.mat(one_hot_label[i]).T\n \n x.set_value(feature)\n one_hot.set_value(label)\n \n\n optimizer.one_step()\n \n\n batch_count += 1\n if batch_count >= batch_size:\n \n print(\"epoch: {:d}, iteration: {:d}, loss: {:.3f}\".format(epoch + 1, i + 1, loss.value[0, 0]))\n\n optimizer.update()\n batch_count = 0\n \n\n pred = []\n for i in range(len(X)):\n \n feature = np.mat(X[i]).reshape(img_shape)\n x.set_value(feature)\n \n predict.forward()\n pred.append(predict.value.A.ravel())\n \n pred = np.array(pred).argmax(axis=1)\n accuracy = (y == pred).astype(np.int).sum() / len(X)\n \n print(\"epoch: {:d}, accuracy: {:.3f}\".format(epoch + 1, accuracy))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
Example 1:
Input: J = "aA", S = "aAAbbbb"
Output: 3
Example 2:
Input: J = "z", S = "ZZ"
Output: 0
Note:
S and J will consist of letters and have length at most 50.
The characters in J are distinct.
查找J中的每个字符在 S 出现的次数的总和。
改进:
J有可能有重复的数。
测试数据:
https://leetcode.com/problems/jewels-and-stones/description/
"""
c.. Solution o..
___ numJewelsInStones J, S
"""
:type J: str
:type S: str
:rtype: int
"""
S_dict = {i:S.c..(i) ___ i __ s..(S)}
r_ s..((S_dict.get(i, 0) ___ i __ J))
|
normal
|
{
"blob_id": "8a04447f12a9cb6ba31a21d43629d887a0d1f411",
"index": 3097,
"step-1": "\"\"\"\nExample 1:\n\nInput: J = \"aA\", S = \"aAAbbbb\"\nOutput: 3\nExample 2:\n\nInput: J = \"z\", S = \"ZZ\"\nOutput: 0\nNote:\n\nS and J will consist of letters and have length at most 50.\nThe characters in J are distinct.\n\n查找J中的每个字符在 S 出现的次数的总和。\n\n改进:\nJ有可能有重复的数。\n\n测试数据:\nhttps://leetcode.com/problems/jewels-and-stones/description/\n\n\"\"\"\n\nc.. Solution o..\n ___ numJewelsInStones J, S\n \"\"\"\n :type J: str\n :type S: str\n :rtype: int\n \"\"\"\n S_dict = {i:S.c..(i) ___ i __ s..(S)}\n \n r_ s..((S_dict.get(i, 0) ___ i __ J))\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from lmfit import Parameters
import numpy as np
from cls.cls import *
from reading.ellipseOutput import readEllipseOutput
def readInputModel(txt, equivalentAxisFit, Settings):
psfwing_02pxscale_datatab = None
psfwing_logscale_datatab = None
componentslist = []
params = Parameters()
data = open(txt)
for line in data:
if (line[0] != '#'):
comp = Component()
comp.number = int(line.split()[0])
comp.name = str(line.split()[1])
#components with 4 parameters
if (comp.name == 'ferrer'):
par1name = 'par1_' + str(line.split()[0])
par2name = 'par2_' + str(line.split()[0])
par3name = 'par3_' + str(line.split()[0])
par4name = 'par4_' + str(line.split()[0])
p1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # r_out
if (line.split()[3] == 'False'):
p1 = (par1name, float(line.split()[2]), False, 0.01, None, None)
p2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_0
if (line.split()[5] == 'False'):
p2 = (par2name, float(line.split()[4]), False, None, 35.0, None)
p3 = (par3name, float(line.split()[6]), True, 0.01, 4.0, None) # alpha
if (line.split()[7] == 'False'):
p3 = (par3name, float(line.split()[6]), False, 0.01, 4.0, None)
p4 = (par4name, float(line.split()[8]), True, 0.01, 1.999, None) # beta
if (line.split()[9] == 'False'):
p4 = (par4name, float(line.split()[8]), False, 0.01, 1.999, None)
comp.parameters.add_many(p1, p2, p3, p4)
params.add_many(p1, p2, p3, p4)
componentslist.append(comp)
if (comp.name == 'tsersic'):
par1name = 'par1_' + str(line.split()[0])
par2name = 'par2_' + str(line.split()[0])
par3name = 'par3_' + str(line.split()[0])
par4name = 'par4_' + str(line.split()[0])
p1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # r_e
if (line.split()[3] == 'False'):
p1 = (par1name, float(line.split()[2]), False, 0.01, None, None)
p2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_e
if (line.split()[5] == 'False'):
p2 = (par2name, float(line.split()[4]), False, None, 35.0, None)
p3 = (par3name, float(line.split()[6]), True, 0.01, 20.0, None) # n
if (line.split()[7] == 'False'):
p3 = (par3name, float(line.split()[6]), False, 0.01, 20.0, None)
p4 = (par4name, float(line.split()[8]), True, 0.01, None, None) # r_out
if (line.split()[9] == 'False'):
p4 = (par4name, float(line.split()[8]), False, 0.01, None, None)
comp.parameters.add_many(p1, p2, p3, p4)
params.add_many(p1, p2, p3, p4)
componentslist.append(comp)
#components with 3 parameters
if (comp.name == 'sersic'):
par1name = 'par1_' + str(line.split()[0])
par2name = 'par2_' + str(line.split()[0])
par3name = 'par3_' + str(line.split()[0])
p1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # r_e
if (line.split()[3] == 'False'):
p1 = (par1name, float(line.split()[2]), False, 0.01, None, None)
p2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_e
if (line.split()[5] == 'False'):
p2 = (par2name, float(line.split()[4]), False, None, 35.0, None)
p3 = (par3name, float(line.split()[6]), True, 0.01, 20.0, None) # n
if (line.split()[7] == 'False'):
p3 = (par3name, float(line.split()[6]), False, 0.01, 20.0, None)
comp.parameters.add_many(p1, p2, p3)
params.add_many(p1, p2, p3)
componentslist.append(comp)
if (comp.name == 'tdisc' or comp.name == 'gring'):
par1name = 'par1_' + str(line.split()[0])
par2name = 'par2_' + str(line.split()[0])
par3name = 'par3_' + str(line.split()[0])
p1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # h # fwhm
if (line.split()[3] == 'False'):
p1 = (par1name, float(line.split()[2]), False, 0.01, None, None)
p2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_0 # mu_0
if (line.split()[5] == 'False'):
p2 = (par2name, float(line.split()[4]), False, None, 35.0, None)
p3 = (par3name, float(line.split()[6]), True, 0.01, None, None) # r_out # r_0
if (line.split()[7] == 'False'):
p3 = (par3name, float(line.split()[6]), False, 0.01, None, None)
comp.parameters.add_many(p1, p2, p3)
params.add_many(p1, p2, p3)
componentslist.append(comp)
#components with two parameters
elif (comp.name == 'gaussian' or comp.name == 'disc'):
par1name = 'par1_' + str(line.split()[0])
par2name = 'par2_' + str(line.split()[0])
p1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # h or fwhm ..
if (line.split()[3] == 'False'):
p1 = (par1name, float(line.split()[2]), False, 0.01, None, None)
p2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_0 or mag
if (line.split()[5] == 'False'):
p2 = (par2name, float(line.split()[4]), False, None, 35.0, None)
comp.parameters.add_many(p1, p2)
params.add_many(p1, p2)
componentslist.append(comp)
#components with one parameter
elif (comp.name == 'psf' or comp.name == 'psfwing'):
par2name = 'par2_' + str(line.split()[0])
p2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_0 or mag
if (line.split()[5] == 'False'):
p2 = (par2name, float(line.split()[4]), False, None, 35.0, None)
comp.parameters.add_many(p2)
params.add_many(p2)
componentslist.append(comp)
if (comp.name == 'psfwing'):
#psfwing_02pxscale_datatab = readEllipseOutput('PSFtinytim_centered_resc_linscale05px.ell')
psfwing_02pxscale_datatab = readEllipseOutput('star_02pxscale.ell')
psfwing_02pxscale_datatab['sma'] = psfwing_02pxscale_datatab['sma'] * Settings.pxlToArcsec
if equivalentAxisFit:
psfwing_02pxscale_datatab['sma'] = psfwing_02pxscale_datatab['sma'] * np.sqrt(1 - psfwing_02pxscale_datatab['ellip'])
#if minorAxisFit:
# psfwing_02pxscale_datatab['sma'] = psfwing_02pxscale_datatab['sma'] * (1 - psfwing_02pxscale_datatab['ellip'])
psfwing_02pxscale_datatab['intens'] = psfwing_02pxscale_datatab['intens'] / Settings.pxlToArcsec**2
psfwing_02pxscale_datatab['intens'] = psfwing_02pxscale_datatab['intens'] / max(psfwing_02pxscale_datatab['intens'])
#psfwing_logscale_datatab = readEllipseOutput('PSFtinytim_centered_resc_logscale.ell')
psfwing_logscale_datatab = readEllipseOutput('star_logscale.ell')
psfwing_logscale_datatab['sma'] = psfwing_logscale_datatab['sma'] * Settings.pxlToArcsec
if equivalentAxisFit:
psfwing_logscale_datatab['sma'] = psfwing_logscale_datatab['sma'] * np.sqrt(1 - psfwing_logscale_datatab['ellip'])
#if minorAxisFit:
# psfwing_logscale_datatab['sma'] = psfwing_logscale_datatab['sma'] * (1 - psfwing_logscale_datatab['ellip'])
psfwing_logscale_datatab['intens'] = psfwing_logscale_datatab['intens'] / Settings.pxlToArcsec**2
psfwing_logscale_datatab['intens'] = psfwing_logscale_datatab['intens'] / max(psfwing_logscale_datatab['intens'])
return componentslist, params, psfwing_02pxscale_datatab, psfwing_logscale_datatab
|
normal
|
{
"blob_id": "219b22b6ad685fc316b1df02cc924a1cfec89f5b",
"index": 650,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef readInputModel(txt, equivalentAxisFit, Settings):\n psfwing_02pxscale_datatab = None\n psfwing_logscale_datatab = None\n componentslist = []\n params = Parameters()\n data = open(txt)\n for line in data:\n if line[0] != '#':\n comp = Component()\n comp.number = int(line.split()[0])\n comp.name = str(line.split()[1])\n if comp.name == 'ferrer':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n par3name = 'par3_' + str(line.split()[0])\n par4name = 'par4_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n p3 = par3name, float(line.split()[6]), True, 0.01, 4.0, None\n if line.split()[7] == 'False':\n p3 = par3name, float(line.split()[6]\n ), False, 0.01, 4.0, None\n p4 = par4name, float(line.split()[8]), True, 0.01, 1.999, None\n if line.split()[9] == 'False':\n p4 = par4name, float(line.split()[8]\n ), False, 0.01, 1.999, None\n comp.parameters.add_many(p1, p2, p3, p4)\n params.add_many(p1, p2, p3, p4)\n componentslist.append(comp)\n if comp.name == 'tsersic':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n par3name = 'par3_' + str(line.split()[0])\n par4name = 'par4_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n p3 = par3name, float(line.split()[6]), True, 0.01, 20.0, None\n if line.split()[7] == 'False':\n p3 = par3name, float(line.split()[6]\n ), False, 0.01, 20.0, None\n p4 = par4name, float(line.split()[8]), True, 0.01, None, None\n if line.split()[9] == 'False':\n p4 = par4name, float(line.split()[8]\n ), False, 0.01, None, None\n comp.parameters.add_many(p1, p2, p3, p4)\n params.add_many(p1, p2, p3, p4)\n componentslist.append(comp)\n if comp.name == 'sersic':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n par3name = 'par3_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n p3 = par3name, float(line.split()[6]), True, 0.01, 20.0, None\n if line.split()[7] == 'False':\n p3 = par3name, float(line.split()[6]\n ), False, 0.01, 20.0, None\n comp.parameters.add_many(p1, p2, p3)\n params.add_many(p1, p2, p3)\n componentslist.append(comp)\n if comp.name == 'tdisc' or comp.name == 'gring':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n par3name = 'par3_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n p3 = par3name, float(line.split()[6]), True, 0.01, None, None\n if line.split()[7] == 'False':\n p3 = par3name, float(line.split()[6]\n ), False, 0.01, None, None\n comp.parameters.add_many(p1, p2, p3)\n params.add_many(p1, p2, p3)\n componentslist.append(comp)\n elif comp.name == 'gaussian' or comp.name == 'disc':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n comp.parameters.add_many(p1, p2)\n params.add_many(p1, p2)\n componentslist.append(comp)\n elif comp.name == 'psf' or comp.name == 'psfwing':\n par2name = 'par2_' + str(line.split()[0])\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n comp.parameters.add_many(p2)\n params.add_many(p2)\n componentslist.append(comp)\n if comp.name == 'psfwing':\n psfwing_02pxscale_datatab = readEllipseOutput(\n 'star_02pxscale.ell')\n psfwing_02pxscale_datatab['sma'\n ] = psfwing_02pxscale_datatab['sma'\n ] * Settings.pxlToArcsec\n if equivalentAxisFit:\n psfwing_02pxscale_datatab['sma'\n ] = psfwing_02pxscale_datatab['sma'] * np.sqrt(\n 1 - psfwing_02pxscale_datatab['ellip'])\n psfwing_02pxscale_datatab['intens'\n ] = psfwing_02pxscale_datatab['intens'\n ] / Settings.pxlToArcsec ** 2\n psfwing_02pxscale_datatab['intens'\n ] = psfwing_02pxscale_datatab['intens'] / max(\n psfwing_02pxscale_datatab['intens'])\n psfwing_logscale_datatab = readEllipseOutput(\n 'star_logscale.ell')\n psfwing_logscale_datatab['sma'] = psfwing_logscale_datatab[\n 'sma'] * Settings.pxlToArcsec\n if equivalentAxisFit:\n psfwing_logscale_datatab['sma'\n ] = psfwing_logscale_datatab['sma'] * np.sqrt(1 -\n psfwing_logscale_datatab['ellip'])\n psfwing_logscale_datatab['intens'\n ] = psfwing_logscale_datatab['intens'\n ] / Settings.pxlToArcsec ** 2\n psfwing_logscale_datatab['intens'\n ] = psfwing_logscale_datatab['intens'] / max(\n psfwing_logscale_datatab['intens'])\n return (componentslist, params, psfwing_02pxscale_datatab,\n psfwing_logscale_datatab)\n",
"step-3": "from lmfit import Parameters\nimport numpy as np\nfrom cls.cls import *\nfrom reading.ellipseOutput import readEllipseOutput\n\n\ndef readInputModel(txt, equivalentAxisFit, Settings):\n psfwing_02pxscale_datatab = None\n psfwing_logscale_datatab = None\n componentslist = []\n params = Parameters()\n data = open(txt)\n for line in data:\n if line[0] != '#':\n comp = Component()\n comp.number = int(line.split()[0])\n comp.name = str(line.split()[1])\n if comp.name == 'ferrer':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n par3name = 'par3_' + str(line.split()[0])\n par4name = 'par4_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n p3 = par3name, float(line.split()[6]), True, 0.01, 4.0, None\n if line.split()[7] == 'False':\n p3 = par3name, float(line.split()[6]\n ), False, 0.01, 4.0, None\n p4 = par4name, float(line.split()[8]), True, 0.01, 1.999, None\n if line.split()[9] == 'False':\n p4 = par4name, float(line.split()[8]\n ), False, 0.01, 1.999, None\n comp.parameters.add_many(p1, p2, p3, p4)\n params.add_many(p1, p2, p3, p4)\n componentslist.append(comp)\n if comp.name == 'tsersic':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n par3name = 'par3_' + str(line.split()[0])\n par4name = 'par4_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n p3 = par3name, float(line.split()[6]), True, 0.01, 20.0, None\n if line.split()[7] == 'False':\n p3 = par3name, float(line.split()[6]\n ), False, 0.01, 20.0, None\n p4 = par4name, float(line.split()[8]), True, 0.01, None, None\n if line.split()[9] == 'False':\n p4 = par4name, float(line.split()[8]\n ), False, 0.01, None, None\n comp.parameters.add_many(p1, p2, p3, p4)\n params.add_many(p1, p2, p3, p4)\n componentslist.append(comp)\n if comp.name == 'sersic':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n par3name = 'par3_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n p3 = par3name, float(line.split()[6]), True, 0.01, 20.0, None\n if line.split()[7] == 'False':\n p3 = par3name, float(line.split()[6]\n ), False, 0.01, 20.0, None\n comp.parameters.add_many(p1, p2, p3)\n params.add_many(p1, p2, p3)\n componentslist.append(comp)\n if comp.name == 'tdisc' or comp.name == 'gring':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n par3name = 'par3_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n p3 = par3name, float(line.split()[6]), True, 0.01, None, None\n if line.split()[7] == 'False':\n p3 = par3name, float(line.split()[6]\n ), False, 0.01, None, None\n comp.parameters.add_many(p1, p2, p3)\n params.add_many(p1, p2, p3)\n componentslist.append(comp)\n elif comp.name == 'gaussian' or comp.name == 'disc':\n par1name = 'par1_' + str(line.split()[0])\n par2name = 'par2_' + str(line.split()[0])\n p1 = par1name, float(line.split()[2]), True, 0.01, None, None\n if line.split()[3] == 'False':\n p1 = par1name, float(line.split()[2]\n ), False, 0.01, None, None\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n comp.parameters.add_many(p1, p2)\n params.add_many(p1, p2)\n componentslist.append(comp)\n elif comp.name == 'psf' or comp.name == 'psfwing':\n par2name = 'par2_' + str(line.split()[0])\n p2 = par2name, float(line.split()[4]), True, None, 35.0, None\n if line.split()[5] == 'False':\n p2 = par2name, float(line.split()[4]\n ), False, None, 35.0, None\n comp.parameters.add_many(p2)\n params.add_many(p2)\n componentslist.append(comp)\n if comp.name == 'psfwing':\n psfwing_02pxscale_datatab = readEllipseOutput(\n 'star_02pxscale.ell')\n psfwing_02pxscale_datatab['sma'\n ] = psfwing_02pxscale_datatab['sma'\n ] * Settings.pxlToArcsec\n if equivalentAxisFit:\n psfwing_02pxscale_datatab['sma'\n ] = psfwing_02pxscale_datatab['sma'] * np.sqrt(\n 1 - psfwing_02pxscale_datatab['ellip'])\n psfwing_02pxscale_datatab['intens'\n ] = psfwing_02pxscale_datatab['intens'\n ] / Settings.pxlToArcsec ** 2\n psfwing_02pxscale_datatab['intens'\n ] = psfwing_02pxscale_datatab['intens'] / max(\n psfwing_02pxscale_datatab['intens'])\n psfwing_logscale_datatab = readEllipseOutput(\n 'star_logscale.ell')\n psfwing_logscale_datatab['sma'] = psfwing_logscale_datatab[\n 'sma'] * Settings.pxlToArcsec\n if equivalentAxisFit:\n psfwing_logscale_datatab['sma'\n ] = psfwing_logscale_datatab['sma'] * np.sqrt(1 -\n psfwing_logscale_datatab['ellip'])\n psfwing_logscale_datatab['intens'\n ] = psfwing_logscale_datatab['intens'\n ] / Settings.pxlToArcsec ** 2\n psfwing_logscale_datatab['intens'\n ] = psfwing_logscale_datatab['intens'] / max(\n psfwing_logscale_datatab['intens'])\n return (componentslist, params, psfwing_02pxscale_datatab,\n psfwing_logscale_datatab)\n",
"step-4": "from lmfit import Parameters\nimport numpy as np\n\nfrom cls.cls import *\n\nfrom reading.ellipseOutput import readEllipseOutput\n\ndef readInputModel(txt, equivalentAxisFit, Settings):\n\n\tpsfwing_02pxscale_datatab = None\n\tpsfwing_logscale_datatab = None\n\n\tcomponentslist = []\n\tparams = Parameters()\n\t\n\tdata = open(txt)\n\tfor line in data:\n\t\tif (line[0] != '#'):\n\t\t\tcomp = Component()\n\t\t\tcomp.number = int(line.split()[0])\n\t\t\tcomp.name = str(line.split()[1])\n\t\t\t\n\t\t\t#components with 4 parameters\n\t\t\tif (comp.name == 'ferrer'):\n\t\t\t\n\t\t\t\tpar1name = 'par1_' + str(line.split()[0])\n\t\t\t\tpar2name = 'par2_' + str(line.split()[0])\n\t\t\t\tpar3name = 'par3_' + str(line.split()[0])\n\t\t\t\tpar4name = 'par4_' + str(line.split()[0])\n\t\t\t\n\t\t\t\tp1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # r_out \n\t\t\t\tif (line.split()[3] == 'False'):\n\t\t\t\t\tp1 = (par1name, float(line.split()[2]), False, 0.01, None, None) \n\t\t\t\tp2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_0 \n\t\t\t\tif (line.split()[5] == 'False'):\n\t\t\t\t\tp2 = (par2name, float(line.split()[4]), False, None, 35.0, None) \t\t\n\t\t\t\tp3 = (par3name, float(line.split()[6]), True, 0.01, 4.0, None) # alpha\n\t\t\t\tif (line.split()[7] == 'False'):\n\t\t\t\t\tp3 = (par3name, float(line.split()[6]), False, 0.01, 4.0, None) \t\n\t\t\t\tp4 = (par4name, float(line.split()[8]), True, 0.01, 1.999, None) # beta\n\t\t\t\tif (line.split()[9] == 'False'):\n\t\t\t\t\tp4 = (par4name, float(line.split()[8]), False, 0.01, 1.999, None) \t\n\t\t\t\t\t\t\n\t\t\t\tcomp.parameters.add_many(p1, p2, p3, p4) \n\t\t\t\tparams.add_many(p1, p2, p3, p4)\n\t\n\t\t\t\tcomponentslist.append(comp)\n\t\t\t\n\t\t\tif (comp.name == 'tsersic'):\n\t\t\t\n\t\t\t\tpar1name = 'par1_' + str(line.split()[0])\n\t\t\t\tpar2name = 'par2_' + str(line.split()[0])\n\t\t\t\tpar3name = 'par3_' + str(line.split()[0])\n\t\t\t\tpar4name = 'par4_' + str(line.split()[0])\n\t\t\t\n\t\t\t\tp1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # r_e \n\t\t\t\tif (line.split()[3] == 'False'):\n\t\t\t\t\tp1 = (par1name, float(line.split()[2]), False, 0.01, None, None) \n\t\t\t\tp2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_e \n\t\t\t\tif (line.split()[5] == 'False'):\n\t\t\t\t\tp2 = (par2name, float(line.split()[4]), False, None, 35.0, None) \t\t\n\t\t\t\tp3 = (par3name, float(line.split()[6]), True, 0.01, 20.0, None) # n\n\t\t\t\tif (line.split()[7] == 'False'):\n\t\t\t\t\tp3 = (par3name, float(line.split()[6]), False, 0.01, 20.0, None) \t\t\n\t\t\t\tp4 = (par4name, float(line.split()[8]), True, 0.01, None, None) # r_out \n\t\t\t\tif (line.split()[9] == 'False'):\n\t\t\t\t\tp4 = (par4name, float(line.split()[8]), False, 0.01, None, None) \n\t\t\t\t\t\t\n\t\t\t\tcomp.parameters.add_many(p1, p2, p3, p4) \n\t\t\t\tparams.add_many(p1, p2, p3, p4)\n\t\n\t\t\t\tcomponentslist.append(comp)\n\t\t\t\n\t\t\t#components with 3 parameters\n\t\t\tif (comp.name == 'sersic'):\n\t\t\t\n\t\t\t\tpar1name = 'par1_' + str(line.split()[0])\n\t\t\t\tpar2name = 'par2_' + str(line.split()[0])\n\t\t\t\tpar3name = 'par3_' + str(line.split()[0])\n\t\t\t\n\t\t\t\tp1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # r_e \n\t\t\t\tif (line.split()[3] == 'False'):\n\t\t\t\t\tp1 = (par1name, float(line.split()[2]), False, 0.01, None, None) \n\t\t\t\tp2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_e \n\t\t\t\tif (line.split()[5] == 'False'):\n\t\t\t\t\tp2 = (par2name, float(line.split()[4]), False, None, 35.0, None) \t\t\n\t\t\t\tp3 = (par3name, float(line.split()[6]), True, 0.01, 20.0, None) # n\n\t\t\t\tif (line.split()[7] == 'False'):\n\t\t\t\t\tp3 = (par3name, float(line.split()[6]), False, 0.01, 20.0, None) \t\t\n\t\t\t\tcomp.parameters.add_many(p1, p2, p3) \n\t\t\t\tparams.add_many(p1, p2, p3)\n\t\n\t\t\t\tcomponentslist.append(comp)\n\t\t\t\n\t\t\tif (comp.name == 'tdisc' or comp.name == 'gring'):\n\t\t\t\n\t\t\t\tpar1name = 'par1_' + str(line.split()[0])\n\t\t\t\tpar2name = 'par2_' + str(line.split()[0])\n\t\t\t\tpar3name = 'par3_' + str(line.split()[0])\n\t\t\t\n\t\t\t\tp1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # h # fwhm\n\t\t\t\tif (line.split()[3] == 'False'):\n\t\t\t\t\tp1 = (par1name, float(line.split()[2]), False, 0.01, None, None) \n\t\t\t\tp2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_0 # mu_0 \n\t\t\t\tif (line.split()[5] == 'False'):\n\t\t\t\t\tp2 = (par2name, float(line.split()[4]), False, None, 35.0, None) \t\t\n\t\t\t\tp3 = (par3name, float(line.split()[6]), True, 0.01, None, None) # r_out # r_0\n\t\t\t\tif (line.split()[7] == 'False'):\n\t\t\t\t\tp3 = (par3name, float(line.split()[6]), False, 0.01, None, None) \t\t\n\t\t\t\tcomp.parameters.add_many(p1, p2, p3) \n\t\t\t\tparams.add_many(p1, p2, p3)\n\t\n\t\t\t\tcomponentslist.append(comp)\n\t\t\t\n\t\t\t#components with two parameters\t\n\t\t\telif (comp.name == 'gaussian' or comp.name == 'disc'):\n\t\t\t\n\t\t\t\tpar1name = 'par1_' + str(line.split()[0])\n\t\t\t\tpar2name = 'par2_' + str(line.split()[0])\n\t\t\t\n\t\t\t\tp1 = (par1name, float(line.split()[2]), True, 0.01, None, None) # h or fwhm ..\n\t\t\t\tif (line.split()[3] == 'False'):\n\t\t\t\t\tp1 = (par1name, float(line.split()[2]), False, 0.01, None, None) \n\t\t\t\tp2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_0 or mag\n\t\t\t\tif (line.split()[5] == 'False'):\n\t\t\t\t\tp2 = (par2name, float(line.split()[4]), False, None, 35.0, None) \t\t\n\t\t\t\tcomp.parameters.add_many(p1, p2) \n\t\t\t\tparams.add_many(p1, p2)\n\t\n\t\t\t\tcomponentslist.append(comp)\n\t\t\t\n\t\t\t#components with one parameter\t\n\t\t\telif (comp.name == 'psf' or comp.name == 'psfwing'):\n\t\t\t\n\t\t\t\tpar2name = 'par2_' + str(line.split()[0])\n\t\t\t\n\t\t\t\tp2 = (par2name, float(line.split()[4]), True, None, 35.0, None) # mu_0 or mag\n\t\t\t\tif (line.split()[5] == 'False'):\n\t\t\t\t\tp2 = (par2name, float(line.split()[4]), False, None, 35.0, None) \t\t\n\t\t\t\tcomp.parameters.add_many(p2) \n\t\t\t\tparams.add_many(p2)\n\t\n\t\t\t\tcomponentslist.append(comp)\n\t\t\t\t\n\t\t\t\tif (comp.name == 'psfwing'):\n\t\t\t\t\n\t\t\t\t\t#psfwing_02pxscale_datatab = readEllipseOutput('PSFtinytim_centered_resc_linscale05px.ell')\n\t\t\t\t\tpsfwing_02pxscale_datatab = readEllipseOutput('star_02pxscale.ell')\n\t\t\t\t\tpsfwing_02pxscale_datatab['sma'] = psfwing_02pxscale_datatab['sma'] * Settings.pxlToArcsec\n\t\t\t\t\tif equivalentAxisFit:\n\t\t\t\t\t\tpsfwing_02pxscale_datatab['sma'] = psfwing_02pxscale_datatab['sma'] * np.sqrt(1 - psfwing_02pxscale_datatab['ellip'])\n\t\t\t\t\t#if minorAxisFit:\n\t\t\t\t\t#\tpsfwing_02pxscale_datatab['sma'] = psfwing_02pxscale_datatab['sma'] * (1 - psfwing_02pxscale_datatab['ellip'])\n\t\t\t\t\tpsfwing_02pxscale_datatab['intens'] = psfwing_02pxscale_datatab['intens'] / Settings.pxlToArcsec**2\n\t\t\t\t\tpsfwing_02pxscale_datatab['intens'] = psfwing_02pxscale_datatab['intens'] / max(psfwing_02pxscale_datatab['intens'])\n\t\t\t\t\t\n\t\t\t\t\t#psfwing_logscale_datatab = readEllipseOutput('PSFtinytim_centered_resc_logscale.ell')\n\t\t\t\t\tpsfwing_logscale_datatab = readEllipseOutput('star_logscale.ell')\n\t\t\t\t\tpsfwing_logscale_datatab['sma'] = psfwing_logscale_datatab['sma'] * Settings.pxlToArcsec\n\t\t\t\t\tif equivalentAxisFit:\t\n\t\t\t\t\t\tpsfwing_logscale_datatab['sma'] = psfwing_logscale_datatab['sma'] * np.sqrt(1 - psfwing_logscale_datatab['ellip'])\n\t\t\t\t\t#if minorAxisFit:\n\t\t\t\t\t#\tpsfwing_logscale_datatab['sma'] = psfwing_logscale_datatab['sma'] * (1 - psfwing_logscale_datatab['ellip'])\n\t\t\t\t\tpsfwing_logscale_datatab['intens'] = psfwing_logscale_datatab['intens'] / Settings.pxlToArcsec**2\n\t\t\t\t\tpsfwing_logscale_datatab['intens'] = psfwing_logscale_datatab['intens'] / max(psfwing_logscale_datatab['intens'])\n\t\t\t\n\treturn componentslist, params, psfwing_02pxscale_datatab, psfwing_logscale_datatab\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from mp_data_scrapper.items import MpDataScrapperItem
class MininovaSpider(CrawlSpider):
name = 'mp'
allowed_domains = ['india.gov.in']
start_urls = ['http://india.gov.in/my-government/indian-parliament/lok-sabha',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=1',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=2',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=3',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=4',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=5',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=6',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=7',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=8',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=9',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=10',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=11',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=12',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=13',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=14',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=15',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=16',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=17',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=18',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=19',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=20',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=21',
]
rules = [Rule(SgmlLinkExtractor(allow=['/my-government/indian-parliament/[^?]+'], deny=['my-government/indian-parliament/lok-sabha', 'my-government/indian-parliament/rajya-sabha'], unique=True), process_links='process_links', callback='parse_mp', follow=True)]
def parse_mp(self, response):
mp = MpDataScrapperItem()
try:
mp['name'] = response.xpath("//h1/text()").extract()[0]
except IndexError:
pass
try:
mp['constituency'] = response.xpath("//span[@class='views-label views-label-field-const-name-value']/following::span[1]/text()").extract()[0]
#mp['constituency'] = response.xpath("//span[contains(concat(' ',normalize-space(@class),' '),' views-label-field-const-name-value ')]/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['party'] = response.xpath("//span[@class='views-label views-label-field-party-fname-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['father'] = response.xpath("//span[@class='views-label views-label-field-father-name-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['mother'] = response.xpath("//span[@class='views-label views-label-field-mother-name-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['dob'] = response.xpath("//span[@class='views-label views-label-field-dob-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['birth_place'] = response.xpath("//span[@class='views-label views-label-field-birth-place-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['marital_status'] = response.xpath("//span[@class='views-label views-label-field-marital-status-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['spouse_name'] = response.xpath("//span[@class='views-label views-label-field-spouse-name-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['num_sons'] = response.xpath("//span[@class='views-label views-label-field-sons-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['num_daughters'] = response.xpath("//span[@class='views-label views-label-field-daughters-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['state'] = response.xpath("//span[@class='views-label views-label-field-state-name-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['permanent_address'] = response.xpath("//span[@class='views-label views-label-phpcode-1']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['present_address'] = response.xpath("//span[@class='views-label views-label-phpcode-2']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['email'] = response.xpath("//span[@class='views-label views-label-field-email-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['education'] = response.xpath("//span[@class='views-label views-label-phpcode-5']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['positions_held'] = response.xpath("//span[@class='views-label views-label-phpcode']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['social_cultural_activities'] = response.xpath("//span[@class='views-label views-label-phpcode-7']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['sports_clubs'] = response.xpath("//span[@class='views-label views-label-phpcode-8']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['pastimes_recreation'] = response.xpath("//span[@class='views-label views-label-phpcode-9']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['countries_visited'] = response.xpath("//span[@class='views-label views-label-phpcode-4']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['other_info'] = response.xpath("//span[@class='views-label views-label-phpcode-3']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['photo'] = 'http://india.gov.in' + response.xpath("//div[@class='views-field views-field-phpcode-10']/child::span[1]/child::img[1]/@src").extract()[0]
except IndexError:
pass
return mp
def process_links(self,links):
for i, w in enumerate(links):
print w.url
#w.url = w.url.replace("http://india.gov.in/my-government/indian-parliament/lok-sabha", "http://india.gov.in")
links[i] = w
return links
|
normal
|
{
"blob_id": "94e9d67095dde4d3bf7ddb207ac17a4c250a2bfc",
"index": 1986,
"step-1": "from scrapy.contrib.spiders import CrawlSpider, Rule\nfrom scrapy.contrib.linkextractors import LinkExtractor\nfrom scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor\nfrom mp_data_scrapper.items import MpDataScrapperItem\n\nclass MininovaSpider(CrawlSpider):\n\n name = 'mp'\n allowed_domains = ['india.gov.in']\n start_urls = ['http://india.gov.in/my-government/indian-parliament/lok-sabha',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=1',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=2',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=3',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=4',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=5',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=6',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=7',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=8',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=9',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=10',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=11',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=12',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=13',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=14',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=15',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=16',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=17',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=18',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=19',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=20',\n 'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=21',\n ]\n rules = [Rule(SgmlLinkExtractor(allow=['/my-government/indian-parliament/[^?]+'], deny=['my-government/indian-parliament/lok-sabha', 'my-government/indian-parliament/rajya-sabha'], unique=True), process_links='process_links', callback='parse_mp', follow=True)]\n\n def parse_mp(self, response):\n mp = MpDataScrapperItem()\n\ttry:\n mp['name'] = response.xpath(\"//h1/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['constituency'] = response.xpath(\"//span[@class='views-label views-label-field-const-name-value']/following::span[1]/text()\").extract()[0]\n #mp['constituency'] = response.xpath(\"//span[contains(concat(' ',normalize-space(@class),' '),' views-label-field-const-name-value ')]/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['party'] = response.xpath(\"//span[@class='views-label views-label-field-party-fname-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['father'] = response.xpath(\"//span[@class='views-label views-label-field-father-name-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['mother'] = response.xpath(\"//span[@class='views-label views-label-field-mother-name-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['dob'] = response.xpath(\"//span[@class='views-label views-label-field-dob-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['birth_place'] = response.xpath(\"//span[@class='views-label views-label-field-birth-place-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['marital_status'] = response.xpath(\"//span[@class='views-label views-label-field-marital-status-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['spouse_name'] = response.xpath(\"//span[@class='views-label views-label-field-spouse-name-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['num_sons'] = response.xpath(\"//span[@class='views-label views-label-field-sons-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['num_daughters'] = response.xpath(\"//span[@class='views-label views-label-field-daughters-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['state'] = response.xpath(\"//span[@class='views-label views-label-field-state-name-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['permanent_address'] = response.xpath(\"//span[@class='views-label views-label-phpcode-1']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['present_address'] = response.xpath(\"//span[@class='views-label views-label-phpcode-2']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['email'] = response.xpath(\"//span[@class='views-label views-label-field-email-value']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['education'] = response.xpath(\"//span[@class='views-label views-label-phpcode-5']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['positions_held'] = response.xpath(\"//span[@class='views-label views-label-phpcode']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['social_cultural_activities'] = response.xpath(\"//span[@class='views-label views-label-phpcode-7']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['sports_clubs'] = response.xpath(\"//span[@class='views-label views-label-phpcode-8']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['pastimes_recreation'] = response.xpath(\"//span[@class='views-label views-label-phpcode-9']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['countries_visited'] = response.xpath(\"//span[@class='views-label views-label-phpcode-4']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['other_info'] = response.xpath(\"//span[@class='views-label views-label-phpcode-3']/following::span[1]/text()\").extract()[0]\n\texcept IndexError:\n\t pass\n\ttry:\n mp['photo'] = 'http://india.gov.in' + response.xpath(\"//div[@class='views-field views-field-phpcode-10']/child::span[1]/child::img[1]/@src\").extract()[0]\n\texcept IndexError:\n\t pass\n return mp\n\n def process_links(self,links):\n for i, w in enumerate(links):\n print w.url\n #w.url = w.url.replace(\"http://india.gov.in/my-government/indian-parliament/lok-sabha\", \"http://india.gov.in\")\n links[i] = w\n return links\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from Global import *
import ShuntingYard
from Thompson import *
def check_string(automaton, word):
inicial = automata['s'].closure
for i in word:
inicial = state_list_delta(inicial, i)
return automaton['f'] in inicial
def create_AFND(re):
deltas = []
initial_node = ShuntingYard.create_tree(ShuntingYard.to_rpn(re))
s = State('s')
f = State('f')
automaton = {s.name: s, f.name: f}
#automaton = {s.name: s}
s.add_transition(initial_node, f);
deltas.append((s,initial_node))
while len(deltas) > 0:
(origin, simbol) = deltas.pop()
if not origin in automaton.values():
automaton.setdefault(origin.name, origin)
if isinstance(simbol, ShuntingYard.Node):
aux_deltas = Thompson.generic(origin, simbol)
for t in aux_deltas:
deltas.insert(0, t)
for state_name in automaton:
automaton[state_name].update_closure()
return automaton
|
normal
|
{
"blob_id": "9cf0174a8bd2bccbd8e5d0be1f0b031a1a23c9df",
"index": 4691,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef check_string(automaton, word):\n inicial = automata['s'].closure\n for i in word:\n inicial = state_list_delta(inicial, i)\n return automaton['f'] in inicial\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef check_string(automaton, word):\n inicial = automata['s'].closure\n for i in word:\n inicial = state_list_delta(inicial, i)\n return automaton['f'] in inicial\n\n\ndef create_AFND(re):\n deltas = []\n initial_node = ShuntingYard.create_tree(ShuntingYard.to_rpn(re))\n s = State('s')\n f = State('f')\n automaton = {s.name: s, f.name: f}\n s.add_transition(initial_node, f)\n deltas.append((s, initial_node))\n while len(deltas) > 0:\n origin, simbol = deltas.pop()\n if not origin in automaton.values():\n automaton.setdefault(origin.name, origin)\n if isinstance(simbol, ShuntingYard.Node):\n aux_deltas = Thompson.generic(origin, simbol)\n for t in aux_deltas:\n deltas.insert(0, t)\n for state_name in automaton:\n automaton[state_name].update_closure()\n return automaton\n",
"step-4": "from Global import *\nimport ShuntingYard\nfrom Thompson import *\n\n\ndef check_string(automaton, word):\n inicial = automata['s'].closure\n for i in word:\n inicial = state_list_delta(inicial, i)\n return automaton['f'] in inicial\n\n\ndef create_AFND(re):\n deltas = []\n initial_node = ShuntingYard.create_tree(ShuntingYard.to_rpn(re))\n s = State('s')\n f = State('f')\n automaton = {s.name: s, f.name: f}\n s.add_transition(initial_node, f)\n deltas.append((s, initial_node))\n while len(deltas) > 0:\n origin, simbol = deltas.pop()\n if not origin in automaton.values():\n automaton.setdefault(origin.name, origin)\n if isinstance(simbol, ShuntingYard.Node):\n aux_deltas = Thompson.generic(origin, simbol)\n for t in aux_deltas:\n deltas.insert(0, t)\n for state_name in automaton:\n automaton[state_name].update_closure()\n return automaton\n",
"step-5": "from Global import *\nimport ShuntingYard\nfrom Thompson import *\n\ndef check_string(automaton, word):\n\tinicial = automata['s'].closure\n\tfor i in word:\n\t\tinicial = state_list_delta(inicial, i)\n\treturn automaton['f'] in inicial\n\ndef create_AFND(re):\n\tdeltas = []\n\n\tinitial_node = ShuntingYard.create_tree(ShuntingYard.to_rpn(re))\n\n\ts = State('s')\n\tf = State('f')\n\tautomaton = {s.name: s, f.name: f}\n\t#automaton = {s.name: s}\n\n\ts.add_transition(initial_node, f);\n\tdeltas.append((s,initial_node))\n\n\twhile len(deltas) > 0:\n\t\t(origin, simbol) = deltas.pop()\n\t\t\n\t\tif not origin in automaton.values():\n\t\t\tautomaton.setdefault(origin.name, origin)\n\n\t\tif isinstance(simbol, ShuntingYard.Node):\n\t\t\taux_deltas = Thompson.generic(origin, simbol)\n\t\t\tfor t in aux_deltas:\n\t\t\t\tdeltas.insert(0, t)\n\n\tfor state_name in automaton:\n\t\tautomaton[state_name].update_closure()\n\n\treturn automaton\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
import sys
import numpy as np
import random
import matplotlib.pyplot as plt
#Your code here
def loadData(fileDj):
data = []
fid = open(fileDj)
for line in fid:
line = line.strip()
m = [float(x) for x in line.split(' ')]
data.append(m)
return data
## K-means functions
def getInitialCentroids(X, k):
initialCentroids = []
for i in range(k):
index = random.randint(0, len(X))
initialCentroids.append(X[index])
#Your code here
return initialCentroids
def visualizeClusters(clusters):
for i in range(len(clusters)):
clusters[i] = np.array(clusters[i])
plt.plot(clusters[0][:,0], clusters[0][:,1], 'rs', clusters[1][:,0], clusters[1][:,1], 'bs')
plt.show()
return
def has_converged(centroids, old_centroids, iterations):
MAX_ITERATIONS = 100
if iterations > MAX_ITERATIONS:
return True
return old_centroids == centroids
def euclidean_dist(data, centroids, clusters):
centroids = np.array(centroids)
for instance in data:
instance = np.array(instance)
mu_index = min([(i[0], np.linalg.norm(instance - centroids[i[0]])) \
for i in enumerate(centroids)], key=lambda t: t[1])[0]
try:
clusters[mu_index].append(instance)
except KeyError:
clusters[mu_index] = [instance]
for cluster in clusters:
if not cluster:
cluster.append(data[np.random.randint(0, len(data), size=1)].flatten().tolist())
return clusters
def kmeans(X, k, maxIter=1000):
centroids = getInitialCentroids(X,k)
old_centroids = [[] for i in range(k)]
iterations = 0
while not (has_converged(centroids, old_centroids, iterations)):
iterations += 1
clusters = [[] for i in range(k)]
# assign data points to clusters
clusters = euclidean_dist(X, centroids, clusters)
# recalculate centroids
index = 0
for cluster in clusters:
old_centroids[index] = centroids[index]
centroids[index] = np.mean(cluster, axis=0).tolist()
index += 1
visualizeClusters(clusters)
return clusters
def kmeans_(X, k, maxIter=1000):
centroids = getInitialCentroids(X,k)
old_centroids = [[] for i in range(k)]
iterations = 0
while not (has_converged(centroids, old_centroids, iterations)):
iterations += 1
clusters = [[] for i in range(k)]
# assign data points to clusters
clusters = euclidean_dist(X, centroids, clusters)
# recalculate centroids
index = 0
for cluster in clusters:
old_centroids[index] = centroids[index]
centroids[index] = np.mean(cluster, axis=0).tolist()
index += 1
#visualizeClusters(clusters)
return clusters
def Func(clusters):
center = []
for i in range(len(clusters)):
center.append(clusters[i][0])
distSum = 0
for i in range(len(clusters)):
for j in range(1, len(clusters[i])):
distSum += np.linalg.norm(center[i] - clusters[i][j])
return distSum
def kneeFinding(X,kList):
obj = []
for i in kList:
obj.append(Func(kmeans_(X, i)))
plt.plot(range(1,7), obj)
plt.show()
return
def purity(X, clusters):
purities = []
#Your code
for i in range(2):
count = 0
for idx in range(len(clusters[i])):
if(int(clusters[i][idx][2]) == 1):
count += 1
purity = count*1.0 / len(clusters[i])
if purity > 0.5:
purities.append(purity)
else:
purities.append(1-purity)
#<type 'list'>: [0.9724249797242498, 0.999000999000999]
return purities
'''
## GMM functions
#calculate the initial covariance matrix
#covType: diag, full
def getInitialsGMM(X,k,covType):
if covType == 'full':
dataArray = np.transpose(np.array([pt[0:-1] for pt in X]))
covMat = np.cov(dataArray)
else:
covMatList = []
for i in range(len(X[0])-1):
data = [pt[i] for pt in X]
cov = np.asscalar(np.cov(data))
covMatList.append(cov)
covMat = np.diag(covMatList)
initialClusters = {}
#Your code here
return initialClusters
def calcLogLikelihood(X,clusters,k):
loglikelihood = 0
#Your code here
return loglikelihood
#E-step
def updateEStep(X,clusters,k):
EMatrix = []
#Your code here
return EMatrix
#M-step
def updateMStep(X,clusters,EMatrix):
#Your code here
return clusters
def visualizeClustersGMM(X,labels,clusters,covType):
#Your code here
def gmmCluster(X, k, covType, maxIter=1000):
#initial clusters
clustersGMM = getInitialsGMM(X,k,covType)
labels = []
#Your code here
visualizeClustersGMM(X,labels,clustersGMM,covType)
return labels,clustersGMM
def purityGMM(X, clusters, labels):
purities = []
#Your code here
return purities
'''
def main():
#######dataset path
#datadir = sys.argv[1]
datadir = ''
pathDataset1 = datadir+'humanData.txt'
#pathDataset2 = datadir+'/audioData.txt'
dataset1 = loadData(pathDataset1)
#dataset2 = loadData(pathDataset2)
#Q4
kneeFinding(dataset1,range(1,7))
#Q5
clusters = kmeans(dataset1, 2, maxIter=1000)
purity(dataset1,clusters)
'''
#Q7
labels11,clustersGMM11 = gmmCluster(dataset1, 2, 'diag')
labels12,clustersGMM12 = gmmCluster(dataset1, 2, 'full')
#Q8
labels21,clustersGMM21 = gmmCluster(dataset2, 2, 'diag')
labels22,clustersGMM22 = gmmCluster(dataset2, 2, 'full')
#Q9
purities11 = purityGMM(dataset1, clustersGMM11, labels11)
purities12 = purityGMM(dataset1, clustersGMM12, labels12)
purities21 = purityGMM(dataset2, clustersGMM21, labels21)
purities22 = purityGMM(dataset2, clustersGMM22, labels22)
'''
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "000dd63089fd0c6184fd032fe75ccc920beee7a8",
"index": 127,
"step-1": "<mask token>\n\n\ndef loadData(fileDj):\n data = []\n fid = open(fileDj)\n for line in fid:\n line = line.strip()\n m = [float(x) for x in line.split(' ')]\n data.append(m)\n return data\n\n\ndef getInitialCentroids(X, k):\n initialCentroids = []\n for i in range(k):\n index = random.randint(0, len(X))\n initialCentroids.append(X[index])\n return initialCentroids\n\n\ndef visualizeClusters(clusters):\n for i in range(len(clusters)):\n clusters[i] = np.array(clusters[i])\n plt.plot(clusters[0][:, 0], clusters[0][:, 1], 'rs', clusters[1][:, 0],\n clusters[1][:, 1], 'bs')\n plt.show()\n return\n\n\n<mask token>\n\n\ndef euclidean_dist(data, centroids, clusters):\n centroids = np.array(centroids)\n for instance in data:\n instance = np.array(instance)\n mu_index = min([(i[0], np.linalg.norm(instance - centroids[i[0]])) for\n i in enumerate(centroids)], key=lambda t: t[1])[0]\n try:\n clusters[mu_index].append(instance)\n except KeyError:\n clusters[mu_index] = [instance]\n for cluster in clusters:\n if not cluster:\n cluster.append(data[np.random.randint(0, len(data), size=1)].\n flatten().tolist())\n return clusters\n\n\ndef kmeans(X, k, maxIter=1000):\n centroids = getInitialCentroids(X, k)\n old_centroids = [[] for i in range(k)]\n iterations = 0\n while not has_converged(centroids, old_centroids, iterations):\n iterations += 1\n clusters = [[] for i in range(k)]\n clusters = euclidean_dist(X, centroids, clusters)\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n visualizeClusters(clusters)\n return clusters\n\n\ndef kmeans_(X, k, maxIter=1000):\n centroids = getInitialCentroids(X, k)\n old_centroids = [[] for i in range(k)]\n iterations = 0\n while not has_converged(centroids, old_centroids, iterations):\n iterations += 1\n clusters = [[] for i in range(k)]\n clusters = euclidean_dist(X, centroids, clusters)\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n return clusters\n\n\ndef Func(clusters):\n center = []\n for i in range(len(clusters)):\n center.append(clusters[i][0])\n distSum = 0\n for i in range(len(clusters)):\n for j in range(1, len(clusters[i])):\n distSum += np.linalg.norm(center[i] - clusters[i][j])\n return distSum\n\n\ndef kneeFinding(X, kList):\n obj = []\n for i in kList:\n obj.append(Func(kmeans_(X, i)))\n plt.plot(range(1, 7), obj)\n plt.show()\n return\n\n\ndef purity(X, clusters):\n purities = []\n for i in range(2):\n count = 0\n for idx in range(len(clusters[i])):\n if int(clusters[i][idx][2]) == 1:\n count += 1\n purity = count * 1.0 / len(clusters[i])\n if purity > 0.5:\n purities.append(purity)\n else:\n purities.append(1 - purity)\n return purities\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef loadData(fileDj):\n data = []\n fid = open(fileDj)\n for line in fid:\n line = line.strip()\n m = [float(x) for x in line.split(' ')]\n data.append(m)\n return data\n\n\ndef getInitialCentroids(X, k):\n initialCentroids = []\n for i in range(k):\n index = random.randint(0, len(X))\n initialCentroids.append(X[index])\n return initialCentroids\n\n\ndef visualizeClusters(clusters):\n for i in range(len(clusters)):\n clusters[i] = np.array(clusters[i])\n plt.plot(clusters[0][:, 0], clusters[0][:, 1], 'rs', clusters[1][:, 0],\n clusters[1][:, 1], 'bs')\n plt.show()\n return\n\n\n<mask token>\n\n\ndef euclidean_dist(data, centroids, clusters):\n centroids = np.array(centroids)\n for instance in data:\n instance = np.array(instance)\n mu_index = min([(i[0], np.linalg.norm(instance - centroids[i[0]])) for\n i in enumerate(centroids)], key=lambda t: t[1])[0]\n try:\n clusters[mu_index].append(instance)\n except KeyError:\n clusters[mu_index] = [instance]\n for cluster in clusters:\n if not cluster:\n cluster.append(data[np.random.randint(0, len(data), size=1)].\n flatten().tolist())\n return clusters\n\n\ndef kmeans(X, k, maxIter=1000):\n centroids = getInitialCentroids(X, k)\n old_centroids = [[] for i in range(k)]\n iterations = 0\n while not has_converged(centroids, old_centroids, iterations):\n iterations += 1\n clusters = [[] for i in range(k)]\n clusters = euclidean_dist(X, centroids, clusters)\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n visualizeClusters(clusters)\n return clusters\n\n\ndef kmeans_(X, k, maxIter=1000):\n centroids = getInitialCentroids(X, k)\n old_centroids = [[] for i in range(k)]\n iterations = 0\n while not has_converged(centroids, old_centroids, iterations):\n iterations += 1\n clusters = [[] for i in range(k)]\n clusters = euclidean_dist(X, centroids, clusters)\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n return clusters\n\n\ndef Func(clusters):\n center = []\n for i in range(len(clusters)):\n center.append(clusters[i][0])\n distSum = 0\n for i in range(len(clusters)):\n for j in range(1, len(clusters[i])):\n distSum += np.linalg.norm(center[i] - clusters[i][j])\n return distSum\n\n\ndef kneeFinding(X, kList):\n obj = []\n for i in kList:\n obj.append(Func(kmeans_(X, i)))\n plt.plot(range(1, 7), obj)\n plt.show()\n return\n\n\ndef purity(X, clusters):\n purities = []\n for i in range(2):\n count = 0\n for idx in range(len(clusters[i])):\n if int(clusters[i][idx][2]) == 1:\n count += 1\n purity = count * 1.0 / len(clusters[i])\n if purity > 0.5:\n purities.append(purity)\n else:\n purities.append(1 - purity)\n return purities\n\n\n<mask token>\n\n\ndef main():\n datadir = ''\n pathDataset1 = datadir + 'humanData.txt'\n dataset1 = loadData(pathDataset1)\n kneeFinding(dataset1, range(1, 7))\n clusters = kmeans(dataset1, 2, maxIter=1000)\n purity(dataset1, clusters)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef loadData(fileDj):\n data = []\n fid = open(fileDj)\n for line in fid:\n line = line.strip()\n m = [float(x) for x in line.split(' ')]\n data.append(m)\n return data\n\n\ndef getInitialCentroids(X, k):\n initialCentroids = []\n for i in range(k):\n index = random.randint(0, len(X))\n initialCentroids.append(X[index])\n return initialCentroids\n\n\ndef visualizeClusters(clusters):\n for i in range(len(clusters)):\n clusters[i] = np.array(clusters[i])\n plt.plot(clusters[0][:, 0], clusters[0][:, 1], 'rs', clusters[1][:, 0],\n clusters[1][:, 1], 'bs')\n plt.show()\n return\n\n\ndef has_converged(centroids, old_centroids, iterations):\n MAX_ITERATIONS = 100\n if iterations > MAX_ITERATIONS:\n return True\n return old_centroids == centroids\n\n\ndef euclidean_dist(data, centroids, clusters):\n centroids = np.array(centroids)\n for instance in data:\n instance = np.array(instance)\n mu_index = min([(i[0], np.linalg.norm(instance - centroids[i[0]])) for\n i in enumerate(centroids)], key=lambda t: t[1])[0]\n try:\n clusters[mu_index].append(instance)\n except KeyError:\n clusters[mu_index] = [instance]\n for cluster in clusters:\n if not cluster:\n cluster.append(data[np.random.randint(0, len(data), size=1)].\n flatten().tolist())\n return clusters\n\n\ndef kmeans(X, k, maxIter=1000):\n centroids = getInitialCentroids(X, k)\n old_centroids = [[] for i in range(k)]\n iterations = 0\n while not has_converged(centroids, old_centroids, iterations):\n iterations += 1\n clusters = [[] for i in range(k)]\n clusters = euclidean_dist(X, centroids, clusters)\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n visualizeClusters(clusters)\n return clusters\n\n\ndef kmeans_(X, k, maxIter=1000):\n centroids = getInitialCentroids(X, k)\n old_centroids = [[] for i in range(k)]\n iterations = 0\n while not has_converged(centroids, old_centroids, iterations):\n iterations += 1\n clusters = [[] for i in range(k)]\n clusters = euclidean_dist(X, centroids, clusters)\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n return clusters\n\n\ndef Func(clusters):\n center = []\n for i in range(len(clusters)):\n center.append(clusters[i][0])\n distSum = 0\n for i in range(len(clusters)):\n for j in range(1, len(clusters[i])):\n distSum += np.linalg.norm(center[i] - clusters[i][j])\n return distSum\n\n\ndef kneeFinding(X, kList):\n obj = []\n for i in kList:\n obj.append(Func(kmeans_(X, i)))\n plt.plot(range(1, 7), obj)\n plt.show()\n return\n\n\ndef purity(X, clusters):\n purities = []\n for i in range(2):\n count = 0\n for idx in range(len(clusters[i])):\n if int(clusters[i][idx][2]) == 1:\n count += 1\n purity = count * 1.0 / len(clusters[i])\n if purity > 0.5:\n purities.append(purity)\n else:\n purities.append(1 - purity)\n return purities\n\n\n<mask token>\n\n\ndef main():\n datadir = ''\n pathDataset1 = datadir + 'humanData.txt'\n dataset1 = loadData(pathDataset1)\n kneeFinding(dataset1, range(1, 7))\n clusters = kmeans(dataset1, 2, maxIter=1000)\n purity(dataset1, clusters)\n\n\n<mask token>\nif __name__ == '__main__':\n main()\n",
"step-4": "import sys\nimport numpy as np\nimport random\nimport matplotlib.pyplot as plt\n\n\ndef loadData(fileDj):\n data = []\n fid = open(fileDj)\n for line in fid:\n line = line.strip()\n m = [float(x) for x in line.split(' ')]\n data.append(m)\n return data\n\n\ndef getInitialCentroids(X, k):\n initialCentroids = []\n for i in range(k):\n index = random.randint(0, len(X))\n initialCentroids.append(X[index])\n return initialCentroids\n\n\ndef visualizeClusters(clusters):\n for i in range(len(clusters)):\n clusters[i] = np.array(clusters[i])\n plt.plot(clusters[0][:, 0], clusters[0][:, 1], 'rs', clusters[1][:, 0],\n clusters[1][:, 1], 'bs')\n plt.show()\n return\n\n\ndef has_converged(centroids, old_centroids, iterations):\n MAX_ITERATIONS = 100\n if iterations > MAX_ITERATIONS:\n return True\n return old_centroids == centroids\n\n\ndef euclidean_dist(data, centroids, clusters):\n centroids = np.array(centroids)\n for instance in data:\n instance = np.array(instance)\n mu_index = min([(i[0], np.linalg.norm(instance - centroids[i[0]])) for\n i in enumerate(centroids)], key=lambda t: t[1])[0]\n try:\n clusters[mu_index].append(instance)\n except KeyError:\n clusters[mu_index] = [instance]\n for cluster in clusters:\n if not cluster:\n cluster.append(data[np.random.randint(0, len(data), size=1)].\n flatten().tolist())\n return clusters\n\n\ndef kmeans(X, k, maxIter=1000):\n centroids = getInitialCentroids(X, k)\n old_centroids = [[] for i in range(k)]\n iterations = 0\n while not has_converged(centroids, old_centroids, iterations):\n iterations += 1\n clusters = [[] for i in range(k)]\n clusters = euclidean_dist(X, centroids, clusters)\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n visualizeClusters(clusters)\n return clusters\n\n\ndef kmeans_(X, k, maxIter=1000):\n centroids = getInitialCentroids(X, k)\n old_centroids = [[] for i in range(k)]\n iterations = 0\n while not has_converged(centroids, old_centroids, iterations):\n iterations += 1\n clusters = [[] for i in range(k)]\n clusters = euclidean_dist(X, centroids, clusters)\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n return clusters\n\n\ndef Func(clusters):\n center = []\n for i in range(len(clusters)):\n center.append(clusters[i][0])\n distSum = 0\n for i in range(len(clusters)):\n for j in range(1, len(clusters[i])):\n distSum += np.linalg.norm(center[i] - clusters[i][j])\n return distSum\n\n\ndef kneeFinding(X, kList):\n obj = []\n for i in kList:\n obj.append(Func(kmeans_(X, i)))\n plt.plot(range(1, 7), obj)\n plt.show()\n return\n\n\ndef purity(X, clusters):\n purities = []\n for i in range(2):\n count = 0\n for idx in range(len(clusters[i])):\n if int(clusters[i][idx][2]) == 1:\n count += 1\n purity = count * 1.0 / len(clusters[i])\n if purity > 0.5:\n purities.append(purity)\n else:\n purities.append(1 - purity)\n return purities\n\n\n<mask token>\n\n\ndef main():\n datadir = ''\n pathDataset1 = datadir + 'humanData.txt'\n dataset1 = loadData(pathDataset1)\n kneeFinding(dataset1, range(1, 7))\n clusters = kmeans(dataset1, 2, maxIter=1000)\n purity(dataset1, clusters)\n\n\n<mask token>\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/python\n\nimport sys\nimport numpy as np\nimport random\nimport matplotlib.pyplot as plt\n#Your code here\n\ndef loadData(fileDj):\n data = []\n fid = open(fileDj)\n for line in fid:\n line = line.strip()\n m = [float(x) for x in line.split(' ')]\n data.append(m)\n\n\n return data\n\n## K-means functions \n\ndef getInitialCentroids(X, k):\n initialCentroids = []\n\n for i in range(k):\n index = random.randint(0, len(X))\n initialCentroids.append(X[index])\n\n #Your code here\n return initialCentroids\n\n\ndef visualizeClusters(clusters):\n\n for i in range(len(clusters)):\n clusters[i] = np.array(clusters[i])\n\n plt.plot(clusters[0][:,0], clusters[0][:,1], 'rs', clusters[1][:,0], clusters[1][:,1], 'bs')\n plt.show()\n return\n\ndef has_converged(centroids, old_centroids, iterations):\n MAX_ITERATIONS = 100\n if iterations > MAX_ITERATIONS:\n return True\n return old_centroids == centroids\n\ndef euclidean_dist(data, centroids, clusters):\n centroids = np.array(centroids)\n for instance in data:\n instance = np.array(instance)\n\n mu_index = min([(i[0], np.linalg.norm(instance - centroids[i[0]])) \\\n for i in enumerate(centroids)], key=lambda t: t[1])[0]\n try:\n clusters[mu_index].append(instance)\n except KeyError:\n clusters[mu_index] = [instance]\n\n for cluster in clusters:\n if not cluster:\n cluster.append(data[np.random.randint(0, len(data), size=1)].flatten().tolist())\n\n return clusters\n\n\ndef kmeans(X, k, maxIter=1000):\n\n centroids = getInitialCentroids(X,k)\n\n old_centroids = [[] for i in range(k)]\n\n iterations = 0\n while not (has_converged(centroids, old_centroids, iterations)):\n iterations += 1\n\n clusters = [[] for i in range(k)]\n\n # assign data points to clusters\n clusters = euclidean_dist(X, centroids, clusters)\n\n # recalculate centroids\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n\n visualizeClusters(clusters)\n\n return clusters\n\ndef kmeans_(X, k, maxIter=1000):\n\n centroids = getInitialCentroids(X,k)\n\n old_centroids = [[] for i in range(k)]\n\n iterations = 0\n while not (has_converged(centroids, old_centroids, iterations)):\n iterations += 1\n\n clusters = [[] for i in range(k)]\n\n # assign data points to clusters\n clusters = euclidean_dist(X, centroids, clusters)\n\n # recalculate centroids\n index = 0\n for cluster in clusters:\n old_centroids[index] = centroids[index]\n centroids[index] = np.mean(cluster, axis=0).tolist()\n index += 1\n\n #visualizeClusters(clusters)\n\n return clusters\n\n\ndef Func(clusters):\n center = []\n for i in range(len(clusters)):\n center.append(clusters[i][0])\n\n distSum = 0\n\n for i in range(len(clusters)):\n for j in range(1, len(clusters[i])):\n distSum += np.linalg.norm(center[i] - clusters[i][j])\n\n return distSum\n\ndef kneeFinding(X,kList):\n obj = []\n\n for i in kList:\n obj.append(Func(kmeans_(X, i)))\n\n plt.plot(range(1,7), obj)\n plt.show()\n\n return\n\ndef purity(X, clusters):\n purities = []\n #Your code\n for i in range(2):\n count = 0\n for idx in range(len(clusters[i])):\n if(int(clusters[i][idx][2]) == 1):\n count += 1\n\n purity = count*1.0 / len(clusters[i])\n if purity > 0.5:\n purities.append(purity)\n else:\n purities.append(1-purity)\n\n #<type 'list'>: [0.9724249797242498, 0.999000999000999]\n return purities\n\n'''\n\n## GMM functions \n\n#calculate the initial covariance matrix\n#covType: diag, full\ndef getInitialsGMM(X,k,covType):\n if covType == 'full':\n dataArray = np.transpose(np.array([pt[0:-1] for pt in X]))\n covMat = np.cov(dataArray)\n else:\n covMatList = []\n for i in range(len(X[0])-1):\n data = [pt[i] for pt in X]\n cov = np.asscalar(np.cov(data))\n covMatList.append(cov)\n covMat = np.diag(covMatList)\n\n initialClusters = {}\n #Your code here\n return initialClusters\n\n\ndef calcLogLikelihood(X,clusters,k):\n loglikelihood = 0\n #Your code here\n return loglikelihood\n\n#E-step\ndef updateEStep(X,clusters,k):\n EMatrix = []\n #Your code here\n return EMatrix\n\n#M-step\ndef updateMStep(X,clusters,EMatrix):\n #Your code here\n return clusters\n\ndef visualizeClustersGMM(X,labels,clusters,covType):\n #Your code here\n\n\ndef gmmCluster(X, k, covType, maxIter=1000):\n #initial clusters\n clustersGMM = getInitialsGMM(X,k,covType)\n labels = []\n #Your code here\n visualizeClustersGMM(X,labels,clustersGMM,covType)\n return labels,clustersGMM\n\n\ndef purityGMM(X, clusters, labels):\n purities = []\n #Your code here\n return purities\n\n\n'''\n\ndef main():\n #######dataset path\n #datadir = sys.argv[1]\n datadir = ''\n pathDataset1 = datadir+'humanData.txt'\n #pathDataset2 = datadir+'/audioData.txt'\n dataset1 = loadData(pathDataset1)\n #dataset2 = loadData(pathDataset2)\n\n\n #Q4\n kneeFinding(dataset1,range(1,7))\n\n #Q5\n clusters = kmeans(dataset1, 2, maxIter=1000)\n purity(dataset1,clusters)\n'''\n #Q7\n labels11,clustersGMM11 = gmmCluster(dataset1, 2, 'diag')\n labels12,clustersGMM12 = gmmCluster(dataset1, 2, 'full')\n\n #Q8\n labels21,clustersGMM21 = gmmCluster(dataset2, 2, 'diag')\n labels22,clustersGMM22 = gmmCluster(dataset2, 2, 'full')\n\n #Q9\n purities11 = purityGMM(dataset1, clustersGMM11, labels11)\n purities12 = purityGMM(dataset1, clustersGMM12, labels12)\n purities21 = purityGMM(dataset2, clustersGMM21, labels21)\n purities22 = purityGMM(dataset2, clustersGMM22, labels22)\n'''\nif __name__ == \"__main__\":\n main()",
"step-ids": [
9,
10,
12,
13,
14
]
}
|
[
9,
10,
12,
13,
14
] |
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from django.dispatch import Signal
from djangochannelsrestframework.observer.base_observer import BaseObserver
class Observer(BaseObserver):
def __init__(self, func, signal: Signal = None, kwargs=None):
super().__init__(func)
if kwargs is None:
kwargs = {}
self.signal = signal
self.signal_kwargs = kwargs
self._serializer = None
self.signal.connect(self.handle, **self.signal_kwargs)
def handle(self, signal, *args, **kwargs):
message = self.serialize(signal, *args, **kwargs)
channel_layer = get_channel_layer()
for group_name in self.group_names_for_signal(*args, message=message, **kwargs):
async_to_sync(channel_layer.group_send)(group_name, message)
def group_names(self, *args, **kwargs):
yield "{}-{}-signal-{}".format(
self._uuid,
self.func.__name__.replace("_", "."),
".".join(
arg.lower().replace("_", ".") for arg in self.signal.providing_args
),
)
|
normal
|
{
"blob_id": "66e93295d2797ca9e08100a0a1f28619acb72aa4",
"index": 3397,
"step-1": "<mask token>\n\n\nclass Observer(BaseObserver):\n <mask token>\n\n def handle(self, signal, *args, **kwargs):\n message = self.serialize(signal, *args, **kwargs)\n channel_layer = get_channel_layer()\n for group_name in self.group_names_for_signal(*args, message=\n message, **kwargs):\n async_to_sync(channel_layer.group_send)(group_name, message)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Observer(BaseObserver):\n\n def __init__(self, func, signal: Signal=None, kwargs=None):\n super().__init__(func)\n if kwargs is None:\n kwargs = {}\n self.signal = signal\n self.signal_kwargs = kwargs\n self._serializer = None\n self.signal.connect(self.handle, **self.signal_kwargs)\n\n def handle(self, signal, *args, **kwargs):\n message = self.serialize(signal, *args, **kwargs)\n channel_layer = get_channel_layer()\n for group_name in self.group_names_for_signal(*args, message=\n message, **kwargs):\n async_to_sync(channel_layer.group_send)(group_name, message)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Observer(BaseObserver):\n\n def __init__(self, func, signal: Signal=None, kwargs=None):\n super().__init__(func)\n if kwargs is None:\n kwargs = {}\n self.signal = signal\n self.signal_kwargs = kwargs\n self._serializer = None\n self.signal.connect(self.handle, **self.signal_kwargs)\n\n def handle(self, signal, *args, **kwargs):\n message = self.serialize(signal, *args, **kwargs)\n channel_layer = get_channel_layer()\n for group_name in self.group_names_for_signal(*args, message=\n message, **kwargs):\n async_to_sync(channel_layer.group_send)(group_name, message)\n\n def group_names(self, *args, **kwargs):\n yield '{}-{}-signal-{}'.format(self._uuid, self.func.__name__.\n replace('_', '.'), '.'.join(arg.lower().replace('_', '.') for\n arg in self.signal.providing_args))\n",
"step-4": "from asgiref.sync import async_to_sync\nfrom channels.layers import get_channel_layer\nfrom django.dispatch import Signal\nfrom djangochannelsrestframework.observer.base_observer import BaseObserver\n\n\nclass Observer(BaseObserver):\n\n def __init__(self, func, signal: Signal=None, kwargs=None):\n super().__init__(func)\n if kwargs is None:\n kwargs = {}\n self.signal = signal\n self.signal_kwargs = kwargs\n self._serializer = None\n self.signal.connect(self.handle, **self.signal_kwargs)\n\n def handle(self, signal, *args, **kwargs):\n message = self.serialize(signal, *args, **kwargs)\n channel_layer = get_channel_layer()\n for group_name in self.group_names_for_signal(*args, message=\n message, **kwargs):\n async_to_sync(channel_layer.group_send)(group_name, message)\n\n def group_names(self, *args, **kwargs):\n yield '{}-{}-signal-{}'.format(self._uuid, self.func.__name__.\n replace('_', '.'), '.'.join(arg.lower().replace('_', '.') for\n arg in self.signal.providing_args))\n",
"step-5": "from asgiref.sync import async_to_sync\nfrom channels.layers import get_channel_layer\nfrom django.dispatch import Signal\n\nfrom djangochannelsrestframework.observer.base_observer import BaseObserver\n\n\nclass Observer(BaseObserver):\n def __init__(self, func, signal: Signal = None, kwargs=None):\n super().__init__(func)\n if kwargs is None:\n kwargs = {}\n self.signal = signal\n self.signal_kwargs = kwargs\n self._serializer = None\n self.signal.connect(self.handle, **self.signal_kwargs)\n\n def handle(self, signal, *args, **kwargs):\n message = self.serialize(signal, *args, **kwargs)\n channel_layer = get_channel_layer()\n for group_name in self.group_names_for_signal(*args, message=message, **kwargs):\n async_to_sync(channel_layer.group_send)(group_name, message)\n\n def group_names(self, *args, **kwargs):\n yield \"{}-{}-signal-{}\".format(\n self._uuid,\n self.func.__name__.replace(\"_\", \".\"),\n \".\".join(\n arg.lower().replace(\"_\", \".\") for arg in self.signal.providing_args\n ),\n )\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from django.shortcuts import render
from django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseNotAllowed
from booli import booliwood
from models import add_bosta, get_all_bostas, Bosta
from django import forms
import time
class BostaForm(forms.Form):
maxPrice = forms.IntegerField()
livingArea = forms.IntegerField()
room = forms.IntegerField()
class BostaIdForm(forms.Form):
bostaId = forms.IntegerField()
class SearchBosta(forms.Form):
search_query = forms.CharField()
def show(request):
if request.method == 'POST':
form = BostaForm(request.POST)
if form.is_valid():
maxPrice = form.cleaned_data['maxPrice']
livingArea = form.cleaned_data['livingArea']
room = form.cleaned_data['room']
bostas = Bosta.objects \
.filter(listPrice__lte=maxPrice) \
.filter(livingArea__gte=livingArea) \
.filter(rooms__gte=room) \
.exclude(listPrice=0) \
.order_by('soldDate')
else:
form = BostaForm()
bostas = get_all_bostas()
for bosta in bostas:
if bosta.livingArea == 0:
bosta.sek_m2 = 0
elif bosta.soldPrice == 0:
bosta.sek_m2 = bosta.listPrice / bosta.livingArea
else:
bosta.sek_m2 = bosta.soldPrice / bosta.livingArea
data = {
'bostas': bostas,
'form': form,
}
return render(request, 'main.html', data)
def update(request):
totalListing = 0
totalSold = 0
form = SearchBosta()
data = {
'totalListing': totalListing,
'totalSold': totalSold,
'countListing': 0,
'countSold': 0,
'form': form
}
if request.method == 'POST':
form = SearchBosta(request.POST)
if form.is_valid():
q = form.cleaned_data['search_query'].encode('utf8')
d1 = search("listings", q)
if d1:
data['totalListing'] = d1['total']
data['countListing'] = d1['count']
d1 = search("sold", q)
if d1:
data['totalSold'] = d1['total']
data['countSold'] = d1['count']
return render(request, 'update.html', data)
def search(type_search, q):
total = 0
while True:
result = booliwood(q, total, 50, type_search)
for listing in result[type_search]:
add_bosta(listing)
total = total + result['count']
if total >= result['totalCount']:
break
time.sleep(1)
data = {
'total': total,
'count': result['totalCount'],
}
return data
|
normal
|
{
"blob_id": "53573a21364e9dfef9ed1164185ab441dbc29601",
"index": 123,
"step-1": "<mask token>\n\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\n\ndef show(request):\n if request.method == 'POST':\n form = BostaForm(request.POST)\n if form.is_valid():\n maxPrice = form.cleaned_data['maxPrice']\n livingArea = form.cleaned_data['livingArea']\n room = form.cleaned_data['room']\n bostas = Bosta.objects.filter(listPrice__lte=maxPrice).filter(\n livingArea__gte=livingArea).filter(rooms__gte=room).exclude(\n listPrice=0).order_by('soldDate')\n else:\n form = BostaForm()\n bostas = get_all_bostas()\n for bosta in bostas:\n if bosta.livingArea == 0:\n bosta.sek_m2 = 0\n elif bosta.soldPrice == 0:\n bosta.sek_m2 = bosta.listPrice / bosta.livingArea\n else:\n bosta.sek_m2 = bosta.soldPrice / bosta.livingArea\n data = {'bostas': bostas, 'form': form}\n return render(request, 'main.html', data)\n\n\ndef update(request):\n totalListing = 0\n totalSold = 0\n form = SearchBosta()\n data = {'totalListing': totalListing, 'totalSold': totalSold,\n 'countListing': 0, 'countSold': 0, 'form': form}\n if request.method == 'POST':\n form = SearchBosta(request.POST)\n if form.is_valid():\n q = form.cleaned_data['search_query'].encode('utf8')\n d1 = search('listings', q)\n if d1:\n data['totalListing'] = d1['total']\n data['countListing'] = d1['count']\n d1 = search('sold', q)\n if d1:\n data['totalSold'] = d1['total']\n data['countSold'] = d1['count']\n return render(request, 'update.html', data)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\n\ndef show(request):\n if request.method == 'POST':\n form = BostaForm(request.POST)\n if form.is_valid():\n maxPrice = form.cleaned_data['maxPrice']\n livingArea = form.cleaned_data['livingArea']\n room = form.cleaned_data['room']\n bostas = Bosta.objects.filter(listPrice__lte=maxPrice).filter(\n livingArea__gte=livingArea).filter(rooms__gte=room).exclude(\n listPrice=0).order_by('soldDate')\n else:\n form = BostaForm()\n bostas = get_all_bostas()\n for bosta in bostas:\n if bosta.livingArea == 0:\n bosta.sek_m2 = 0\n elif bosta.soldPrice == 0:\n bosta.sek_m2 = bosta.listPrice / bosta.livingArea\n else:\n bosta.sek_m2 = bosta.soldPrice / bosta.livingArea\n data = {'bostas': bostas, 'form': form}\n return render(request, 'main.html', data)\n\n\ndef update(request):\n totalListing = 0\n totalSold = 0\n form = SearchBosta()\n data = {'totalListing': totalListing, 'totalSold': totalSold,\n 'countListing': 0, 'countSold': 0, 'form': form}\n if request.method == 'POST':\n form = SearchBosta(request.POST)\n if form.is_valid():\n q = form.cleaned_data['search_query'].encode('utf8')\n d1 = search('listings', q)\n if d1:\n data['totalListing'] = d1['total']\n data['countListing'] = d1['count']\n d1 = search('sold', q)\n if d1:\n data['totalSold'] = d1['total']\n data['countSold'] = d1['count']\n return render(request, 'update.html', data)\n\n\ndef search(type_search, q):\n total = 0\n while True:\n result = booliwood(q, total, 50, type_search)\n for listing in result[type_search]:\n add_bosta(listing)\n total = total + result['count']\n if total >= result['totalCount']:\n break\n time.sleep(1)\n data = {'total': total, 'count': result['totalCount']}\n return data\n",
"step-4": "from django.shortcuts import render\nfrom django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseNotAllowed\nfrom booli import booliwood\nfrom models import add_bosta, get_all_bostas, Bosta\nfrom django import forms\nimport time\n\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\n\ndef show(request):\n if request.method == 'POST':\n form = BostaForm(request.POST)\n if form.is_valid():\n maxPrice = form.cleaned_data['maxPrice']\n livingArea = form.cleaned_data['livingArea']\n room = form.cleaned_data['room']\n bostas = Bosta.objects.filter(listPrice__lte=maxPrice).filter(\n livingArea__gte=livingArea).filter(rooms__gte=room).exclude(\n listPrice=0).order_by('soldDate')\n else:\n form = BostaForm()\n bostas = get_all_bostas()\n for bosta in bostas:\n if bosta.livingArea == 0:\n bosta.sek_m2 = 0\n elif bosta.soldPrice == 0:\n bosta.sek_m2 = bosta.listPrice / bosta.livingArea\n else:\n bosta.sek_m2 = bosta.soldPrice / bosta.livingArea\n data = {'bostas': bostas, 'form': form}\n return render(request, 'main.html', data)\n\n\ndef update(request):\n totalListing = 0\n totalSold = 0\n form = SearchBosta()\n data = {'totalListing': totalListing, 'totalSold': totalSold,\n 'countListing': 0, 'countSold': 0, 'form': form}\n if request.method == 'POST':\n form = SearchBosta(request.POST)\n if form.is_valid():\n q = form.cleaned_data['search_query'].encode('utf8')\n d1 = search('listings', q)\n if d1:\n data['totalListing'] = d1['total']\n data['countListing'] = d1['count']\n d1 = search('sold', q)\n if d1:\n data['totalSold'] = d1['total']\n data['countSold'] = d1['count']\n return render(request, 'update.html', data)\n\n\ndef search(type_search, q):\n total = 0\n while True:\n result = booliwood(q, total, 50, type_search)\n for listing in result[type_search]:\n add_bosta(listing)\n total = total + result['count']\n if total >= result['totalCount']:\n break\n time.sleep(1)\n data = {'total': total, 'count': result['totalCount']}\n return data\n",
"step-5": "from django.shortcuts import render\nfrom django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseNotAllowed\nfrom booli import booliwood\nfrom models import add_bosta, get_all_bostas, Bosta\nfrom django import forms\nimport time\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\ndef show(request):\n\tif request.method == 'POST':\n\t\tform = BostaForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tmaxPrice = form.cleaned_data['maxPrice']\n\t\t\tlivingArea = form.cleaned_data['livingArea']\n\t\t\troom = form.cleaned_data['room']\n\t\t\tbostas = Bosta.objects \\\n\t\t\t.filter(listPrice__lte=maxPrice) \\\n\t\t\t.filter(livingArea__gte=livingArea) \\\n\t\t\t.filter(rooms__gte=room) \\\n\t\t\t.exclude(listPrice=0) \\\n\t\t\t.order_by('soldDate') \n\telse:\n\t\tform = BostaForm()\n\t\tbostas = get_all_bostas()\n\tfor bosta in bostas:\n\t\tif bosta.livingArea == 0:\n\t\t\tbosta.sek_m2 = 0\n\t\telif bosta.soldPrice == 0:\n\t\t\tbosta.sek_m2 = bosta.listPrice / bosta.livingArea\n\t\telse:\n\t\t\tbosta.sek_m2 = bosta.soldPrice / bosta.livingArea\n\n\tdata = { \n\t'bostas': bostas,\n\t'form': form,\n\t}\n\treturn render(request, 'main.html', data)\n\ndef update(request):\n\ttotalListing = 0\n\ttotalSold = 0\n\tform = SearchBosta()\n\tdata = {\n\t'totalListing': totalListing,\n\t'totalSold': totalSold,\n\t'countListing': 0,\n\t'countSold': 0,\n\t'form': form\n\t}\n\tif request.method == 'POST':\n\t\tform = SearchBosta(request.POST)\n\t\tif form.is_valid():\n\t\t\tq = form.cleaned_data['search_query'].encode('utf8')\n\t\t\td1 = search(\"listings\", q)\n\t\t\tif d1:\n\t\t\t\tdata['totalListing'] = d1['total']\n\t\t\t\tdata['countListing'] = d1['count']\n\t\t\td1 = search(\"sold\", q)\n\t\t\tif d1:\n\t\t\t\tdata['totalSold'] = d1['total']\n\t\t\t\tdata['countSold'] = d1['count']\n\n\treturn render(request, 'update.html', data)\n\ndef search(type_search, q):\n\ttotal = 0\n\twhile True:\n\t\tresult = booliwood(q, total, 50, type_search)\n\t\tfor listing in result[type_search]:\n\t\t\tadd_bosta(listing)\n\t\ttotal = total + result['count']\n\t\tif total >= result['totalCount']:\n\t\t\tbreak\n\t\ttime.sleep(1)\n\tdata = {\n\t'total': total,\n\t'count': result['totalCount'],\n\t}\n\treturn data\n\n",
"step-ids": [
6,
8,
9,
10,
11
]
}
|
[
6,
8,
9,
10,
11
] |
from lredit import *
# customization of MainWindow
def configure(window):
#----------------------------------------------
# Generic edit config
# tab width and indent width
Mode.tab_width = 4
# make TAB character visible
Mode.show_tab = True
# make space character visible
Mode.show_space = False
# make full-width space character visible
Mode.show_wspace = True
# make line-end visible
Mode.show_lineend = True
# make end-of-fileline visible
Mode.show_fileend = True
# cancel selection when text is copied into clipboard
Mode.cancel_selection_on_copy = False
# copy current line if text is not selected
Mode.copy_line_if_not_selected = True
# cut current line if text is not selected
Mode.cut_line_if_not_selected = True
#----------------------------------------------
# Specific mode config
# use space character instead of TAB
PythonMode.tab_by_space = True
#----------------------------------------------
# key binding
# F3 : search next
window.keymap[ "F3" ] = window.command.SearchNext
# Shift-F3 : search previous
window.keymap[ "S-F3" ] = window.command.SearchPrev
#----------------------------------------------
# extension menu
window.ext_menu_items = [
( "Another Pane", "C-W", window.command.AnotherPane ),
( "Project Files", "C-P", window.command.ProjectFileList ),
( "Recent Files", "C-H", window.command.RecentFileList ),
( "Bookmark List", "C-M", window.command.BookmarkList ),
( "Document List", "C-D", window.command.DocumentList ),
( "Outline Analysis", "C-O", window.command.Outline ),
( "Search Result", "C-S", window.command.SearchResultList ),
]
#----------------------------------------------
# user defined command
def command_MyTool1(info):
# print to log pane
print( "Hello World!" )
def command_MyTool2(info):
# insert text into active edit
edit = window.activeEditPane().edit
edit.modifyText( text="Hello World!" )
window.launcher.command_list += [
( "MyTool1", command_MyTool1 ),
( "MyTool2", command_MyTool2 ),
]
#----------------------------------------------
# user menu
def command_MyMenu(info):
items = [
( "My Tool 1", "C-1", command_MyTool1 ),
( "My Tool 2", "C-2", command_MyTool2 ),
]
window.menu( None, items )
window.keymap[ "C-T" ] = command_MyMenu
#----------------------------------------------
# customization of menu bar
# add [Tool] > [Extra]
window.insertMenu( ("tool","custom_tools_end"),
MenuNode(
"extra", "&Extra",
items=[
MenuNode( "focus_left", "Focus to &Left", window.command.FocusLeftEdit ),
MenuNode( "focus_right", "Focus to &Right", window.command.FocusRightEdit ),
]
)
)
# open specified document
class command_SwitchDocument:
def __init__( self, doc ):
self.doc = doc
def __call__( self, info ):
window.activeOpen( doc=self.doc )
# function to display opened documents
def menuitems_Documents():
items = []
i = 0
items.append( MenuNode( separator=True ) )
for edit in window.edit_list:
name = edit.doc.getName()
items.append( MenuNode( "doc_%d"%i, name, command_SwitchDocument(edit.doc) ) )
i+=1
items.append( MenuNode( separator=True ) )
return items
# add menu items of documents at the bottom of [View] menu
window.appendMenu( ("view",), menuitems_Documents )
#----------------------------------------------
# misc tools
# remove continuing overlapped lines
def command_Unique(info):
edit = window.activePane().edit
previous_line = [None]
def func( text, info ):
if previous_line[0]==text:
return False
else:
previous_line[0]=text
return True
edit.filterLines(func)
# search by previous condition and bookmark the found lines
def command_SearchAndBookmark(info):
if not window.search_object: return
edit = window.activePane().edit
point = edit.pointDocumentBegin()
count = 0
while point:
point = edit.search( search_object=window.search_object, point=point, direction=1, move_cursor=False, select=False, hitmark=False, paint=False, message=False )
if point:
edit.bookmark( point.line, [ 1 ], paint=False )
point.line += 1
point.index = 0
count += 1
msg = "found %d lines" % ( count )
window.setStatusMessage( msg, 3000 )
window.paint()
window.launcher.command_list += [
( "Unique", command_Unique ),
( "SearchAndBookmark", command_SearchAndBookmark ),
]
#----------------------------------------------
# association between filename pattern and mode
window.fileext_list = [
( "*.ini", "ini" ),
( "*.py *.pyw *.pys", "python" ),
( "*.pl", "perl" ),
( "*.js", "javascript" ),
( "*.cpp *.cc *.cxx *.hpp *.hh *.hxx *.h", "c++" ),
( "*.c *.h", "c" ),
( "*.mm *.h", "objective-c++" ),
( "*.m *.h", "objective-c" ),
( "*.cs", "c#" ),
( "*.java", "java" ),
( "*.vert *.frag *.geo", "glsl" ),
( "*.xml", "xml" ),
( "*.html *.htm", "html" ),
( "makefile *.mk", "makefile" ),
( "*.bat", "batch" ),
( "*.sql", "sql" ),
( "*", "text" ),
]
#----------------------------------------------
# add mode
# lexer class of Ini file
class IniLexer(RegexLexer):
def __init__(self):
RegexLexer.__init__(self)
self.rule_map['root'] = [
(r'\s+', Token_Text),
(r'[;#].*?$', Token_Comment),
(r'\[.*?\]$', Token_Keyword),
(r'(.*?)([ \t]*=[ \t]*)(.*?)$', ( Token_Name, Token_Text, Token_String) ),
(None, Token_Text)
]
# mode definition of Ini file
class IniMode(Mode):
name = "ini"
def __init__(self):
Mode.__init__(self)
self.lexer = IniLexer()
self.completion = WordCompletion()
@staticmethod
def staticconfigure(window):
Mode.staticconfigure(window)
callConfigFunc("staticconfigure_IniMode",window)
def configure( self, edit ):
Mode.configure( self, edit )
callConfigFunc("configure_IniMode",self,edit)
# add ini file mode
window.mode_list.append( IniMode )
# association of ini filename pattern
window.fileext_list.insert( 0, ( "*.ini", "ini" ) )
#----------------------------------------------
# customization of PythonMode
# configuration of PythonMode object (such as key binding)
def configure_PythonMode( mode, edit ):
# F6 : output 'Hello' in log pane
def command_Print( info ):
print( "Hello" )
edit.keymap[ "F6" ] = command_Print
# static configuration of PythonMode class (such as menu bar customization)
def staticconfigure_PythonMode(window):
# command to insert 'Hello Python!' into active edit area
def command_InsertHello(info):
edit = window.activeEditPane().edit
edit.modifyText( text="Hello Python!" )
# function to check if the active edit area is Python mode
def isPythonMode():
return isinstance( window.activeEditPaneMode(), PythonMode )
# add menu item to display only when active mode is Python mode
window.insertMenu( ("tool","custom_tools_end"), MenuNode( "insert_hello", "Insert &Hello", command_InsertHello, visible=isPythonMode ) )
|
normal
|
{
"blob_id": "d8e2613b45b3f4a24db0b07a01061c6057c9feed",
"index": 4973,
"step-1": "from lredit import *\n\n\n# customization of MainWindow\ndef configure(window):\n\n\n #----------------------------------------------\n # Generic edit config\n\n # tab width and indent width\n Mode.tab_width = 4\n\n # make TAB character visible\n Mode.show_tab = True\n\n # make space character visible\n Mode.show_space = False\n\n # make full-width space character visible\n Mode.show_wspace = True\n\n # make line-end visible\n Mode.show_lineend = True\n\n # make end-of-fileline visible\n Mode.show_fileend = True\n\n # cancel selection when text is copied into clipboard\n Mode.cancel_selection_on_copy = False\n\n # copy current line if text is not selected\n Mode.copy_line_if_not_selected = True\n\n # cut current line if text is not selected\n Mode.cut_line_if_not_selected = True\n\n\n #----------------------------------------------\n # Specific mode config\n\n # use space character instead of TAB\n PythonMode.tab_by_space = True\n\n\n #----------------------------------------------\n # key binding\n\n # F3 : search next\n window.keymap[ \"F3\" ] = window.command.SearchNext\n\n # Shift-F3 : search previous\n window.keymap[ \"S-F3\" ] = window.command.SearchPrev\n\n\n #----------------------------------------------\n # extension menu\n\n window.ext_menu_items = [\n ( \"Another Pane\", \"C-W\", window.command.AnotherPane ),\n ( \"Project Files\", \"C-P\", window.command.ProjectFileList ),\n ( \"Recent Files\", \"C-H\", window.command.RecentFileList ),\n ( \"Bookmark List\", \"C-M\", window.command.BookmarkList ),\n ( \"Document List\", \"C-D\", window.command.DocumentList ),\n ( \"Outline Analysis\", \"C-O\", window.command.Outline ),\n ( \"Search Result\", \"C-S\", window.command.SearchResultList ),\n ]\n\n\n #----------------------------------------------\n # user defined command\n\n def command_MyTool1(info):\n # print to log pane\n print( \"Hello World!\" )\n\n def command_MyTool2(info):\n # insert text into active edit\n edit = window.activeEditPane().edit\n edit.modifyText( text=\"Hello World!\" )\n\n window.launcher.command_list += [\n ( \"MyTool1\", command_MyTool1 ),\n ( \"MyTool2\", command_MyTool2 ),\n ]\n\n #----------------------------------------------\n # user menu\n\n def command_MyMenu(info):\n\n items = [\n ( \"My Tool 1\", \"C-1\", command_MyTool1 ),\n ( \"My Tool 2\", \"C-2\", command_MyTool2 ),\n ]\n\n window.menu( None, items )\n\n window.keymap[ \"C-T\" ] = command_MyMenu\n\n\n #----------------------------------------------\n # customization of menu bar\n\n # add [Tool] > [Extra]\n window.insertMenu( (\"tool\",\"custom_tools_end\"),\n MenuNode(\n \"extra\", \"&Extra\",\n items=[\n MenuNode( \"focus_left\", \"Focus to &Left\", window.command.FocusLeftEdit ),\n MenuNode( \"focus_right\", \"Focus to &Right\", window.command.FocusRightEdit ),\n ]\n )\n )\n\n # open specified document\n class command_SwitchDocument:\n\n def __init__( self, doc ):\n self.doc = doc\n\n def __call__( self, info ):\n window.activeOpen( doc=self.doc )\n\n # function to display opened documents\n def menuitems_Documents():\n items = []\n i = 0\n items.append( MenuNode( separator=True ) )\n for edit in window.edit_list:\n name = edit.doc.getName()\n items.append( MenuNode( \"doc_%d\"%i, name, command_SwitchDocument(edit.doc) ) )\n i+=1\n items.append( MenuNode( separator=True ) )\n return items\n\n # add menu items of documents at the bottom of [View] menu\n window.appendMenu( (\"view\",), menuitems_Documents )\n\n\n #----------------------------------------------\n # misc tools\n\n # remove continuing overlapped lines\n def command_Unique(info):\n\n edit = window.activePane().edit\n\n previous_line = [None]\n def func( text, info ):\n if previous_line[0]==text:\n return False\n else:\n previous_line[0]=text\n return True\n\n edit.filterLines(func)\n\n # search by previous condition and bookmark the found lines\n def command_SearchAndBookmark(info):\n\n if not window.search_object: return\n\n edit = window.activePane().edit\n point = edit.pointDocumentBegin()\n count = 0\n \n while point:\n point = edit.search( search_object=window.search_object, point=point, direction=1, move_cursor=False, select=False, hitmark=False, paint=False, message=False )\n if point:\n edit.bookmark( point.line, [ 1 ], paint=False )\n point.line += 1\n point.index = 0\n count += 1\n \n msg = \"found %d lines\" % ( count )\n window.setStatusMessage( msg, 3000 )\n\n window.paint()\n\n window.launcher.command_list += [\n ( \"Unique\", command_Unique ),\n ( \"SearchAndBookmark\", command_SearchAndBookmark ),\n ]\n\n\n #----------------------------------------------\n # association between filename pattern and mode\n\n window.fileext_list = [\n ( \"*.ini\", \"ini\" ),\n ( \"*.py *.pyw *.pys\", \"python\" ),\n ( \"*.pl\", \"perl\" ),\n ( \"*.js\", \"javascript\" ),\n ( \"*.cpp *.cc *.cxx *.hpp *.hh *.hxx *.h\", \"c++\" ),\n ( \"*.c *.h\", \"c\" ),\n ( \"*.mm *.h\", \"objective-c++\" ),\n ( \"*.m *.h\", \"objective-c\" ),\n ( \"*.cs\", \"c#\" ),\n ( \"*.java\", \"java\" ),\n ( \"*.vert *.frag *.geo\", \"glsl\" ),\n ( \"*.xml\", \"xml\" ),\n ( \"*.html *.htm\", \"html\" ),\n ( \"makefile *.mk\", \"makefile\" ),\n ( \"*.bat\", \"batch\" ),\n ( \"*.sql\", \"sql\" ),\n ( \"*\", \"text\" ),\n ]\n\n #----------------------------------------------\n # add mode\n\n # lexer class of Ini file\n class IniLexer(RegexLexer):\n\n def __init__(self):\n\n RegexLexer.__init__(self)\n\n self.rule_map['root'] = [\n (r'\\s+', Token_Text),\n (r'[;#].*?$', Token_Comment),\n (r'\\[.*?\\]$', Token_Keyword),\n (r'(.*?)([ \\t]*=[ \\t]*)(.*?)$', ( Token_Name, Token_Text, Token_String) ),\n (None, Token_Text)\n ]\n\n # mode definition of Ini file\n class IniMode(Mode):\n\n name = \"ini\"\n\n def __init__(self):\n Mode.__init__(self)\n self.lexer = IniLexer()\n self.completion = WordCompletion()\n\n @staticmethod\n def staticconfigure(window):\n Mode.staticconfigure(window)\n callConfigFunc(\"staticconfigure_IniMode\",window)\n\n def configure( self, edit ):\n Mode.configure( self, edit )\n callConfigFunc(\"configure_IniMode\",self,edit)\n\n # add ini file mode\n window.mode_list.append( IniMode )\n\n # association of ini filename pattern\n window.fileext_list.insert( 0, ( \"*.ini\", \"ini\" ) )\n\n\n\n#----------------------------------------------\n# customization of PythonMode\n\n# configuration of PythonMode object (such as key binding)\ndef configure_PythonMode( mode, edit ):\n\n # F6 : output 'Hello' in log pane\n def command_Print( info ):\n print( \"Hello\" )\n\n edit.keymap[ \"F6\" ] = command_Print\n\n\n# static configuration of PythonMode class (such as menu bar customization)\ndef staticconfigure_PythonMode(window):\n\n # command to insert 'Hello Python!' into active edit area\n def command_InsertHello(info):\n edit = window.activeEditPane().edit\n edit.modifyText( text=\"Hello Python!\" )\n\n # function to check if the active edit area is Python mode\n def isPythonMode():\n return isinstance( window.activeEditPaneMode(), PythonMode )\n\n # add menu item to display only when active mode is Python mode\n window.insertMenu( (\"tool\",\"custom_tools_end\"), MenuNode( \"insert_hello\", \"Insert &Hello\", command_InsertHello, visible=isPythonMode ) )\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
def climb_ways(n, k):
|
normal
|
{
"blob_id": "05144338cc9c0c65010e0b8a3dd6fb50f6343214",
"index": 6641,
"step-1": "def climb_ways(n, k):",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from collections import namedtuple
import argparse
import pdb
import traceback
import sys
import os
from qca_hex_analyzer import WmiCtrlAnalyzer, HtcCtrlAnalyzer, HttAnalyzer, AllAnalyzer
import hexfilter
description = \
"Tool used to analyze hexdumps produced by a qca wireless kernel " \
"driver (such as ath6kl, ath10k or qcacld2.0). " \
"The hexdumps are assumed to contain dumps of the traffic " \
"between the driver and the target. " \
"No special preprocessing of the log files is required. " \
"Filter strings (description strings) can be used to limit the output " \
"(only RX or TX etc.). " \
"The driver must of course be configured to log all necessary debug " \
"data (for ath6kl and ath10k this means a proper debug mask). "
wmi_ctrl_help = \
"Subcommand for WMI control message parsing. " \
"This subcommand is used to extract WMI control messages from the input. "
wmi_ctrl_description = \
"Extracts WMI control message hexdata from an input (--input-file). " \
"The extracted messages will be printed to the output (--output -file). " \
"--ep-id is used to determine from which HTC endpoint the data will " \
"be extracted (see description of that option below). " \
"All valid WMI control message ID's will be printed together with the " \
"message enum string (from ath6kl source code). " \
"The --wmi-old option must be used if the driver does not use the WMI " \
"unified protocol (ath6kl). " \
"The WMI control message payload will also be printed together with " \
"message ID's if the --print-data option is used."
htc_ctrl_help = \
"Subcommand for HTC control message parsing. " \
"This subcommand is used to extract HTC control messages from the input. "
htc_ctrl_description = \
"Extracts HTC control message hexdata from an input (--input-file). " \
"The extracted messages will be printed to the output (--output -file). " \
"All valid HTC control message ID's will be printed together with the " \
"message enum string (from ath6kl source code). " \
"The message payload will also be printed together with the " \
"message ID's if the --print-data option is used. " \
"HTC control messages will always be extracted from endpoint 0."
htt_help = \
"Subcommand for HTT message parsing. " \
"This subcommand is used to extract HTT messages from the input. "
htt_description = \
"Extracts HTT message hexdata from an input (--input-file). " \
"The extracted messages will be printed to the output (--output -file). " \
"--ep-id is used to determine from which HTC endpoint the data will " \
"be extracted (see description of that option below). " \
"All valid HTT message ID's will be printed together with the " \
"message enum string (from ath10k source code). " \
"The message payload will also be printed together with " \
"message ID's if the --print-data option is used."
all_help = \
"Subcommand for parsing of all supported message types. " \
"This subcommand is used to extract both WMI control, " \
"HTC control and HTT messages from the input. "
all_description = \
"Extracts message hexdata from an input (--input-file). " \
"The extracted messages will be printed to the output (--output-file). " \
"The messages can be any of the supported message types " \
"(currently only WMI controli, HTC control and HTT). " \
"--wmi-ctrl-ep-id and --htt-ep-id is used to determine from which " \
"endpoints WMI and HTT data will be extracted " \
"(see description of those options below). " \
"HTC control messages will always be extracted from ep 0. " \
"All valid message ID's will be printed together " \
"with a corresponding message enum string. " \
"The message payload will also be printed together with " \
"message ID's if the --print-data option is used."
def auto_int(x):
return int(x, 0)
def load_options():
global parsed_args
base_parser = argparse.ArgumentParser(add_help=False)
base_parser.add_argument('-i', '--input-file',
help="Input (log) file. If omitted, "
"stdin will be read.")
base_parser.add_argument('-o', '--output-file',
help="Output file. If omitted, "
"the output will be written to stdout.")
base_parser.add_argument('-n', '--no-timestamps', action="store_true",
help="Specifies whether or not the input file "
"contains timestamps. ")
base_parser.add_argument('-d', '--desc-str', nargs='+', type=str,
help="Description string(s) of the dumps. "
"Only dumps with a prefix "
"matching any of the provided desc strings "
"will be analyzed. "
"If no --desc-str option is given, no "
"description filtering will be performed. "
"The prefix of a hexdump is the short "
"description string before the address "
"in each line of the dump, i.e the hexdump "
"prefix. "
"--desc-str is normally used to select "
"between RX and TX logs and should be "
"combined with a proper --data-direction "
"option.")
base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,
help="This option is used to specify how the "
"hexdata should be interpreted. "
"Valid values are: "
"t2h (target to host) or h2t (host to target). "
"With t2h, RX trailers will be printed if "
"--print-data is used. h2t is default. "
"This option should be combined with an "
"applicable --desc-str option. ")
base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,
help="Description string(s) of the dumps to be. "
"excluded. Similar to --desc-str, but all "
"matching prefixes will be excluded from "
"the analysis.")
base_parser.add_argument('-s', '--short-htc-header', action="store_true",
help="Use 6 byte HTC header (\"old\" format) "
"instead of 8 bytes.")
base_parser.add_argument('-t', '--keep-timestamps', action="store_true",
help="Keep the timestamps associated with each "
"hexdump in the output. "
"This option will only have effect if the "
"log file contains timestamps.")
parser = argparse.ArgumentParser(prog="qca_hex_analyzer",
description=description,
parents=[base_parser])
subparsers = parser.add_subparsers(dest="subparser_name")
parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl',
help=wmi_ctrl_help,
description=wmi_ctrl_description,
parents=[base_parser])
parser_wmi_ctrl.add_argument('--wmi-old', action="store_true",
help="Specifies whether or not the WMI messages "
"are according to the \"old\" WMI protocol. "
"If not set, the messages will be interpreted "
"according to the unified WMI format")
parser_wmi_ctrl.add_argument('-p', '--print-data', action="store_true",
help="Print WMI data message payload (and not just "
"WMI message ID) for all encountered messages. ")
parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,
type=int, default=[2],
help="WMI control service endpoint ID. "
"This is the endpoint where the WMI control data is "
"expected to be present. Make sure the endpoint "
"matches the endpoint id associated with the "
"control service endpoint (service id 0x100) "
"of the driver (the endpoint received from the "
"target in the HTC service connect response). "
"If this option is omitted a default value of 2 "
"will be used.")
parser_wmi_ctrl.add_argument('--tlv', action="store_true",
help="TLV analysis."
"Each WMI message will be interpreted as a TLV "
"message and the content of the message will be. "
"written out in text (instead of hexdump). "
"If the encountered message is not supported by "
"the parser, the hex data will be printed instead.")
parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID',
nargs='+', type=auto_int,
help="WMI message id filter. "
"Only WMI messages with an id matching any of the "
"provided id's will be included in the output. "
"If no --id | --msg-id option is given, no "
"filtering will be performed. ")
parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',
nargs='+', type=auto_int,
help="WMI message id exclude filter. "
"Similar to --id | --msg-id, but all matching "
"id's will be excluded from the output. ")
parser_htc_ctrl = subparsers.add_parser('htc-ctrl',
help=htc_ctrl_help,
description=htc_ctrl_description,
parents=[base_parser])
parser_htc_ctrl.add_argument('-p', '--print-data', action="store_true",
help="Print HTC ctrl data message payload (and not just "
"message ID) for all encountered messages. ")
parser_htt = subparsers.add_parser('htt',
help=htt_help,
description=htt_description,
parents=[base_parser])
parser_htt.add_argument('-p', '--print-data', action="store_true",
help="Print HTT data message payload (and not just "
"HTT message ID) for all encountered messages. ")
parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1,
type=int, default=[1],
help="HTT service endpoint ID. "
"This is the endpoint where the HTT data is "
"expected to be present. Make sure the endpoint "
"matches the endpoint id associated with the "
"HTT endpoint (service id 0x300) "
"of the driver (the endpoint received from the "
"target in the HTC service connect response). "
"If this option is omitted a default value of 1 "
"will be used.")
parser_all = subparsers.add_parser('all',
help=all_help,
description=all_description,
parents=[base_parser])
parser_all.add_argument('-p', '--print-data', action="store_true",
help="Print message payload (and not just "
"message ID) for all encountered messages. ")
parser_all.add_argument('--wmi-old', action="store_true",
help="Specifies whether or not the WMI messages "
"are according to the \"old\" WMI protocol. "
"If not set, the messages will be interpreted "
"according to the unified WMI format")
parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1,
type=int, default=[1],
help="HTT service endpoint ID. "
"This is the endpoint where the HTT data is "
"expected to be present. Make sure the endpoint "
"matches the endpoint id associated with the "
"HTT endpoint (service id 0x300) "
"of the driver (the endpoint received from the "
"target in the HTC service connect response). "
"If this option is omitted a default value of 1 "
"will be used.")
parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1,
type=int, default=[2],
help="WMI control service endpoint ID. "
"This is the endpoint where the WMI control data is "
"expected to be present. Make sure the endpoint "
"matches the endpoint id associated with the "
"control service endpoint (service id 0x100) "
"of the driver (the endpoint received from the "
"target in the HTC service connect response). "
"If this option is omitted a default value of 2 "
"will be used.")
parsed_args = parser.parse_args()
def main():
global parsed_args
load_options()
try:
if parsed_args.input_file:
infp = open(parsed_args.input_file, "r")
else:
infp = sys.stdin
if parsed_args.output_file:
outfp = open(parsed_args.output_file, "w")
else:
outfp = sys.stdout
if parsed_args.data_direction:
if parsed_args.data_direction[0] == 't2h':
t2h = True
elif parsed_args.data_direction[0] == 'h2t':
t2h = False
else:
sys.stderr.write('Unsupported data direction: {}\n'.format(parsed_args.data_direction[0]))
exit(1)
else:
# Interpret the data as host -> target is the default behaviour
t2h = False
hf = hexfilter.HexFilterLinux(skip_timestamps=(not parsed_args.keep_timestamps),
abs_timestamps=True,
dump_desc=parsed_args.desc_str,
dump_desc_invert=parsed_args.desc_str_invert,
log_has_timestamps=(not parsed_args.no_timestamps),
include_dump_desc_in_output=False,
remove_ascii_part=True)
if parsed_args.subparser_name == 'wmi-ctrl':
analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],
wmi_unified=(not parsed_args.wmi_old),
short_htc_hdr=parsed_args.short_htc_header,
timestamps=parsed_args.keep_timestamps,
t2h=t2h,
tlv_analysis=parsed_args.tlv,
msg_id_filter=parsed_args.id,
msg_id_exclude_filter=parsed_args.skip_id)
if parsed_args.tlv:
parsed_args.print_data = True
elif parsed_args.subparser_name == 'htc-ctrl':
analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.short_htc_header,
timestamps=parsed_args.keep_timestamps,
t2h=t2h)
elif parsed_args.subparser_name == 'htt':
analyzer = HttAnalyzer(eid=parsed_args.ep_id[0],
short_htc_hdr=parsed_args.short_htc_header,
timestamps=parsed_args.keep_timestamps,
t2h=t2h)
elif parsed_args.subparser_name == 'all':
analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[0],
htt_eid=parsed_args.htt_ep_id[0],
wmi_unified=(not parsed_args.wmi_old),
short_htc_hdr=parsed_args.short_htc_header,
timestamps=parsed_args.keep_timestamps,
t2h=t2h)
else:
sys.stderr.write('Unsupported subcommand: {}\n'.format(parsed_args.subparser_name))
for line in infp:
if hf.parse_line(line):
hexdata = hf.get_hex()
if analyzer.parse_hexdata(hexdata):
str = analyzer.get_id_str()
outfp.write(str)
if parsed_args.print_data:
analyzer.print_data(outfp)
except IOError as err:
sys.stderr.write('{}\n'.format(err))
except:
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "3b381668dbb9b4e5a2e323dc4d6b5e3951736882",
"index": 1804,
"step-1": "<mask token>\n\n\ndef auto_int(x):\n return int(x, 0)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef auto_int(x):\n return int(x, 0)\n\n\ndef load_options():\n global parsed_args\n base_parser = argparse.ArgumentParser(add_help=False)\n base_parser.add_argument('-i', '--input-file', help=\n 'Input (log) file. If omitted, stdin will be read.')\n base_parser.add_argument('-o', '--output-file', help=\n 'Output file. If omitted, the output will be written to stdout.')\n base_parser.add_argument('-n', '--no-timestamps', action='store_true',\n help='Specifies whether or not the input file contains timestamps. ')\n base_parser.add_argument('-d', '--desc-str', nargs='+', type=str, help=\n 'Description string(s) of the dumps. Only dumps with a prefix matching any of the provided desc strings will be analyzed. If no --desc-str option is given, no description filtering will be performed. The prefix of a hexdump is the short description string before the address in each line of the dump, i.e the hexdump prefix. --desc-str is normally used to select between RX and TX logs and should be combined with a proper --data-direction option.'\n )\n base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,\n help=\n 'This option is used to specify how the hexdata should be interpreted. Valid values are: t2h (target to host) or h2t (host to target). With t2h, RX trailers will be printed if --print-data is used. h2t is default. This option should be combined with an applicable --desc-str option. '\n )\n base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,\n help=\n 'Description string(s) of the dumps to be. excluded. Similar to --desc-str, but all matching prefixes will be excluded from the analysis.'\n )\n base_parser.add_argument('-s', '--short-htc-header', action=\n 'store_true', help=\n 'Use 6 byte HTC header (\"old\" format) instead of 8 bytes.')\n base_parser.add_argument('-t', '--keep-timestamps', action='store_true',\n help=\n 'Keep the timestamps associated with each hexdump in the output. This option will only have effect if the log file contains timestamps.'\n )\n parser = argparse.ArgumentParser(prog='qca_hex_analyzer', description=\n description, parents=[base_parser])\n subparsers = parser.add_subparsers(dest='subparser_name')\n parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl', help=wmi_ctrl_help,\n description=wmi_ctrl_description, parents=[base_parser])\n parser_wmi_ctrl.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_wmi_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print WMI data message payload (and not just WMI message ID) for all encountered messages. '\n )\n parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parser_wmi_ctrl.add_argument('--tlv', action='store_true', help=\n 'TLV analysis.Each WMI message will be interpreted as a TLV message and the content of the message will be. written out in text (instead of hexdump). If the encountered message is not supported by the parser, the hex data will be printed instead.'\n )\n parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID', nargs=\n '+', type=auto_int, help=\n \"WMI message id filter. Only WMI messages with an id matching any of the provided id's will be included in the output. If no --id | --msg-id option is given, no filtering will be performed. \"\n )\n parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',\n nargs='+', type=auto_int, help=\n \"WMI message id exclude filter. Similar to --id | --msg-id, but all matching id's will be excluded from the output. \"\n )\n parser_htc_ctrl = subparsers.add_parser('htc-ctrl', help=htc_ctrl_help,\n description=htc_ctrl_description, parents=[base_parser])\n parser_htc_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print HTC ctrl data message payload (and not just message ID) for all encountered messages. '\n )\n parser_htt = subparsers.add_parser('htt', help=htt_help, description=\n htt_description, parents=[base_parser])\n parser_htt.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print HTT data message payload (and not just HTT message ID) for all encountered messages. '\n )\n parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1, type=\n int, default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all = subparsers.add_parser('all', help=all_help, description=\n all_description, parents=[base_parser])\n parser_all.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print message payload (and not just message ID) for all encountered messages. '\n )\n parser_all.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1, type=int,\n default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1, type\n =int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parsed_args = parser.parse_args()\n\n\ndef main():\n global parsed_args\n load_options()\n try:\n if parsed_args.input_file:\n infp = open(parsed_args.input_file, 'r')\n else:\n infp = sys.stdin\n if parsed_args.output_file:\n outfp = open(parsed_args.output_file, 'w')\n else:\n outfp = sys.stdout\n if parsed_args.data_direction:\n if parsed_args.data_direction[0] == 't2h':\n t2h = True\n elif parsed_args.data_direction[0] == 'h2t':\n t2h = False\n else:\n sys.stderr.write('Unsupported data direction: {}\\n'.format(\n parsed_args.data_direction[0]))\n exit(1)\n else:\n t2h = False\n hf = hexfilter.HexFilterLinux(skip_timestamps=not parsed_args.\n keep_timestamps, abs_timestamps=True, dump_desc=parsed_args.\n desc_str, dump_desc_invert=parsed_args.desc_str_invert,\n log_has_timestamps=not parsed_args.no_timestamps,\n include_dump_desc_in_output=False, remove_ascii_part=True)\n if parsed_args.subparser_name == 'wmi-ctrl':\n analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],\n wmi_unified=not parsed_args.wmi_old, short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h, tlv_analysis=parsed_args.tlv,\n msg_id_filter=parsed_args.id, msg_id_exclude_filter=\n parsed_args.skip_id)\n if parsed_args.tlv:\n parsed_args.print_data = True\n elif parsed_args.subparser_name == 'htc-ctrl':\n analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'htt':\n analyzer = HttAnalyzer(eid=parsed_args.ep_id[0], short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h)\n elif parsed_args.subparser_name == 'all':\n analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[\n 0], htt_eid=parsed_args.htt_ep_id[0], wmi_unified=not\n parsed_args.wmi_old, short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n else:\n sys.stderr.write('Unsupported subcommand: {}\\n'.format(\n parsed_args.subparser_name))\n for line in infp:\n if hf.parse_line(line):\n hexdata = hf.get_hex()\n if analyzer.parse_hexdata(hexdata):\n str = analyzer.get_id_str()\n outfp.write(str)\n if parsed_args.print_data:\n analyzer.print_data(outfp)\n except IOError as err:\n sys.stderr.write('{}\\n'.format(err))\n except:\n type, value, tb = sys.exc_info()\n traceback.print_exc()\n pdb.post_mortem(tb)\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\ndescription = (\n 'Tool used to analyze hexdumps produced by a qca wireless kernel driver (such as ath6kl, ath10k or qcacld2.0). The hexdumps are assumed to contain dumps of the traffic between the driver and the target. No special preprocessing of the log files is required. Filter strings (description strings) can be used to limit the output (only RX or TX etc.). The driver must of course be configured to log all necessary debug data (for ath6kl and ath10k this means a proper debug mask). '\n )\nwmi_ctrl_help = (\n 'Subcommand for WMI control message parsing. This subcommand is used to extract WMI control messages from the input. '\n )\nwmi_ctrl_description = (\n \"Extracts WMI control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid WMI control message ID's will be printed together with the message enum string (from ath6kl source code). The --wmi-old option must be used if the driver does not use the WMI unified protocol (ath6kl). The WMI control message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\nhtc_ctrl_help = (\n 'Subcommand for HTC control message parsing. This subcommand is used to extract HTC control messages from the input. '\n )\nhtc_ctrl_description = (\n \"Extracts HTC control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). All valid HTC control message ID's will be printed together with the message enum string (from ath6kl source code). The message payload will also be printed together with the message ID's if the --print-data option is used. HTC control messages will always be extracted from endpoint 0.\"\n )\nhtt_help = (\n 'Subcommand for HTT message parsing. This subcommand is used to extract HTT messages from the input. '\n )\nhtt_description = (\n \"Extracts HTT message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid HTT message ID's will be printed together with the message enum string (from ath10k source code). The message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\nall_help = (\n 'Subcommand for parsing of all supported message types. This subcommand is used to extract both WMI control, HTC control and HTT messages from the input. '\n )\nall_description = (\n \"Extracts message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output-file). The messages can be any of the supported message types (currently only WMI controli, HTC control and HTT). --wmi-ctrl-ep-id and --htt-ep-id is used to determine from which endpoints WMI and HTT data will be extracted (see description of those options below). HTC control messages will always be extracted from ep 0. All valid message ID's will be printed together with a corresponding message enum string. The message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\n\n\ndef auto_int(x):\n return int(x, 0)\n\n\ndef load_options():\n global parsed_args\n base_parser = argparse.ArgumentParser(add_help=False)\n base_parser.add_argument('-i', '--input-file', help=\n 'Input (log) file. If omitted, stdin will be read.')\n base_parser.add_argument('-o', '--output-file', help=\n 'Output file. If omitted, the output will be written to stdout.')\n base_parser.add_argument('-n', '--no-timestamps', action='store_true',\n help='Specifies whether or not the input file contains timestamps. ')\n base_parser.add_argument('-d', '--desc-str', nargs='+', type=str, help=\n 'Description string(s) of the dumps. Only dumps with a prefix matching any of the provided desc strings will be analyzed. If no --desc-str option is given, no description filtering will be performed. The prefix of a hexdump is the short description string before the address in each line of the dump, i.e the hexdump prefix. --desc-str is normally used to select between RX and TX logs and should be combined with a proper --data-direction option.'\n )\n base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,\n help=\n 'This option is used to specify how the hexdata should be interpreted. Valid values are: t2h (target to host) or h2t (host to target). With t2h, RX trailers will be printed if --print-data is used. h2t is default. This option should be combined with an applicable --desc-str option. '\n )\n base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,\n help=\n 'Description string(s) of the dumps to be. excluded. Similar to --desc-str, but all matching prefixes will be excluded from the analysis.'\n )\n base_parser.add_argument('-s', '--short-htc-header', action=\n 'store_true', help=\n 'Use 6 byte HTC header (\"old\" format) instead of 8 bytes.')\n base_parser.add_argument('-t', '--keep-timestamps', action='store_true',\n help=\n 'Keep the timestamps associated with each hexdump in the output. This option will only have effect if the log file contains timestamps.'\n )\n parser = argparse.ArgumentParser(prog='qca_hex_analyzer', description=\n description, parents=[base_parser])\n subparsers = parser.add_subparsers(dest='subparser_name')\n parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl', help=wmi_ctrl_help,\n description=wmi_ctrl_description, parents=[base_parser])\n parser_wmi_ctrl.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_wmi_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print WMI data message payload (and not just WMI message ID) for all encountered messages. '\n )\n parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parser_wmi_ctrl.add_argument('--tlv', action='store_true', help=\n 'TLV analysis.Each WMI message will be interpreted as a TLV message and the content of the message will be. written out in text (instead of hexdump). If the encountered message is not supported by the parser, the hex data will be printed instead.'\n )\n parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID', nargs=\n '+', type=auto_int, help=\n \"WMI message id filter. Only WMI messages with an id matching any of the provided id's will be included in the output. If no --id | --msg-id option is given, no filtering will be performed. \"\n )\n parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',\n nargs='+', type=auto_int, help=\n \"WMI message id exclude filter. Similar to --id | --msg-id, but all matching id's will be excluded from the output. \"\n )\n parser_htc_ctrl = subparsers.add_parser('htc-ctrl', help=htc_ctrl_help,\n description=htc_ctrl_description, parents=[base_parser])\n parser_htc_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print HTC ctrl data message payload (and not just message ID) for all encountered messages. '\n )\n parser_htt = subparsers.add_parser('htt', help=htt_help, description=\n htt_description, parents=[base_parser])\n parser_htt.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print HTT data message payload (and not just HTT message ID) for all encountered messages. '\n )\n parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1, type=\n int, default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all = subparsers.add_parser('all', help=all_help, description=\n all_description, parents=[base_parser])\n parser_all.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print message payload (and not just message ID) for all encountered messages. '\n )\n parser_all.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1, type=int,\n default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1, type\n =int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parsed_args = parser.parse_args()\n\n\ndef main():\n global parsed_args\n load_options()\n try:\n if parsed_args.input_file:\n infp = open(parsed_args.input_file, 'r')\n else:\n infp = sys.stdin\n if parsed_args.output_file:\n outfp = open(parsed_args.output_file, 'w')\n else:\n outfp = sys.stdout\n if parsed_args.data_direction:\n if parsed_args.data_direction[0] == 't2h':\n t2h = True\n elif parsed_args.data_direction[0] == 'h2t':\n t2h = False\n else:\n sys.stderr.write('Unsupported data direction: {}\\n'.format(\n parsed_args.data_direction[0]))\n exit(1)\n else:\n t2h = False\n hf = hexfilter.HexFilterLinux(skip_timestamps=not parsed_args.\n keep_timestamps, abs_timestamps=True, dump_desc=parsed_args.\n desc_str, dump_desc_invert=parsed_args.desc_str_invert,\n log_has_timestamps=not parsed_args.no_timestamps,\n include_dump_desc_in_output=False, remove_ascii_part=True)\n if parsed_args.subparser_name == 'wmi-ctrl':\n analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],\n wmi_unified=not parsed_args.wmi_old, short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h, tlv_analysis=parsed_args.tlv,\n msg_id_filter=parsed_args.id, msg_id_exclude_filter=\n parsed_args.skip_id)\n if parsed_args.tlv:\n parsed_args.print_data = True\n elif parsed_args.subparser_name == 'htc-ctrl':\n analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'htt':\n analyzer = HttAnalyzer(eid=parsed_args.ep_id[0], short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h)\n elif parsed_args.subparser_name == 'all':\n analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[\n 0], htt_eid=parsed_args.htt_ep_id[0], wmi_unified=not\n parsed_args.wmi_old, short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n else:\n sys.stderr.write('Unsupported subcommand: {}\\n'.format(\n parsed_args.subparser_name))\n for line in infp:\n if hf.parse_line(line):\n hexdata = hf.get_hex()\n if analyzer.parse_hexdata(hexdata):\n str = analyzer.get_id_str()\n outfp.write(str)\n if parsed_args.print_data:\n analyzer.print_data(outfp)\n except IOError as err:\n sys.stderr.write('{}\\n'.format(err))\n except:\n type, value, tb = sys.exc_info()\n traceback.print_exc()\n pdb.post_mortem(tb)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from collections import namedtuple\nimport argparse\nimport pdb\nimport traceback\nimport sys\nimport os\nfrom qca_hex_analyzer import WmiCtrlAnalyzer, HtcCtrlAnalyzer, HttAnalyzer, AllAnalyzer\nimport hexfilter\ndescription = (\n 'Tool used to analyze hexdumps produced by a qca wireless kernel driver (such as ath6kl, ath10k or qcacld2.0). The hexdumps are assumed to contain dumps of the traffic between the driver and the target. No special preprocessing of the log files is required. Filter strings (description strings) can be used to limit the output (only RX or TX etc.). The driver must of course be configured to log all necessary debug data (for ath6kl and ath10k this means a proper debug mask). '\n )\nwmi_ctrl_help = (\n 'Subcommand for WMI control message parsing. This subcommand is used to extract WMI control messages from the input. '\n )\nwmi_ctrl_description = (\n \"Extracts WMI control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid WMI control message ID's will be printed together with the message enum string (from ath6kl source code). The --wmi-old option must be used if the driver does not use the WMI unified protocol (ath6kl). The WMI control message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\nhtc_ctrl_help = (\n 'Subcommand for HTC control message parsing. This subcommand is used to extract HTC control messages from the input. '\n )\nhtc_ctrl_description = (\n \"Extracts HTC control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). All valid HTC control message ID's will be printed together with the message enum string (from ath6kl source code). The message payload will also be printed together with the message ID's if the --print-data option is used. HTC control messages will always be extracted from endpoint 0.\"\n )\nhtt_help = (\n 'Subcommand for HTT message parsing. This subcommand is used to extract HTT messages from the input. '\n )\nhtt_description = (\n \"Extracts HTT message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid HTT message ID's will be printed together with the message enum string (from ath10k source code). The message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\nall_help = (\n 'Subcommand for parsing of all supported message types. This subcommand is used to extract both WMI control, HTC control and HTT messages from the input. '\n )\nall_description = (\n \"Extracts message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output-file). The messages can be any of the supported message types (currently only WMI controli, HTC control and HTT). --wmi-ctrl-ep-id and --htt-ep-id is used to determine from which endpoints WMI and HTT data will be extracted (see description of those options below). HTC control messages will always be extracted from ep 0. All valid message ID's will be printed together with a corresponding message enum string. The message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\n\n\ndef auto_int(x):\n return int(x, 0)\n\n\ndef load_options():\n global parsed_args\n base_parser = argparse.ArgumentParser(add_help=False)\n base_parser.add_argument('-i', '--input-file', help=\n 'Input (log) file. If omitted, stdin will be read.')\n base_parser.add_argument('-o', '--output-file', help=\n 'Output file. If omitted, the output will be written to stdout.')\n base_parser.add_argument('-n', '--no-timestamps', action='store_true',\n help='Specifies whether or not the input file contains timestamps. ')\n base_parser.add_argument('-d', '--desc-str', nargs='+', type=str, help=\n 'Description string(s) of the dumps. Only dumps with a prefix matching any of the provided desc strings will be analyzed. If no --desc-str option is given, no description filtering will be performed. The prefix of a hexdump is the short description string before the address in each line of the dump, i.e the hexdump prefix. --desc-str is normally used to select between RX and TX logs and should be combined with a proper --data-direction option.'\n )\n base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,\n help=\n 'This option is used to specify how the hexdata should be interpreted. Valid values are: t2h (target to host) or h2t (host to target). With t2h, RX trailers will be printed if --print-data is used. h2t is default. This option should be combined with an applicable --desc-str option. '\n )\n base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,\n help=\n 'Description string(s) of the dumps to be. excluded. Similar to --desc-str, but all matching prefixes will be excluded from the analysis.'\n )\n base_parser.add_argument('-s', '--short-htc-header', action=\n 'store_true', help=\n 'Use 6 byte HTC header (\"old\" format) instead of 8 bytes.')\n base_parser.add_argument('-t', '--keep-timestamps', action='store_true',\n help=\n 'Keep the timestamps associated with each hexdump in the output. This option will only have effect if the log file contains timestamps.'\n )\n parser = argparse.ArgumentParser(prog='qca_hex_analyzer', description=\n description, parents=[base_parser])\n subparsers = parser.add_subparsers(dest='subparser_name')\n parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl', help=wmi_ctrl_help,\n description=wmi_ctrl_description, parents=[base_parser])\n parser_wmi_ctrl.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_wmi_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print WMI data message payload (and not just WMI message ID) for all encountered messages. '\n )\n parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parser_wmi_ctrl.add_argument('--tlv', action='store_true', help=\n 'TLV analysis.Each WMI message will be interpreted as a TLV message and the content of the message will be. written out in text (instead of hexdump). If the encountered message is not supported by the parser, the hex data will be printed instead.'\n )\n parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID', nargs=\n '+', type=auto_int, help=\n \"WMI message id filter. Only WMI messages with an id matching any of the provided id's will be included in the output. If no --id | --msg-id option is given, no filtering will be performed. \"\n )\n parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',\n nargs='+', type=auto_int, help=\n \"WMI message id exclude filter. Similar to --id | --msg-id, but all matching id's will be excluded from the output. \"\n )\n parser_htc_ctrl = subparsers.add_parser('htc-ctrl', help=htc_ctrl_help,\n description=htc_ctrl_description, parents=[base_parser])\n parser_htc_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print HTC ctrl data message payload (and not just message ID) for all encountered messages. '\n )\n parser_htt = subparsers.add_parser('htt', help=htt_help, description=\n htt_description, parents=[base_parser])\n parser_htt.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print HTT data message payload (and not just HTT message ID) for all encountered messages. '\n )\n parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1, type=\n int, default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all = subparsers.add_parser('all', help=all_help, description=\n all_description, parents=[base_parser])\n parser_all.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print message payload (and not just message ID) for all encountered messages. '\n )\n parser_all.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1, type=int,\n default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1, type\n =int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parsed_args = parser.parse_args()\n\n\ndef main():\n global parsed_args\n load_options()\n try:\n if parsed_args.input_file:\n infp = open(parsed_args.input_file, 'r')\n else:\n infp = sys.stdin\n if parsed_args.output_file:\n outfp = open(parsed_args.output_file, 'w')\n else:\n outfp = sys.stdout\n if parsed_args.data_direction:\n if parsed_args.data_direction[0] == 't2h':\n t2h = True\n elif parsed_args.data_direction[0] == 'h2t':\n t2h = False\n else:\n sys.stderr.write('Unsupported data direction: {}\\n'.format(\n parsed_args.data_direction[0]))\n exit(1)\n else:\n t2h = False\n hf = hexfilter.HexFilterLinux(skip_timestamps=not parsed_args.\n keep_timestamps, abs_timestamps=True, dump_desc=parsed_args.\n desc_str, dump_desc_invert=parsed_args.desc_str_invert,\n log_has_timestamps=not parsed_args.no_timestamps,\n include_dump_desc_in_output=False, remove_ascii_part=True)\n if parsed_args.subparser_name == 'wmi-ctrl':\n analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],\n wmi_unified=not parsed_args.wmi_old, short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h, tlv_analysis=parsed_args.tlv,\n msg_id_filter=parsed_args.id, msg_id_exclude_filter=\n parsed_args.skip_id)\n if parsed_args.tlv:\n parsed_args.print_data = True\n elif parsed_args.subparser_name == 'htc-ctrl':\n analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'htt':\n analyzer = HttAnalyzer(eid=parsed_args.ep_id[0], short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h)\n elif parsed_args.subparser_name == 'all':\n analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[\n 0], htt_eid=parsed_args.htt_ep_id[0], wmi_unified=not\n parsed_args.wmi_old, short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n else:\n sys.stderr.write('Unsupported subcommand: {}\\n'.format(\n parsed_args.subparser_name))\n for line in infp:\n if hf.parse_line(line):\n hexdata = hf.get_hex()\n if analyzer.parse_hexdata(hexdata):\n str = analyzer.get_id_str()\n outfp.write(str)\n if parsed_args.print_data:\n analyzer.print_data(outfp)\n except IOError as err:\n sys.stderr.write('{}\\n'.format(err))\n except:\n type, value, tb = sys.exc_info()\n traceback.print_exc()\n pdb.post_mortem(tb)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from collections import namedtuple\n\nimport argparse\nimport pdb\nimport traceback\nimport sys\nimport os\nfrom qca_hex_analyzer import WmiCtrlAnalyzer, HtcCtrlAnalyzer, HttAnalyzer, AllAnalyzer\nimport hexfilter\n\ndescription = \\\n \"Tool used to analyze hexdumps produced by a qca wireless kernel \" \\\n \"driver (such as ath6kl, ath10k or qcacld2.0). \" \\\n \"The hexdumps are assumed to contain dumps of the traffic \" \\\n \"between the driver and the target. \" \\\n \"No special preprocessing of the log files is required. \" \\\n \"Filter strings (description strings) can be used to limit the output \" \\\n \"(only RX or TX etc.). \" \\\n \"The driver must of course be configured to log all necessary debug \" \\\n \"data (for ath6kl and ath10k this means a proper debug mask). \"\n\nwmi_ctrl_help = \\\n \"Subcommand for WMI control message parsing. \" \\\n \"This subcommand is used to extract WMI control messages from the input. \"\n\nwmi_ctrl_description = \\\n \"Extracts WMI control message hexdata from an input (--input-file). \" \\\n \"The extracted messages will be printed to the output (--output -file). \" \\\n \"--ep-id is used to determine from which HTC endpoint the data will \" \\\n \"be extracted (see description of that option below). \" \\\n \"All valid WMI control message ID's will be printed together with the \" \\\n \"message enum string (from ath6kl source code). \" \\\n \"The --wmi-old option must be used if the driver does not use the WMI \" \\\n \"unified protocol (ath6kl). \" \\\n \"The WMI control message payload will also be printed together with \" \\\n \"message ID's if the --print-data option is used.\"\n\nhtc_ctrl_help = \\\n \"Subcommand for HTC control message parsing. \" \\\n \"This subcommand is used to extract HTC control messages from the input. \"\n\nhtc_ctrl_description = \\\n \"Extracts HTC control message hexdata from an input (--input-file). \" \\\n \"The extracted messages will be printed to the output (--output -file). \" \\\n \"All valid HTC control message ID's will be printed together with the \" \\\n \"message enum string (from ath6kl source code). \" \\\n \"The message payload will also be printed together with the \" \\\n \"message ID's if the --print-data option is used. \" \\\n \"HTC control messages will always be extracted from endpoint 0.\"\n\nhtt_help = \\\n \"Subcommand for HTT message parsing. \" \\\n \"This subcommand is used to extract HTT messages from the input. \"\n\nhtt_description = \\\n \"Extracts HTT message hexdata from an input (--input-file). \" \\\n \"The extracted messages will be printed to the output (--output -file). \" \\\n \"--ep-id is used to determine from which HTC endpoint the data will \" \\\n \"be extracted (see description of that option below). \" \\\n \"All valid HTT message ID's will be printed together with the \" \\\n \"message enum string (from ath10k source code). \" \\\n \"The message payload will also be printed together with \" \\\n \"message ID's if the --print-data option is used.\"\n\nall_help = \\\n \"Subcommand for parsing of all supported message types. \" \\\n \"This subcommand is used to extract both WMI control, \" \\\n \"HTC control and HTT messages from the input. \"\n\nall_description = \\\n \"Extracts message hexdata from an input (--input-file). \" \\\n \"The extracted messages will be printed to the output (--output-file). \" \\\n \"The messages can be any of the supported message types \" \\\n \"(currently only WMI controli, HTC control and HTT). \" \\\n \"--wmi-ctrl-ep-id and --htt-ep-id is used to determine from which \" \\\n \"endpoints WMI and HTT data will be extracted \" \\\n \"(see description of those options below). \" \\\n \"HTC control messages will always be extracted from ep 0. \" \\\n \"All valid message ID's will be printed together \" \\\n \"with a corresponding message enum string. \" \\\n \"The message payload will also be printed together with \" \\\n \"message ID's if the --print-data option is used.\"\n\n\ndef auto_int(x):\n\n return int(x, 0)\n\n\ndef load_options():\n\n global parsed_args\n base_parser = argparse.ArgumentParser(add_help=False)\n\n base_parser.add_argument('-i', '--input-file',\n help=\"Input (log) file. If omitted, \"\n \"stdin will be read.\")\n base_parser.add_argument('-o', '--output-file',\n help=\"Output file. If omitted, \"\n \"the output will be written to stdout.\")\n base_parser.add_argument('-n', '--no-timestamps', action=\"store_true\",\n help=\"Specifies whether or not the input file \"\n \"contains timestamps. \")\n base_parser.add_argument('-d', '--desc-str', nargs='+', type=str,\n help=\"Description string(s) of the dumps. \"\n \"Only dumps with a prefix \"\n \"matching any of the provided desc strings \"\n \"will be analyzed. \"\n \"If no --desc-str option is given, no \"\n \"description filtering will be performed. \"\n \"The prefix of a hexdump is the short \"\n \"description string before the address \"\n \"in each line of the dump, i.e the hexdump \"\n \"prefix. \"\n \"--desc-str is normally used to select \"\n \"between RX and TX logs and should be \"\n \"combined with a proper --data-direction \"\n \"option.\")\n base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,\n help=\"This option is used to specify how the \"\n \"hexdata should be interpreted. \"\n \"Valid values are: \"\n \"t2h (target to host) or h2t (host to target). \"\n \"With t2h, RX trailers will be printed if \"\n \"--print-data is used. h2t is default. \"\n \"This option should be combined with an \"\n \"applicable --desc-str option. \")\n base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,\n help=\"Description string(s) of the dumps to be. \"\n \"excluded. Similar to --desc-str, but all \"\n \"matching prefixes will be excluded from \"\n \"the analysis.\")\n base_parser.add_argument('-s', '--short-htc-header', action=\"store_true\",\n help=\"Use 6 byte HTC header (\\\"old\\\" format) \"\n \"instead of 8 bytes.\")\n base_parser.add_argument('-t', '--keep-timestamps', action=\"store_true\",\n help=\"Keep the timestamps associated with each \"\n \"hexdump in the output. \"\n \"This option will only have effect if the \"\n \"log file contains timestamps.\")\n\n parser = argparse.ArgumentParser(prog=\"qca_hex_analyzer\",\n description=description,\n parents=[base_parser])\n\n subparsers = parser.add_subparsers(dest=\"subparser_name\")\n parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl',\n help=wmi_ctrl_help,\n description=wmi_ctrl_description,\n parents=[base_parser])\n parser_wmi_ctrl.add_argument('--wmi-old', action=\"store_true\",\n help=\"Specifies whether or not the WMI messages \"\n \"are according to the \\\"old\\\" WMI protocol. \"\n \"If not set, the messages will be interpreted \"\n \"according to the unified WMI format\")\n parser_wmi_ctrl.add_argument('-p', '--print-data', action=\"store_true\",\n help=\"Print WMI data message payload (and not just \"\n \"WMI message ID) for all encountered messages. \")\n parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[2],\n help=\"WMI control service endpoint ID. \"\n \"This is the endpoint where the WMI control data is \"\n \"expected to be present. Make sure the endpoint \"\n \"matches the endpoint id associated with the \"\n \"control service endpoint (service id 0x100) \"\n \"of the driver (the endpoint received from the \"\n \"target in the HTC service connect response). \"\n \"If this option is omitted a default value of 2 \"\n \"will be used.\")\n parser_wmi_ctrl.add_argument('--tlv', action=\"store_true\",\n help=\"TLV analysis.\"\n \"Each WMI message will be interpreted as a TLV \"\n \"message and the content of the message will be. \"\n \"written out in text (instead of hexdump). \"\n \"If the encountered message is not supported by \"\n \"the parser, the hex data will be printed instead.\")\n parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID',\n nargs='+', type=auto_int,\n help=\"WMI message id filter. \"\n \"Only WMI messages with an id matching any of the \"\n \"provided id's will be included in the output. \"\n \"If no --id | --msg-id option is given, no \"\n \"filtering will be performed. \")\n parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',\n nargs='+', type=auto_int,\n help=\"WMI message id exclude filter. \"\n \"Similar to --id | --msg-id, but all matching \"\n \"id's will be excluded from the output. \")\n parser_htc_ctrl = subparsers.add_parser('htc-ctrl',\n help=htc_ctrl_help,\n description=htc_ctrl_description,\n parents=[base_parser])\n parser_htc_ctrl.add_argument('-p', '--print-data', action=\"store_true\",\n help=\"Print HTC ctrl data message payload (and not just \"\n \"message ID) for all encountered messages. \")\n parser_htt = subparsers.add_parser('htt',\n help=htt_help,\n description=htt_description,\n parents=[base_parser])\n parser_htt.add_argument('-p', '--print-data', action=\"store_true\",\n help=\"Print HTT data message payload (and not just \"\n \"HTT message ID) for all encountered messages. \")\n parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[1],\n help=\"HTT service endpoint ID. \"\n \"This is the endpoint where the HTT data is \"\n \"expected to be present. Make sure the endpoint \"\n \"matches the endpoint id associated with the \"\n \"HTT endpoint (service id 0x300) \"\n \"of the driver (the endpoint received from the \"\n \"target in the HTC service connect response). \"\n \"If this option is omitted a default value of 1 \"\n \"will be used.\")\n parser_all = subparsers.add_parser('all',\n help=all_help,\n description=all_description,\n parents=[base_parser])\n parser_all.add_argument('-p', '--print-data', action=\"store_true\",\n help=\"Print message payload (and not just \"\n \"message ID) for all encountered messages. \")\n parser_all.add_argument('--wmi-old', action=\"store_true\",\n help=\"Specifies whether or not the WMI messages \"\n \"are according to the \\\"old\\\" WMI protocol. \"\n \"If not set, the messages will be interpreted \"\n \"according to the unified WMI format\")\n parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1,\n type=int, default=[1],\n help=\"HTT service endpoint ID. \"\n \"This is the endpoint where the HTT data is \"\n \"expected to be present. Make sure the endpoint \"\n \"matches the endpoint id associated with the \"\n \"HTT endpoint (service id 0x300) \"\n \"of the driver (the endpoint received from the \"\n \"target in the HTC service connect response). \"\n \"If this option is omitted a default value of 1 \"\n \"will be used.\")\n parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1,\n type=int, default=[2],\n help=\"WMI control service endpoint ID. \"\n \"This is the endpoint where the WMI control data is \"\n \"expected to be present. Make sure the endpoint \"\n \"matches the endpoint id associated with the \"\n \"control service endpoint (service id 0x100) \"\n \"of the driver (the endpoint received from the \"\n \"target in the HTC service connect response). \"\n \"If this option is omitted a default value of 2 \"\n \"will be used.\")\n parsed_args = parser.parse_args()\n\n\ndef main():\n global parsed_args\n load_options()\n\n try:\n if parsed_args.input_file:\n infp = open(parsed_args.input_file, \"r\")\n else:\n infp = sys.stdin\n if parsed_args.output_file:\n outfp = open(parsed_args.output_file, \"w\")\n else:\n outfp = sys.stdout\n\n if parsed_args.data_direction:\n if parsed_args.data_direction[0] == 't2h':\n t2h = True\n elif parsed_args.data_direction[0] == 'h2t':\n t2h = False\n else:\n sys.stderr.write('Unsupported data direction: {}\\n'.format(parsed_args.data_direction[0]))\n exit(1)\n else:\n # Interpret the data as host -> target is the default behaviour\n t2h = False\n\n hf = hexfilter.HexFilterLinux(skip_timestamps=(not parsed_args.keep_timestamps),\n abs_timestamps=True,\n dump_desc=parsed_args.desc_str,\n dump_desc_invert=parsed_args.desc_str_invert,\n log_has_timestamps=(not parsed_args.no_timestamps),\n include_dump_desc_in_output=False,\n remove_ascii_part=True)\n\n if parsed_args.subparser_name == 'wmi-ctrl':\n analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],\n wmi_unified=(not parsed_args.wmi_old),\n short_htc_hdr=parsed_args.short_htc_header,\n timestamps=parsed_args.keep_timestamps,\n t2h=t2h,\n tlv_analysis=parsed_args.tlv,\n msg_id_filter=parsed_args.id,\n msg_id_exclude_filter=parsed_args.skip_id)\n if parsed_args.tlv:\n parsed_args.print_data = True\n elif parsed_args.subparser_name == 'htc-ctrl':\n analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.short_htc_header,\n timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'htt':\n analyzer = HttAnalyzer(eid=parsed_args.ep_id[0],\n short_htc_hdr=parsed_args.short_htc_header,\n timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'all':\n analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[0],\n htt_eid=parsed_args.htt_ep_id[0],\n wmi_unified=(not parsed_args.wmi_old),\n short_htc_hdr=parsed_args.short_htc_header,\n timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n else:\n sys.stderr.write('Unsupported subcommand: {}\\n'.format(parsed_args.subparser_name))\n\n for line in infp:\n if hf.parse_line(line):\n hexdata = hf.get_hex()\n if analyzer.parse_hexdata(hexdata):\n str = analyzer.get_id_str()\n outfp.write(str)\n if parsed_args.print_data:\n analyzer.print_data(outfp)\n\n except IOError as err:\n sys.stderr.write('{}\\n'.format(err))\n except:\n type, value, tb = sys.exc_info()\n traceback.print_exc()\n pdb.post_mortem(tb)\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
1,
4,
5,
6,
7
]
}
|
[
1,
4,
5,
6,
7
] |
# Python 2.7 Doritobot Vision System
# EECS 498 Purple Team, 2014
# Written by Cody Hyman ([email protected])
# Written against OpenCV 3.0.0-alpha
import sys
import os
import cv2
import numpy as np
from uvcinterface import UVCInterface as uvc
from visionUtil import VisionUtil as vu
from collections import deque
from math import *
# Calibration state 'Enumeration'
class CalState(object):
UNCAL = 1
CAL_PROG = 2
CALIBRATED = 3
### Vision System Class ###
class VisionSystem(object):
# Window names
CAM_FEED_NAME = 'Camera Feed'
CAL_NAME = 'Calibrated Image'
PROC_NAME = 'Vision Processing'
CTL_NAME = 'Filter Controls'
# Constants
G_CENTER = 52
R_CENTER = 0
SMIN = 50
VMIN = 80
#HISTORY_LENGTH = 15
EMPTY_KERNEL = [0, 0, 0, 0, 0, 0, 0]
RAW_KERNEL = np.array([1, 2, 3, 6, 10, 18, 20], dtype = np.float32)
FIR_KERNEL = np.multiply(RAW_KERNEL,1/np.linalg.norm(RAW_KERNEL,1)) # Normalized kernel
def __init__(self, camera):
### Instance Value initialization ###
self.camera = camera
self.calstate = CalState.UNCAL
self.calpts = []
self.XSIZE = 1000
self.YSIZE = 1000
self.x_est = -1
self.y_est = -1
self.theta_est = -1
# Drawing storage
self.waypointEst = [(300,300)] # Waypoint estimates for UI
self.tagLoc = (10,10) # Tag location estimate
self.fVectorStart = (0,0)
self.fVectorEnd = (0,0)
#self.worldpts = np.float32([
# [0,self.YSIZE/2],
# [0,0],
# [self.XSIZE,0],
# [self.XSIZE,self.YSIZE/2]
# ])
# ===== ***** Calibration points from world *****===== #
'''self.worldpts = np.float32([
[-5, -1. * -105], #22
[90, -1. * -100], #27
[90, -1. * 110], #26
[0, -1. * 107] #25
])#*self.IMG_SCALE + self.IMG_OFFSET'''
# Swap x-y coordinates (WTF!)
'''self.worldpts = np.float32([
[-105,-5], #22
[-100, 90], #27
[110, 90], #26
[107, 0] #25
])#*self.IMG_SCALE + self.IMG_OFFSET'''
self.worldpts = np.float32([
[-104,-2], #22
[-104,85], #27
[115,84], #26
[115,3] #25
])
self.worldpts = vu.toImageCoordinates(self.worldpts)
testPts = vu.toWaypointCoordinates(self.worldpts)
print 'TestWorldPts', str(testPts)
# ===== *************** ===== #
### Camera initialization ###
print 'Opening Camera ' + str(camera)
self.vidcap = cv2.VideoCapture(camera)# Open up specified camera
# Check if camera is opened and exit if not
if self.vidcap.isOpened():
print 'Camera ' + str(camera) + ' opened successfully'
else:
print 'ERROR: Camera ' + str(camera) + ' not opened'
return False
# Set camera autoexposure
uvc.set(self.camera, uvc.EXPOSURE_AUTO, 1)
uvc.set(self.camera, uvc.EXPOSURE_AUTO_PRIORITY, 0)
### Initialize UI elements ###
# Filter Controls Window
ctlWindow = cv2.namedWindow(self.CTL_NAME)
cv2.createTrackbar('Blue', self.CTL_NAME, 88, 180, self.trackbarChangeHandler)
cv2.createTrackbar('Green', self.CTL_NAME, 41, 180, self.trackbarChangeHandler)
cv2.createTrackbar('Red', self.CTL_NAME, 172, 180, self.trackbarChangeHandler)
cv2.createTrackbar('B Cutoff', self.CTL_NAME, 110, 255, self.trackbarChangeHandler)
cv2.createTrackbar('G Cutoff', self.CTL_NAME, 110, 255, self.trackbarChangeHandler)
cv2.createTrackbar('R Cutoff', self.CTL_NAME, 110, 255, self.trackbarChangeHandler)
cv2.createTrackbar('Sat Cutoff', self.CTL_NAME, 100, 255, self.trackbarChangeHandler)
cv2.createTrackbar('Show Background', self.CTL_NAME, 1, 1, self.trackbarChangeHandler)
# Camera input window
camWindow = cv2.namedWindow(self.CAM_FEED_NAME)
cv2.createTrackbar('Gain', self.CAM_FEED_NAME, 128, 255, self.gainChanged)
cv2.createTrackbar('Exposure', self.CAM_FEED_NAME, 1600, 2000, self.exposureChanged)
cv2.createTrackbar('Saturation', self.CAM_FEED_NAME, 128, 255, self.saturationChanged)
cv2.setMouseCallback(self.CAM_FEED_NAME, self.mouseClickHandler) # Set mouse callbacks for calibration
# Rectified/Calibrated Image window
#calWindow = cv2.namedWindow(self.CAL_NAME)
#cv2.setMouseCallback(self.CAL_NAME, self.colorClickHandler)
# Image processing Window 2
procWindow = cv2.namedWindow(self.PROC_NAME)
# History for filter bank
self.xHistory = deque(self.EMPTY_KERNEL)
self.yHistory = deque(self.EMPTY_KERNEL)
self.thetaHistory = deque(self.EMPTY_KERNEL)
# Run vision on a frame
def processFrame(self):
### Main processing loop ###
#while(True):
frameRet, self.camImg = self.vidcap.read()
#Img = self.drawCalMarkers()
cv2.imshow(self.CAM_FEED_NAME, self.drawCalMarkers())
if(self.calstate == CalState.CALIBRATED):
self.remapImage() # Apply perspective warp
bl = cv2.getTrackbarPos('Blue', self.CTL_NAME)
gr = cv2.getTrackbarPos('Green', self.CTL_NAME)
rd = cv2.getTrackbarPos('Red', self.CTL_NAME)
bvmin = cv2.getTrackbarPos('B Cutoff', self.CTL_NAME)
gvmin = cv2.getTrackbarPos('G Cutoff', self.CTL_NAME)
rvmin = cv2.getTrackbarPos('R Cutoff', self.CTL_NAME)
smin = cv2.getTrackbarPos('Sat Cutoff', self.CTL_NAME)
bgroundFlag = cv2.getTrackbarPos('Show Background', self.CTL_NAME)
bCentroid, self.bTagImg = self.findMarker(self.warpImg, bl, 10, smin, bvmin)
gCentroid, self.gTagImg = self.findMarker(self.warpImg, gr, 10, smin, gvmin)
rCentroid, self.rTagImg = self.findMarker(self.warpImg, rd, 10, smin, rvmin)
#vu.printCentroids(gCentroid, rCentroid)
if(bgroundFlag):
self.rgbImg = vu.comboImage(self.bTagImg, self.gTagImg, self.rTagImg, self.warpImg)
else:
self.rgbImg = vu.comboImage(self.bTagImg, self.gTagImg, self.rTagImg)
ctr, theta, bCtr, gCtr, rCtr = vu.localizeRobot(bCentroid, gCentroid, rCentroid)
if((ctr != None) and (theta != None)):
fctr, ftheta = self.filterPoints(ctr, theta)
self.x_est = ctr[0]
self.y_est = ctr[1]
# print 'Theta IN:', theta
self.theta_est = theta#ftheta
self.tagLoc = vu.computeTagLocation(ctr, bCtr) # Compute tag location
vu.drawSquareMarker(self.rgbImg, int(fctr[0]), int(fctr[1]), 5, (255,0,255))
if(gCentroid != None):
vu.drawSquareMarker(self.rgbImg, int(gCentroid[0]), int(gCentroid[1]), 5, (0,0,255))
if(rCentroid != None):
vu.drawSquareMarker(self.rgbImg, int(rCentroid[0]), int(rCentroid[1]), 5, (255,0,0))
if(bCentroid != None):
vu.drawSquareMarker(self.rgbImg, int(bCentroid[0]), int(bCentroid[1]), 5, (255,255,0))
wpIndex = 0
for wp in self.waypointEst:
wpIndex = wpIndex + 1
if(wpIndex == 1):
wpcolor = (0,0,255)
else:
wpcolor = (0,255,255)
vu.drawFilledCircleMarker(self.rgbImg, wp[0], wp[1], 10, wpcolor) #
vu.drawTextIndex(self.rgbImg, wp[0], wp[1], str(wpIndex)) # Draw waypoint index
if(self.tagLoc[0] != None):
vu.drawFilledCircleMarker(self.rgbImg, self.tagLoc[0], self.tagLoc[1], 5, (0,0,160))
#vu.drawVector(self.rgbImg, self.fVectorStart, self.fVectorEnd, (255,128,255))
#cv2.imshow(self.CAL_NAME, self.warpImg)
cv2.imshow(self.PROC_NAME, self.rgbImg)
#if cv2.waitKey(20) & 0xFF == ord('q'):
# break
# Use current perspective transform to remap image
def remapImage(self):
if(self.calstate == CalState.CALIBRATED):
self.warpImg = cv2.warpPerspective(self.camImg, self.warp,(int(300*vu.IMG_SCALE),int(300*vu.IMG_SCALE)))
self.warpImg = cv2.GaussianBlur(self.warpImg, (9,9), 1)
self.warpImg = cv2.medianBlur(self.warpImg, 5)
else:
print 'Transform not calibrated'
# Draws calibration markers on the camera image
def drawCalMarkers(self):
markedImg = self.camImg.copy()
for pt in self.calpts:
vu.drawSquareMarker(markedImg, pt[0], pt[1], 5, (255,0,255))
return markedImg
# Finds a marker's central moment
def findMarker(self, image, hueCenter, hueWidth, satMin, valMin):
hsvImg = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
markerImg = cv2.inRange(hsvImg, np.array([hueCenter-hueWidth/2, satMin, valMin]), np.array([hueCenter+hueWidth/2, 255, 255]))
cleanElement = cv2.getStructuringElement(cv2.MORPH_CROSS, (3,3))
markerImg = cv2.erode(markerImg, cleanElement) # Clean up marker image w/ erode-dilate-median
markerImg = cv2.dilate(markerImg, cleanElement)
markerImg = cv2.medianBlur(markerImg, 3)
mMoments = cv2.moments(markerImg) # Compute moments
m00 = mMoments['m00']
if(m00 > 0.1):
return (mMoments['m10']/m00, mMoments['m01']/m00), markerImg
return None, markerImg
# FIR on centers and angles
def filterPoints(self, ctr, theta):
if((ctr != None) and (theta != None)):
if(len(self.xHistory) == len(self.FIR_KERNEL)):
self.xHistory.popleft()
if(len(self.yHistory) == len(self.FIR_KERNEL)):
self.yHistory.popleft()
if(len(self.thetaHistory) == len(self.FIR_KERNEL)):
self.thetaHistory.popleft()
self.xHistory.append(ctr[0])
self.yHistory.append(ctr[1])
self.thetaHistory.append(theta)
xFilter = np.linalg.norm(np.multiply(self.FIR_KERNEL, np.array(self.xHistory)),1)
yFilter = np.linalg.norm(np.multiply(self.FIR_KERNEL, np.array(self.yHistory)),1)
thetaFilter = np.linalg.norm(np.multiply(self.FIR_KERNEL, np.array(self.thetaHistory)),1)
#print 'Filtered Phi:', phiFilter, ' Raw Theta:', theta
return (xFilter, yFilter), thetaFilter
# Interface to get current state estimates
def getState(self):
# Give estimated [x,y,theta]
if(self.tagLoc != None):
tx = self.tagLoc[0]
ty = self.tagLoc[1]
else:
tx = None
ty = None
return [self.x_est, self.y_est, self.theta_est, tx, ty]
### Event Handlers ###
# Camera input mouseclick handler
def mouseClickHandler(self, event, x, y, flags, param):
if event == cv2.EVENT_RBUTTONDOWN:
print 'Recalibration requested'
self.calstate = CalState.CAL_PROG
self.calpts = [] # Reset calibration points
if event == cv2.EVENT_LBUTTONDOWN:
print 'Mouse left click event at ' + str(x) + ',' + str(y)
if(self.calstate == CalState.UNCAL):
self.calstate = CalState.CAL_PROG
print 'Adding calibration point at (' + str(x) + ',' + str(y) + ')'
self.calpts.append([x,y])
elif(self.calstate == CalState.CAL_PROG):
if(len(self.calpts) < 4):
print 'Adding calibration point at (' + str(x) + ',' + str(y) + ')'
self.calpts.append([x,y])
# Finish
if(len(self.calpts) == 4):
print 'Calibrated'
self.warp = cv2.getPerspectiveTransform(np.float32(self.calpts), self.worldpts)
print str(self.calpts)
self.calstate = CalState.CALIBRATED
elif(self.calstate == CalState.CALIBRATED):
print 'Already calibrated'
# Color click handler for cal window
def colorClickHandler(self, event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
print 'Checking marker 1 color at ', str(x), ',', str(y)
pass # Get color at point
if event == cv2.EVENT_RBUTTONDOWN:
print 'Checking marker 2 color at ', str(x), ',', str(y)
pass # Get color at point
# Generic do-nothing slider handler (for )
def trackbarChangeHandler(self, x):
pass
# Gain slider handler
def gainChanged(self, gain):
uvc.set(self.camera, uvc.GAIN, gain)
# Saturation slider handler
def saturationChanged(self, sat):
uvc.set(self.camera, uvc.SATURATION, sat)
# Exposure slider handler
def exposureChanged(self, exp):
uvc.set(self.camera, uvc.EXPOSURE_ABS, exp)
# Sets the waypoint list for rendering on overlay
def setWaypoints(self, waypointEst):
self.waypointEst = vu.toImageCoordinates(waypointEst)
# Sets the estimated tag location for rendering on the overlay
def setTagLocation(self, tagEst):
self.tagLoc = (int(tagEst[0]),int(tagEst[1]))
# Stops the vision process
def stop(self):
self.vidcap.release()
cv2.release()
cv2.destroyAllWindows()
# Main function to run vision system as standalone
def main():
print 'Args:' , str(sys.argv)
for x in range(len(sys.argv)):
if(sys.argv[x] == '-c'):
ncam = int(sys.argv[x+1])
vs = VisionSystem(ncam)
self.vidcap.release()
cv2.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "324030a976af29dc93fdb637583bfaab93671cc2",
"index": 8515,
"step-1": "# Python 2.7 Doritobot Vision System\n# EECS 498 Purple Team, 2014\n# Written by Cody Hyman ([email protected])\n# Written against OpenCV 3.0.0-alpha\n\nimport sys\nimport os\n\nimport cv2\nimport numpy as np\n\nfrom uvcinterface import UVCInterface as uvc\nfrom visionUtil import VisionUtil as vu\nfrom collections import deque\nfrom math import *\n# Calibration state 'Enumeration'\nclass CalState(object):\n UNCAL = 1\n CAL_PROG = 2\n CALIBRATED = 3\n\n### Vision System Class ###\nclass VisionSystem(object):\n\t# Window names\n\tCAM_FEED_NAME = 'Camera Feed'\n\tCAL_NAME = 'Calibrated Image'\n\tPROC_NAME = 'Vision Processing'\n\tCTL_NAME = 'Filter Controls'\n\n\t# Constants\n\tG_CENTER = 52\n\tR_CENTER = 0\n\tSMIN = 50\n\tVMIN = 80\n\n\t#HISTORY_LENGTH = 15\n\tEMPTY_KERNEL = [0, 0, 0, 0, 0, 0, 0]\n\tRAW_KERNEL = np.array([1, 2, 3, 6, 10, 18, 20], dtype = np.float32)\n\tFIR_KERNEL = np.multiply(RAW_KERNEL,1/np.linalg.norm(RAW_KERNEL,1)) # Normalized kernel\n\t\n\tdef __init__(self, camera):\n\t\t### Instance Value initialization ###\n\t\tself.camera = camera\n\t\tself.calstate = CalState.UNCAL\n\t\tself.calpts = []\n\t\tself.XSIZE = 1000\n\t\tself.YSIZE = 1000\n\t\tself.x_est = -1\n\t\tself.y_est = -1\n\t\tself.theta_est = -1\n\t\t\n\t\t# Drawing storage\n\t\tself.waypointEst = [(300,300)] # Waypoint estimates for UI\n\t\tself.tagLoc = (10,10) # Tag location estimate\n\t\tself.fVectorStart = (0,0)\n\t\tself.fVectorEnd = (0,0)\n\t\t\n\t\t\n\t\t#self.worldpts = np.float32([\n\t\t# [0,self.YSIZE/2],\n\t\t# [0,0],\n\t\t# [self.XSIZE,0],\n\t\t# [self.XSIZE,self.YSIZE/2]\n\t\t# ])\n\t\t\n\t\t# ===== ***** Calibration points from world *****===== #\n\t\t'''self.worldpts = np.float32([\n\t\t [-5, -1. * -105],\t\t#22\n\t\t [90, -1. * -100],\t\t#27\n\t\t [90, -1. * 110],\t\t#26\n\t\t [0, -1. * 107]\t\t#25\n\t\t ])#*self.IMG_SCALE + self.IMG_OFFSET'''\n\t\t\n\t\t# Swap x-y coordinates (WTF!)\n\t\t'''self.worldpts = np.float32([\n\t\t [-105,-5],\t\t#22\n\t\t [-100, 90],\t\t#27\n\t\t [110, 90],\t\t#26\n\t\t [107, 0]\t\t#25\n\t\t ])#*self.IMG_SCALE + self.IMG_OFFSET'''\n\t\t\t\t\n\t\tself.worldpts = np.float32([\n\t\t [-104,-2], #22\n\t\t [-104,85], #27\n\t\t [115,84], #26\n\t\t [115,3] #25\n\t\t ])\n\t\tself.worldpts = vu.toImageCoordinates(self.worldpts)\n\t\ttestPts = vu.toWaypointCoordinates(self.worldpts)\n\t\tprint 'TestWorldPts', str(testPts)\n\t\t# ===== *************** ===== #\n\t\t \n\t\t### Camera initialization ###\n\t\tprint 'Opening Camera ' + str(camera)\n\t\tself.vidcap = cv2.VideoCapture(camera)# Open up specified camera\n\t\t# Check if camera is opened and exit if not\n\t\tif self.vidcap.isOpened():\n\t\t print 'Camera ' + str(camera) + ' opened successfully'\n\t\telse:\n\t\t print 'ERROR: Camera ' + str(camera) + ' not opened'\n\t\t return False\n\n\t\t# Set camera autoexposure\n\t\tuvc.set(self.camera, uvc.EXPOSURE_AUTO, 1)\n\t\tuvc.set(self.camera, uvc.EXPOSURE_AUTO_PRIORITY, 0)\n\n\t\t### Initialize UI elements ###\n\t\t# Filter Controls Window\n\t\tctlWindow = cv2.namedWindow(self.CTL_NAME)\n\t\tcv2.createTrackbar('Blue', self.CTL_NAME, 88, 180, self.trackbarChangeHandler)\n\t\tcv2.createTrackbar('Green', self.CTL_NAME, 41, 180, self.trackbarChangeHandler) \n\t\tcv2.createTrackbar('Red', self.CTL_NAME, 172, 180, self.trackbarChangeHandler)\n\t\tcv2.createTrackbar('B Cutoff', self.CTL_NAME, 110, 255, self.trackbarChangeHandler)\n\t\tcv2.createTrackbar('G Cutoff', self.CTL_NAME, 110, 255, self.trackbarChangeHandler)\n\t\tcv2.createTrackbar('R Cutoff', self.CTL_NAME, 110, 255, self.trackbarChangeHandler)\n\t\tcv2.createTrackbar('Sat Cutoff', self.CTL_NAME, 100, 255, self.trackbarChangeHandler)\n\t\tcv2.createTrackbar('Show Background', self.CTL_NAME, 1, 1, self.trackbarChangeHandler)\n\t\t\n\t\t# Camera input window\n\t\tcamWindow = cv2.namedWindow(self.CAM_FEED_NAME)\n\t\tcv2.createTrackbar('Gain', self.CAM_FEED_NAME, 128, 255, self.gainChanged)\n\t\tcv2.createTrackbar('Exposure', self.CAM_FEED_NAME, 1600, 2000, self.exposureChanged)\n\t\tcv2.createTrackbar('Saturation', self.CAM_FEED_NAME, 128, 255, self.saturationChanged)\n\t\tcv2.setMouseCallback(self.CAM_FEED_NAME, self.mouseClickHandler) # Set mouse callbacks for calibration\n\t\t\n\t\t# Rectified/Calibrated Image window\n\t\t#calWindow = cv2.namedWindow(self.CAL_NAME)\n\t\t#cv2.setMouseCallback(self.CAL_NAME, self.colorClickHandler)\n\t\t\n\t\t# Image processing Window 2\n\t\tprocWindow = cv2.namedWindow(self.PROC_NAME)\n\n\t\t# History for filter bank\n\t\tself.xHistory = deque(self.EMPTY_KERNEL)\n\t\tself.yHistory = deque(self.EMPTY_KERNEL)\n\t\tself.thetaHistory = deque(self.EMPTY_KERNEL)\n\n\t# Run vision on a frame\n\tdef processFrame(self):\n\t### Main processing loop ###\n\t#while(True):\n\t frameRet, self.camImg = self.vidcap.read()\n\t #Img = self.drawCalMarkers()\n\t cv2.imshow(self.CAM_FEED_NAME, self.drawCalMarkers())\n\t if(self.calstate == CalState.CALIBRATED):\n\t\t\tself.remapImage() # Apply perspective warp\n\t\t\tbl = cv2.getTrackbarPos('Blue', self.CTL_NAME)\n\t\t\tgr = cv2.getTrackbarPos('Green', self.CTL_NAME)\n\t\t\trd = cv2.getTrackbarPos('Red', self.CTL_NAME)\n\t\t\tbvmin = cv2.getTrackbarPos('B Cutoff', self.CTL_NAME)\n\t\t\tgvmin = cv2.getTrackbarPos('G Cutoff', self.CTL_NAME)\n\t\t\trvmin = cv2.getTrackbarPos('R Cutoff', self.CTL_NAME)\n\t\t\tsmin = cv2.getTrackbarPos('Sat Cutoff', self.CTL_NAME)\n\t\t\tbgroundFlag = cv2.getTrackbarPos('Show Background', self.CTL_NAME)\n\t\t\tbCentroid, self.bTagImg = self.findMarker(self.warpImg, bl, 10, smin, bvmin)\n\t\t\tgCentroid, self.gTagImg = self.findMarker(self.warpImg, gr, 10, smin, gvmin)\n\t\t\trCentroid, self.rTagImg = self.findMarker(self.warpImg, rd, 10, smin, rvmin)\n\t\t\t#vu.printCentroids(gCentroid, rCentroid)\n\t\t\tif(bgroundFlag):\n\t\t\t self.rgbImg = vu.comboImage(self.bTagImg, self.gTagImg, self.rTagImg, self.warpImg)\n\t\t\telse:\n\t\t\t self.rgbImg = vu.comboImage(self.bTagImg, self.gTagImg, self.rTagImg)\n\t\t\tctr, theta, bCtr, gCtr, rCtr = vu.localizeRobot(bCentroid, gCentroid, rCentroid)\n\t\t\tif((ctr != None) and (theta != None)):\n\t\t\t fctr, ftheta = self.filterPoints(ctr, theta)\n\t\t\t self.x_est = ctr[0]\n\t\t\t self.y_est = ctr[1]\n\t\t\t # print 'Theta IN:', theta\n\t\t\t self.theta_est = theta#ftheta\n\t\t\t self.tagLoc = vu.computeTagLocation(ctr, bCtr) # Compute tag location\n\t\t\t vu.drawSquareMarker(self.rgbImg, int(fctr[0]), int(fctr[1]), 5, (255,0,255))\n\t\t\tif(gCentroid != None):\n\t\t\t\tvu.drawSquareMarker(self.rgbImg, int(gCentroid[0]), int(gCentroid[1]), 5, (0,0,255))\n\t\t\tif(rCentroid != None):\n\t\t\t\tvu.drawSquareMarker(self.rgbImg, int(rCentroid[0]), int(rCentroid[1]), 5, (255,0,0))\n\t\t\tif(bCentroid != None):\n\t\t\t\tvu.drawSquareMarker(self.rgbImg, int(bCentroid[0]), int(bCentroid[1]), 5, (255,255,0))\n\t\t\twpIndex = 0\n\t\t\tfor wp in self.waypointEst:\n\t\t\t wpIndex = wpIndex + 1\n\t\t\t if(wpIndex == 1):\n\t\t\t\twpcolor = (0,0,255)\n\t\t\t else:\n\t\t\t\twpcolor = (0,255,255)\n\t\t\t vu.drawFilledCircleMarker(self.rgbImg, wp[0], wp[1], 10, wpcolor) #\n\t\t\t vu.drawTextIndex(self.rgbImg, wp[0], wp[1], str(wpIndex)) # Draw waypoint index\n\t\t\tif(self.tagLoc[0] != None):\n\t\t\t vu.drawFilledCircleMarker(self.rgbImg, self.tagLoc[0], self.tagLoc[1], 5, (0,0,160))\n\t\t\t#vu.drawVector(self.rgbImg, self.fVectorStart, self.fVectorEnd, (255,128,255))\n\t\t\t#cv2.imshow(self.CAL_NAME, self.warpImg)\n\t\t\tcv2.imshow(self.PROC_NAME, self.rgbImg)\n\t #if cv2.waitKey(20) & 0xFF == ord('q'):\n\t # break\n\t\n\t# Use current perspective transform to remap image\n\tdef remapImage(self):\n\t\tif(self.calstate == CalState.CALIBRATED):\n\t\t\tself.warpImg = cv2.warpPerspective(self.camImg, self.warp,(int(300*vu.IMG_SCALE),int(300*vu.IMG_SCALE)))\n\t\t\tself.warpImg = cv2.GaussianBlur(self.warpImg, (9,9), 1)\n\t\t\tself.warpImg = cv2.medianBlur(self.warpImg, 5)\n\t\telse:\n\t\t print 'Transform not calibrated'\n\n\t# Draws calibration markers on the camera image \n\tdef drawCalMarkers(self):\n\t\tmarkedImg = self.camImg.copy()\n\t\tfor pt in self.calpts:\n\t\t vu.drawSquareMarker(markedImg, pt[0], pt[1], 5, (255,0,255))\n\t\treturn markedImg\n\n\t# Finds a marker's central moment\n\tdef findMarker(self, image, hueCenter, hueWidth, satMin, valMin):\n\t\thsvImg = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)\n\t\tmarkerImg = cv2.inRange(hsvImg, np.array([hueCenter-hueWidth/2, satMin, valMin]), np.array([hueCenter+hueWidth/2, 255, 255]))\n\t\tcleanElement = cv2.getStructuringElement(cv2.MORPH_CROSS, (3,3))\n\t\tmarkerImg = cv2.erode(markerImg, cleanElement) # Clean up marker image w/ erode-dilate-median\n\t\tmarkerImg = cv2.dilate(markerImg, cleanElement)\n\t\tmarkerImg = cv2.medianBlur(markerImg, 3)\n\t\tmMoments = cv2.moments(markerImg) # Compute moments\n\t\tm00 = mMoments['m00']\n\t\tif(m00 > 0.1):\n\t\t\treturn (mMoments['m10']/m00, mMoments['m01']/m00), markerImg\n\t\treturn None, markerImg\n\n\t# FIR on centers and angles\n\tdef filterPoints(self, ctr, theta):\n\t\tif((ctr != None) and (theta != None)):\n\t\t\tif(len(self.xHistory) == len(self.FIR_KERNEL)):\n\t\t\t\tself.xHistory.popleft()\n\t\t\tif(len(self.yHistory) == len(self.FIR_KERNEL)):\n\t\t\t\tself.yHistory.popleft()\n\t\t\tif(len(self.thetaHistory) == len(self.FIR_KERNEL)):\n\t\t\t\tself.thetaHistory.popleft()\n\t\t\tself.xHistory.append(ctr[0])\n\t\t\tself.yHistory.append(ctr[1])\n\t\t\tself.thetaHistory.append(theta)\n\t\t\txFilter = np.linalg.norm(np.multiply(self.FIR_KERNEL, np.array(self.xHistory)),1)\n\t\t\tyFilter = np.linalg.norm(np.multiply(self.FIR_KERNEL, np.array(self.yHistory)),1)\n\t\t\tthetaFilter = np.linalg.norm(np.multiply(self.FIR_KERNEL, np.array(self.thetaHistory)),1)\n\t\t\t#print 'Filtered Phi:', phiFilter, ' Raw Theta:', theta\n\t\t\treturn (xFilter, yFilter), thetaFilter\n\n\t# Interface to get current state estimates\n\tdef getState(self):\n\t # Give estimated [x,y,theta]\n\t if(self.tagLoc != None):\n\t\ttx = self.tagLoc[0]\n\t\tty = self.tagLoc[1]\n\t else:\n\t\ttx = None\n\t\tty = None\n\t return [self.x_est, self.y_est, self.theta_est, tx, ty] \n\t \n\t### Event Handlers ###\n\t# Camera input mouseclick handler\n\tdef mouseClickHandler(self, event, x, y, flags, param):\n\t\tif event == cv2.EVENT_RBUTTONDOWN:\n\t\t print 'Recalibration requested'\n\t\t self.calstate = CalState.CAL_PROG\n\t\t self.calpts = [] # Reset calibration points\n\t\tif event == cv2.EVENT_LBUTTONDOWN:\n\t\t print 'Mouse left click event at ' + str(x) + ',' + str(y)\n\t\t if(self.calstate == CalState.UNCAL):\n\t\t\tself.calstate = CalState.CAL_PROG\n\t\t\tprint 'Adding calibration point at (' + str(x) + ',' + str(y) + ')'\n\t\t\tself.calpts.append([x,y])\n\t\t elif(self.calstate == CalState.CAL_PROG):\n\t\t\tif(len(self.calpts) < 4):\n\t\t\t print 'Adding calibration point at (' + str(x) + ',' + str(y) + ')'\n\t\t\t self.calpts.append([x,y])\n\t\t\t # Finish\n\t\t\t if(len(self.calpts) == 4):\n\t\t\t\tprint 'Calibrated'\n\t\t\t\tself.warp = cv2.getPerspectiveTransform(np.float32(self.calpts), self.worldpts)\n\t\t\t\tprint str(self.calpts)\n\t\t\t\tself.calstate = CalState.CALIBRATED\n\t\t elif(self.calstate == CalState.CALIBRATED):\n\t\t\tprint 'Already calibrated'\t \n\t\n\t# Color click handler for cal window\n\tdef colorClickHandler(self, event, x, y, flags, param):\n\t\tif event == cv2.EVENT_LBUTTONDOWN:\n\t\t\tprint 'Checking marker 1 color at ', str(x), ',', str(y)\n\t\t\tpass # Get color at point\n\t\tif event == cv2.EVENT_RBUTTONDOWN:\n\t\t\tprint 'Checking marker 2 color at ', str(x), ',', str(y)\n\t\t\tpass # Get color at point\n\n\t# Generic do-nothing slider handler (for )\n\tdef trackbarChangeHandler(self, x):\n\t\tpass\n\n\t# Gain slider handler\n\tdef gainChanged(self, gain):\n\t\tuvc.set(self.camera, uvc.GAIN, gain)\n\t\n\t# Saturation slider handler\n\tdef saturationChanged(self, sat):\n\t\tuvc.set(self.camera, uvc.SATURATION, sat)\n\n\t# Exposure slider handler\n\tdef exposureChanged(self, exp):\n\t\tuvc.set(self.camera, uvc.EXPOSURE_ABS, exp)\n\t\t\n\t# Sets the waypoint list for rendering on overlay\n\tdef setWaypoints(self, waypointEst):\n\t self.waypointEst = vu.toImageCoordinates(waypointEst)\n\t \n\t# Sets the estimated tag location for rendering on the overlay\n\tdef setTagLocation(self, tagEst):\n\t self.tagLoc = (int(tagEst[0]),int(tagEst[1]))\n\t \n\t# Stops the vision process\n\tdef stop(self):\n\t self.vidcap.release()\n\t cv2.release()\n\t cv2.destroyAllWindows()\n\n# Main function to run vision system as standalone\ndef main():\n\tprint 'Args:' , str(sys.argv)\n\tfor x in range(len(sys.argv)):\n\t\tif(sys.argv[x] == '-c'):\n\t\t\tncam = int(sys.argv[x+1])\n\tvs = VisionSystem(ncam)\n\tself.vidcap.release()\n\tcv2.release()\n\tcv2.destroyAllWindows()\n\n\t \nif __name__ == '__main__':\n main()\n\n ",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import arabic_reshaper
from scrapy import Spider, Request
from bidi.algorithm import get_display
from websites.items import ArticleItem
from operator import add
from scrapy_splash import SplashRequest
class Blogsaljazeera2Spider(Spider):
name = 'blogsaljazeera2'
allowed_domains = ['blogs.aljazeera.net']
start_urls = ['http://blogs.aljazeera.net/topics/short']
@staticmethod
def cleanhtml(raw_html):
cleanr = re.compile('<.*?>')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
@staticmethod
def lua_script(n):
LUA_SCRIPT = """
function main(splash)
local url = splash.args.url
assert(splash:go(url))
assert(splash:wait(1))
for i=1,{},1 do
assert(splash:runjs('document.getElementsByTagName("button")[0].click()'))
assert(splash:wait(1))
end
return {}
end
""".format(n, "{html=splash:html()}")
return LUA_SCRIPT
def parse(self, response):
for url in self.start_urls:
yield Request(response.urljoin(url), self.parse_result, meta={
'splash': {
'args': {'lua_source': self.lua_script(2)},
'endpoint': 'execute',
}
})
def parse_result(self, response):
for link in response.xpath("//*[@id='topics_Artilce_container']/div/a/@href").extract():
yield Request(response.urljoin(link), self.parse_links, dont_filter=False)
def parse_links(self, response):
rep = int(int(response.xpath("//input[@id='intTotal']/@value").extract_first())/6)+1
yield SplashRequest(url=response.urljoin(''), callback=self.parse_comment, endpoint='execute', args={'lua_source': self.lua_script(rep)})
def parse_comment(self, response):
item = ArticleItem()
title = ""
try:
title = get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath("//h1[@class='tweet_strip_text']/text()").extract_first()).strip()))
except (RuntimeError, TypeError, NameError):
pass
item["title"] = title
author = ""
try:
author = get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath("null").extract_first()).strip()))
except (RuntimeError, TypeError, NameError):
pass
item["author"] = author
item["link"] = response.url
description = list()
try:
description.extend([self.cleanhtml(d) for d in get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath("null").extract())))])
except (RuntimeError, TypeError, NameError):
pass
item["description"] = description
comment = list()
names = list()
feeds = list()
try:
comment.extend([get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(d))) for d in response.xpath("//article/p/text()").extract()])
names.extend([get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(d))) for d in response.xpath("//article/div/div/h2/text()").extract()])
feeds.extend([self.cleanhtml(d) for d in response.xpath("//*[@class='number_likes']/text()").extract()])
except (RuntimeError, TypeError, NameError):
pass
item["comments"] = comment
item["names"] = names
item["feedbacks"] = feeds
return item
|
normal
|
{
"blob_id": "17058b323c0a0974dfa8f124ccd6cb5bf29dd849",
"index": 2065,
"step-1": "<mask token>\n\n\nclass Blogsaljazeera2Spider(Spider):\n <mask token>\n <mask token>\n <mask token>\n\n @staticmethod\n def cleanhtml(raw_html):\n cleanr = re.compile('<.*?>')\n cleantext = re.sub(cleanr, '', raw_html)\n return cleantext\n\n @staticmethod\n def lua_script(n):\n LUA_SCRIPT = (\n \"\"\"\n function main(splash)\n local url = splash.args.url\n assert(splash:go(url))\n assert(splash:wait(1))\n for i=1,{},1 do\n assert(splash:runjs('document.getElementsByTagName(\"button\")[0].click()'))\n assert(splash:wait(1))\n end\n return {}\n end\n \"\"\"\n .format(n, '{html=splash:html()}'))\n return LUA_SCRIPT\n\n def parse(self, response):\n for url in self.start_urls:\n yield Request(response.urljoin(url), self.parse_result, meta={\n 'splash': {'args': {'lua_source': self.lua_script(2)},\n 'endpoint': 'execute'}})\n\n def parse_result(self, response):\n for link in response.xpath(\n \"//*[@id='topics_Artilce_container']/div/a/@href\").extract():\n yield Request(response.urljoin(link), self.parse_links,\n dont_filter=False)\n <mask token>\n\n def parse_comment(self, response):\n item = ArticleItem()\n title = ''\n try:\n title = get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(response.xpath(\n \"//h1[@class='tweet_strip_text']/text()\").extract_first()).\n strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item['title'] = title\n author = ''\n try:\n author = get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(response.xpath('null').extract_first()).strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item['author'] = author\n item['link'] = response.url\n description = list()\n try:\n description.extend([self.cleanhtml(d) for d in get_display(\n arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath\n ('null').extract())))])\n except (RuntimeError, TypeError, NameError):\n pass\n item['description'] = description\n comment = list()\n names = list()\n feeds = list()\n try:\n comment.extend([get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(d))) for d in response.xpath('//article/p/text()'\n ).extract()])\n names.extend([get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(d))) for d in response.xpath(\n '//article/div/div/h2/text()').extract()])\n feeds.extend([self.cleanhtml(d) for d in response.xpath(\n \"//*[@class='number_likes']/text()\").extract()])\n except (RuntimeError, TypeError, NameError):\n pass\n item['comments'] = comment\n item['names'] = names\n item['feedbacks'] = feeds\n return item\n",
"step-2": "<mask token>\n\n\nclass Blogsaljazeera2Spider(Spider):\n <mask token>\n <mask token>\n <mask token>\n\n @staticmethod\n def cleanhtml(raw_html):\n cleanr = re.compile('<.*?>')\n cleantext = re.sub(cleanr, '', raw_html)\n return cleantext\n\n @staticmethod\n def lua_script(n):\n LUA_SCRIPT = (\n \"\"\"\n function main(splash)\n local url = splash.args.url\n assert(splash:go(url))\n assert(splash:wait(1))\n for i=1,{},1 do\n assert(splash:runjs('document.getElementsByTagName(\"button\")[0].click()'))\n assert(splash:wait(1))\n end\n return {}\n end\n \"\"\"\n .format(n, '{html=splash:html()}'))\n return LUA_SCRIPT\n\n def parse(self, response):\n for url in self.start_urls:\n yield Request(response.urljoin(url), self.parse_result, meta={\n 'splash': {'args': {'lua_source': self.lua_script(2)},\n 'endpoint': 'execute'}})\n\n def parse_result(self, response):\n for link in response.xpath(\n \"//*[@id='topics_Artilce_container']/div/a/@href\").extract():\n yield Request(response.urljoin(link), self.parse_links,\n dont_filter=False)\n\n def parse_links(self, response):\n rep = int(int(response.xpath(\"//input[@id='intTotal']/@value\").\n extract_first()) / 6) + 1\n yield SplashRequest(url=response.urljoin(''), callback=self.\n parse_comment, endpoint='execute', args={'lua_source': self.\n lua_script(rep)})\n\n def parse_comment(self, response):\n item = ArticleItem()\n title = ''\n try:\n title = get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(response.xpath(\n \"//h1[@class='tweet_strip_text']/text()\").extract_first()).\n strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item['title'] = title\n author = ''\n try:\n author = get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(response.xpath('null').extract_first()).strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item['author'] = author\n item['link'] = response.url\n description = list()\n try:\n description.extend([self.cleanhtml(d) for d in get_display(\n arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath\n ('null').extract())))])\n except (RuntimeError, TypeError, NameError):\n pass\n item['description'] = description\n comment = list()\n names = list()\n feeds = list()\n try:\n comment.extend([get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(d))) for d in response.xpath('//article/p/text()'\n ).extract()])\n names.extend([get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(d))) for d in response.xpath(\n '//article/div/div/h2/text()').extract()])\n feeds.extend([self.cleanhtml(d) for d in response.xpath(\n \"//*[@class='number_likes']/text()\").extract()])\n except (RuntimeError, TypeError, NameError):\n pass\n item['comments'] = comment\n item['names'] = names\n item['feedbacks'] = feeds\n return item\n",
"step-3": "<mask token>\n\n\nclass Blogsaljazeera2Spider(Spider):\n name = 'blogsaljazeera2'\n allowed_domains = ['blogs.aljazeera.net']\n start_urls = ['http://blogs.aljazeera.net/topics/short']\n\n @staticmethod\n def cleanhtml(raw_html):\n cleanr = re.compile('<.*?>')\n cleantext = re.sub(cleanr, '', raw_html)\n return cleantext\n\n @staticmethod\n def lua_script(n):\n LUA_SCRIPT = (\n \"\"\"\n function main(splash)\n local url = splash.args.url\n assert(splash:go(url))\n assert(splash:wait(1))\n for i=1,{},1 do\n assert(splash:runjs('document.getElementsByTagName(\"button\")[0].click()'))\n assert(splash:wait(1))\n end\n return {}\n end\n \"\"\"\n .format(n, '{html=splash:html()}'))\n return LUA_SCRIPT\n\n def parse(self, response):\n for url in self.start_urls:\n yield Request(response.urljoin(url), self.parse_result, meta={\n 'splash': {'args': {'lua_source': self.lua_script(2)},\n 'endpoint': 'execute'}})\n\n def parse_result(self, response):\n for link in response.xpath(\n \"//*[@id='topics_Artilce_container']/div/a/@href\").extract():\n yield Request(response.urljoin(link), self.parse_links,\n dont_filter=False)\n\n def parse_links(self, response):\n rep = int(int(response.xpath(\"//input[@id='intTotal']/@value\").\n extract_first()) / 6) + 1\n yield SplashRequest(url=response.urljoin(''), callback=self.\n parse_comment, endpoint='execute', args={'lua_source': self.\n lua_script(rep)})\n\n def parse_comment(self, response):\n item = ArticleItem()\n title = ''\n try:\n title = get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(response.xpath(\n \"//h1[@class='tweet_strip_text']/text()\").extract_first()).\n strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item['title'] = title\n author = ''\n try:\n author = get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(response.xpath('null').extract_first()).strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item['author'] = author\n item['link'] = response.url\n description = list()\n try:\n description.extend([self.cleanhtml(d) for d in get_display(\n arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath\n ('null').extract())))])\n except (RuntimeError, TypeError, NameError):\n pass\n item['description'] = description\n comment = list()\n names = list()\n feeds = list()\n try:\n comment.extend([get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(d))) for d in response.xpath('//article/p/text()'\n ).extract()])\n names.extend([get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(d))) for d in response.xpath(\n '//article/div/div/h2/text()').extract()])\n feeds.extend([self.cleanhtml(d) for d in response.xpath(\n \"//*[@class='number_likes']/text()\").extract()])\n except (RuntimeError, TypeError, NameError):\n pass\n item['comments'] = comment\n item['names'] = names\n item['feedbacks'] = feeds\n return item\n",
"step-4": "from __future__ import unicode_literals\nimport re\nimport arabic_reshaper\nfrom scrapy import Spider, Request\nfrom bidi.algorithm import get_display\nfrom websites.items import ArticleItem\nfrom operator import add\nfrom scrapy_splash import SplashRequest\n\n\nclass Blogsaljazeera2Spider(Spider):\n name = 'blogsaljazeera2'\n allowed_domains = ['blogs.aljazeera.net']\n start_urls = ['http://blogs.aljazeera.net/topics/short']\n\n @staticmethod\n def cleanhtml(raw_html):\n cleanr = re.compile('<.*?>')\n cleantext = re.sub(cleanr, '', raw_html)\n return cleantext\n\n @staticmethod\n def lua_script(n):\n LUA_SCRIPT = (\n \"\"\"\n function main(splash)\n local url = splash.args.url\n assert(splash:go(url))\n assert(splash:wait(1))\n for i=1,{},1 do\n assert(splash:runjs('document.getElementsByTagName(\"button\")[0].click()'))\n assert(splash:wait(1))\n end\n return {}\n end\n \"\"\"\n .format(n, '{html=splash:html()}'))\n return LUA_SCRIPT\n\n def parse(self, response):\n for url in self.start_urls:\n yield Request(response.urljoin(url), self.parse_result, meta={\n 'splash': {'args': {'lua_source': self.lua_script(2)},\n 'endpoint': 'execute'}})\n\n def parse_result(self, response):\n for link in response.xpath(\n \"//*[@id='topics_Artilce_container']/div/a/@href\").extract():\n yield Request(response.urljoin(link), self.parse_links,\n dont_filter=False)\n\n def parse_links(self, response):\n rep = int(int(response.xpath(\"//input[@id='intTotal']/@value\").\n extract_first()) / 6) + 1\n yield SplashRequest(url=response.urljoin(''), callback=self.\n parse_comment, endpoint='execute', args={'lua_source': self.\n lua_script(rep)})\n\n def parse_comment(self, response):\n item = ArticleItem()\n title = ''\n try:\n title = get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(response.xpath(\n \"//h1[@class='tweet_strip_text']/text()\").extract_first()).\n strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item['title'] = title\n author = ''\n try:\n author = get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(response.xpath('null').extract_first()).strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item['author'] = author\n item['link'] = response.url\n description = list()\n try:\n description.extend([self.cleanhtml(d) for d in get_display(\n arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath\n ('null').extract())))])\n except (RuntimeError, TypeError, NameError):\n pass\n item['description'] = description\n comment = list()\n names = list()\n feeds = list()\n try:\n comment.extend([get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(d))) for d in response.xpath('//article/p/text()'\n ).extract()])\n names.extend([get_display(arabic_reshaper.reshape(u'' + self.\n cleanhtml(d))) for d in response.xpath(\n '//article/div/div/h2/text()').extract()])\n feeds.extend([self.cleanhtml(d) for d in response.xpath(\n \"//*[@class='number_likes']/text()\").extract()])\n except (RuntimeError, TypeError, NameError):\n pass\n item['comments'] = comment\n item['names'] = names\n item['feedbacks'] = feeds\n return item\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\nimport re\nimport arabic_reshaper\nfrom scrapy import Spider, Request\nfrom bidi.algorithm import get_display\nfrom websites.items import ArticleItem\nfrom operator import add\nfrom scrapy_splash import SplashRequest\n\n\nclass Blogsaljazeera2Spider(Spider):\n name = 'blogsaljazeera2'\n allowed_domains = ['blogs.aljazeera.net']\n start_urls = ['http://blogs.aljazeera.net/topics/short']\n\n\n @staticmethod\n def cleanhtml(raw_html):\n cleanr = re.compile('<.*?>')\n cleantext = re.sub(cleanr, '', raw_html)\n return cleantext\n\n @staticmethod\n def lua_script(n):\n LUA_SCRIPT = \"\"\"\n function main(splash)\n local url = splash.args.url\n assert(splash:go(url))\n assert(splash:wait(1))\n for i=1,{},1 do\n assert(splash:runjs('document.getElementsByTagName(\"button\")[0].click()'))\n assert(splash:wait(1))\n end\n return {}\n end\n \"\"\".format(n, \"{html=splash:html()}\")\n return LUA_SCRIPT\n\n def parse(self, response):\n for url in self.start_urls:\n yield Request(response.urljoin(url), self.parse_result, meta={\n 'splash': {\n 'args': {'lua_source': self.lua_script(2)},\n 'endpoint': 'execute',\n }\n })\n\n def parse_result(self, response):\n for link in response.xpath(\"//*[@id='topics_Artilce_container']/div/a/@href\").extract():\n yield Request(response.urljoin(link), self.parse_links, dont_filter=False)\n\n def parse_links(self, response):\n rep = int(int(response.xpath(\"//input[@id='intTotal']/@value\").extract_first())/6)+1\n yield SplashRequest(url=response.urljoin(''), callback=self.parse_comment, endpoint='execute', args={'lua_source': self.lua_script(rep)})\n\n def parse_comment(self, response):\n item = ArticleItem()\n\n title = \"\"\n try:\n title = get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath(\"//h1[@class='tweet_strip_text']/text()\").extract_first()).strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item[\"title\"] = title\n\n author = \"\"\n try:\n author = get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath(\"null\").extract_first()).strip()))\n except (RuntimeError, TypeError, NameError):\n pass\n item[\"author\"] = author\n\n item[\"link\"] = response.url\n\n description = list()\n try:\n description.extend([self.cleanhtml(d) for d in get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath(\"null\").extract())))])\n except (RuntimeError, TypeError, NameError):\n pass\n item[\"description\"] = description\n\n comment = list()\n names = list()\n feeds = list()\n try:\n comment.extend([get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(d))) for d in response.xpath(\"//article/p/text()\").extract()])\n names.extend([get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(d))) for d in response.xpath(\"//article/div/div/h2/text()\").extract()])\n feeds.extend([self.cleanhtml(d) for d in response.xpath(\"//*[@class='number_likes']/text()\").extract()])\n except (RuntimeError, TypeError, NameError):\n pass\n item[\"comments\"] = comment\n item[\"names\"] = names\n item[\"feedbacks\"] = feeds\n\n return item\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
from django.shortcuts import render
from rest_framework.response import Response
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from .models import Flight, Passenger, Reservation
from .serializers import FlightSerializer, PassengerSerializer, ReservationSerializer
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
# Function Based Views Below
@api_view(['GET'])
def find_flight(request):
bodyData = request.data
req_flight = Flight.objects.filter(
departureCity = bodyData['departureCity'],
arrivalCity = bodyData['arrivalCity'],
dateOfDeparture = bodyData['dateOfDeparture']
)
serialized_flight = FlightSerializer(req_flight, many=True)
return Response(serialized_flight.data)
@api_view(['POST'])
def save_reservation(request):
bodyData = request.data
req_flight = Flight.objects.get(id= bodyData['flightID'])
req_passenger = Passenger()
req_passenger.firstName = bodyData['firstName']
req_passenger.lastName = bodyData['lastName']
req_passenger.middlename = bodyData['middleName']
req_passenger.email = bodyData['email']
req_passenger.phone = bodyData['phone']
req_passenger.save()
req_reservation = Reservation()
req_reservation.flight = req_flight
req_reservation.passenger = req_passenger
req_reservation.save()
return Response(status=status.HTTP_201_CREATED)
# Non Primary based Operations Below
class ListFlight(ListCreateAPIView):
queryset = Flight.objects.all()
serializer_class = FlightSerializer
permission_classes = [IsAuthenticated]
class ListPassengers(ListCreateAPIView):
queryset = Passenger.objects.all()
serializer_class = PassengerSerializer
class ListReservation(ListCreateAPIView):
queryset = Reservation.objects.all()
serializer_class = ReservationSerializer
# Primary Key based Operation Below
class DetailedFlight(RetrieveUpdateDestroyAPIView):
queryset = Flight.objects.all()
serializer_class = FlightSerializer
permission_classes = [IsAuthenticated]
class DetailedPassenger(RetrieveUpdateDestroyAPIView):
queryset = Passenger.objects.all()
serializer_class = PassengerSerializer
class Detailedreservation(RetrieveUpdateDestroyAPIView):
queryset = Reservation.objects.all()
serializer_class = ReservationSerializer
|
normal
|
{
"blob_id": "d437d77d5a57a6f2f4a2d530be05c3845dce93bc",
"index": 1459,
"step-1": "<mask token>\n\n\nclass Detailedreservation(RetrieveUpdateDestroyAPIView):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ListReservation(ListCreateAPIView):\n <mask token>\n <mask token>\n\n\nclass DetailedFlight(RetrieveUpdateDestroyAPIView):\n queryset = Flight.objects.all()\n serializer_class = FlightSerializer\n permission_classes = [IsAuthenticated]\n\n\nclass DetailedPassenger(RetrieveUpdateDestroyAPIView):\n queryset = Passenger.objects.all()\n serializer_class = PassengerSerializer\n\n\nclass Detailedreservation(RetrieveUpdateDestroyAPIView):\n queryset = Reservation.objects.all()\n serializer_class = ReservationSerializer\n",
"step-3": "<mask token>\n\n\n@api_view(['POST'])\ndef save_reservation(request):\n bodyData = request.data\n req_flight = Flight.objects.get(id=bodyData['flightID'])\n req_passenger = Passenger()\n req_passenger.firstName = bodyData['firstName']\n req_passenger.lastName = bodyData['lastName']\n req_passenger.middlename = bodyData['middleName']\n req_passenger.email = bodyData['email']\n req_passenger.phone = bodyData['phone']\n req_passenger.save()\n req_reservation = Reservation()\n req_reservation.flight = req_flight\n req_reservation.passenger = req_passenger\n req_reservation.save()\n return Response(status=status.HTTP_201_CREATED)\n\n\nclass ListFlight(ListCreateAPIView):\n queryset = Flight.objects.all()\n serializer_class = FlightSerializer\n permission_classes = [IsAuthenticated]\n\n\nclass ListPassengers(ListCreateAPIView):\n queryset = Passenger.objects.all()\n serializer_class = PassengerSerializer\n\n\nclass ListReservation(ListCreateAPIView):\n queryset = Reservation.objects.all()\n serializer_class = ReservationSerializer\n\n\nclass DetailedFlight(RetrieveUpdateDestroyAPIView):\n queryset = Flight.objects.all()\n serializer_class = FlightSerializer\n permission_classes = [IsAuthenticated]\n\n\nclass DetailedPassenger(RetrieveUpdateDestroyAPIView):\n queryset = Passenger.objects.all()\n serializer_class = PassengerSerializer\n\n\nclass Detailedreservation(RetrieveUpdateDestroyAPIView):\n queryset = Reservation.objects.all()\n serializer_class = ReservationSerializer\n",
"step-4": "<mask token>\n\n\n@api_view(['GET'])\ndef find_flight(request):\n bodyData = request.data\n req_flight = Flight.objects.filter(departureCity=bodyData[\n 'departureCity'], arrivalCity=bodyData['arrivalCity'],\n dateOfDeparture=bodyData['dateOfDeparture'])\n serialized_flight = FlightSerializer(req_flight, many=True)\n return Response(serialized_flight.data)\n\n\n@api_view(['POST'])\ndef save_reservation(request):\n bodyData = request.data\n req_flight = Flight.objects.get(id=bodyData['flightID'])\n req_passenger = Passenger()\n req_passenger.firstName = bodyData['firstName']\n req_passenger.lastName = bodyData['lastName']\n req_passenger.middlename = bodyData['middleName']\n req_passenger.email = bodyData['email']\n req_passenger.phone = bodyData['phone']\n req_passenger.save()\n req_reservation = Reservation()\n req_reservation.flight = req_flight\n req_reservation.passenger = req_passenger\n req_reservation.save()\n return Response(status=status.HTTP_201_CREATED)\n\n\nclass ListFlight(ListCreateAPIView):\n queryset = Flight.objects.all()\n serializer_class = FlightSerializer\n permission_classes = [IsAuthenticated]\n\n\nclass ListPassengers(ListCreateAPIView):\n queryset = Passenger.objects.all()\n serializer_class = PassengerSerializer\n\n\nclass ListReservation(ListCreateAPIView):\n queryset = Reservation.objects.all()\n serializer_class = ReservationSerializer\n\n\nclass DetailedFlight(RetrieveUpdateDestroyAPIView):\n queryset = Flight.objects.all()\n serializer_class = FlightSerializer\n permission_classes = [IsAuthenticated]\n\n\nclass DetailedPassenger(RetrieveUpdateDestroyAPIView):\n queryset = Passenger.objects.all()\n serializer_class = PassengerSerializer\n\n\nclass Detailedreservation(RetrieveUpdateDestroyAPIView):\n queryset = Reservation.objects.all()\n serializer_class = ReservationSerializer\n",
"step-5": "from django.shortcuts import render\nfrom rest_framework.response import Response\nfrom rest_framework import status\nfrom rest_framework.decorators import api_view\nfrom rest_framework.permissions import IsAuthenticated\nfrom .models import Flight, Passenger, Reservation\nfrom .serializers import FlightSerializer, PassengerSerializer, ReservationSerializer\nfrom rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView\n\n# Function Based Views Below\n\n@api_view(['GET'])\ndef find_flight(request):\n bodyData = request.data\n req_flight = Flight.objects.filter(\n departureCity = bodyData['departureCity'],\n arrivalCity = bodyData['arrivalCity'],\n dateOfDeparture = bodyData['dateOfDeparture']\n )\n serialized_flight = FlightSerializer(req_flight, many=True)\n return Response(serialized_flight.data)\n\n\n@api_view(['POST'])\ndef save_reservation(request):\n bodyData = request.data\n req_flight = Flight.objects.get(id= bodyData['flightID'])\n\n req_passenger = Passenger()\n req_passenger.firstName = bodyData['firstName']\n req_passenger.lastName = bodyData['lastName']\n req_passenger.middlename = bodyData['middleName']\n req_passenger.email = bodyData['email']\n req_passenger.phone = bodyData['phone']\n req_passenger.save()\n\n req_reservation = Reservation()\n req_reservation.flight = req_flight\n req_reservation.passenger = req_passenger\n req_reservation.save()\n\n return Response(status=status.HTTP_201_CREATED)\n\n\n# Non Primary based Operations Below\n\nclass ListFlight(ListCreateAPIView):\n queryset = Flight.objects.all()\n serializer_class = FlightSerializer\n permission_classes = [IsAuthenticated]\n\nclass ListPassengers(ListCreateAPIView):\n queryset = Passenger.objects.all()\n serializer_class = PassengerSerializer\n\nclass ListReservation(ListCreateAPIView):\n queryset = Reservation.objects.all()\n serializer_class = ReservationSerializer\n\n\n# Primary Key based Operation Below \n\n\nclass DetailedFlight(RetrieveUpdateDestroyAPIView):\n queryset = Flight.objects.all()\n serializer_class = FlightSerializer\n permission_classes = [IsAuthenticated]\n\nclass DetailedPassenger(RetrieveUpdateDestroyAPIView):\n queryset = Passenger.objects.all()\n serializer_class = PassengerSerializer\n\nclass Detailedreservation(RetrieveUpdateDestroyAPIView):\n queryset = Reservation.objects.all()\n serializer_class = ReservationSerializer",
"step-ids": [
1,
7,
13,
14,
16
]
}
|
[
1,
7,
13,
14,
16
] |
from typing import List, Any, Callable, Iterable, TypeVar, Tuple
T = TypeVar('T')
def partition(pred: Callable[[T], bool], it: Iterable[T]) \
-> Tuple[List[T], List[T]]: ...
|
normal
|
{
"blob_id": "8e443d136a4e9fcdd18a106192f9c097928b8c99",
"index": 7340,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef partition(pred: Callable[[T], bool], it: Iterable[T]) ->Tuple[List[T],\n List[T]]:\n ...\n",
"step-3": "<mask token>\nT = TypeVar('T')\n\n\ndef partition(pred: Callable[[T], bool], it: Iterable[T]) ->Tuple[List[T],\n List[T]]:\n ...\n",
"step-4": "from typing import List, Any, Callable, Iterable, TypeVar, Tuple\nT = TypeVar('T')\n\n\ndef partition(pred: Callable[[T], bool], it: Iterable[T]) ->Tuple[List[T],\n List[T]]:\n ...\n",
"step-5": "from typing import List, Any, Callable, Iterable, TypeVar, Tuple\n\nT = TypeVar('T')\n\ndef partition(pred: Callable[[T], bool], it: Iterable[T]) \\\n -> Tuple[List[T], List[T]]: ...\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# coding:utf-8
__author__ = 'yinzishao'
# dic ={}
class operation():
def GetResult(self):
pass
class operationAdd(operation):
def GetResult(self):
return self.numberA + self.numberB
class operationDev(operation):
def GetResult(self):
# if(self.numberB!=0):
# return self.numberA /self.numberB
# else:
# raise "被除数不能为0"
try :
return self.numberA /self.numberB
except Exception,e:
print "error:divided by zero"
return 0
class operationMul(operation):
def GetResult(self):
return self.numberA*self.numberB
class operationSub(operation):
def GetResult(self):
return self.numberA-self.numberB
class operationFac():
dic ={}
def __init__(self):
self.dic ={"+":operationAdd(),"-":operationSub(),"/":operationDev(),"*":operationDev()}
def creatOpe(self,sign):
if sign in self.dic:
return self.dic[sign]
else:
return "faise"
if __name__ =="__main__":
fuhao = raw_input("operator:")
nA= input("a:")
nB= input("b:")
a =operationFac().creatOpe(fuhao)
a.numberA=nA
a.numberB=nB
print a.GetResult()
# dic ={"+":operationAdd(),"-":operationSub(),"/":operationDev(),"*":operationDev()}
# print dic
|
normal
|
{
"blob_id": "7e33c6ada3d141ba8067dbf88c2e85a91802a067",
"index": 8446,
"step-1": "# coding:utf-8\n__author__ = 'yinzishao'\n# dic ={}\n\nclass operation():\n def GetResult(self):\n pass\n\nclass operationAdd(operation):\n def GetResult(self):\n return self.numberA + self.numberB\n\nclass operationDev(operation):\n def GetResult(self):\n # if(self.numberB!=0):\n # return self.numberA /self.numberB\n # else:\n # raise \"被除数不能为0\"\n try :\n return self.numberA /self.numberB\n except Exception,e:\n print \"error:divided by zero\"\n return 0\nclass operationMul(operation):\n def GetResult(self):\n return self.numberA*self.numberB\n\nclass operationSub(operation):\n def GetResult(self):\n return self.numberA-self.numberB\n\nclass operationFac():\n dic ={}\n def __init__(self):\n self.dic ={\"+\":operationAdd(),\"-\":operationSub(),\"/\":operationDev(),\"*\":operationDev()}\n def creatOpe(self,sign):\n if sign in self.dic:\n return self.dic[sign]\n else:\n return \"faise\"\n\nif __name__ ==\"__main__\":\n fuhao = raw_input(\"operator:\")\n nA= input(\"a:\")\n nB= input(\"b:\")\n a =operationFac().creatOpe(fuhao)\n a.numberA=nA\n a.numberB=nB\n print a.GetResult()\n # dic ={\"+\":operationAdd(),\"-\":operationSub(),\"/\":operationDev(),\"*\":operationDev()}\n # print dic\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import sys
import utils
#import random
def findNearestPoint(points,no_used , src):
# If no nearest point found, return max.
dest = src
minDist = sys.float_info.max
for i in range(len(points)):
if no_used[i] and i!=src:
dist = utils.length(points[src], points[i])
if dist < minDist:
dest =i
minDist = dist
return dest, minDist
def solve(points):
#get an initial tour by NearestPoint method
tour = [0 for i in range(len(points))]
no_used = [True for i in range(len(points))]
totalDist = 0.0
# src =int( random.random()*(len(points)-1))
# no_used[src] = False
# tour[0]=src
src =0
no_used[0] = False
for i in range(1, len(points)):
dest, minDist = findNearestPoint(points, no_used, src) #find Nearest Point
tour[i] = dest
no_used[dest] = False #have been used
src = dest
totalDist += minDist
#plus distance between last point and initial point
return totalDist + utils.length(points[tour[-1]], points[tour[0]]), tour
|
normal
|
{
"blob_id": "943db90aa7721ddad3d7f5103c4d398fbf4e143b",
"index": 2768,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef findNearestPoint(points, no_used, src):\n dest = src\n minDist = sys.float_info.max\n for i in range(len(points)):\n if no_used[i] and i != src:\n dist = utils.length(points[src], points[i])\n if dist < minDist:\n dest = i\n minDist = dist\n return dest, minDist\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef findNearestPoint(points, no_used, src):\n dest = src\n minDist = sys.float_info.max\n for i in range(len(points)):\n if no_used[i] and i != src:\n dist = utils.length(points[src], points[i])\n if dist < minDist:\n dest = i\n minDist = dist\n return dest, minDist\n\n\ndef solve(points):\n tour = [(0) for i in range(len(points))]\n no_used = [(True) for i in range(len(points))]\n totalDist = 0.0\n src = 0\n no_used[0] = False\n for i in range(1, len(points)):\n dest, minDist = findNearestPoint(points, no_used, src)\n tour[i] = dest\n no_used[dest] = False\n src = dest\n totalDist += minDist\n return totalDist + utils.length(points[tour[-1]], points[tour[0]]), tour\n",
"step-4": "import sys\nimport utils\n\n\ndef findNearestPoint(points, no_used, src):\n dest = src\n minDist = sys.float_info.max\n for i in range(len(points)):\n if no_used[i] and i != src:\n dist = utils.length(points[src], points[i])\n if dist < minDist:\n dest = i\n minDist = dist\n return dest, minDist\n\n\ndef solve(points):\n tour = [(0) for i in range(len(points))]\n no_used = [(True) for i in range(len(points))]\n totalDist = 0.0\n src = 0\n no_used[0] = False\n for i in range(1, len(points)):\n dest, minDist = findNearestPoint(points, no_used, src)\n tour[i] = dest\n no_used[dest] = False\n src = dest\n totalDist += minDist\n return totalDist + utils.length(points[tour[-1]], points[tour[0]]), tour\n",
"step-5": "import sys\nimport utils\n#import random\n\ndef findNearestPoint(points,no_used , src):\n # If no nearest point found, return max.\n \n dest = src\n minDist = sys.float_info.max\n \n for i in range(len(points)):\n if no_used[i] and i!=src:\n\n \n dist = utils.length(points[src], points[i]) \n if dist < minDist:\n dest =i\n minDist = dist \n \n\n return dest, minDist\n \ndef solve(points):\n #get an initial tour by NearestPoint method\n tour = [0 for i in range(len(points))]\n no_used = [True for i in range(len(points))]\n totalDist = 0.0\n \n# src =int( random.random()*(len(points)-1))\n# no_used[src] = False\n# tour[0]=src\n src =0\n no_used[0] = False\n \n for i in range(1, len(points)):\n dest, minDist = findNearestPoint(points, no_used, src) #find Nearest Point\n tour[i] = dest\n no_used[dest] = False #have been used\n src = dest\n totalDist += minDist\n #plus distance between last point and initial point\n return totalDist + utils.length(points[tour[-1]], points[tour[0]]), tour\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.urls import path
from .views import job_upload_view, job_view, job_applicants_view, posted_job_view, bussiness_list_view
app_name = 'jobs'
urlpatterns = [
path('', job_view, name='job-index'),
path('applicants/', job_applicants_view, name='job-applicants'),
path('posted/', posted_job_view, name='job-posted'),
path('business/', bussiness_list_view, name='business'),
path('upload/', job_upload_view, name='job-upload'),
]
|
normal
|
{
"blob_id": "b88af16693eca10d0bd78fd706389f5468c9b99b",
"index": 144,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'jobs'\nurlpatterns = [path('', job_view, name='job-index'), path('applicants/',\n job_applicants_view, name='job-applicants'), path('posted/',\n posted_job_view, name='job-posted'), path('business/',\n bussiness_list_view, name='business'), path('upload/', job_upload_view,\n name='job-upload')]\n",
"step-3": "from django.urls import path\nfrom .views import job_upload_view, job_view, job_applicants_view, posted_job_view, bussiness_list_view\napp_name = 'jobs'\nurlpatterns = [path('', job_view, name='job-index'), path('applicants/',\n job_applicants_view, name='job-applicants'), path('posted/',\n posted_job_view, name='job-posted'), path('business/',\n bussiness_list_view, name='business'), path('upload/', job_upload_view,\n name='job-upload')]\n",
"step-4": "from django.urls import path\nfrom .views import job_upload_view, job_view, job_applicants_view, posted_job_view, bussiness_list_view\n\napp_name = 'jobs'\nurlpatterns = [\n path('', job_view, name='job-index'),\n path('applicants/', job_applicants_view, name='job-applicants'),\n path('posted/', posted_job_view, name='job-posted'),\n path('business/', bussiness_list_view, name='business'),\n path('upload/', job_upload_view, name='job-upload'),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from .most_serializers import *
|
normal
|
{
"blob_id": "a718949ed95b7d78f091b1e0f237eed151b102ae",
"index": 2160,
"step-1": "<mask token>\n",
"step-2": "from .most_serializers import *\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import argparse
import tensorboardX as tb
import torch as th
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data as D
import data
import mlp
import resnet
import utils
parser = argparse.ArgumentParser()
parser.add_argument('--bst', nargs='+', type=int, help='Batch Size for Training')
parser.add_argument('--bsi', type=int, help='Batch Size for Inference')
parser.add_argument('--ds', type=str, help='DataSet')
parser.add_argument('--gpu', type=int, help='GPU')
parser.add_argument('--id', type=str, help='IDentifier')
parser.add_argument('--log-every', type=int, help='LOG statistics EVERY _ iterations')
parser.add_argument('--loss', type=str, help='LOSS')
parser.add_argument('--lr', type=float, help='Learning Rate')
parser.add_argument('--metric', type=str, help='METRIC')
parser.add_argument('--model', type=str, help='MODEL')
parser.add_argument('--ni', type=int, help='Number of Iterations')
parser.add_argument('--opt', type=str, help='OPTimizer')
parser.add_argument('--ptt', nargs='+', type=int, help='ParTiTion')
parser.add_argument('--tb', action='store_true', help='TensorBoard')
parser.add_argument('--w', type=float, help='Weight')
parser.add_argument('--wd', type=float, help='Weight Decay')
args = parser.parse_args()
x, y = {'adult' : data.load_adult,
'cifar10' : data.load_multi_cifar10,
'cifar100' : data.load_multi_cifar100,
'covtype' : data.load_covtype,
'kddcup08' : data.load_kddcup08,
'letter' : data.load_multi_letter,
'mnist' : data.load_multi_mnist}[args.ds]()
x, y = data.shuffle(x, y)
[[train_xx, train_yy],
[val_xx, val_yy],
[test_xx, test_yy]] = data.partition(x, y, args.ptt)
train_x, val_x, test_x = th.cat(train_xx), th.cat(val_xx), th.cat(test_xx)
train_y, val_y, test_y = th.cat(train_yy), th.cat(val_yy), th.cat(test_yy)
train_x, val_x, test_x = data.normalize([train_x, val_x, test_x])
train_xx = th.split(train_x, [len(x) for x in train_xx])
train_datasets = [D.TensorDataset(x) for x in train_xx]
train_loader = D.DataLoader(D.TensorDataset(train_x, train_y), args.bsi)
val_loader = D.DataLoader(D.TensorDataset(val_x, val_y), args.bsi)
test_loader = D.DataLoader(D.TensorDataset(test_x, test_y), args.bsi)
pclass_list = [len(y) / len(train_y) for y in train_yy]
n_classes = len(train_yy)
if len(args.bst) == n_classes:
bs_list = args.bst
elif len(args.bst) == 1:
bs_list = [args.bst[0]] * n_classes
else:
raise RuntimeError()
train_loaders = [utils.cycle(D.DataLoader(ds, bs, shuffle=True)) \
for ds, bs in zip(train_datasets, bs_list)]
if args.model == 'linear':
model = th.nn.Linear(train_x.size(1), n_classes)
elif args.model == 'mlp':
model = mlp.MLP([train_x.size(1), 64, 64, 64, n_classes], th.relu, bn=True)
elif args.model == 'resnet':
model = resnet.ResNet(18, n_classes)[args.model]
else:
raise RuntimeError()
dev = th.device('cpu') if args.gpu < 0 else th.device('cuda:%d' % args.gpu)
model = model.to(dev)
params = list(model.parameters())
kwargs = {'params' : params, 'lr' : args.lr, 'weight_decay' : args.wd}
opt = {'sgd' : optim.SGD(**kwargs),
'adam' : optim.Adam(amsgrad=True, **kwargs)}[args.opt]
metric = getattr(utils, args.metric)
if args.tb:
path = 'tb/%s' % args.id
writer = tb.SummaryWriter(path)
train_writer = tb.SummaryWriter(path + '/a')
val_writer = tb.SummaryWriter(path + '/b')
test_writer = tb.SummaryWriter(path + '/c')
def infer(loader, model):
yy = []
y_barr = []
for x, y in loader:
x, y = x.to(dev), y.to(dev)
y_bar = th.max(model(x), 1)[1]
yy.append(y)
y_barr.append(y_bar)
y = th.cat(yy)
y_bar = th.cat(y_barr)
return y, y_bar
def log(model, i):
mmm = []
for loader in train_loader, val_loader, test_loader:
y, y_bar = infer(loader, model)
a = th.sum(y == y_bar).item() / len(y)
fnfn = utils.fn_mc(y, y_bar, n_classes)
fpfp = utils.fp_mc(y, y_bar, n_classes)
m = metric(pclass_list, fnfn, fpfp)
mmm.append([a, m])
tagg = ['a', args.metric]
placeholder = '0' * (len(str(args.ni)) - len(str(i)))
xx = ['/'.join(['%0.2f' % m for m in mm]) for mm in zip(*mmm)]
x = ' | '.join('%s %s' % (tag, mm) for tag, mm in zip(tagg, xx))
print('[iteration %s%d]%s' % ((placeholder, i, x)))
if args.tb:
for writer, mm in zip([train_writer, val_writer, test_writer], mmm):
for tag, m in zip(tagg, mm):
writer.add_scalar(tag, m, i)
utils.eval(model)
log(model, 0)
for i in range(args.ni):
xx = [next(loader)[0].to(dev) for loader in train_loaders]
x = th.cat(xx)
utils.train(model)
z = F.softmax(model(x), 1)
zz = th.split(z, [len(x) for x in xx])
pneg_list = [1 - th.mean(z[:, i]) for i, z in enumerate(zz)]
fnfn = [p_class * p_neg for p_class, p_neg in zip(pclass_list, pneg_list)]
fpfp = [(1 - p_class) * p_neg for p_class, p_neg in zip(pclass_list, pneg_list)]
if args.w > 0:
loss = sum(args.w * fn + (1 - args.w) * fp for fn, fp in zip(fnfn, fpfp))
else:
loss = -metric(pclass_list, fnfn, fpfp)
opt.zero_grad()
loss.backward()
opt.step()
utils.eval(model)
if (i + 1) % args.log_every == 0:
log(model, i + 1)
|
normal
|
{
"blob_id": "92bcfff733e5f305ad1276ceb39a72a8f0fcb214",
"index": 8038,
"step-1": "<mask token>\n\n\ndef log(model, i):\n mmm = []\n for loader in (train_loader, val_loader, test_loader):\n y, y_bar = infer(loader, model)\n a = th.sum(y == y_bar).item() / len(y)\n fnfn = utils.fn_mc(y, y_bar, n_classes)\n fpfp = utils.fp_mc(y, y_bar, n_classes)\n m = metric(pclass_list, fnfn, fpfp)\n mmm.append([a, m])\n tagg = ['a', args.metric]\n placeholder = '0' * (len(str(args.ni)) - len(str(i)))\n xx = ['/'.join([('%0.2f' % m) for m in mm]) for mm in zip(*mmm)]\n x = ' | '.join('%s %s' % (tag, mm) for tag, mm in zip(tagg, xx))\n print('[iteration %s%d]%s' % (placeholder, i, x))\n if args.tb:\n for writer, mm in zip([train_writer, val_writer, test_writer], mmm):\n for tag, m in zip(tagg, mm):\n writer.add_scalar(tag, m, i)\n\n\n<mask token>\n",
"step-2": "<mask token>\nparser.add_argument('--bst', nargs='+', type=int, help=\n 'Batch Size for Training')\nparser.add_argument('--bsi', type=int, help='Batch Size for Inference')\nparser.add_argument('--ds', type=str, help='DataSet')\nparser.add_argument('--gpu', type=int, help='GPU')\nparser.add_argument('--id', type=str, help='IDentifier')\nparser.add_argument('--log-every', type=int, help=\n 'LOG statistics EVERY _ iterations')\nparser.add_argument('--loss', type=str, help='LOSS')\nparser.add_argument('--lr', type=float, help='Learning Rate')\nparser.add_argument('--metric', type=str, help='METRIC')\nparser.add_argument('--model', type=str, help='MODEL')\nparser.add_argument('--ni', type=int, help='Number of Iterations')\nparser.add_argument('--opt', type=str, help='OPTimizer')\nparser.add_argument('--ptt', nargs='+', type=int, help='ParTiTion')\nparser.add_argument('--tb', action='store_true', help='TensorBoard')\nparser.add_argument('--w', type=float, help='Weight')\nparser.add_argument('--wd', type=float, help='Weight Decay')\n<mask token>\nif len(args.bst) == n_classes:\n bs_list = args.bst\nelif len(args.bst) == 1:\n bs_list = [args.bst[0]] * n_classes\nelse:\n raise RuntimeError()\n<mask token>\nif args.model == 'linear':\n model = th.nn.Linear(train_x.size(1), n_classes)\nelif args.model == 'mlp':\n model = mlp.MLP([train_x.size(1), 64, 64, 64, n_classes], th.relu, bn=True)\nelif args.model == 'resnet':\n model = resnet.ResNet(18, n_classes)[args.model]\nelse:\n raise RuntimeError()\n<mask token>\nif args.tb:\n path = 'tb/%s' % args.id\n writer = tb.SummaryWriter(path)\n train_writer = tb.SummaryWriter(path + '/a')\n val_writer = tb.SummaryWriter(path + '/b')\n test_writer = tb.SummaryWriter(path + '/c')\n\n\ndef infer(loader, model):\n yy = []\n y_barr = []\n for x, y in loader:\n x, y = x.to(dev), y.to(dev)\n y_bar = th.max(model(x), 1)[1]\n yy.append(y)\n y_barr.append(y_bar)\n y = th.cat(yy)\n y_bar = th.cat(y_barr)\n return y, y_bar\n\n\ndef log(model, i):\n mmm = []\n for loader in (train_loader, val_loader, test_loader):\n y, y_bar = infer(loader, model)\n a = th.sum(y == y_bar).item() / len(y)\n fnfn = utils.fn_mc(y, y_bar, n_classes)\n fpfp = utils.fp_mc(y, y_bar, n_classes)\n m = metric(pclass_list, fnfn, fpfp)\n mmm.append([a, m])\n tagg = ['a', args.metric]\n placeholder = '0' * (len(str(args.ni)) - len(str(i)))\n xx = ['/'.join([('%0.2f' % m) for m in mm]) for mm in zip(*mmm)]\n x = ' | '.join('%s %s' % (tag, mm) for tag, mm in zip(tagg, xx))\n print('[iteration %s%d]%s' % (placeholder, i, x))\n if args.tb:\n for writer, mm in zip([train_writer, val_writer, test_writer], mmm):\n for tag, m in zip(tagg, mm):\n writer.add_scalar(tag, m, i)\n\n\nutils.eval(model)\nlog(model, 0)\nfor i in range(args.ni):\n xx = [next(loader)[0].to(dev) for loader in train_loaders]\n x = th.cat(xx)\n utils.train(model)\n z = F.softmax(model(x), 1)\n zz = th.split(z, [len(x) for x in xx])\n pneg_list = [(1 - th.mean(z[:, i])) for i, z in enumerate(zz)]\n fnfn = [(p_class * p_neg) for p_class, p_neg in zip(pclass_list, pneg_list)\n ]\n fpfp = [((1 - p_class) * p_neg) for p_class, p_neg in zip(pclass_list,\n pneg_list)]\n if args.w > 0:\n loss = sum(args.w * fn + (1 - args.w) * fp for fn, fp in zip(fnfn,\n fpfp))\n else:\n loss = -metric(pclass_list, fnfn, fpfp)\n opt.zero_grad()\n loss.backward()\n opt.step()\n utils.eval(model)\n if (i + 1) % args.log_every == 0:\n log(model, i + 1)\n",
"step-3": "<mask token>\nparser = argparse.ArgumentParser()\nparser.add_argument('--bst', nargs='+', type=int, help=\n 'Batch Size for Training')\nparser.add_argument('--bsi', type=int, help='Batch Size for Inference')\nparser.add_argument('--ds', type=str, help='DataSet')\nparser.add_argument('--gpu', type=int, help='GPU')\nparser.add_argument('--id', type=str, help='IDentifier')\nparser.add_argument('--log-every', type=int, help=\n 'LOG statistics EVERY _ iterations')\nparser.add_argument('--loss', type=str, help='LOSS')\nparser.add_argument('--lr', type=float, help='Learning Rate')\nparser.add_argument('--metric', type=str, help='METRIC')\nparser.add_argument('--model', type=str, help='MODEL')\nparser.add_argument('--ni', type=int, help='Number of Iterations')\nparser.add_argument('--opt', type=str, help='OPTimizer')\nparser.add_argument('--ptt', nargs='+', type=int, help='ParTiTion')\nparser.add_argument('--tb', action='store_true', help='TensorBoard')\nparser.add_argument('--w', type=float, help='Weight')\nparser.add_argument('--wd', type=float, help='Weight Decay')\nargs = parser.parse_args()\nx, y = {'adult': data.load_adult, 'cifar10': data.load_multi_cifar10,\n 'cifar100': data.load_multi_cifar100, 'covtype': data.load_covtype,\n 'kddcup08': data.load_kddcup08, 'letter': data.load_multi_letter,\n 'mnist': data.load_multi_mnist}[args.ds]()\nx, y = data.shuffle(x, y)\n[[train_xx, train_yy], [val_xx, val_yy], [test_xx, test_yy]] = data.partition(x\n , y, args.ptt)\ntrain_x, val_x, test_x = th.cat(train_xx), th.cat(val_xx), th.cat(test_xx)\ntrain_y, val_y, test_y = th.cat(train_yy), th.cat(val_yy), th.cat(test_yy)\ntrain_x, val_x, test_x = data.normalize([train_x, val_x, test_x])\ntrain_xx = th.split(train_x, [len(x) for x in train_xx])\ntrain_datasets = [D.TensorDataset(x) for x in train_xx]\ntrain_loader = D.DataLoader(D.TensorDataset(train_x, train_y), args.bsi)\nval_loader = D.DataLoader(D.TensorDataset(val_x, val_y), args.bsi)\ntest_loader = D.DataLoader(D.TensorDataset(test_x, test_y), args.bsi)\npclass_list = [(len(y) / len(train_y)) for y in train_yy]\nn_classes = len(train_yy)\nif len(args.bst) == n_classes:\n bs_list = args.bst\nelif len(args.bst) == 1:\n bs_list = [args.bst[0]] * n_classes\nelse:\n raise RuntimeError()\ntrain_loaders = [utils.cycle(D.DataLoader(ds, bs, shuffle=True)) for ds, bs in\n zip(train_datasets, bs_list)]\nif args.model == 'linear':\n model = th.nn.Linear(train_x.size(1), n_classes)\nelif args.model == 'mlp':\n model = mlp.MLP([train_x.size(1), 64, 64, 64, n_classes], th.relu, bn=True)\nelif args.model == 'resnet':\n model = resnet.ResNet(18, n_classes)[args.model]\nelse:\n raise RuntimeError()\ndev = th.device('cpu') if args.gpu < 0 else th.device('cuda:%d' % args.gpu)\nmodel = model.to(dev)\nparams = list(model.parameters())\nkwargs = {'params': params, 'lr': args.lr, 'weight_decay': args.wd}\nopt = {'sgd': optim.SGD(**kwargs), 'adam': optim.Adam(amsgrad=True, **kwargs)}[\n args.opt]\nmetric = getattr(utils, args.metric)\nif args.tb:\n path = 'tb/%s' % args.id\n writer = tb.SummaryWriter(path)\n train_writer = tb.SummaryWriter(path + '/a')\n val_writer = tb.SummaryWriter(path + '/b')\n test_writer = tb.SummaryWriter(path + '/c')\n\n\ndef infer(loader, model):\n yy = []\n y_barr = []\n for x, y in loader:\n x, y = x.to(dev), y.to(dev)\n y_bar = th.max(model(x), 1)[1]\n yy.append(y)\n y_barr.append(y_bar)\n y = th.cat(yy)\n y_bar = th.cat(y_barr)\n return y, y_bar\n\n\ndef log(model, i):\n mmm = []\n for loader in (train_loader, val_loader, test_loader):\n y, y_bar = infer(loader, model)\n a = th.sum(y == y_bar).item() / len(y)\n fnfn = utils.fn_mc(y, y_bar, n_classes)\n fpfp = utils.fp_mc(y, y_bar, n_classes)\n m = metric(pclass_list, fnfn, fpfp)\n mmm.append([a, m])\n tagg = ['a', args.metric]\n placeholder = '0' * (len(str(args.ni)) - len(str(i)))\n xx = ['/'.join([('%0.2f' % m) for m in mm]) for mm in zip(*mmm)]\n x = ' | '.join('%s %s' % (tag, mm) for tag, mm in zip(tagg, xx))\n print('[iteration %s%d]%s' % (placeholder, i, x))\n if args.tb:\n for writer, mm in zip([train_writer, val_writer, test_writer], mmm):\n for tag, m in zip(tagg, mm):\n writer.add_scalar(tag, m, i)\n\n\nutils.eval(model)\nlog(model, 0)\nfor i in range(args.ni):\n xx = [next(loader)[0].to(dev) for loader in train_loaders]\n x = th.cat(xx)\n utils.train(model)\n z = F.softmax(model(x), 1)\n zz = th.split(z, [len(x) for x in xx])\n pneg_list = [(1 - th.mean(z[:, i])) for i, z in enumerate(zz)]\n fnfn = [(p_class * p_neg) for p_class, p_neg in zip(pclass_list, pneg_list)\n ]\n fpfp = [((1 - p_class) * p_neg) for p_class, p_neg in zip(pclass_list,\n pneg_list)]\n if args.w > 0:\n loss = sum(args.w * fn + (1 - args.w) * fp for fn, fp in zip(fnfn,\n fpfp))\n else:\n loss = -metric(pclass_list, fnfn, fpfp)\n opt.zero_grad()\n loss.backward()\n opt.step()\n utils.eval(model)\n if (i + 1) % args.log_every == 0:\n log(model, i + 1)\n",
"step-4": "import argparse\nimport tensorboardX as tb\nimport torch as th\nimport torch.nn.functional as F\nimport torch.optim as optim\nimport torch.utils.data as D\nimport data\nimport mlp\nimport resnet\nimport utils\nparser = argparse.ArgumentParser()\nparser.add_argument('--bst', nargs='+', type=int, help=\n 'Batch Size for Training')\nparser.add_argument('--bsi', type=int, help='Batch Size for Inference')\nparser.add_argument('--ds', type=str, help='DataSet')\nparser.add_argument('--gpu', type=int, help='GPU')\nparser.add_argument('--id', type=str, help='IDentifier')\nparser.add_argument('--log-every', type=int, help=\n 'LOG statistics EVERY _ iterations')\nparser.add_argument('--loss', type=str, help='LOSS')\nparser.add_argument('--lr', type=float, help='Learning Rate')\nparser.add_argument('--metric', type=str, help='METRIC')\nparser.add_argument('--model', type=str, help='MODEL')\nparser.add_argument('--ni', type=int, help='Number of Iterations')\nparser.add_argument('--opt', type=str, help='OPTimizer')\nparser.add_argument('--ptt', nargs='+', type=int, help='ParTiTion')\nparser.add_argument('--tb', action='store_true', help='TensorBoard')\nparser.add_argument('--w', type=float, help='Weight')\nparser.add_argument('--wd', type=float, help='Weight Decay')\nargs = parser.parse_args()\nx, y = {'adult': data.load_adult, 'cifar10': data.load_multi_cifar10,\n 'cifar100': data.load_multi_cifar100, 'covtype': data.load_covtype,\n 'kddcup08': data.load_kddcup08, 'letter': data.load_multi_letter,\n 'mnist': data.load_multi_mnist}[args.ds]()\nx, y = data.shuffle(x, y)\n[[train_xx, train_yy], [val_xx, val_yy], [test_xx, test_yy]] = data.partition(x\n , y, args.ptt)\ntrain_x, val_x, test_x = th.cat(train_xx), th.cat(val_xx), th.cat(test_xx)\ntrain_y, val_y, test_y = th.cat(train_yy), th.cat(val_yy), th.cat(test_yy)\ntrain_x, val_x, test_x = data.normalize([train_x, val_x, test_x])\ntrain_xx = th.split(train_x, [len(x) for x in train_xx])\ntrain_datasets = [D.TensorDataset(x) for x in train_xx]\ntrain_loader = D.DataLoader(D.TensorDataset(train_x, train_y), args.bsi)\nval_loader = D.DataLoader(D.TensorDataset(val_x, val_y), args.bsi)\ntest_loader = D.DataLoader(D.TensorDataset(test_x, test_y), args.bsi)\npclass_list = [(len(y) / len(train_y)) for y in train_yy]\nn_classes = len(train_yy)\nif len(args.bst) == n_classes:\n bs_list = args.bst\nelif len(args.bst) == 1:\n bs_list = [args.bst[0]] * n_classes\nelse:\n raise RuntimeError()\ntrain_loaders = [utils.cycle(D.DataLoader(ds, bs, shuffle=True)) for ds, bs in\n zip(train_datasets, bs_list)]\nif args.model == 'linear':\n model = th.nn.Linear(train_x.size(1), n_classes)\nelif args.model == 'mlp':\n model = mlp.MLP([train_x.size(1), 64, 64, 64, n_classes], th.relu, bn=True)\nelif args.model == 'resnet':\n model = resnet.ResNet(18, n_classes)[args.model]\nelse:\n raise RuntimeError()\ndev = th.device('cpu') if args.gpu < 0 else th.device('cuda:%d' % args.gpu)\nmodel = model.to(dev)\nparams = list(model.parameters())\nkwargs = {'params': params, 'lr': args.lr, 'weight_decay': args.wd}\nopt = {'sgd': optim.SGD(**kwargs), 'adam': optim.Adam(amsgrad=True, **kwargs)}[\n args.opt]\nmetric = getattr(utils, args.metric)\nif args.tb:\n path = 'tb/%s' % args.id\n writer = tb.SummaryWriter(path)\n train_writer = tb.SummaryWriter(path + '/a')\n val_writer = tb.SummaryWriter(path + '/b')\n test_writer = tb.SummaryWriter(path + '/c')\n\n\ndef infer(loader, model):\n yy = []\n y_barr = []\n for x, y in loader:\n x, y = x.to(dev), y.to(dev)\n y_bar = th.max(model(x), 1)[1]\n yy.append(y)\n y_barr.append(y_bar)\n y = th.cat(yy)\n y_bar = th.cat(y_barr)\n return y, y_bar\n\n\ndef log(model, i):\n mmm = []\n for loader in (train_loader, val_loader, test_loader):\n y, y_bar = infer(loader, model)\n a = th.sum(y == y_bar).item() / len(y)\n fnfn = utils.fn_mc(y, y_bar, n_classes)\n fpfp = utils.fp_mc(y, y_bar, n_classes)\n m = metric(pclass_list, fnfn, fpfp)\n mmm.append([a, m])\n tagg = ['a', args.metric]\n placeholder = '0' * (len(str(args.ni)) - len(str(i)))\n xx = ['/'.join([('%0.2f' % m) for m in mm]) for mm in zip(*mmm)]\n x = ' | '.join('%s %s' % (tag, mm) for tag, mm in zip(tagg, xx))\n print('[iteration %s%d]%s' % (placeholder, i, x))\n if args.tb:\n for writer, mm in zip([train_writer, val_writer, test_writer], mmm):\n for tag, m in zip(tagg, mm):\n writer.add_scalar(tag, m, i)\n\n\nutils.eval(model)\nlog(model, 0)\nfor i in range(args.ni):\n xx = [next(loader)[0].to(dev) for loader in train_loaders]\n x = th.cat(xx)\n utils.train(model)\n z = F.softmax(model(x), 1)\n zz = th.split(z, [len(x) for x in xx])\n pneg_list = [(1 - th.mean(z[:, i])) for i, z in enumerate(zz)]\n fnfn = [(p_class * p_neg) for p_class, p_neg in zip(pclass_list, pneg_list)\n ]\n fpfp = [((1 - p_class) * p_neg) for p_class, p_neg in zip(pclass_list,\n pneg_list)]\n if args.w > 0:\n loss = sum(args.w * fn + (1 - args.w) * fp for fn, fp in zip(fnfn,\n fpfp))\n else:\n loss = -metric(pclass_list, fnfn, fpfp)\n opt.zero_grad()\n loss.backward()\n opt.step()\n utils.eval(model)\n if (i + 1) % args.log_every == 0:\n log(model, i + 1)\n",
"step-5": "import argparse\nimport tensorboardX as tb\nimport torch as th\nimport torch.nn.functional as F\nimport torch.optim as optim\nimport torch.utils.data as D\nimport data\nimport mlp\nimport resnet\nimport utils\n\nparser = argparse.ArgumentParser()\nparser.add_argument('--bst', nargs='+', type=int, help='Batch Size for Training')\nparser.add_argument('--bsi', type=int, help='Batch Size for Inference')\nparser.add_argument('--ds', type=str, help='DataSet')\nparser.add_argument('--gpu', type=int, help='GPU')\nparser.add_argument('--id', type=str, help='IDentifier')\nparser.add_argument('--log-every', type=int, help='LOG statistics EVERY _ iterations')\nparser.add_argument('--loss', type=str, help='LOSS')\nparser.add_argument('--lr', type=float, help='Learning Rate')\nparser.add_argument('--metric', type=str, help='METRIC')\nparser.add_argument('--model', type=str, help='MODEL')\nparser.add_argument('--ni', type=int, help='Number of Iterations')\nparser.add_argument('--opt', type=str, help='OPTimizer')\nparser.add_argument('--ptt', nargs='+', type=int, help='ParTiTion')\nparser.add_argument('--tb', action='store_true', help='TensorBoard')\nparser.add_argument('--w', type=float, help='Weight')\nparser.add_argument('--wd', type=float, help='Weight Decay')\nargs = parser.parse_args()\n\nx, y = {'adult' : data.load_adult,\n 'cifar10' : data.load_multi_cifar10,\n 'cifar100' : data.load_multi_cifar100,\n 'covtype' : data.load_covtype,\n 'kddcup08' : data.load_kddcup08,\n 'letter' : data.load_multi_letter,\n 'mnist' : data.load_multi_mnist}[args.ds]()\nx, y = data.shuffle(x, y)\n[[train_xx, train_yy],\n [val_xx, val_yy],\n [test_xx, test_yy]] = data.partition(x, y, args.ptt)\ntrain_x, val_x, test_x = th.cat(train_xx), th.cat(val_xx), th.cat(test_xx)\ntrain_y, val_y, test_y = th.cat(train_yy), th.cat(val_yy), th.cat(test_yy)\ntrain_x, val_x, test_x = data.normalize([train_x, val_x, test_x])\ntrain_xx = th.split(train_x, [len(x) for x in train_xx])\ntrain_datasets = [D.TensorDataset(x) for x in train_xx]\ntrain_loader = D.DataLoader(D.TensorDataset(train_x, train_y), args.bsi)\nval_loader = D.DataLoader(D.TensorDataset(val_x, val_y), args.bsi)\ntest_loader = D.DataLoader(D.TensorDataset(test_x, test_y), args.bsi)\npclass_list = [len(y) / len(train_y) for y in train_yy]\n\nn_classes = len(train_yy)\nif len(args.bst) == n_classes:\n bs_list = args.bst\nelif len(args.bst) == 1:\n bs_list = [args.bst[0]] * n_classes\nelse:\n raise RuntimeError()\ntrain_loaders = [utils.cycle(D.DataLoader(ds, bs, shuffle=True)) \\\n for ds, bs in zip(train_datasets, bs_list)]\n\nif args.model == 'linear':\n model = th.nn.Linear(train_x.size(1), n_classes)\nelif args.model == 'mlp':\n model = mlp.MLP([train_x.size(1), 64, 64, 64, n_classes], th.relu, bn=True)\nelif args.model == 'resnet':\n model = resnet.ResNet(18, n_classes)[args.model]\nelse:\n raise RuntimeError()\ndev = th.device('cpu') if args.gpu < 0 else th.device('cuda:%d' % args.gpu)\nmodel = model.to(dev)\nparams = list(model.parameters())\nkwargs = {'params' : params, 'lr' : args.lr, 'weight_decay' : args.wd}\nopt = {'sgd' : optim.SGD(**kwargs),\n 'adam' : optim.Adam(amsgrad=True, **kwargs)}[args.opt]\nmetric = getattr(utils, args.metric)\n\nif args.tb:\n path = 'tb/%s' % args.id\n writer = tb.SummaryWriter(path)\n train_writer = tb.SummaryWriter(path + '/a')\n val_writer = tb.SummaryWriter(path + '/b')\n test_writer = tb.SummaryWriter(path + '/c')\n\ndef infer(loader, model):\n yy = []\n y_barr = []\n for x, y in loader:\n x, y = x.to(dev), y.to(dev)\n y_bar = th.max(model(x), 1)[1]\n yy.append(y)\n y_barr.append(y_bar)\n y = th.cat(yy)\n y_bar = th.cat(y_barr)\n return y, y_bar\n\ndef log(model, i):\n mmm = []\n for loader in train_loader, val_loader, test_loader:\n y, y_bar = infer(loader, model)\n\n a = th.sum(y == y_bar).item() / len(y)\n fnfn = utils.fn_mc(y, y_bar, n_classes)\n fpfp = utils.fp_mc(y, y_bar, n_classes)\n m = metric(pclass_list, fnfn, fpfp)\n\n mmm.append([a, m])\n\n tagg = ['a', args.metric]\n\n placeholder = '0' * (len(str(args.ni)) - len(str(i)))\n xx = ['/'.join(['%0.2f' % m for m in mm]) for mm in zip(*mmm)]\n x = ' | '.join('%s %s' % (tag, mm) for tag, mm in zip(tagg, xx))\n print('[iteration %s%d]%s' % ((placeholder, i, x)))\n\n if args.tb:\n for writer, mm in zip([train_writer, val_writer, test_writer], mmm):\n for tag, m in zip(tagg, mm):\n writer.add_scalar(tag, m, i)\n\nutils.eval(model)\nlog(model, 0)\n\nfor i in range(args.ni):\n xx = [next(loader)[0].to(dev) for loader in train_loaders]\n x = th.cat(xx)\n utils.train(model)\n z = F.softmax(model(x), 1)\n zz = th.split(z, [len(x) for x in xx])\n pneg_list = [1 - th.mean(z[:, i]) for i, z in enumerate(zz)]\n fnfn = [p_class * p_neg for p_class, p_neg in zip(pclass_list, pneg_list)]\n fpfp = [(1 - p_class) * p_neg for p_class, p_neg in zip(pclass_list, pneg_list)]\n\n if args.w > 0:\n loss = sum(args.w * fn + (1 - args.w) * fp for fn, fp in zip(fnfn, fpfp))\n else:\n loss = -metric(pclass_list, fnfn, fpfp)\n\n opt.zero_grad()\n loss.backward()\n opt.step()\n\n utils.eval(model)\n if (i + 1) % args.log_every == 0:\n log(model, i + 1)\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# Databricks notebook source
#import and create sparksession object
from pyspark.sql import SparkSession
spark=SparkSession.builder.appName('rc').getOrCreate()
# COMMAND ----------
#import the required functions and libraries
from pyspark.sql.functions import *
# COMMAND ----------
# Convert csv file to Spark DataFrame (Databricks version)
def loadDataFrame(fileName, fileSchema):
return (spark.read.format("csv")
.schema(fileSchema)
.option("header", "true")
.option("mode", "DROPMALFORMED")
.csv("/FileStore/tables/%s" % (fileName)))
# COMMAND ----------
from pyspark.sql.types import *
movieRatingSchema = StructType([
StructField("userId", IntegerType(), True),
StructField("movieId", IntegerType(), True),
StructField("rating", FloatType(), True),
StructField("timestamp", StringType(), True)])
movieSchema = StructType([
StructField("movieId", IntegerType(), True),
StructField("title", StringType(), True),
StructField("genres", StringType(), True)])
MovieRatingsDF = loadDataFrame("ratings.csv", movieRatingSchema).cache()
MoviesDF = loadDataFrame("movies.csv", movieSchema).cache()
# COMMAND ----------
#load the dataset and create sprk dataframe
df = MovieRatingsDF.join(MoviesDF, 'movieId').select(['userId', 'title', 'rating'])
#df=spark.read.csv('movie_ratings_df.csv',inferSchema=True,header=True)
# COMMAND ----------
#validate the shape of the data
print((df.count(),len(df.columns)))
# COMMAND ----------
#check columns in dataframe
df.printSchema()
# COMMAND ----------
#validate few rows of dataframe in random order
df.orderBy(rand()).show(10,False)
# COMMAND ----------
#check number of ratings by each user
df.groupBy('userId').count().orderBy('count',ascending=False).show(10,False)
# COMMAND ----------
#check number of ratings by each user
df.groupBy('userId').count().orderBy('count',ascending=True).show(10,False)
# COMMAND ----------
#number of times movie been rated
df.groupBy('title').count().orderBy('count',ascending=False).show(10,False)
# COMMAND ----------
df.groupBy('title').count().orderBy('count',ascending=True).show(10,False)
# COMMAND ----------
#import String indexer to convert string values to numeric values
from pyspark.ml.feature import StringIndexer,IndexToString
# COMMAND ----------
#creating string indexer to convert the movie title column values into numerical values
stringIndexer = StringIndexer(inputCol="title", outputCol="title_new")
# COMMAND ----------
#applying stringindexer object on dataframe movie title column
model = stringIndexer.fit(df)
# COMMAND ----------
#creating new dataframe with transformed values
indexed = model.transform(df)
# COMMAND ----------
#validate the numerical title values
indexed.show(10)
# COMMAND ----------
#number of times each numerical movie title has been rated
indexed.groupBy('title_new').count().orderBy('count',ascending=False).show(10,False)
# COMMAND ----------
#split the data into training and test datatset
train,test=indexed.randomSplit([0.75,0.25])
# COMMAND ----------
#count number of records in train set
train.count()
# COMMAND ----------
#count number of records in test set
test.count()
# COMMAND ----------
#import ALS recommender function from pyspark ml library
from pyspark.ml.recommendation import ALS
# COMMAND ----------
#Training the recommender model using train datatset
rec=ALS(maxIter=10,regParam=0.01,userCol='userId',itemCol='title_new',ratingCol='rating',nonnegative=True,coldStartStrategy="drop")
# COMMAND ----------
#fit the model on train set
rec_model=rec.fit(train)
# COMMAND ----------
#making predictions on test set
predicted_ratings=rec_model.transform(test)
# COMMAND ----------
#columns in predicted ratings dataframe
predicted_ratings.printSchema()
# COMMAND ----------
#predicted vs actual ratings for test set
predicted_ratings.orderBy(rand()).show(10)
# COMMAND ----------
#importing Regression Evaluator to measure RMSE
from pyspark.ml.evaluation import RegressionEvaluator
# COMMAND ----------
#create Regressor evaluator object for measuring accuracy
evaluator=RegressionEvaluator(metricName='rmse',predictionCol='prediction',labelCol='rating')
# COMMAND ----------
#apply the RE on predictions dataframe to calculate RMSE
rmse=evaluator.evaluate(predicted_ratings)
# COMMAND ----------
#print RMSE error
print(rmse)
# COMMAND ----------
#Recommend top movies which user might like
# COMMAND ----------
#create dataset of all distinct movies
unique_movies=indexed.select('title_new').distinct()
# COMMAND ----------
#number of unique movies
unique_movies.count()
# COMMAND ----------
#assigning alias name 'a' to unique movies df
a = unique_movies.alias('a')
# COMMAND ----------
user_id=85
# COMMAND ----------
#creating another dataframe which contains already watched movie by active user
watched_movies=indexed.filter(indexed['userId'] == user_id).select('title_new').distinct()
# COMMAND ----------
#number of movies already rated
watched_movies.count()
# COMMAND ----------
#assigning alias name 'b' to watched movies df
b=watched_movies.alias('b')
# COMMAND ----------
#joining both tables on left join
total_movies = a.join(b, a.title_new == b.title_new,how='left')
# COMMAND ----------
total_movies.show(10,False)
# COMMAND ----------
#selecting movies which active user is yet to rate or watch
remaining_movies=total_movies.where(col("b.title_new").isNull()).select(a.title_new).distinct()
# COMMAND ----------
#number of movies user is yet to rate
remaining_movies.count()
# COMMAND ----------
#adding new column of user_Id of active useer to remaining movies df
remaining_movies=remaining_movies.withColumn("userId",lit(int(user_id)))
# COMMAND ----------
remaining_movies.show(10,False)
# COMMAND ----------
#making recommendations using ALS recommender model and selecting only top 'n' movies
recommendations=rec_model.transform(remaining_movies).orderBy('prediction',ascending=False)
# COMMAND ----------
recommendations.show(5,False)
# COMMAND ----------
#converting title_new values back to movie titles
movie_title = IndexToString(inputCol="title_new", outputCol="title",labels=model.labels)
final_recommendations=movie_title.transform(recommendations)
# COMMAND ----------
final_recommendations.show(10,False)
# COMMAND ----------
#create function to recommend top 'n' movies to any particular user
def top_movies(user_id,n):
"""
This function returns the top 'n' movies that user has not seen yet but might like
"""
#assigning alias name 'a' to unique movies df
a = unique_movies.alias('a')
#creating another dataframe which contains already watched movie by active user
watched_movies=indexed.filter(indexed['userId'] == user_id).select('title_new')
#assigning alias name 'b' to watched movies df
b=watched_movies.alias('b')
#joining both tables on left join
total_movies = a.join(b, a.title_new == b.title_new,how='left')
#selecting movies which active user is yet to rate or watch
remaining_movies=total_movies.where(col("b.title_new").isNull()).select(a.title_new).distinct()
#adding new column of user_Id of active useer to remaining movies df
remaining_movies=remaining_movies.withColumn("userId",lit(int(user_id)))
#making recommendations using ALS recommender model and selecting only top 'n' movies
recommendations=rec_model.transform(remaining_movies).orderBy('prediction',ascending=False).limit(n)
#adding columns of movie titles in recommendations
movie_title = IndexToString(inputCol="title_new", outputCol="title",labels=model.labels)
final_recommendations=movie_title.transform(recommendations)
#return the recommendations to active user
return final_recommendations.show(n,False)
# COMMAND ----------
top_movies(85,10)
# COMMAND ----------
|
normal
|
{
"blob_id": "d22ebe24605065452ae35c44367ee21a726ae7a1",
"index": 1892,
"step-1": "<mask token>\n\n\ndef loadDataFrame(fileName, fileSchema):\n return spark.read.format('csv').schema(fileSchema).option('header', 'true'\n ).option('mode', 'DROPMALFORMED').csv('/FileStore/tables/%s' % fileName\n )\n\n\n<mask token>\n\n\ndef top_movies(user_id, n):\n \"\"\"\n This function returns the top 'n' movies that user has not seen yet but might like \n \n \"\"\"\n a = unique_movies.alias('a')\n watched_movies = indexed.filter(indexed['userId'] == user_id).select(\n 'title_new')\n b = watched_movies.alias('b')\n total_movies = a.join(b, a.title_new == b.title_new, how='left')\n remaining_movies = total_movies.where(col('b.title_new').isNull()).select(a\n .title_new).distinct()\n remaining_movies = remaining_movies.withColumn('userId', lit(int(user_id)))\n recommendations = rec_model.transform(remaining_movies).orderBy(\n 'prediction', ascending=False).limit(n)\n movie_title = IndexToString(inputCol='title_new', outputCol='title',\n labels=model.labels)\n final_recommendations = movie_title.transform(recommendations)\n return final_recommendations.show(n, False)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef loadDataFrame(fileName, fileSchema):\n return spark.read.format('csv').schema(fileSchema).option('header', 'true'\n ).option('mode', 'DROPMALFORMED').csv('/FileStore/tables/%s' % fileName\n )\n\n\n<mask token>\nprint((df.count(), len(df.columns)))\ndf.printSchema()\ndf.orderBy(rand()).show(10, False)\ndf.groupBy('userId').count().orderBy('count', ascending=False).show(10, False)\ndf.groupBy('userId').count().orderBy('count', ascending=True).show(10, False)\ndf.groupBy('title').count().orderBy('count', ascending=False).show(10, False)\ndf.groupBy('title').count().orderBy('count', ascending=True).show(10, False)\n<mask token>\nindexed.show(10)\nindexed.groupBy('title_new').count().orderBy('count', ascending=False).show(\n 10, False)\n<mask token>\ntrain.count()\ntest.count()\n<mask token>\npredicted_ratings.printSchema()\npredicted_ratings.orderBy(rand()).show(10)\n<mask token>\nprint(rmse)\n<mask token>\nunique_movies.count()\n<mask token>\nwatched_movies.count()\n<mask token>\ntotal_movies.show(10, False)\n<mask token>\nremaining_movies.count()\n<mask token>\nremaining_movies.show(10, False)\n<mask token>\nrecommendations.show(5, False)\n<mask token>\nfinal_recommendations.show(10, False)\n\n\ndef top_movies(user_id, n):\n \"\"\"\n This function returns the top 'n' movies that user has not seen yet but might like \n \n \"\"\"\n a = unique_movies.alias('a')\n watched_movies = indexed.filter(indexed['userId'] == user_id).select(\n 'title_new')\n b = watched_movies.alias('b')\n total_movies = a.join(b, a.title_new == b.title_new, how='left')\n remaining_movies = total_movies.where(col('b.title_new').isNull()).select(a\n .title_new).distinct()\n remaining_movies = remaining_movies.withColumn('userId', lit(int(user_id)))\n recommendations = rec_model.transform(remaining_movies).orderBy(\n 'prediction', ascending=False).limit(n)\n movie_title = IndexToString(inputCol='title_new', outputCol='title',\n labels=model.labels)\n final_recommendations = movie_title.transform(recommendations)\n return final_recommendations.show(n, False)\n\n\ntop_movies(85, 10)\n",
"step-3": "<mask token>\nspark = SparkSession.builder.appName('rc').getOrCreate()\n<mask token>\n\n\ndef loadDataFrame(fileName, fileSchema):\n return spark.read.format('csv').schema(fileSchema).option('header', 'true'\n ).option('mode', 'DROPMALFORMED').csv('/FileStore/tables/%s' % fileName\n )\n\n\n<mask token>\nmovieRatingSchema = StructType([StructField('userId', IntegerType(), True),\n StructField('movieId', IntegerType(), True), StructField('rating',\n FloatType(), True), StructField('timestamp', StringType(), True)])\nmovieSchema = StructType([StructField('movieId', IntegerType(), True),\n StructField('title', StringType(), True), StructField('genres',\n StringType(), True)])\nMovieRatingsDF = loadDataFrame('ratings.csv', movieRatingSchema).cache()\nMoviesDF = loadDataFrame('movies.csv', movieSchema).cache()\ndf = MovieRatingsDF.join(MoviesDF, 'movieId').select(['userId', 'title',\n 'rating'])\nprint((df.count(), len(df.columns)))\ndf.printSchema()\ndf.orderBy(rand()).show(10, False)\ndf.groupBy('userId').count().orderBy('count', ascending=False).show(10, False)\ndf.groupBy('userId').count().orderBy('count', ascending=True).show(10, False)\ndf.groupBy('title').count().orderBy('count', ascending=False).show(10, False)\ndf.groupBy('title').count().orderBy('count', ascending=True).show(10, False)\n<mask token>\nstringIndexer = StringIndexer(inputCol='title', outputCol='title_new')\nmodel = stringIndexer.fit(df)\nindexed = model.transform(df)\nindexed.show(10)\nindexed.groupBy('title_new').count().orderBy('count', ascending=False).show(\n 10, False)\ntrain, test = indexed.randomSplit([0.75, 0.25])\ntrain.count()\ntest.count()\n<mask token>\nrec = ALS(maxIter=10, regParam=0.01, userCol='userId', itemCol='title_new',\n ratingCol='rating', nonnegative=True, coldStartStrategy='drop')\nrec_model = rec.fit(train)\npredicted_ratings = rec_model.transform(test)\npredicted_ratings.printSchema()\npredicted_ratings.orderBy(rand()).show(10)\n<mask token>\nevaluator = RegressionEvaluator(metricName='rmse', predictionCol=\n 'prediction', labelCol='rating')\nrmse = evaluator.evaluate(predicted_ratings)\nprint(rmse)\nunique_movies = indexed.select('title_new').distinct()\nunique_movies.count()\na = unique_movies.alias('a')\nuser_id = 85\nwatched_movies = indexed.filter(indexed['userId'] == user_id).select(\n 'title_new').distinct()\nwatched_movies.count()\nb = watched_movies.alias('b')\ntotal_movies = a.join(b, a.title_new == b.title_new, how='left')\ntotal_movies.show(10, False)\nremaining_movies = total_movies.where(col('b.title_new').isNull()).select(a\n .title_new).distinct()\nremaining_movies.count()\nremaining_movies = remaining_movies.withColumn('userId', lit(int(user_id)))\nremaining_movies.show(10, False)\nrecommendations = rec_model.transform(remaining_movies).orderBy('prediction',\n ascending=False)\nrecommendations.show(5, False)\nmovie_title = IndexToString(inputCol='title_new', outputCol='title', labels\n =model.labels)\nfinal_recommendations = movie_title.transform(recommendations)\nfinal_recommendations.show(10, False)\n\n\ndef top_movies(user_id, n):\n \"\"\"\n This function returns the top 'n' movies that user has not seen yet but might like \n \n \"\"\"\n a = unique_movies.alias('a')\n watched_movies = indexed.filter(indexed['userId'] == user_id).select(\n 'title_new')\n b = watched_movies.alias('b')\n total_movies = a.join(b, a.title_new == b.title_new, how='left')\n remaining_movies = total_movies.where(col('b.title_new').isNull()).select(a\n .title_new).distinct()\n remaining_movies = remaining_movies.withColumn('userId', lit(int(user_id)))\n recommendations = rec_model.transform(remaining_movies).orderBy(\n 'prediction', ascending=False).limit(n)\n movie_title = IndexToString(inputCol='title_new', outputCol='title',\n labels=model.labels)\n final_recommendations = movie_title.transform(recommendations)\n return final_recommendations.show(n, False)\n\n\ntop_movies(85, 10)\n",
"step-4": "from pyspark.sql import SparkSession\nspark = SparkSession.builder.appName('rc').getOrCreate()\nfrom pyspark.sql.functions import *\n\n\ndef loadDataFrame(fileName, fileSchema):\n return spark.read.format('csv').schema(fileSchema).option('header', 'true'\n ).option('mode', 'DROPMALFORMED').csv('/FileStore/tables/%s' % fileName\n )\n\n\nfrom pyspark.sql.types import *\nmovieRatingSchema = StructType([StructField('userId', IntegerType(), True),\n StructField('movieId', IntegerType(), True), StructField('rating',\n FloatType(), True), StructField('timestamp', StringType(), True)])\nmovieSchema = StructType([StructField('movieId', IntegerType(), True),\n StructField('title', StringType(), True), StructField('genres',\n StringType(), True)])\nMovieRatingsDF = loadDataFrame('ratings.csv', movieRatingSchema).cache()\nMoviesDF = loadDataFrame('movies.csv', movieSchema).cache()\ndf = MovieRatingsDF.join(MoviesDF, 'movieId').select(['userId', 'title',\n 'rating'])\nprint((df.count(), len(df.columns)))\ndf.printSchema()\ndf.orderBy(rand()).show(10, False)\ndf.groupBy('userId').count().orderBy('count', ascending=False).show(10, False)\ndf.groupBy('userId').count().orderBy('count', ascending=True).show(10, False)\ndf.groupBy('title').count().orderBy('count', ascending=False).show(10, False)\ndf.groupBy('title').count().orderBy('count', ascending=True).show(10, False)\nfrom pyspark.ml.feature import StringIndexer, IndexToString\nstringIndexer = StringIndexer(inputCol='title', outputCol='title_new')\nmodel = stringIndexer.fit(df)\nindexed = model.transform(df)\nindexed.show(10)\nindexed.groupBy('title_new').count().orderBy('count', ascending=False).show(\n 10, False)\ntrain, test = indexed.randomSplit([0.75, 0.25])\ntrain.count()\ntest.count()\nfrom pyspark.ml.recommendation import ALS\nrec = ALS(maxIter=10, regParam=0.01, userCol='userId', itemCol='title_new',\n ratingCol='rating', nonnegative=True, coldStartStrategy='drop')\nrec_model = rec.fit(train)\npredicted_ratings = rec_model.transform(test)\npredicted_ratings.printSchema()\npredicted_ratings.orderBy(rand()).show(10)\nfrom pyspark.ml.evaluation import RegressionEvaluator\nevaluator = RegressionEvaluator(metricName='rmse', predictionCol=\n 'prediction', labelCol='rating')\nrmse = evaluator.evaluate(predicted_ratings)\nprint(rmse)\nunique_movies = indexed.select('title_new').distinct()\nunique_movies.count()\na = unique_movies.alias('a')\nuser_id = 85\nwatched_movies = indexed.filter(indexed['userId'] == user_id).select(\n 'title_new').distinct()\nwatched_movies.count()\nb = watched_movies.alias('b')\ntotal_movies = a.join(b, a.title_new == b.title_new, how='left')\ntotal_movies.show(10, False)\nremaining_movies = total_movies.where(col('b.title_new').isNull()).select(a\n .title_new).distinct()\nremaining_movies.count()\nremaining_movies = remaining_movies.withColumn('userId', lit(int(user_id)))\nremaining_movies.show(10, False)\nrecommendations = rec_model.transform(remaining_movies).orderBy('prediction',\n ascending=False)\nrecommendations.show(5, False)\nmovie_title = IndexToString(inputCol='title_new', outputCol='title', labels\n =model.labels)\nfinal_recommendations = movie_title.transform(recommendations)\nfinal_recommendations.show(10, False)\n\n\ndef top_movies(user_id, n):\n \"\"\"\n This function returns the top 'n' movies that user has not seen yet but might like \n \n \"\"\"\n a = unique_movies.alias('a')\n watched_movies = indexed.filter(indexed['userId'] == user_id).select(\n 'title_new')\n b = watched_movies.alias('b')\n total_movies = a.join(b, a.title_new == b.title_new, how='left')\n remaining_movies = total_movies.where(col('b.title_new').isNull()).select(a\n .title_new).distinct()\n remaining_movies = remaining_movies.withColumn('userId', lit(int(user_id)))\n recommendations = rec_model.transform(remaining_movies).orderBy(\n 'prediction', ascending=False).limit(n)\n movie_title = IndexToString(inputCol='title_new', outputCol='title',\n labels=model.labels)\n final_recommendations = movie_title.transform(recommendations)\n return final_recommendations.show(n, False)\n\n\ntop_movies(85, 10)\n",
"step-5": "# Databricks notebook source\n#import and create sparksession object\nfrom pyspark.sql import SparkSession \nspark=SparkSession.builder.appName('rc').getOrCreate()\n\n# COMMAND ----------\n\n#import the required functions and libraries\nfrom pyspark.sql.functions import *\n\n# COMMAND ----------\n\n# Convert csv file to Spark DataFrame (Databricks version)\ndef loadDataFrame(fileName, fileSchema):\n return (spark.read.format(\"csv\")\n .schema(fileSchema)\n .option(\"header\", \"true\")\n .option(\"mode\", \"DROPMALFORMED\")\n .csv(\"/FileStore/tables/%s\" % (fileName)))\n\n# COMMAND ----------\n\nfrom pyspark.sql.types import *\n\nmovieRatingSchema = StructType([\n StructField(\"userId\", IntegerType(), True),\n StructField(\"movieId\", IntegerType(), True),\n StructField(\"rating\", FloatType(), True),\n StructField(\"timestamp\", StringType(), True)])\n\nmovieSchema = StructType([\n StructField(\"movieId\", IntegerType(), True),\n StructField(\"title\", StringType(), True),\n StructField(\"genres\", StringType(), True)])\n\nMovieRatingsDF = loadDataFrame(\"ratings.csv\", movieRatingSchema).cache()\nMoviesDF = loadDataFrame(\"movies.csv\", movieSchema).cache()\n\n# COMMAND ----------\n\n#load the dataset and create sprk dataframe\ndf = MovieRatingsDF.join(MoviesDF, 'movieId').select(['userId', 'title', 'rating'])\n\n\n#df=spark.read.csv('movie_ratings_df.csv',inferSchema=True,header=True)\n\n# COMMAND ----------\n\n#validate the shape of the data \nprint((df.count(),len(df.columns)))\n\n# COMMAND ----------\n\n#check columns in dataframe\ndf.printSchema()\n\n# COMMAND ----------\n\n#validate few rows of dataframe in random order\ndf.orderBy(rand()).show(10,False)\n\n# COMMAND ----------\n\n#check number of ratings by each user\ndf.groupBy('userId').count().orderBy('count',ascending=False).show(10,False)\n\n# COMMAND ----------\n\n#check number of ratings by each user\ndf.groupBy('userId').count().orderBy('count',ascending=True).show(10,False)\n\n# COMMAND ----------\n\n#number of times movie been rated \ndf.groupBy('title').count().orderBy('count',ascending=False).show(10,False)\n\n# COMMAND ----------\n\ndf.groupBy('title').count().orderBy('count',ascending=True).show(10,False)\n\n# COMMAND ----------\n\n#import String indexer to convert string values to numeric values\nfrom pyspark.ml.feature import StringIndexer,IndexToString\n\n# COMMAND ----------\n\n#creating string indexer to convert the movie title column values into numerical values\nstringIndexer = StringIndexer(inputCol=\"title\", outputCol=\"title_new\")\n\n# COMMAND ----------\n\n#applying stringindexer object on dataframe movie title column\nmodel = stringIndexer.fit(df)\n\n# COMMAND ----------\n\n#creating new dataframe with transformed values\nindexed = model.transform(df)\n\n# COMMAND ----------\n\n#validate the numerical title values\nindexed.show(10)\n\n# COMMAND ----------\n\n#number of times each numerical movie title has been rated \nindexed.groupBy('title_new').count().orderBy('count',ascending=False).show(10,False)\n\n# COMMAND ----------\n\n#split the data into training and test datatset\ntrain,test=indexed.randomSplit([0.75,0.25])\n\n# COMMAND ----------\n\n#count number of records in train set\ntrain.count()\n\n# COMMAND ----------\n\n#count number of records in test set\ntest.count()\n\n# COMMAND ----------\n\n#import ALS recommender function from pyspark ml library\nfrom pyspark.ml.recommendation import ALS\n\n# COMMAND ----------\n\n#Training the recommender model using train datatset\nrec=ALS(maxIter=10,regParam=0.01,userCol='userId',itemCol='title_new',ratingCol='rating',nonnegative=True,coldStartStrategy=\"drop\")\n\n# COMMAND ----------\n\n#fit the model on train set\nrec_model=rec.fit(train)\n\n# COMMAND ----------\n\n#making predictions on test set \npredicted_ratings=rec_model.transform(test)\n\n# COMMAND ----------\n\n#columns in predicted ratings dataframe\npredicted_ratings.printSchema()\n\n# COMMAND ----------\n\n#predicted vs actual ratings for test set \npredicted_ratings.orderBy(rand()).show(10)\n\n# COMMAND ----------\n\n#importing Regression Evaluator to measure RMSE\nfrom pyspark.ml.evaluation import RegressionEvaluator\n\n# COMMAND ----------\n\n#create Regressor evaluator object for measuring accuracy\nevaluator=RegressionEvaluator(metricName='rmse',predictionCol='prediction',labelCol='rating')\n\n# COMMAND ----------\n\n#apply the RE on predictions dataframe to calculate RMSE\nrmse=evaluator.evaluate(predicted_ratings)\n\n# COMMAND ----------\n\n#print RMSE error\nprint(rmse)\n\n# COMMAND ----------\n\n#Recommend top movies which user might like \n\n# COMMAND ----------\n\n#create dataset of all distinct movies \nunique_movies=indexed.select('title_new').distinct()\n\n# COMMAND ----------\n\n#number of unique movies\nunique_movies.count()\n\n# COMMAND ----------\n\n#assigning alias name 'a' to unique movies df\na = unique_movies.alias('a')\n\n# COMMAND ----------\n\nuser_id=85\n\n# COMMAND ----------\n\n#creating another dataframe which contains already watched movie by active user \nwatched_movies=indexed.filter(indexed['userId'] == user_id).select('title_new').distinct()\n\n# COMMAND ----------\n\n#number of movies already rated \nwatched_movies.count()\n\n# COMMAND ----------\n\n#assigning alias name 'b' to watched movies df\nb=watched_movies.alias('b')\n\n# COMMAND ----------\n\n#joining both tables on left join \ntotal_movies = a.join(b, a.title_new == b.title_new,how='left')\n\n\n# COMMAND ----------\n\ntotal_movies.show(10,False)\n\n# COMMAND ----------\n\n#selecting movies which active user is yet to rate or watch\nremaining_movies=total_movies.where(col(\"b.title_new\").isNull()).select(a.title_new).distinct()\n\n# COMMAND ----------\n\n#number of movies user is yet to rate \nremaining_movies.count()\n\n# COMMAND ----------\n\n#adding new column of user_Id of active useer to remaining movies df \nremaining_movies=remaining_movies.withColumn(\"userId\",lit(int(user_id)))\n\n\n# COMMAND ----------\n\nremaining_movies.show(10,False)\n\n# COMMAND ----------\n\n#making recommendations using ALS recommender model and selecting only top 'n' movies\nrecommendations=rec_model.transform(remaining_movies).orderBy('prediction',ascending=False)\n\n# COMMAND ----------\n\nrecommendations.show(5,False)\n\n# COMMAND ----------\n\n#converting title_new values back to movie titles\nmovie_title = IndexToString(inputCol=\"title_new\", outputCol=\"title\",labels=model.labels)\n\nfinal_recommendations=movie_title.transform(recommendations)\n\n\n# COMMAND ----------\n\nfinal_recommendations.show(10,False)\n\n# COMMAND ----------\n\n#create function to recommend top 'n' movies to any particular user\ndef top_movies(user_id,n):\n \"\"\"\n This function returns the top 'n' movies that user has not seen yet but might like \n \n \"\"\"\n #assigning alias name 'a' to unique movies df\n a = unique_movies.alias('a')\n \n #creating another dataframe which contains already watched movie by active user \n watched_movies=indexed.filter(indexed['userId'] == user_id).select('title_new')\n \n #assigning alias name 'b' to watched movies df\n b=watched_movies.alias('b')\n \n #joining both tables on left join \n total_movies = a.join(b, a.title_new == b.title_new,how='left')\n \n #selecting movies which active user is yet to rate or watch\n remaining_movies=total_movies.where(col(\"b.title_new\").isNull()).select(a.title_new).distinct()\n \n \n #adding new column of user_Id of active useer to remaining movies df \n remaining_movies=remaining_movies.withColumn(\"userId\",lit(int(user_id)))\n \n \n #making recommendations using ALS recommender model and selecting only top 'n' movies\n recommendations=rec_model.transform(remaining_movies).orderBy('prediction',ascending=False).limit(n)\n \n \n #adding columns of movie titles in recommendations\n movie_title = IndexToString(inputCol=\"title_new\", outputCol=\"title\",labels=model.labels)\n final_recommendations=movie_title.transform(recommendations)\n \n #return the recommendations to active user\n return final_recommendations.show(n,False)\n\n# COMMAND ----------\n\ntop_movies(85,10)\n\n# COMMAND ----------\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
# Define a function to compute color histogram features
# Pass the color_space flag as 3-letter all caps string
# like 'HSV' or 'LUV' etc.
# KEEP IN MIND IF YOU DECIDE TO USE THIS FUNCTION LATER
# IN YOUR PROJECT THAT IF YOU READ THE IMAGE WITH
# cv2.imread() INSTEAD YOU START WITH BGR COLOR!
def bin_spatial(img, color_space='RGB', size=(32, 32)):
colour_dict = { 'RGB':'RGB',
'BGR':cv2.COLOR_BGR2RGB,
'HLS':cv2.COLOR_BGR2HLS,
'HSV':cv2.COLOR_BGR2HSV,
'LUV':cv2.COLOR_BGR2LUV,
'YUV': cv2.COLOR_RGB2YUV,
'YCrCb': cv2.COLOR_RGB2YCrCb
}
# If someother Colour Space
if color_space.upper() != 'RGB':
method = colour_dict.get(color_space, 'RGB')
img = cv2.cvtColor(img, method)
else:
img = np.copy(img)
small_img = cv2.resize(img, size)
feature_vec = small_img.ravel()
# Return the feature vector
return feature_vec
if __name__ == "__main__":
# You can also read cutout2, 3, 4 etc. to see other examples
image = mpimg.imread('cutout1.jpg')
feature_vec = bin_spatial(image, color_space='HSV', size=(32, 32))
# Plot features
plt.plot(feature_vec)
plt.title('Spatially Binned Features')
##
## Solution
##
# Define a function to compute color histogram features
# Pass the color_space flag as 3-letter all caps string
# like 'HSV' or 'LUV' etc.
# def bin_spatial(img, color_space='RGB', size=(32, 32)):
# # Convert image to new color space (if specified)
# if color_space != 'RGB':
# if color_space == 'HSV':
# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2HSV)
# elif color_space == 'LUV':
# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2LUV)
# elif color_space == 'HLS':
# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
# elif color_space == 'YUV':
# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2YUV)
# elif color_space == 'YCrCb':
# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2YCrCb)
# else: feature_image = np.copy(img)
# # Use cv2.resize().ravel() to create the feature vector
# features = cv2.resize(feature_image, size).ravel()
# # Return the feature vector
# return features
|
normal
|
{
"blob_id": "f178ae70ce54244624c2254d0d6256b83144db33",
"index": 5085,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef bin_spatial(img, color_space='RGB', size=(32, 32)):\n colour_dict = {'RGB': 'RGB', 'BGR': cv2.COLOR_BGR2RGB, 'HLS': cv2.\n COLOR_BGR2HLS, 'HSV': cv2.COLOR_BGR2HSV, 'LUV': cv2.COLOR_BGR2LUV,\n 'YUV': cv2.COLOR_RGB2YUV, 'YCrCb': cv2.COLOR_RGB2YCrCb}\n if color_space.upper() != 'RGB':\n method = colour_dict.get(color_space, 'RGB')\n img = cv2.cvtColor(img, method)\n else:\n img = np.copy(img)\n small_img = cv2.resize(img, size)\n feature_vec = small_img.ravel()\n return feature_vec\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef bin_spatial(img, color_space='RGB', size=(32, 32)):\n colour_dict = {'RGB': 'RGB', 'BGR': cv2.COLOR_BGR2RGB, 'HLS': cv2.\n COLOR_BGR2HLS, 'HSV': cv2.COLOR_BGR2HSV, 'LUV': cv2.COLOR_BGR2LUV,\n 'YUV': cv2.COLOR_RGB2YUV, 'YCrCb': cv2.COLOR_RGB2YCrCb}\n if color_space.upper() != 'RGB':\n method = colour_dict.get(color_space, 'RGB')\n img = cv2.cvtColor(img, method)\n else:\n img = np.copy(img)\n small_img = cv2.resize(img, size)\n feature_vec = small_img.ravel()\n return feature_vec\n\n\nif __name__ == '__main__':\n image = mpimg.imread('cutout1.jpg')\n feature_vec = bin_spatial(image, color_space='HSV', size=(32, 32))\n plt.plot(feature_vec)\n plt.title('Spatially Binned Features')\n",
"step-4": "import numpy as np\nimport cv2\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\n\n\ndef bin_spatial(img, color_space='RGB', size=(32, 32)):\n colour_dict = {'RGB': 'RGB', 'BGR': cv2.COLOR_BGR2RGB, 'HLS': cv2.\n COLOR_BGR2HLS, 'HSV': cv2.COLOR_BGR2HSV, 'LUV': cv2.COLOR_BGR2LUV,\n 'YUV': cv2.COLOR_RGB2YUV, 'YCrCb': cv2.COLOR_RGB2YCrCb}\n if color_space.upper() != 'RGB':\n method = colour_dict.get(color_space, 'RGB')\n img = cv2.cvtColor(img, method)\n else:\n img = np.copy(img)\n small_img = cv2.resize(img, size)\n feature_vec = small_img.ravel()\n return feature_vec\n\n\nif __name__ == '__main__':\n image = mpimg.imread('cutout1.jpg')\n feature_vec = bin_spatial(image, color_space='HSV', size=(32, 32))\n plt.plot(feature_vec)\n plt.title('Spatially Binned Features')\n",
"step-5": "import numpy as np\nimport cv2\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\n\n\n# Define a function to compute color histogram features \n# Pass the color_space flag as 3-letter all caps string\n# like 'HSV' or 'LUV' etc.\n# KEEP IN MIND IF YOU DECIDE TO USE THIS FUNCTION LATER\n# IN YOUR PROJECT THAT IF YOU READ THE IMAGE WITH \n# cv2.imread() INSTEAD YOU START WITH BGR COLOR!\ndef bin_spatial(img, color_space='RGB', size=(32, 32)):\n colour_dict = { 'RGB':'RGB',\n 'BGR':cv2.COLOR_BGR2RGB,\n 'HLS':cv2.COLOR_BGR2HLS,\n 'HSV':cv2.COLOR_BGR2HSV,\n 'LUV':cv2.COLOR_BGR2LUV,\n 'YUV': cv2.COLOR_RGB2YUV,\n 'YCrCb': cv2.COLOR_RGB2YCrCb\n }\n \n # If someother Colour Space\n if color_space.upper() != 'RGB':\n method = colour_dict.get(color_space, 'RGB')\n img = cv2.cvtColor(img, method)\n else:\n img = np.copy(img)\n\n small_img = cv2.resize(img, size)\n feature_vec = small_img.ravel()\n # Return the feature vector\n return feature_vec\n\nif __name__ == \"__main__\": \n # You can also read cutout2, 3, 4 etc. to see other examples\n image = mpimg.imread('cutout1.jpg')\n feature_vec = bin_spatial(image, color_space='HSV', size=(32, 32))\n\n # Plot features\n plt.plot(feature_vec)\n plt.title('Spatially Binned Features')\n\n\n##\n## Solution\n##\n# Define a function to compute color histogram features \n# Pass the color_space flag as 3-letter all caps string\n# like 'HSV' or 'LUV' etc.\n# def bin_spatial(img, color_space='RGB', size=(32, 32)):\n# # Convert image to new color space (if specified)\n# if color_space != 'RGB':\n# if color_space == 'HSV':\n# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2HSV)\n# elif color_space == 'LUV':\n# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2LUV)\n# elif color_space == 'HLS':\n# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)\n# elif color_space == 'YUV':\n# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2YUV)\n# elif color_space == 'YCrCb':\n# feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2YCrCb)\n# else: feature_image = np.copy(img) \n# # Use cv2.resize().ravel() to create the feature vector\n# features = cv2.resize(feature_image, size).ravel() \n# # Return the feature vector\n# return features",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
# encoding=utf-8
"""
@Author : Don
@Date : 9/16/2020 1:40 PM
@Desc :
"""
import os
import yaml
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config.yaml")
with open(config_path, "r", encoding="utf-8") as f:
conf = yaml.load(f.read(), Loader=yaml.FullLoader)
|
normal
|
{
"blob_id": "8834548f6180fc864d73a71194125b22d230a393",
"index": 6882,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open(config_path, 'r', encoding='utf-8') as f:\n conf = yaml.load(f.read(), Loader=yaml.FullLoader)\n",
"step-3": "<mask token>\nconfig_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),\n 'config.yaml')\nwith open(config_path, 'r', encoding='utf-8') as f:\n conf = yaml.load(f.read(), Loader=yaml.FullLoader)\n",
"step-4": "<mask token>\nimport os\nimport yaml\nconfig_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),\n 'config.yaml')\nwith open(config_path, 'r', encoding='utf-8') as f:\n conf = yaml.load(f.read(), Loader=yaml.FullLoader)\n",
"step-5": "#!/usr/bin/python\n# encoding=utf-8\n\n\"\"\"\n@Author : Don\n@Date : 9/16/2020 1:40 PM\n@Desc : \n\"\"\"\nimport os\n\nimport yaml\n\nconfig_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"config.yaml\")\n\nwith open(config_path, \"r\", encoding=\"utf-8\") as f:\n conf = yaml.load(f.read(), Loader=yaml.FullLoader)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from nose.tools import *
from packt_offer import *
from bs4 import BeautifulSoup
class TestPacktOffer:
def setUp(self):
self.proper_soup = BeautifulSoup(
""""
<div id="deal-of-the-day" class="cf">
<div class="dotd-main-book cf">
<div class="section-inner">
<div class="dotd-main-book-image float-left">
<a href="/application-development/github-essentials">
<noscript><img src="//serv.cloudfront.net/sites/imagecache/9781783553716.png" class="bookimage imagecache imagecache-dotd_main_image" itemprop="url"/>
</noscript><img src="//serv.cloudfront.net/sites/imagecache/9781783553716.png" data-original="//d1ldz4te4covpm.cloudfront.net/sites/default/files/imagecache/dotd_main_image/9781783553716.png" class="bookimage imagecache imagecache-dotd_main_image" itemprop="url" style="opacity: 1;">
</a>
</div>
<div class="dotd-main-book-summary float-left">
<div class="dotd-title">
<h2>Example title</h2>
</div>
<br>
<div>
An example description of book offered by Packtpub.
<ul>
<li>First reason why you should read this book.</li>
<li>Second reason why you should read this book.</li>
</ul>
</div>
<div class="dotd-main-book-form cf">
<div class="dots-main-book-price float-left"></div>
<div class="float-left free-ebook"></div>
</div>
</div>
</div>
</div>
</div>""", "html.parser")
for linebreak in self.proper_soup.find_all('br'):
linebreak.extract()
self.improper_soup = BeautifulSoup("""
<div id="deal-of-the-day" class="cf">
<div class="dotd-main-book cf">
<div class="section-inner">
<div class="dotd-main-book-summary float-left">
<div class="dotd-title">
</div>
<br>
</div>
</div>
</div>
</div>""", "html.parser")
for linebreak in self.improper_soup.find_all('br'):
linebreak.extract()
def test_offer_image_url_extracter_proper(self):
result = offer_image_url_extracter(self.proper_soup)
assert_equals(result,
'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')
def test_offer_image_url_extracter_no_content(self):
"""Case when <div> with a given image class is not present in a given page."""
result = offer_image_url_extracter(self.improper_soup)
assert_equals(result, '')
def test_offer_title_extracter_proper(self):
result = offer_title_extracter(self.proper_soup)
assert_equals(result, 'Example title')
def test_offer_title_extracter_no_content(self):
result = offer_title_extracter(self.improper_soup)
assert_equals(result, '')
def test_offer_description_extracter_proper(self):
result = offer_description_extracter(self.proper_soup)
assert_equals(result, """<div>
An example description of book offered by Packtpub.
<ul>
<li>First reason why you should read this book.</li>
<li>Second reason why you should read this book.</li>
</ul>
</div>
""")
def test_offer_description_extracter_no_content(self):
result = offer_description_extracter(self.improper_soup)
assert_equals(result, '')
def test_message_creator_all_proper(self):
msg = message_creator(b'000000', 'www.image.com/image.jpg', 'Offer title', 'Offer description',
'[email protected]', ['[email protected]'])
assert_in(
"""\
MIME-Version: 1.0
Subject: Packt offer: Offer title
From: [email protected]
To: [email protected]
This is a multi-part message in MIME format.""", msg)
assert_in(
"""\
<div><h2>New Packtpub offer:</h2></div>
</br>
<div>
<img src="cid:image1">
</div>
<div><h2>Offer title</h2></div>
</br>
<div>Offer description</div>
</br>
<a href="https://www.packtpub.com/packt/offers/free-learning">Get it!</a>""", msg)
assert_in(
"""\
Content-Type: image/jpeg
MIME-Version: 1.0
Content-Transfer-Encoding: base64
Content-ID: <image1>
Content-Disposition: inline; filename="www.image.com/image.jpg"\
""", msg)
@raises(AttributeError)
def test_message_creator_wrong_image_url(self):
msg = message_creator(b'000000', 'www.image.com', 'Offer title', 'Offer description',
'[email protected]', ['[email protected]'])
|
normal
|
{
"blob_id": "a29f89750ef3a55116959b217b8c9100b294c66c",
"index": 3766,
"step-1": "<mask token>\n\n\nclass TestPacktOffer:\n <mask token>\n <mask token>\n <mask token>\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n <mask token>\n <mask token>\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestPacktOffer:\n <mask token>\n\n def test_offer_image_url_extracter_proper(self):\n result = offer_image_url_extracter(self.proper_soup)\n assert_equals(result,\n 'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')\n <mask token>\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n <mask token>\n\n def test_offer_description_extracter_proper(self):\n result = offer_description_extracter(self.proper_soup)\n assert_equals(result,\n \"\"\"<div>\n An example description of book offered by Packtpub.\n <ul>\n<li>First reason why you should read this book.</li>\n<li>Second reason why you should read this book.</li>\n</ul>\n</div>\n\"\"\"\n )\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_message_creator_all_proper(self):\n msg = message_creator(b'000000', 'www.image.com/image.jpg',\n 'Offer title', 'Offer description', '[email protected]', [\n '[email protected]'])\n assert_in(\n \"\"\"MIME-Version: 1.0\nSubject: Packt offer: Offer title\nFrom: [email protected]\nTo: [email protected]\n\nThis is a multi-part message in MIME format.\"\"\"\n , msg)\n assert_in(\n \"\"\" <div><h2>New Packtpub offer:</h2></div>\n </br>\n <div>\n <img src=\"cid:image1\">\n </div>\n <div><h2>Offer title</h2></div>\n </br>\n <div>Offer description</div>\n </br>\n <a href=\"https://www.packtpub.com/packt/offers/free-learning\">Get it!</a>\"\"\"\n , msg)\n assert_in(\n \"\"\"Content-Type: image/jpeg\nMIME-Version: 1.0\nContent-Transfer-Encoding: base64\nContent-ID: <image1>\nContent-Disposition: inline; filename=\"www.image.com/image.jpg\\\"\"\"\"\n , msg)\n\n @raises(AttributeError)\n def test_message_creator_wrong_image_url(self):\n msg = message_creator(b'000000', 'www.image.com', 'Offer title',\n 'Offer description', '[email protected]', ['[email protected]'])\n",
"step-3": "<mask token>\n\n\nclass TestPacktOffer:\n <mask token>\n\n def test_offer_image_url_extracter_proper(self):\n result = offer_image_url_extracter(self.proper_soup)\n assert_equals(result,\n 'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')\n\n def test_offer_image_url_extracter_no_content(self):\n \"\"\"Case when <div> with a given image class is not present in a given page.\"\"\"\n result = offer_image_url_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n <mask token>\n\n def test_offer_description_extracter_proper(self):\n result = offer_description_extracter(self.proper_soup)\n assert_equals(result,\n \"\"\"<div>\n An example description of book offered by Packtpub.\n <ul>\n<li>First reason why you should read this book.</li>\n<li>Second reason why you should read this book.</li>\n</ul>\n</div>\n\"\"\"\n )\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_message_creator_all_proper(self):\n msg = message_creator(b'000000', 'www.image.com/image.jpg',\n 'Offer title', 'Offer description', '[email protected]', [\n '[email protected]'])\n assert_in(\n \"\"\"MIME-Version: 1.0\nSubject: Packt offer: Offer title\nFrom: [email protected]\nTo: [email protected]\n\nThis is a multi-part message in MIME format.\"\"\"\n , msg)\n assert_in(\n \"\"\" <div><h2>New Packtpub offer:</h2></div>\n </br>\n <div>\n <img src=\"cid:image1\">\n </div>\n <div><h2>Offer title</h2></div>\n </br>\n <div>Offer description</div>\n </br>\n <a href=\"https://www.packtpub.com/packt/offers/free-learning\">Get it!</a>\"\"\"\n , msg)\n assert_in(\n \"\"\"Content-Type: image/jpeg\nMIME-Version: 1.0\nContent-Transfer-Encoding: base64\nContent-ID: <image1>\nContent-Disposition: inline; filename=\"www.image.com/image.jpg\\\"\"\"\"\n , msg)\n\n @raises(AttributeError)\n def test_message_creator_wrong_image_url(self):\n msg = message_creator(b'000000', 'www.image.com', 'Offer title',\n 'Offer description', '[email protected]', ['[email protected]'])\n",
"step-4": "from nose.tools import *\nfrom packt_offer import *\nfrom bs4 import BeautifulSoup\n\n\nclass TestPacktOffer:\n\n def setUp(self):\n self.proper_soup = BeautifulSoup(\n \"\"\"\"\n <div id=\"deal-of-the-day\" class=\"cf\">\n <div class=\"dotd-main-book cf\">\n <div class=\"section-inner\">\n <div class=\"dotd-main-book-image float-left\">\n <a href=\"/application-development/github-essentials\">\n <noscript><img src=\"//serv.cloudfront.net/sites/imagecache/9781783553716.png\" class=\"bookimage imagecache imagecache-dotd_main_image\" itemprop=\"url\"/>\n </noscript><img src=\"//serv.cloudfront.net/sites/imagecache/9781783553716.png\" data-original=\"//d1ldz4te4covpm.cloudfront.net/sites/default/files/imagecache/dotd_main_image/9781783553716.png\" class=\"bookimage imagecache imagecache-dotd_main_image\" itemprop=\"url\" style=\"opacity: 1;\">\t\t\t\t\t\t\n </a>\n </div>\n <div class=\"dotd-main-book-summary float-left\">\n <div class=\"dotd-title\">\n <h2>Example title</h2>\n </div>\n <br>\n <div>\n An example description of book offered by Packtpub.\n <ul>\n <li>First reason why you should read this book.</li>\n <li>Second reason why you should read this book.</li>\n </ul>\n </div>\n <div class=\"dotd-main-book-form cf\">\n <div class=\"dots-main-book-price float-left\"></div>\n <div class=\"float-left free-ebook\"></div>\n </div>\n </div>\n \n </div>\n </div>\n </div>\"\"\"\n , 'html.parser')\n for linebreak in self.proper_soup.find_all('br'):\n linebreak.extract()\n self.improper_soup = BeautifulSoup(\n \"\"\"\n <div id=\"deal-of-the-day\" class=\"cf\">\n <div class=\"dotd-main-book cf\">\n <div class=\"section-inner\">\n <div class=\"dotd-main-book-summary float-left\">\n <div class=\"dotd-title\">\n </div>\n <br>\n </div>\n\n </div>\n </div>\n </div>\"\"\"\n , 'html.parser')\n for linebreak in self.improper_soup.find_all('br'):\n linebreak.extract()\n\n def test_offer_image_url_extracter_proper(self):\n result = offer_image_url_extracter(self.proper_soup)\n assert_equals(result,\n 'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')\n\n def test_offer_image_url_extracter_no_content(self):\n \"\"\"Case when <div> with a given image class is not present in a given page.\"\"\"\n result = offer_image_url_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n\n def test_offer_title_extracter_no_content(self):\n result = offer_title_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_description_extracter_proper(self):\n result = offer_description_extracter(self.proper_soup)\n assert_equals(result,\n \"\"\"<div>\n An example description of book offered by Packtpub.\n <ul>\n<li>First reason why you should read this book.</li>\n<li>Second reason why you should read this book.</li>\n</ul>\n</div>\n\"\"\"\n )\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_message_creator_all_proper(self):\n msg = message_creator(b'000000', 'www.image.com/image.jpg',\n 'Offer title', 'Offer description', '[email protected]', [\n '[email protected]'])\n assert_in(\n \"\"\"MIME-Version: 1.0\nSubject: Packt offer: Offer title\nFrom: [email protected]\nTo: [email protected]\n\nThis is a multi-part message in MIME format.\"\"\"\n , msg)\n assert_in(\n \"\"\" <div><h2>New Packtpub offer:</h2></div>\n </br>\n <div>\n <img src=\"cid:image1\">\n </div>\n <div><h2>Offer title</h2></div>\n </br>\n <div>Offer description</div>\n </br>\n <a href=\"https://www.packtpub.com/packt/offers/free-learning\">Get it!</a>\"\"\"\n , msg)\n assert_in(\n \"\"\"Content-Type: image/jpeg\nMIME-Version: 1.0\nContent-Transfer-Encoding: base64\nContent-ID: <image1>\nContent-Disposition: inline; filename=\"www.image.com/image.jpg\\\"\"\"\"\n , msg)\n\n @raises(AttributeError)\n def test_message_creator_wrong_image_url(self):\n msg = message_creator(b'000000', 'www.image.com', 'Offer title',\n 'Offer description', '[email protected]', ['[email protected]'])\n",
"step-5": "from nose.tools import *\nfrom packt_offer import *\nfrom bs4 import BeautifulSoup\n\n\nclass TestPacktOffer:\n def setUp(self):\n self.proper_soup = BeautifulSoup(\n \"\"\"\"\n <div id=\"deal-of-the-day\" class=\"cf\">\n <div class=\"dotd-main-book cf\">\n <div class=\"section-inner\">\n <div class=\"dotd-main-book-image float-left\">\n <a href=\"/application-development/github-essentials\">\n <noscript><img src=\"//serv.cloudfront.net/sites/imagecache/9781783553716.png\" class=\"bookimage imagecache imagecache-dotd_main_image\" itemprop=\"url\"/>\n </noscript><img src=\"//serv.cloudfront.net/sites/imagecache/9781783553716.png\" data-original=\"//d1ldz4te4covpm.cloudfront.net/sites/default/files/imagecache/dotd_main_image/9781783553716.png\" class=\"bookimage imagecache imagecache-dotd_main_image\" itemprop=\"url\" style=\"opacity: 1;\">\t\t\t\t\t\t\n </a>\n </div>\n <div class=\"dotd-main-book-summary float-left\">\n <div class=\"dotd-title\">\n <h2>Example title</h2>\n </div>\n <br>\n <div>\n An example description of book offered by Packtpub.\n <ul>\n <li>First reason why you should read this book.</li>\n <li>Second reason why you should read this book.</li>\n </ul>\n </div>\n <div class=\"dotd-main-book-form cf\">\n <div class=\"dots-main-book-price float-left\"></div>\n <div class=\"float-left free-ebook\"></div>\n </div>\n </div>\n \n </div>\n </div>\n </div>\"\"\", \"html.parser\")\n for linebreak in self.proper_soup.find_all('br'):\n linebreak.extract()\n\n self.improper_soup = BeautifulSoup(\"\"\"\n <div id=\"deal-of-the-day\" class=\"cf\">\n <div class=\"dotd-main-book cf\">\n <div class=\"section-inner\">\n <div class=\"dotd-main-book-summary float-left\">\n <div class=\"dotd-title\">\n </div>\n <br>\n </div>\n\n </div>\n </div>\n </div>\"\"\", \"html.parser\")\n\n for linebreak in self.improper_soup.find_all('br'):\n linebreak.extract()\n\n def test_offer_image_url_extracter_proper(self):\n result = offer_image_url_extracter(self.proper_soup)\n assert_equals(result,\n 'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')\n\n def test_offer_image_url_extracter_no_content(self):\n \"\"\"Case when <div> with a given image class is not present in a given page.\"\"\"\n result = offer_image_url_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n\n def test_offer_title_extracter_no_content(self):\n result = offer_title_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_description_extracter_proper(self):\n result = offer_description_extracter(self.proper_soup)\n assert_equals(result, \"\"\"<div>\n An example description of book offered by Packtpub.\n <ul>\n<li>First reason why you should read this book.</li>\n<li>Second reason why you should read this book.</li>\n</ul>\n</div>\n\"\"\")\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_message_creator_all_proper(self):\n msg = message_creator(b'000000', 'www.image.com/image.jpg', 'Offer title', 'Offer description',\n '[email protected]', ['[email protected]'])\n assert_in(\n \"\"\"\\\nMIME-Version: 1.0\nSubject: Packt offer: Offer title\nFrom: [email protected]\nTo: [email protected]\n\nThis is a multi-part message in MIME format.\"\"\", msg)\n\n assert_in(\n \"\"\"\\\n <div><h2>New Packtpub offer:</h2></div>\n </br>\n <div>\n <img src=\"cid:image1\">\n </div>\n <div><h2>Offer title</h2></div>\n </br>\n <div>Offer description</div>\n </br>\n <a href=\"https://www.packtpub.com/packt/offers/free-learning\">Get it!</a>\"\"\", msg)\n\n assert_in(\n \"\"\"\\\nContent-Type: image/jpeg\nMIME-Version: 1.0\nContent-Transfer-Encoding: base64\nContent-ID: <image1>\nContent-Disposition: inline; filename=\"www.image.com/image.jpg\"\\\n\"\"\", msg)\n\n @raises(AttributeError)\n def test_message_creator_wrong_image_url(self):\n msg = message_creator(b'000000', 'www.image.com', 'Offer title', 'Offer description',\n '[email protected]', ['[email protected]'])\n",
"step-ids": [
3,
7,
8,
11,
12
]
}
|
[
3,
7,
8,
11,
12
] |
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_table as dt
import plotly.express as px
import pandas as pd
import plotly.graph_objects as go
import numpy as np
from datetime import datetime as dat
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn import metrics
from sklearn.feature_selection import f_regression
from sklearn.feature_selection import SelectKBest
# TODO:
# The model doesn't really take the opponent into consideration when calculating the win percentage. As you would expect, this is not ideal and is something that needs to be fixed
#
# The bar charts only graph 2019 data. Allowing the user to choose the year would be an easy addition. Future implemenatations could also include a previous X games option instead
#
# The bar chart only graphs stats correctly if they are selected in order. For example, if the set of possible stats are ['assists', 'rebounds', 'blocks'], they must all be selected
# in order to show all the values correctly. If the user selects only 'assists' and 'blocks' then 'assists' graphs correctly. 'blocks' is graphed but is given the value assigned
# to 'rebounds' because it assumes the second position in the array of stats to be graphed.
#
# The model doesn't run well (and generally fails) for small schools due to a lack of data for those teams. Error checking needs to be implemented to eliminate this problem.
def getStatsByYear(teamID, year, data):
''' Returns the stats for a chosen team for a specific year. Choices are 2016 - 2019 '''
teamStats = data[data["team_id"] == teamID]
for index, row in teamStats.iterrows():
if (row["season"] == year):
teamStatsForGivenYear = teamStats[data["season"] == row["season"]]
return teamStatsForGivenYear
def generate_bar_chart(team, opponent, stats, stat_names, data):
''' Generates a bar chart for a the user selected team, opponent and stats '''
teamStats = getStatsByYear(team, 2019, data)
opponentStats = getStatsByYear(opponent, 2019, data)
teamStatValues = teamStats[["assists", "assists_turnover_ratio", "blocked_att", "blocks", "defensive_rebounds", "fast_break_pts",
"field_goals_att", "field_goals_pct", "field_goals_made", "free_throws_att",
"free_throws_pct", "free_throws_made", "offensive_rebounds", "personal_fouls",
"points", "points_against", "points_in_paint", "points_off_turnovers",
"rebounds", "second_chance_pts", "steals", "team_rebounds", "three_points_att",
"three_points_pct", "three_points_made", "turnovers", "two_points_att",
"two_points_pct", "two_points_made"
]]
opponentStatValues = opponentStats[["assists", "assists_turnover_ratio", "blocked_att", "blocks", "defensive_rebounds", "fast_break_pts",
"field_goals_att", "field_goals_pct", "field_goals_made", "free_throws_att",
"free_throws_pct", "free_throws_made", "offensive_rebounds", "personal_fouls",
"points", "points_against", "points_in_paint", "points_off_turnovers",
"rebounds", "second_chance_pts", "steals", "team_rebounds", "three_points_att",
"three_points_pct", "three_points_made", "turnovers", "two_points_att",
"two_points_pct", "two_points_made"
]]
stats_to_be_graphed = []
for i in range(len(stat_names)):
if i in stats:
stats_to_be_graphed.append(stat_names[i])
# Graphs average stat values for the user's chosen team
teamVals = go.Bar(
x = stats_to_be_graphed,
y = teamStatValues.mean(),
name = data[(data.team_id == team)]['market'].iloc[0]
)
# Graphs average stat values for the opponent's team
opponentVals = go.Bar(
x = stats_to_be_graphed,
y = opponentStatValues.mean(),
name = data[(data.team_id == opponent)]['market'].iloc[0]
)
data = [teamVals, opponentVals]
layout = go.Layout(barmode = 'group')
fig = go.Figure(data = data, layout = layout)
return fig
def getAllTeamMatchRecords(teamID, df):
''' Returns all game records for a given teamID '''
return df[df["team_id"] == teamID]
def select_features(X_train, y_train, X_test):
''' Selects features '''
# configure to select all features
fs = SelectKBest(score_func=f_regression, k='all')
# learn relationship from training data
fs.fit(X_train, y_train)
# transform train input data
X_train_fs = fs.transform(X_train)
# transform test input data
X_test_fs = fs.transform(X_test)
return X_train_fs, X_test_fs, fs
def overallFeatures(df):
''' Return list of top four features '''
datasetForFS = df
datasetForFS.fillna(0)
X1 = datasetForFS[["assists","blocks","defensive_rebounds","opponent_drb","fast_break_pts","points_in_paint","points_off_turnovers","rebounds","steals","turnovers","efg","tov_pct","orb_pct","ftr"]]
y1 = datasetForFS['win']
X_train, X_test, y_train, y_test = train_test_split(X1, y1, test_size=0.2, random_state=0)
X_train_fs, X_test_fs, fs = select_features(X_train, y_train, X_test)
colList = X1.columns.values.tolist()
statScoreDF = pd.DataFrame(data={'Stat': pd.Series(colList), 'Score': pd.Series(fs.scores_.tolist())})
statScoreDF = statScoreDF.sort_values(by=['Score'], ascending=False)
return statScoreDF.head(n=4)['Stat'].tolist()
def avgDataRow(df):
''' Returns the average values of a dataFrame '''
df1 = dict()
for (columnName, columnData) in df.iteritems():
df1[columnName] = [df[columnName].mean()]
return pd.DataFrame(df1)
def updateWinPct(dfMain, dfWin, reg):
''' Return new win percentage '''
dfPred = reg.predict(dfMain)
return pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.around(dfPred), 'Predicted (float)': dfPred})
def filterRowsFS(df):
''' Return dataframe with selected features '''
return df[["assists","blocks","defensive_rebounds","opponent_drb","fast_break_pts","points_in_paint","points_off_turnovers","rebounds","steals","turnovers","efg","tov_pct","orb_pct","ftr"]]
def learn(dataset):
''' Trains the model '''
dataset = pd.read_csv("team_boxscores_v3.csv")
dataset = dataset.fillna(0)
# Shuffle
dataset = dataset.sample(frac = 1)
X1 = dataset[["assists","blocks","defensive_rebounds","opponent_drb","fast_break_pts","points_in_paint","points_off_turnovers","rebounds","steals","turnovers","efg","tov_pct","orb_pct","ftr"]]
y1 = dataset['win']
# No shuffle
# X_train, X_test, y_train, y_test = train_test_split(X1, y1, test_size=0.2, random_state=0)
# W/ shuffle
X_train = X1[int(len(X1)/5):]
X_test = X1[:int(len(X1)/5)]
y_train = y1[int(len(y1)/5):]
y_test = y1[:int(len(y1)/5)]
regressor = LinearRegression()
regressor.fit(X_train, y_train)
coeff_df = pd.DataFrame(regressor.coef_, X1.columns, columns=['Coefficient'])
y_pred = regressor.predict(X_test)
y_pred_round = np.around(regressor.predict(X_test))
return regressor, pd.DataFrame({'Actual': y_test, 'Predicted (int)': y_pred_round, 'Predicted (float)': y_pred})
def calculate_win_percentage(team, stat1, stat2, stat3, stat4, regressor, data):
''' Calculates the win percentage for a team and the 4 selected stat values '''
temp = getAllTeamMatchRecords(team, data)
changed_stat1 = overallFeatures(temp)[0]
changed_stat2 = overallFeatures(temp)[1]
changed_stat3 = overallFeatures(temp)[2]
changed_stat4 = overallFeatures(temp)[3]
average_team_stats = avgDataRow(filterRowsFS(temp))
dfWin = temp["win"]
dfFinal = pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.around(regressor.predict(average_team_stats)), 'Predicted (float)': regressor.predict(average_team_stats)})
origWinPct = dfFinal.at[0, 'Predicted (float)']
average_team_stats.at[0, changed_stat1] = stat1
average_team_stats.at[0, changed_stat2] = stat2
average_team_stats.at[0, changed_stat3] = stat3
average_team_stats.at[0, changed_stat4] = stat4
win_percentage = updateWinPct(average_team_stats, dfWin, regressor).at[0,'Predicted (float)']
# Makes sure you can't have a win percentage of > 100% or < 0%
if win_percentage > 1:
win_percentage = 1
elif win_percentage < 0:
win_percentage = 0
win_percentage = win_percentage * 100
win_percentage = round(win_percentage, 2)
win_percentage_text = "Projected Win Percentage: " + str(win_percentage) + "%"
return win_percentage_text
def get_default_slider_values(team, data):
''' Gets the values the each of the 4 sliders should display. These values are what the model estimates the team will get in the matchup '''
numSliders = 4
stat_column_names = []
stat_column_values = []
estimated_stat_values = avgDataRow(filterRowsFS(getAllTeamMatchRecords(team, data)))
for i in range(numSliders):
stat_column_names.append(overallFeatures(getAllTeamMatchRecords(team, data))[i])
stat_column_values.append(estimated_stat_values.at[0, stat_column_names[i]])
return stat_column_names, stat_column_values
|
normal
|
{
"blob_id": "9b581df505765e895047584c5bb586faef95295f",
"index": 453,
"step-1": "<mask token>\n\n\ndef getStatsByYear(teamID, year, data):\n \"\"\" Returns the stats for a chosen team for a specific year. Choices are 2016 - 2019 \"\"\"\n teamStats = data[data['team_id'] == teamID]\n for index, row in teamStats.iterrows():\n if row['season'] == year:\n teamStatsForGivenYear = teamStats[data['season'] == row['season']]\n return teamStatsForGivenYear\n\n\ndef generate_bar_chart(team, opponent, stats, stat_names, data):\n \"\"\" Generates a bar chart for a the user selected team, opponent and stats \"\"\"\n teamStats = getStatsByYear(team, 2019, data)\n opponentStats = getStatsByYear(opponent, 2019, data)\n teamStatValues = teamStats[['assists', 'assists_turnover_ratio',\n 'blocked_att', 'blocks', 'defensive_rebounds', 'fast_break_pts',\n 'field_goals_att', 'field_goals_pct', 'field_goals_made',\n 'free_throws_att', 'free_throws_pct', 'free_throws_made',\n 'offensive_rebounds', 'personal_fouls', 'points', 'points_against',\n 'points_in_paint', 'points_off_turnovers', 'rebounds',\n 'second_chance_pts', 'steals', 'team_rebounds', 'three_points_att',\n 'three_points_pct', 'three_points_made', 'turnovers',\n 'two_points_att', 'two_points_pct', 'two_points_made']]\n opponentStatValues = opponentStats[['assists', 'assists_turnover_ratio',\n 'blocked_att', 'blocks', 'defensive_rebounds', 'fast_break_pts',\n 'field_goals_att', 'field_goals_pct', 'field_goals_made',\n 'free_throws_att', 'free_throws_pct', 'free_throws_made',\n 'offensive_rebounds', 'personal_fouls', 'points', 'points_against',\n 'points_in_paint', 'points_off_turnovers', 'rebounds',\n 'second_chance_pts', 'steals', 'team_rebounds', 'three_points_att',\n 'three_points_pct', 'three_points_made', 'turnovers',\n 'two_points_att', 'two_points_pct', 'two_points_made']]\n stats_to_be_graphed = []\n for i in range(len(stat_names)):\n if i in stats:\n stats_to_be_graphed.append(stat_names[i])\n teamVals = go.Bar(x=stats_to_be_graphed, y=teamStatValues.mean(), name=\n data[data.team_id == team]['market'].iloc[0])\n opponentVals = go.Bar(x=stats_to_be_graphed, y=opponentStatValues.mean(\n ), name=data[data.team_id == opponent]['market'].iloc[0])\n data = [teamVals, opponentVals]\n layout = go.Layout(barmode='group')\n fig = go.Figure(data=data, layout=layout)\n return fig\n\n\n<mask token>\n\n\ndef select_features(X_train, y_train, X_test):\n \"\"\" Selects features \"\"\"\n fs = SelectKBest(score_func=f_regression, k='all')\n fs.fit(X_train, y_train)\n X_train_fs = fs.transform(X_train)\n X_test_fs = fs.transform(X_test)\n return X_train_fs, X_test_fs, fs\n\n\ndef overallFeatures(df):\n \"\"\" Return list of top four features \"\"\"\n datasetForFS = df\n datasetForFS.fillna(0)\n X1 = datasetForFS[['assists', 'blocks', 'defensive_rebounds',\n 'opponent_drb', 'fast_break_pts', 'points_in_paint',\n 'points_off_turnovers', 'rebounds', 'steals', 'turnovers', 'efg',\n 'tov_pct', 'orb_pct', 'ftr']]\n y1 = datasetForFS['win']\n X_train, X_test, y_train, y_test = train_test_split(X1, y1, test_size=\n 0.2, random_state=0)\n X_train_fs, X_test_fs, fs = select_features(X_train, y_train, X_test)\n colList = X1.columns.values.tolist()\n statScoreDF = pd.DataFrame(data={'Stat': pd.Series(colList), 'Score':\n pd.Series(fs.scores_.tolist())})\n statScoreDF = statScoreDF.sort_values(by=['Score'], ascending=False)\n return statScoreDF.head(n=4)['Stat'].tolist()\n\n\n<mask token>\n\n\ndef filterRowsFS(df):\n \"\"\" Return dataframe with selected features \"\"\"\n return df[['assists', 'blocks', 'defensive_rebounds', 'opponent_drb',\n 'fast_break_pts', 'points_in_paint', 'points_off_turnovers',\n 'rebounds', 'steals', 'turnovers', 'efg', 'tov_pct', 'orb_pct', 'ftr']]\n\n\ndef learn(dataset):\n \"\"\" Trains the model \"\"\"\n dataset = pd.read_csv('team_boxscores_v3.csv')\n dataset = dataset.fillna(0)\n dataset = dataset.sample(frac=1)\n X1 = dataset[['assists', 'blocks', 'defensive_rebounds', 'opponent_drb',\n 'fast_break_pts', 'points_in_paint', 'points_off_turnovers',\n 'rebounds', 'steals', 'turnovers', 'efg', 'tov_pct', 'orb_pct', 'ftr']]\n y1 = dataset['win']\n X_train = X1[int(len(X1) / 5):]\n X_test = X1[:int(len(X1) / 5)]\n y_train = y1[int(len(y1) / 5):]\n y_test = y1[:int(len(y1) / 5)]\n regressor = LinearRegression()\n regressor.fit(X_train, y_train)\n coeff_df = pd.DataFrame(regressor.coef_, X1.columns, columns=[\n 'Coefficient'])\n y_pred = regressor.predict(X_test)\n y_pred_round = np.around(regressor.predict(X_test))\n return regressor, pd.DataFrame({'Actual': y_test, 'Predicted (int)':\n y_pred_round, 'Predicted (float)': y_pred})\n\n\ndef calculate_win_percentage(team, stat1, stat2, stat3, stat4, regressor, data\n ):\n \"\"\" Calculates the win percentage for a team and the 4 selected stat values \"\"\"\n temp = getAllTeamMatchRecords(team, data)\n changed_stat1 = overallFeatures(temp)[0]\n changed_stat2 = overallFeatures(temp)[1]\n changed_stat3 = overallFeatures(temp)[2]\n changed_stat4 = overallFeatures(temp)[3]\n average_team_stats = avgDataRow(filterRowsFS(temp))\n dfWin = temp['win']\n dfFinal = pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.\n around(regressor.predict(average_team_stats)), 'Predicted (float)':\n regressor.predict(average_team_stats)})\n origWinPct = dfFinal.at[0, 'Predicted (float)']\n average_team_stats.at[0, changed_stat1] = stat1\n average_team_stats.at[0, changed_stat2] = stat2\n average_team_stats.at[0, changed_stat3] = stat3\n average_team_stats.at[0, changed_stat4] = stat4\n win_percentage = updateWinPct(average_team_stats, dfWin, regressor).at[\n 0, 'Predicted (float)']\n if win_percentage > 1:\n win_percentage = 1\n elif win_percentage < 0:\n win_percentage = 0\n win_percentage = win_percentage * 100\n win_percentage = round(win_percentage, 2)\n win_percentage_text = 'Projected Win Percentage: ' + str(win_percentage\n ) + '%'\n return win_percentage_text\n\n\ndef get_default_slider_values(team, data):\n \"\"\" Gets the values the each of the 4 sliders should display. These values are what the model estimates the team will get in the matchup \"\"\"\n numSliders = 4\n stat_column_names = []\n stat_column_values = []\n estimated_stat_values = avgDataRow(filterRowsFS(getAllTeamMatchRecords(\n team, data)))\n for i in range(numSliders):\n stat_column_names.append(overallFeatures(getAllTeamMatchRecords(\n team, data))[i])\n stat_column_values.append(estimated_stat_values.at[0,\n stat_column_names[i]])\n return stat_column_names, stat_column_values\n",
"step-2": "<mask token>\n\n\ndef getStatsByYear(teamID, year, data):\n \"\"\" Returns the stats for a chosen team for a specific year. Choices are 2016 - 2019 \"\"\"\n teamStats = data[data['team_id'] == teamID]\n for index, row in teamStats.iterrows():\n if row['season'] == year:\n teamStatsForGivenYear = teamStats[data['season'] == row['season']]\n return teamStatsForGivenYear\n\n\ndef generate_bar_chart(team, opponent, stats, stat_names, data):\n \"\"\" Generates a bar chart for a the user selected team, opponent and stats \"\"\"\n teamStats = getStatsByYear(team, 2019, data)\n opponentStats = getStatsByYear(opponent, 2019, data)\n teamStatValues = teamStats[['assists', 'assists_turnover_ratio',\n 'blocked_att', 'blocks', 'defensive_rebounds', 'fast_break_pts',\n 'field_goals_att', 'field_goals_pct', 'field_goals_made',\n 'free_throws_att', 'free_throws_pct', 'free_throws_made',\n 'offensive_rebounds', 'personal_fouls', 'points', 'points_against',\n 'points_in_paint', 'points_off_turnovers', 'rebounds',\n 'second_chance_pts', 'steals', 'team_rebounds', 'three_points_att',\n 'three_points_pct', 'three_points_made', 'turnovers',\n 'two_points_att', 'two_points_pct', 'two_points_made']]\n opponentStatValues = opponentStats[['assists', 'assists_turnover_ratio',\n 'blocked_att', 'blocks', 'defensive_rebounds', 'fast_break_pts',\n 'field_goals_att', 'field_goals_pct', 'field_goals_made',\n 'free_throws_att', 'free_throws_pct', 'free_throws_made',\n 'offensive_rebounds', 'personal_fouls', 'points', 'points_against',\n 'points_in_paint', 'points_off_turnovers', 'rebounds',\n 'second_chance_pts', 'steals', 'team_rebounds', 'three_points_att',\n 'three_points_pct', 'three_points_made', 'turnovers',\n 'two_points_att', 'two_points_pct', 'two_points_made']]\n stats_to_be_graphed = []\n for i in range(len(stat_names)):\n if i in stats:\n stats_to_be_graphed.append(stat_names[i])\n teamVals = go.Bar(x=stats_to_be_graphed, y=teamStatValues.mean(), name=\n data[data.team_id == team]['market'].iloc[0])\n opponentVals = go.Bar(x=stats_to_be_graphed, y=opponentStatValues.mean(\n ), name=data[data.team_id == opponent]['market'].iloc[0])\n data = [teamVals, opponentVals]\n layout = go.Layout(barmode='group')\n fig = go.Figure(data=data, layout=layout)\n return fig\n\n\n<mask token>\n\n\ndef select_features(X_train, y_train, X_test):\n \"\"\" Selects features \"\"\"\n fs = SelectKBest(score_func=f_regression, k='all')\n fs.fit(X_train, y_train)\n X_train_fs = fs.transform(X_train)\n X_test_fs = fs.transform(X_test)\n return X_train_fs, X_test_fs, fs\n\n\ndef overallFeatures(df):\n \"\"\" Return list of top four features \"\"\"\n datasetForFS = df\n datasetForFS.fillna(0)\n X1 = datasetForFS[['assists', 'blocks', 'defensive_rebounds',\n 'opponent_drb', 'fast_break_pts', 'points_in_paint',\n 'points_off_turnovers', 'rebounds', 'steals', 'turnovers', 'efg',\n 'tov_pct', 'orb_pct', 'ftr']]\n y1 = datasetForFS['win']\n X_train, X_test, y_train, y_test = train_test_split(X1, y1, test_size=\n 0.2, random_state=0)\n X_train_fs, X_test_fs, fs = select_features(X_train, y_train, X_test)\n colList = X1.columns.values.tolist()\n statScoreDF = pd.DataFrame(data={'Stat': pd.Series(colList), 'Score':\n pd.Series(fs.scores_.tolist())})\n statScoreDF = statScoreDF.sort_values(by=['Score'], ascending=False)\n return statScoreDF.head(n=4)['Stat'].tolist()\n\n\n<mask token>\n\n\ndef updateWinPct(dfMain, dfWin, reg):\n \"\"\" Return new win percentage \"\"\"\n dfPred = reg.predict(dfMain)\n return pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.\n around(dfPred), 'Predicted (float)': dfPred})\n\n\ndef filterRowsFS(df):\n \"\"\" Return dataframe with selected features \"\"\"\n return df[['assists', 'blocks', 'defensive_rebounds', 'opponent_drb',\n 'fast_break_pts', 'points_in_paint', 'points_off_turnovers',\n 'rebounds', 'steals', 'turnovers', 'efg', 'tov_pct', 'orb_pct', 'ftr']]\n\n\ndef learn(dataset):\n \"\"\" Trains the model \"\"\"\n dataset = pd.read_csv('team_boxscores_v3.csv')\n dataset = dataset.fillna(0)\n dataset = dataset.sample(frac=1)\n X1 = dataset[['assists', 'blocks', 'defensive_rebounds', 'opponent_drb',\n 'fast_break_pts', 'points_in_paint', 'points_off_turnovers',\n 'rebounds', 'steals', 'turnovers', 'efg', 'tov_pct', 'orb_pct', 'ftr']]\n y1 = dataset['win']\n X_train = X1[int(len(X1) / 5):]\n X_test = X1[:int(len(X1) / 5)]\n y_train = y1[int(len(y1) / 5):]\n y_test = y1[:int(len(y1) / 5)]\n regressor = LinearRegression()\n regressor.fit(X_train, y_train)\n coeff_df = pd.DataFrame(regressor.coef_, X1.columns, columns=[\n 'Coefficient'])\n y_pred = regressor.predict(X_test)\n y_pred_round = np.around(regressor.predict(X_test))\n return regressor, pd.DataFrame({'Actual': y_test, 'Predicted (int)':\n y_pred_round, 'Predicted (float)': y_pred})\n\n\ndef calculate_win_percentage(team, stat1, stat2, stat3, stat4, regressor, data\n ):\n \"\"\" Calculates the win percentage for a team and the 4 selected stat values \"\"\"\n temp = getAllTeamMatchRecords(team, data)\n changed_stat1 = overallFeatures(temp)[0]\n changed_stat2 = overallFeatures(temp)[1]\n changed_stat3 = overallFeatures(temp)[2]\n changed_stat4 = overallFeatures(temp)[3]\n average_team_stats = avgDataRow(filterRowsFS(temp))\n dfWin = temp['win']\n dfFinal = pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.\n around(regressor.predict(average_team_stats)), 'Predicted (float)':\n regressor.predict(average_team_stats)})\n origWinPct = dfFinal.at[0, 'Predicted (float)']\n average_team_stats.at[0, changed_stat1] = stat1\n average_team_stats.at[0, changed_stat2] = stat2\n average_team_stats.at[0, changed_stat3] = stat3\n average_team_stats.at[0, changed_stat4] = stat4\n win_percentage = updateWinPct(average_team_stats, dfWin, regressor).at[\n 0, 'Predicted (float)']\n if win_percentage > 1:\n win_percentage = 1\n elif win_percentage < 0:\n win_percentage = 0\n win_percentage = win_percentage * 100\n win_percentage = round(win_percentage, 2)\n win_percentage_text = 'Projected Win Percentage: ' + str(win_percentage\n ) + '%'\n return win_percentage_text\n\n\ndef get_default_slider_values(team, data):\n \"\"\" Gets the values the each of the 4 sliders should display. These values are what the model estimates the team will get in the matchup \"\"\"\n numSliders = 4\n stat_column_names = []\n stat_column_values = []\n estimated_stat_values = avgDataRow(filterRowsFS(getAllTeamMatchRecords(\n team, data)))\n for i in range(numSliders):\n stat_column_names.append(overallFeatures(getAllTeamMatchRecords(\n team, data))[i])\n stat_column_values.append(estimated_stat_values.at[0,\n stat_column_names[i]])\n return stat_column_names, stat_column_values\n",
"step-3": "<mask token>\n\n\ndef getStatsByYear(teamID, year, data):\n \"\"\" Returns the stats for a chosen team for a specific year. Choices are 2016 - 2019 \"\"\"\n teamStats = data[data['team_id'] == teamID]\n for index, row in teamStats.iterrows():\n if row['season'] == year:\n teamStatsForGivenYear = teamStats[data['season'] == row['season']]\n return teamStatsForGivenYear\n\n\ndef generate_bar_chart(team, opponent, stats, stat_names, data):\n \"\"\" Generates a bar chart for a the user selected team, opponent and stats \"\"\"\n teamStats = getStatsByYear(team, 2019, data)\n opponentStats = getStatsByYear(opponent, 2019, data)\n teamStatValues = teamStats[['assists', 'assists_turnover_ratio',\n 'blocked_att', 'blocks', 'defensive_rebounds', 'fast_break_pts',\n 'field_goals_att', 'field_goals_pct', 'field_goals_made',\n 'free_throws_att', 'free_throws_pct', 'free_throws_made',\n 'offensive_rebounds', 'personal_fouls', 'points', 'points_against',\n 'points_in_paint', 'points_off_turnovers', 'rebounds',\n 'second_chance_pts', 'steals', 'team_rebounds', 'three_points_att',\n 'three_points_pct', 'three_points_made', 'turnovers',\n 'two_points_att', 'two_points_pct', 'two_points_made']]\n opponentStatValues = opponentStats[['assists', 'assists_turnover_ratio',\n 'blocked_att', 'blocks', 'defensive_rebounds', 'fast_break_pts',\n 'field_goals_att', 'field_goals_pct', 'field_goals_made',\n 'free_throws_att', 'free_throws_pct', 'free_throws_made',\n 'offensive_rebounds', 'personal_fouls', 'points', 'points_against',\n 'points_in_paint', 'points_off_turnovers', 'rebounds',\n 'second_chance_pts', 'steals', 'team_rebounds', 'three_points_att',\n 'three_points_pct', 'three_points_made', 'turnovers',\n 'two_points_att', 'two_points_pct', 'two_points_made']]\n stats_to_be_graphed = []\n for i in range(len(stat_names)):\n if i in stats:\n stats_to_be_graphed.append(stat_names[i])\n teamVals = go.Bar(x=stats_to_be_graphed, y=teamStatValues.mean(), name=\n data[data.team_id == team]['market'].iloc[0])\n opponentVals = go.Bar(x=stats_to_be_graphed, y=opponentStatValues.mean(\n ), name=data[data.team_id == opponent]['market'].iloc[0])\n data = [teamVals, opponentVals]\n layout = go.Layout(barmode='group')\n fig = go.Figure(data=data, layout=layout)\n return fig\n\n\ndef getAllTeamMatchRecords(teamID, df):\n \"\"\" Returns all game records for a given teamID \"\"\"\n return df[df['team_id'] == teamID]\n\n\ndef select_features(X_train, y_train, X_test):\n \"\"\" Selects features \"\"\"\n fs = SelectKBest(score_func=f_regression, k='all')\n fs.fit(X_train, y_train)\n X_train_fs = fs.transform(X_train)\n X_test_fs = fs.transform(X_test)\n return X_train_fs, X_test_fs, fs\n\n\ndef overallFeatures(df):\n \"\"\" Return list of top four features \"\"\"\n datasetForFS = df\n datasetForFS.fillna(0)\n X1 = datasetForFS[['assists', 'blocks', 'defensive_rebounds',\n 'opponent_drb', 'fast_break_pts', 'points_in_paint',\n 'points_off_turnovers', 'rebounds', 'steals', 'turnovers', 'efg',\n 'tov_pct', 'orb_pct', 'ftr']]\n y1 = datasetForFS['win']\n X_train, X_test, y_train, y_test = train_test_split(X1, y1, test_size=\n 0.2, random_state=0)\n X_train_fs, X_test_fs, fs = select_features(X_train, y_train, X_test)\n colList = X1.columns.values.tolist()\n statScoreDF = pd.DataFrame(data={'Stat': pd.Series(colList), 'Score':\n pd.Series(fs.scores_.tolist())})\n statScoreDF = statScoreDF.sort_values(by=['Score'], ascending=False)\n return statScoreDF.head(n=4)['Stat'].tolist()\n\n\ndef avgDataRow(df):\n \"\"\" Returns the average values of a dataFrame \"\"\"\n df1 = dict()\n for columnName, columnData in df.iteritems():\n df1[columnName] = [df[columnName].mean()]\n return pd.DataFrame(df1)\n\n\ndef updateWinPct(dfMain, dfWin, reg):\n \"\"\" Return new win percentage \"\"\"\n dfPred = reg.predict(dfMain)\n return pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.\n around(dfPred), 'Predicted (float)': dfPred})\n\n\ndef filterRowsFS(df):\n \"\"\" Return dataframe with selected features \"\"\"\n return df[['assists', 'blocks', 'defensive_rebounds', 'opponent_drb',\n 'fast_break_pts', 'points_in_paint', 'points_off_turnovers',\n 'rebounds', 'steals', 'turnovers', 'efg', 'tov_pct', 'orb_pct', 'ftr']]\n\n\ndef learn(dataset):\n \"\"\" Trains the model \"\"\"\n dataset = pd.read_csv('team_boxscores_v3.csv')\n dataset = dataset.fillna(0)\n dataset = dataset.sample(frac=1)\n X1 = dataset[['assists', 'blocks', 'defensive_rebounds', 'opponent_drb',\n 'fast_break_pts', 'points_in_paint', 'points_off_turnovers',\n 'rebounds', 'steals', 'turnovers', 'efg', 'tov_pct', 'orb_pct', 'ftr']]\n y1 = dataset['win']\n X_train = X1[int(len(X1) / 5):]\n X_test = X1[:int(len(X1) / 5)]\n y_train = y1[int(len(y1) / 5):]\n y_test = y1[:int(len(y1) / 5)]\n regressor = LinearRegression()\n regressor.fit(X_train, y_train)\n coeff_df = pd.DataFrame(regressor.coef_, X1.columns, columns=[\n 'Coefficient'])\n y_pred = regressor.predict(X_test)\n y_pred_round = np.around(regressor.predict(X_test))\n return regressor, pd.DataFrame({'Actual': y_test, 'Predicted (int)':\n y_pred_round, 'Predicted (float)': y_pred})\n\n\ndef calculate_win_percentage(team, stat1, stat2, stat3, stat4, regressor, data\n ):\n \"\"\" Calculates the win percentage for a team and the 4 selected stat values \"\"\"\n temp = getAllTeamMatchRecords(team, data)\n changed_stat1 = overallFeatures(temp)[0]\n changed_stat2 = overallFeatures(temp)[1]\n changed_stat3 = overallFeatures(temp)[2]\n changed_stat4 = overallFeatures(temp)[3]\n average_team_stats = avgDataRow(filterRowsFS(temp))\n dfWin = temp['win']\n dfFinal = pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.\n around(regressor.predict(average_team_stats)), 'Predicted (float)':\n regressor.predict(average_team_stats)})\n origWinPct = dfFinal.at[0, 'Predicted (float)']\n average_team_stats.at[0, changed_stat1] = stat1\n average_team_stats.at[0, changed_stat2] = stat2\n average_team_stats.at[0, changed_stat3] = stat3\n average_team_stats.at[0, changed_stat4] = stat4\n win_percentage = updateWinPct(average_team_stats, dfWin, regressor).at[\n 0, 'Predicted (float)']\n if win_percentage > 1:\n win_percentage = 1\n elif win_percentage < 0:\n win_percentage = 0\n win_percentage = win_percentage * 100\n win_percentage = round(win_percentage, 2)\n win_percentage_text = 'Projected Win Percentage: ' + str(win_percentage\n ) + '%'\n return win_percentage_text\n\n\ndef get_default_slider_values(team, data):\n \"\"\" Gets the values the each of the 4 sliders should display. These values are what the model estimates the team will get in the matchup \"\"\"\n numSliders = 4\n stat_column_names = []\n stat_column_values = []\n estimated_stat_values = avgDataRow(filterRowsFS(getAllTeamMatchRecords(\n team, data)))\n for i in range(numSliders):\n stat_column_names.append(overallFeatures(getAllTeamMatchRecords(\n team, data))[i])\n stat_column_values.append(estimated_stat_values.at[0,\n stat_column_names[i]])\n return stat_column_names, stat_column_values\n",
"step-4": "import dash\nimport dash_core_components as dcc\nimport dash_html_components as html\nimport dash_table as dt\nimport plotly.express as px\nimport pandas as pd\nimport plotly.graph_objects as go\nimport numpy as np\nfrom datetime import datetime as dat\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn import metrics\nfrom sklearn.feature_selection import f_regression\nfrom sklearn.feature_selection import SelectKBest\n\n\ndef getStatsByYear(teamID, year, data):\n \"\"\" Returns the stats for a chosen team for a specific year. Choices are 2016 - 2019 \"\"\"\n teamStats = data[data['team_id'] == teamID]\n for index, row in teamStats.iterrows():\n if row['season'] == year:\n teamStatsForGivenYear = teamStats[data['season'] == row['season']]\n return teamStatsForGivenYear\n\n\ndef generate_bar_chart(team, opponent, stats, stat_names, data):\n \"\"\" Generates a bar chart for a the user selected team, opponent and stats \"\"\"\n teamStats = getStatsByYear(team, 2019, data)\n opponentStats = getStatsByYear(opponent, 2019, data)\n teamStatValues = teamStats[['assists', 'assists_turnover_ratio',\n 'blocked_att', 'blocks', 'defensive_rebounds', 'fast_break_pts',\n 'field_goals_att', 'field_goals_pct', 'field_goals_made',\n 'free_throws_att', 'free_throws_pct', 'free_throws_made',\n 'offensive_rebounds', 'personal_fouls', 'points', 'points_against',\n 'points_in_paint', 'points_off_turnovers', 'rebounds',\n 'second_chance_pts', 'steals', 'team_rebounds', 'three_points_att',\n 'three_points_pct', 'three_points_made', 'turnovers',\n 'two_points_att', 'two_points_pct', 'two_points_made']]\n opponentStatValues = opponentStats[['assists', 'assists_turnover_ratio',\n 'blocked_att', 'blocks', 'defensive_rebounds', 'fast_break_pts',\n 'field_goals_att', 'field_goals_pct', 'field_goals_made',\n 'free_throws_att', 'free_throws_pct', 'free_throws_made',\n 'offensive_rebounds', 'personal_fouls', 'points', 'points_against',\n 'points_in_paint', 'points_off_turnovers', 'rebounds',\n 'second_chance_pts', 'steals', 'team_rebounds', 'three_points_att',\n 'three_points_pct', 'three_points_made', 'turnovers',\n 'two_points_att', 'two_points_pct', 'two_points_made']]\n stats_to_be_graphed = []\n for i in range(len(stat_names)):\n if i in stats:\n stats_to_be_graphed.append(stat_names[i])\n teamVals = go.Bar(x=stats_to_be_graphed, y=teamStatValues.mean(), name=\n data[data.team_id == team]['market'].iloc[0])\n opponentVals = go.Bar(x=stats_to_be_graphed, y=opponentStatValues.mean(\n ), name=data[data.team_id == opponent]['market'].iloc[0])\n data = [teamVals, opponentVals]\n layout = go.Layout(barmode='group')\n fig = go.Figure(data=data, layout=layout)\n return fig\n\n\ndef getAllTeamMatchRecords(teamID, df):\n \"\"\" Returns all game records for a given teamID \"\"\"\n return df[df['team_id'] == teamID]\n\n\ndef select_features(X_train, y_train, X_test):\n \"\"\" Selects features \"\"\"\n fs = SelectKBest(score_func=f_regression, k='all')\n fs.fit(X_train, y_train)\n X_train_fs = fs.transform(X_train)\n X_test_fs = fs.transform(X_test)\n return X_train_fs, X_test_fs, fs\n\n\ndef overallFeatures(df):\n \"\"\" Return list of top four features \"\"\"\n datasetForFS = df\n datasetForFS.fillna(0)\n X1 = datasetForFS[['assists', 'blocks', 'defensive_rebounds',\n 'opponent_drb', 'fast_break_pts', 'points_in_paint',\n 'points_off_turnovers', 'rebounds', 'steals', 'turnovers', 'efg',\n 'tov_pct', 'orb_pct', 'ftr']]\n y1 = datasetForFS['win']\n X_train, X_test, y_train, y_test = train_test_split(X1, y1, test_size=\n 0.2, random_state=0)\n X_train_fs, X_test_fs, fs = select_features(X_train, y_train, X_test)\n colList = X1.columns.values.tolist()\n statScoreDF = pd.DataFrame(data={'Stat': pd.Series(colList), 'Score':\n pd.Series(fs.scores_.tolist())})\n statScoreDF = statScoreDF.sort_values(by=['Score'], ascending=False)\n return statScoreDF.head(n=4)['Stat'].tolist()\n\n\ndef avgDataRow(df):\n \"\"\" Returns the average values of a dataFrame \"\"\"\n df1 = dict()\n for columnName, columnData in df.iteritems():\n df1[columnName] = [df[columnName].mean()]\n return pd.DataFrame(df1)\n\n\ndef updateWinPct(dfMain, dfWin, reg):\n \"\"\" Return new win percentage \"\"\"\n dfPred = reg.predict(dfMain)\n return pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.\n around(dfPred), 'Predicted (float)': dfPred})\n\n\ndef filterRowsFS(df):\n \"\"\" Return dataframe with selected features \"\"\"\n return df[['assists', 'blocks', 'defensive_rebounds', 'opponent_drb',\n 'fast_break_pts', 'points_in_paint', 'points_off_turnovers',\n 'rebounds', 'steals', 'turnovers', 'efg', 'tov_pct', 'orb_pct', 'ftr']]\n\n\ndef learn(dataset):\n \"\"\" Trains the model \"\"\"\n dataset = pd.read_csv('team_boxscores_v3.csv')\n dataset = dataset.fillna(0)\n dataset = dataset.sample(frac=1)\n X1 = dataset[['assists', 'blocks', 'defensive_rebounds', 'opponent_drb',\n 'fast_break_pts', 'points_in_paint', 'points_off_turnovers',\n 'rebounds', 'steals', 'turnovers', 'efg', 'tov_pct', 'orb_pct', 'ftr']]\n y1 = dataset['win']\n X_train = X1[int(len(X1) / 5):]\n X_test = X1[:int(len(X1) / 5)]\n y_train = y1[int(len(y1) / 5):]\n y_test = y1[:int(len(y1) / 5)]\n regressor = LinearRegression()\n regressor.fit(X_train, y_train)\n coeff_df = pd.DataFrame(regressor.coef_, X1.columns, columns=[\n 'Coefficient'])\n y_pred = regressor.predict(X_test)\n y_pred_round = np.around(regressor.predict(X_test))\n return regressor, pd.DataFrame({'Actual': y_test, 'Predicted (int)':\n y_pred_round, 'Predicted (float)': y_pred})\n\n\ndef calculate_win_percentage(team, stat1, stat2, stat3, stat4, regressor, data\n ):\n \"\"\" Calculates the win percentage for a team and the 4 selected stat values \"\"\"\n temp = getAllTeamMatchRecords(team, data)\n changed_stat1 = overallFeatures(temp)[0]\n changed_stat2 = overallFeatures(temp)[1]\n changed_stat3 = overallFeatures(temp)[2]\n changed_stat4 = overallFeatures(temp)[3]\n average_team_stats = avgDataRow(filterRowsFS(temp))\n dfWin = temp['win']\n dfFinal = pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.\n around(regressor.predict(average_team_stats)), 'Predicted (float)':\n regressor.predict(average_team_stats)})\n origWinPct = dfFinal.at[0, 'Predicted (float)']\n average_team_stats.at[0, changed_stat1] = stat1\n average_team_stats.at[0, changed_stat2] = stat2\n average_team_stats.at[0, changed_stat3] = stat3\n average_team_stats.at[0, changed_stat4] = stat4\n win_percentage = updateWinPct(average_team_stats, dfWin, regressor).at[\n 0, 'Predicted (float)']\n if win_percentage > 1:\n win_percentage = 1\n elif win_percentage < 0:\n win_percentage = 0\n win_percentage = win_percentage * 100\n win_percentage = round(win_percentage, 2)\n win_percentage_text = 'Projected Win Percentage: ' + str(win_percentage\n ) + '%'\n return win_percentage_text\n\n\ndef get_default_slider_values(team, data):\n \"\"\" Gets the values the each of the 4 sliders should display. These values are what the model estimates the team will get in the matchup \"\"\"\n numSliders = 4\n stat_column_names = []\n stat_column_values = []\n estimated_stat_values = avgDataRow(filterRowsFS(getAllTeamMatchRecords(\n team, data)))\n for i in range(numSliders):\n stat_column_names.append(overallFeatures(getAllTeamMatchRecords(\n team, data))[i])\n stat_column_values.append(estimated_stat_values.at[0,\n stat_column_names[i]])\n return stat_column_names, stat_column_values\n",
"step-5": "import dash\r\nimport dash_core_components as dcc\r\nimport dash_html_components as html\r\nimport dash_table as dt\r\nimport plotly.express as px\r\nimport pandas as pd\r\nimport plotly.graph_objects as go\r\nimport numpy as np\r\nfrom datetime import datetime as dat\r\nfrom sklearn.model_selection import train_test_split\r\nfrom sklearn.linear_model import LinearRegression\r\nfrom sklearn import metrics\r\nfrom sklearn.feature_selection import f_regression\r\nfrom sklearn.feature_selection import SelectKBest\r\n\r\n# TODO: \r\n# The model doesn't really take the opponent into consideration when calculating the win percentage. As you would expect, this is not ideal and is something that needs to be fixed\r\n# \r\n# The bar charts only graph 2019 data. Allowing the user to choose the year would be an easy addition. Future implemenatations could also include a previous X games option instead\r\n# \r\n# The bar chart only graphs stats correctly if they are selected in order. For example, if the set of possible stats are ['assists', 'rebounds', 'blocks'], they must all be selected\r\n# in order to show all the values correctly. If the user selects only 'assists' and 'blocks' then 'assists' graphs correctly. 'blocks' is graphed but is given the value assigned\r\n# to 'rebounds' because it assumes the second position in the array of stats to be graphed. \r\n#\r\n# The model doesn't run well (and generally fails) for small schools due to a lack of data for those teams. Error checking needs to be implemented to eliminate this problem.\r\n\r\n\r\ndef getStatsByYear(teamID, year, data):\r\n ''' Returns the stats for a chosen team for a specific year. Choices are 2016 - 2019 '''\r\n teamStats = data[data[\"team_id\"] == teamID]\r\n \r\n for index, row in teamStats.iterrows():\r\n if (row[\"season\"] == year): \r\n teamStatsForGivenYear = teamStats[data[\"season\"] == row[\"season\"]]\r\n return teamStatsForGivenYear\r\n\r\ndef generate_bar_chart(team, opponent, stats, stat_names, data):\r\n ''' Generates a bar chart for a the user selected team, opponent and stats ''' \r\n\r\n teamStats = getStatsByYear(team, 2019, data)\r\n opponentStats = getStatsByYear(opponent, 2019, data)\r\n\r\n teamStatValues = teamStats[[\"assists\", \"assists_turnover_ratio\", \"blocked_att\", \"blocks\", \"defensive_rebounds\", \"fast_break_pts\",\r\n \"field_goals_att\", \"field_goals_pct\", \"field_goals_made\", \"free_throws_att\",\r\n \"free_throws_pct\", \"free_throws_made\", \"offensive_rebounds\", \"personal_fouls\", \r\n \"points\", \"points_against\", \"points_in_paint\", \"points_off_turnovers\",\r\n \"rebounds\", \"second_chance_pts\", \"steals\", \"team_rebounds\", \"three_points_att\",\r\n \"three_points_pct\", \"three_points_made\", \"turnovers\", \"two_points_att\",\r\n \"two_points_pct\", \"two_points_made\"\r\n ]]\r\n\r\n opponentStatValues = opponentStats[[\"assists\", \"assists_turnover_ratio\", \"blocked_att\", \"blocks\", \"defensive_rebounds\", \"fast_break_pts\",\r\n \"field_goals_att\", \"field_goals_pct\", \"field_goals_made\", \"free_throws_att\",\r\n \"free_throws_pct\", \"free_throws_made\", \"offensive_rebounds\", \"personal_fouls\", \r\n \"points\", \"points_against\", \"points_in_paint\", \"points_off_turnovers\",\r\n \"rebounds\", \"second_chance_pts\", \"steals\", \"team_rebounds\", \"three_points_att\",\r\n \"three_points_pct\", \"three_points_made\", \"turnovers\", \"two_points_att\",\r\n \"two_points_pct\", \"two_points_made\"\r\n ]]\r\n\r\n stats_to_be_graphed = []\r\n\r\n for i in range(len(stat_names)):\r\n if i in stats:\r\n stats_to_be_graphed.append(stat_names[i])\r\n\r\n # Graphs average stat values for the user's chosen team\r\n teamVals = go.Bar(\r\n x = stats_to_be_graphed,\r\n y = teamStatValues.mean(),\r\n name = data[(data.team_id == team)]['market'].iloc[0]\r\n )\r\n\r\n # Graphs average stat values for the opponent's team\r\n opponentVals = go.Bar(\r\n x = stats_to_be_graphed,\r\n y = opponentStatValues.mean(),\r\n name = data[(data.team_id == opponent)]['market'].iloc[0]\r\n )\r\n \r\n data = [teamVals, opponentVals]\r\n layout = go.Layout(barmode = 'group')\r\n fig = go.Figure(data = data, layout = layout)\r\n \r\n return fig\r\n\r\ndef getAllTeamMatchRecords(teamID, df):\r\n ''' Returns all game records for a given teamID '''\r\n return df[df[\"team_id\"] == teamID]\r\n\r\ndef select_features(X_train, y_train, X_test):\r\n ''' Selects features '''\r\n # configure to select all features\r\n fs = SelectKBest(score_func=f_regression, k='all')\r\n # learn relationship from training data\r\n fs.fit(X_train, y_train)\r\n # transform train input data\r\n X_train_fs = fs.transform(X_train)\r\n # transform test input data\r\n X_test_fs = fs.transform(X_test)\r\n return X_train_fs, X_test_fs, fs\r\n\r\ndef overallFeatures(df):\r\n ''' Return list of top four features '''\r\n datasetForFS = df\r\n datasetForFS.fillna(0)\r\n\r\n X1 = datasetForFS[[\"assists\",\"blocks\",\"defensive_rebounds\",\"opponent_drb\",\"fast_break_pts\",\"points_in_paint\",\"points_off_turnovers\",\"rebounds\",\"steals\",\"turnovers\",\"efg\",\"tov_pct\",\"orb_pct\",\"ftr\"]]\r\n y1 = datasetForFS['win']\r\n\r\n X_train, X_test, y_train, y_test = train_test_split(X1, y1, test_size=0.2, random_state=0)\r\n X_train_fs, X_test_fs, fs = select_features(X_train, y_train, X_test)\r\n\r\n colList = X1.columns.values.tolist()\r\n statScoreDF = pd.DataFrame(data={'Stat': pd.Series(colList), 'Score': pd.Series(fs.scores_.tolist())})\r\n statScoreDF = statScoreDF.sort_values(by=['Score'], ascending=False)\r\n \r\n return statScoreDF.head(n=4)['Stat'].tolist()\r\n\r\ndef avgDataRow(df):\r\n ''' Returns the average values of a dataFrame '''\r\n df1 = dict()\r\n for (columnName, columnData) in df.iteritems():\r\n df1[columnName] = [df[columnName].mean()]\r\n \r\n return pd.DataFrame(df1)\r\n\r\ndef updateWinPct(dfMain, dfWin, reg):\r\n ''' Return new win percentage '''\r\n dfPred = reg.predict(dfMain)\r\n return pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.around(dfPred), 'Predicted (float)': dfPred})\r\n\r\ndef filterRowsFS(df):\r\n ''' Return dataframe with selected features '''\r\n return df[[\"assists\",\"blocks\",\"defensive_rebounds\",\"opponent_drb\",\"fast_break_pts\",\"points_in_paint\",\"points_off_turnovers\",\"rebounds\",\"steals\",\"turnovers\",\"efg\",\"tov_pct\",\"orb_pct\",\"ftr\"]]\r\n\r\ndef learn(dataset):\r\n ''' Trains the model '''\r\n dataset = pd.read_csv(\"team_boxscores_v3.csv\")\r\n dataset = dataset.fillna(0)\r\n \r\n # Shuffle\r\n dataset = dataset.sample(frac = 1) \r\n \r\n X1 = dataset[[\"assists\",\"blocks\",\"defensive_rebounds\",\"opponent_drb\",\"fast_break_pts\",\"points_in_paint\",\"points_off_turnovers\",\"rebounds\",\"steals\",\"turnovers\",\"efg\",\"tov_pct\",\"orb_pct\",\"ftr\"]]\r\n y1 = dataset['win']\r\n \r\n # No shuffle\r\n # X_train, X_test, y_train, y_test = train_test_split(X1, y1, test_size=0.2, random_state=0)\r\n \r\n # W/ shuffle\r\n X_train = X1[int(len(X1)/5):]\r\n X_test = X1[:int(len(X1)/5)]\r\n \r\n y_train = y1[int(len(y1)/5):]\r\n y_test = y1[:int(len(y1)/5)]\r\n \r\n regressor = LinearRegression()\r\n regressor.fit(X_train, y_train)\r\n \r\n coeff_df = pd.DataFrame(regressor.coef_, X1.columns, columns=['Coefficient'])\r\n \r\n y_pred = regressor.predict(X_test)\r\n y_pred_round = np.around(regressor.predict(X_test))\r\n \r\n return regressor, pd.DataFrame({'Actual': y_test, 'Predicted (int)': y_pred_round, 'Predicted (float)': y_pred})\r\n\r\ndef calculate_win_percentage(team, stat1, stat2, stat3, stat4, regressor, data):\r\n ''' Calculates the win percentage for a team and the 4 selected stat values '''\r\n temp = getAllTeamMatchRecords(team, data)\r\n changed_stat1 = overallFeatures(temp)[0]\r\n changed_stat2 = overallFeatures(temp)[1]\r\n changed_stat3 = overallFeatures(temp)[2]\r\n changed_stat4 = overallFeatures(temp)[3]\r\n average_team_stats = avgDataRow(filterRowsFS(temp))\r\n dfWin = temp[\"win\"]\r\n\r\n dfFinal = pd.DataFrame({'Actual': dfWin.mean(), 'Predicted (int)': np.around(regressor.predict(average_team_stats)), 'Predicted (float)': regressor.predict(average_team_stats)})\r\n origWinPct = dfFinal.at[0, 'Predicted (float)']\r\n\r\n average_team_stats.at[0, changed_stat1] = stat1\r\n average_team_stats.at[0, changed_stat2] = stat2\r\n average_team_stats.at[0, changed_stat3] = stat3\r\n average_team_stats.at[0, changed_stat4] = stat4\r\n\r\n win_percentage = updateWinPct(average_team_stats, dfWin, regressor).at[0,'Predicted (float)']\r\n\r\n # Makes sure you can't have a win percentage of > 100% or < 0%\r\n if win_percentage > 1:\r\n win_percentage = 1\r\n elif win_percentage < 0:\r\n win_percentage = 0\r\n\r\n win_percentage = win_percentage * 100\r\n win_percentage = round(win_percentage, 2)\r\n\r\n win_percentage_text = \"Projected Win Percentage: \" + str(win_percentage) + \"%\"\r\n\r\n return win_percentage_text\r\n\r\ndef get_default_slider_values(team, data):\r\n ''' Gets the values the each of the 4 sliders should display. These values are what the model estimates the team will get in the matchup '''\r\n numSliders = 4\r\n stat_column_names = []\r\n stat_column_values = []\r\n\r\n estimated_stat_values = avgDataRow(filterRowsFS(getAllTeamMatchRecords(team, data)))\r\n\r\n for i in range(numSliders):\r\n stat_column_names.append(overallFeatures(getAllTeamMatchRecords(team, data))[i])\r\n stat_column_values.append(estimated_stat_values.at[0, stat_column_names[i]])\r\n\r\n return stat_column_names, stat_column_values",
"step-ids": [
8,
9,
11,
12,
13
]
}
|
[
8,
9,
11,
12,
13
] |
from flask import Flask
from flask import request
from flask import session
from flask import jsonify
from flask import make_response
import mariadb
import datetime
import json
import scad_utils
testing: bool = True
if testing:
fake_datetime = datetime.datetime(2020, 8, 7, 15, 10)
app = Flask(__name__)
app.config["SECRET_KEY"] = "clave ultra secreta"
app.permanent_session_lifetime = datetime.timedelta(minutes=20)
teacher_time_tolerance = datetime.timedelta(minutes=20)
db = mariadb.ConnectionPool(
user="brocolio",
password="brocolio",
host="localhost",
pool_name="pul",
pool_size=20,
database="scad",
)
# tmp_cursor: mysql.cursor.MySQLCursor = db.cursor()
# tmp_cursor.execute("SET lc_time_names = 'es_PE';")
# tmp_cursor.close()
spanish_days: dict = {
"Monday": "lunes",
"Tuesday": "martes",
"Wednesday": "miércoles",
"Thursday": "jueves",
"Friday": "viernes",
"Saturday": "sábado",
"Sunday": "domingo",
}
json.JSONEncoder.default = lambda self, obj: (
obj.isoformat()
if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date)
else str(obj)
)
@app.route("/login", methods=["POST"])
def login() -> dict:
db_connection = db.get_connection()
db_cursor = db_connection.cursor(named_tuple=True)
data: dict = request.get_json()
# consulta a la base de datos si el usuario y contrasena son validos
# consulta en la tabla docente
query: str = (
"select DocenteDNI, Nombre, Apellido, Usuario "
"from Docente "
"where Usuario=? and Contrasena=?"
)
db_cursor.execute(query, (data["Usuario"], data["Contrasena"]))
rows = db_cursor.fetchall()
if len(rows) == 1:
session.permanent = True
session["account_type"] = "Docente"
session["DocenteDNI"] = rows[0].DocenteDNI
session["Nombre"] = rows[0].Nombre
session["Apellido"] = rows[0].Apellido
session["Usuario"] = rows[0].Usuario
db_cursor.close()
db_connection.close()
return make_response({"account_type": session["account_type"]}, 200)
else:
# consulta en la tabla administrador
query: str = (
"select Usuario,Contrasena "
"from Administrador "
"where Usuario=? and Contrasena=?"
)
db_cursor.execute(query, (data["Usuario"], data["Contrasena"]))
rows = db_cursor.fetchall()
if len(rows) == 1:
session.permanent = True
session["account_type"] = "Administrador"
session["Usuario"] = rows[0].Usuario
db_cursor.close()
db_connection.close()
return make_response({"account_type": session["account_type"]}, 200)
# no se encontro nada
else:
db_cursor.close()
db_connection.close()
return make_response("pos a lo mejor se equivoco?", 401)
@app.route("/teacher_fullname", methods=["GET"])
def teacherFullname() -> dict:
if "account_type" not in session:
return make_response("pa que quieres saber eso jaja salu2", 401)
elif session["account_type"] == "Docente":
return {"Nombre": session["Nombre"], "Apellido": session["Apellido"]}
elif session["account_type"] == "Administrador":
return make_response("wey no!!!", 400)
@app.route("/time", methods=["GET"])
def time() -> dict:
if testing:
current_datetime = fake_datetime
else:
current_datetime = datetime.datetime.now()
return {
"date": current_datetime.strftime("%d/%m/%Y"),
"time": current_datetime.strftime("%H,%M,%S"),
}
@app.route("/teacher_course_list", methods=["GET"])
def teacherCourseList() -> list:
# verificar la sesion
if "account_type" not in session:
# no inicio sesion
return make_response("nope", 401)
elif session["account_type"] == "Docente":
# consultar la lista de cursos y si se han marcado o no
# un curso marcado se diferencia porque el valor de Hora de la tabla Marcacion
# es diferente de NULL
if testing:
current_datetime = fake_datetime
else:
current_datetime = datetime.datetime.now()
db_connection = db.get_connection()
db_cursor = db_connection.cursor()
db_cursor.execute("SET lc_time_names = 'es_PE'")
query: str = (
"select AsignacionCursoID, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero "
"from AsignacionCurso a "
"inner join Salon s using(SalonID) "
"where Dia=dayname(?) and DocenteDNI=? "
)
db_cursor.execute(
query, (current_datetime.strftime("%Y/%m/%d"), session["DocenteDNI"])
)
today_assigned_courses: list = db_cursor.fetchall()
# se formatea la lista de cursos
today_assigned_courses = scad_utils.rowToDict(
(
"AsignacionCursoID",
"CursoNombre",
"HoraInicio",
"HoraFin",
"Pabellon",
"Numero",
),
today_assigned_courses,
)
if len(today_assigned_courses) > 0:
existence_check_query: str = (
"select * from Marcacion " "where Fecha=? and AsignacionCursoID=?"
)
for course in today_assigned_courses:
db_cursor.execute(
existence_check_query,
(
current_datetime.strftime("%Y/%m/%d"),
course["AsignacionCursoID"],
),
)
if len(db_cursor.fetchall()) > 0:
course["state"] = "marked"
else:
if current_datetime >= scad_utils.timeToDatetime(
course["HoraInicio"], current_datetime
):
if (
current_datetime
- scad_utils.timeToDatetime(
course["HoraInicio"], current_datetime
)
<= teacher_time_tolerance
):
course["state"] = "mark_now"
else:
course["state"] = "not_marked"
else:
course["state"] = "waiting"
db_cursor.close()
db_connection.close()
return jsonify(today_assigned_courses)
elif session["account_type"] == "Administrador":
# el administrador no deberia usar este servicio
return make_response("ya nos jakiaron", 400)
@app.route("/teacher_mark", methods=["POST"])
def teacherMark() -> dict:
# validar si es posible marcar el registro del curso
if "account_type" not in session:
# no inicio sesion
return make_response("stap", 401)
elif session["account_type"] == "Docente":
if testing:
current_datetime = fake_datetime
else:
current_datetime = datetime.datetime.now()
# consultar si hay algun curso para marcar
course_to_mark: dict
db_connection = db.get_connection()
db_cursor = db_connection.cursor(named_tuple=True)
db_cursor.execute("SET lc_time_names = 'es_PE'")
query: str = (
"select AsignacionCursoID,SalonID "
"from AsignacionCurso "
"where DocenteDNI=? "
"and Dia=dayname(?) "
"and HoraInicio <=? "
"and timediff(?,HoraInicio)<=?;"
)
db_cursor.execute(
query,
(
session["DocenteDNI"],
current_datetime.strftime("%Y/%m/%d"),
current_datetime.strftime("%H:%M:%S"),
current_datetime.strftime("%H:%M:%S"),
str(teacher_time_tolerance),
),
)
course_to_mark = db_cursor.fetchall()
if len(course_to_mark) == 1:
insertion_query: str = ("insert into Marcacion() " "values(?,?,?,?);")
db_cursor.execute(
insertion_query,
(
int(course_to_mark[0].AsignacionCursoID),
current_datetime.strftime("%Y/%m/%d"),
current_datetime.strftime("%H:%M:%S"),
int(course_to_mark[0].SalonID),
),
)
db_cursor.close()
db_connection.close()
return make_response("se marco la asistencia", 200)
else:
db_cursor.close()
db_connection.close()
return make_response("ya es tarde", 406)
elif session["account_type"] == "Administrador":
return make_response(
"papu, si ya nos jakiaste por lo menos usa los servicios correctos no?", 400
)
@app.route("/admin_get_report", methods=["GET"])
def adminGetReport() -> list:
if "account_type" not in session:
# no inicio sesion
return make_response("nope", 401)
elif session["account_type"] == "Administrador":
time_range = request.get_json()["time_range"]
if testing:
current_datetime = fake_datetime
else:
current_datetime = datetime.datetime.now()
db_connection = db.get_connection()
db_cursor = db_connection.cursor(named_tuple=True)
db_cursor.execute("SET lc_time_names = 'es_PE'")
report: list
if time_range == "today":
query: str = (
"select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, "
"a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero "
"from AsignacionCurso a "
"inner join Salon s using(SalonID) "
"inner join Docente d using(DocenteDNI) "
"where Dia=dayname(?) and a.HoraInicio<? "
)
db_cursor.execute(
query,
(
current_datetime.strftime("%Y-%m-%d"),
current_datetime.strftime("%H:%M:%S"),
),
)
report = db_cursor.fetchall()
# se formatea la lista de cursos
report = scad_utils.rowToDict(
(
"AsignacionCursoID",
"DocenteDNI",
"Nombre",
"Apellido",
"CursoNombre",
"HoraInicio",
"HoraFin",
"Pabellon",
"Numero",
),
report,
)
if len(report) > 0:
existence_check_query: str = (
"select * from Marcacion " "where Fecha=? and AsignacionCursoID=?"
)
for assignment in report:
db_cursor.execute(
existence_check_query,
(
current_datetime.strftime("%Y-%m-%d"),
assignment["AsignacionCursoID"],
),
)
if len(db_cursor.fetchall()) > 0:
assignment["state"] = "marked"
else:
assignment["state"] = "not_marked"
db_cursor.close()
db_connection.close()
return make_response(jsonify(report), 200)
elif time_range == "yesterday":
query: str = (
"select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, "
"a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero "
"from AsignacionCurso a "
"inner join Salon s using(SalonID) "
"inner join Docente d using(DocenteDNI) "
"where Dia=dayname(?)"
)
current_datetime -= datetime.timedelta(days=1)
db_cursor.execute(
query, (current_datetime.strftime("%Y-%m-%d"),),
)
report = db_cursor.fetchall()
# se formatea la lista de cursos
report = scad_utils.rowToDict(
(
"AsignacionCursoID",
"DocenteDNI",
"Nombre",
"Apellido",
"CursoNombre",
"HoraInicio",
"HoraFin",
"Pabellon",
"Numero",
),
report,
)
if len(report) > 0:
existence_check_query: str = (
"select * from Marcacion " "where Fecha=? and AsignacionCursoID=?"
)
for assignment in report:
db_cursor.execute(
existence_check_query,
(
current_datetime.strftime("%Y-%m-%d"),
assignment["AsignacionCursoID"],
),
)
if len(db_cursor.fetchall()) > 0:
assignment["state"] = "marked"
else:
assignment["state"] = "not_marked"
db_cursor.close()
db_connection.close()
return make_response(jsonify(report), 200)
elif time_range == "this_week":
pass
elif time_range == "this_month":
pass
elif time_range == "all":
pass
else:
return make_response("peticion invalida", 406)
elif session["account_type"] == "Docente":
# el administrador no deberia usar este servicio
return make_response("ya nos jakiaron", 400)
@app.route("/admin_add_teacher", methods=["POST"])
def adminAddTeacher() -> dict:
if "account_type" not in session:
return make_response("", 401)
elif session["account_type"] == "Administrador":
data = request.get_json()
db_connection = db.get_connection()
db_cursor = db_connection.cursor()
query: str = ("insert into Docente() values(?,?,?,?,?)")
db_cursor.execute(
query,
(
data["DocenteDNI"],
data["Nombre"],
data["Apellido"],
data["Usuario"],
data["Contrasena"],
),
)
db_cursor.close()
db_connection.close()
return make_response("se agrego la entrada", 200)
elif session["account_type"] == "Docente":
return make_response("", 401)
@app.route("/admin_get_teacher_table", methods=["GET"])
def adminGetTeacherTable() -> dict:
if "account_type" not in session:
return make_response("", 401)
elif session["account_type"] == "Administrador":
db_connection = db.get_connection()
db_cursor = db_connection.cursor()
query: str = ("select * from Docente")
db_cursor.execute(query)
teacher_table = scad_utils.rowToDict(
("DocenteDNI", "Nombre", "Apellido", "Usuario", "Contrasena"),
db_cursor.fetchall(),
)
db_cursor.close()
db_connection.close()
return make_response(jsonify(teacher_table), 200)
elif session["account_type"] == "Docente":
return make_response("", 401)
@app.route("/admin_get_course_table", methods=["GET"])
def adminGetCourseTable() -> dict:
if "account_type" not in session:
return make_response("", 401)
elif session["account_type"] == "Administrador":
db_connection = db.get_connection()
db_cursor = db_connection.cursor()
query: str = ("select * from Curso")
db_cursor.execute(query)
course_table = scad_utils.rowToDict(
("CursoNombre", "FechaInicio", "FechaFin"), db_cursor.fetchall(),
)
for course in course_table:
course["FechaInicio"] = course["FechaInicio"].isoformat()
course["FechaFin"] = course["FechaFin"].isoformat()
db_cursor.close()
db_connection.close()
return make_response(jsonify(course_table), 200)
elif session["account_type"] == "Docente":
return make_response("", 401)
@app.route("/admin_get_classroom_table", methods=["GET"])
def adminGetClassroomTable() -> dict:
if "account_type" not in session:
return make_response("", 401)
elif session["account_type"] == "Administrador":
db_connection = db.get_connection()
db_cursor = db_connection.cursor()
query: str = ("select Pabellon,Numero from Salon")
db_cursor.execute(query)
classroom_table = scad_utils.rowToDict(
("Pabellon", "Numero"), db_cursor.fetchall(),
)
db_cursor.close()
db_connection.close()
return make_response(jsonify(classroom_table), 200)
elif session["account_type"] == "Docente":
return make_response("", 401)
@app.route("/admin_get_course_assignment_table", methods=["GET"])
def adminGetCourseAssignmentTable() -> dict:
if "account_type" not in session:
return make_response("", 401)
elif session["account_type"] == "Administrador":
db_connection = db.get_connection()
db_cursor = db_connection.cursor()
query: str = (
"select d.DocenteDNI, d.Nombre, d.Apellido,"
"a.CursoNombre, s.Pabellon,s.Numero, a.HoraInicio, a.HoraFin,a.Dia "
"from AsignacionCurso a "
"inner join Salon s using(SalonID) "
"inner join Docente d using(DocenteDNI)"
)
db_cursor.execute(query)
course_assignment_table = scad_utils.rowToDict(
(
"DocenteDNI",
"Nombre",
"Apellido",
"CursoNombre",
"Pabellon",
"Numero",
"HoraInicio",
"HoraFin",
"Dia",
),
db_cursor.fetchall(),
)
db_cursor.close()
db_connection.close()
return make_response(jsonify(course_assignment_table), 200)
elif session["account_type"] == "Docente":
return make_response("", 401)
@app.route("/logout", methods=["DELETE"])
def logout() -> dict:
if "account_type" not in session:
return make_response("primero inicia session broz", 301)
else:
if session["account_type"] == "Docente":
session.pop("Usuario")
session.pop("Nombre")
session.pop("Apellido")
return make_response("hasta luego prosor", 200)
elif session["account_type"] == "Administrador":
session.pop("Usuario")
return make_response("espero haberle sido util, hasta luego", 200)
return make_response("espero haberle sido util, hasta luego", 200)
return make_response("espero haberle sido util, hasta luego", 200)
return make_response("espero haberle sido util, hasta luego", 200)
return make_response("espero haberle sido util, hasta luego", 200)
return make_response("espero haberle sido util, hasta luego", 200)
return make_response("espero haberle sido util, hasta luego", 200)
return make_response("espero haberle sido util, hasta luego", 200)
|
normal
|
{
"blob_id": "ff6b7e2097d78b013f8f5989adee47156579cb9e",
"index": 6226,
"step-1": "<mask token>\n\n\[email protected]('/login', methods=['POST'])\ndef login() ->dict:\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n data: dict = request.get_json()\n query: str = (\n 'select DocenteDNI, Nombre, Apellido, Usuario from Docente where Usuario=? and Contrasena=?'\n )\n db_cursor.execute(query, (data['Usuario'], data['Contrasena']))\n rows = db_cursor.fetchall()\n if len(rows) == 1:\n session.permanent = True\n session['account_type'] = 'Docente'\n session['DocenteDNI'] = rows[0].DocenteDNI\n session['Nombre'] = rows[0].Nombre\n session['Apellido'] = rows[0].Apellido\n session['Usuario'] = rows[0].Usuario\n db_cursor.close()\n db_connection.close()\n return make_response({'account_type': session['account_type']}, 200)\n else:\n query: str = (\n 'select Usuario,Contrasena from Administrador where Usuario=? and Contrasena=?'\n )\n db_cursor.execute(query, (data['Usuario'], data['Contrasena']))\n rows = db_cursor.fetchall()\n if len(rows) == 1:\n session.permanent = True\n session['account_type'] = 'Administrador'\n session['Usuario'] = rows[0].Usuario\n db_cursor.close()\n db_connection.close()\n return make_response({'account_type': session['account_type']}, 200\n )\n else:\n db_cursor.close()\n db_connection.close()\n return make_response('pos a lo mejor se equivoco?', 401)\n\n\[email protected]('/teacher_fullname', methods=['GET'])\ndef teacherFullname() ->dict:\n if 'account_type' not in session:\n return make_response('pa que quieres saber eso jaja salu2', 401)\n elif session['account_type'] == 'Docente':\n return {'Nombre': session['Nombre'], 'Apellido': session['Apellido']}\n elif session['account_type'] == 'Administrador':\n return make_response('wey no!!!', 400)\n\n\[email protected]('/time', methods=['GET'])\ndef time() ->dict:\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n return {'date': current_datetime.strftime('%d/%m/%Y'), 'time':\n current_datetime.strftime('%H,%M,%S')}\n\n\[email protected]('/teacher_course_list', methods=['GET'])\ndef teacherCourseList() ->list:\n if 'account_type' not in session:\n return make_response('nope', 401)\n elif session['account_type'] == 'Docente':\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n query: str = (\n 'select AsignacionCursoID, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) where Dia=dayname(?) and DocenteDNI=? '\n )\n db_cursor.execute(query, (current_datetime.strftime('%Y/%m/%d'),\n session['DocenteDNI']))\n today_assigned_courses: list = db_cursor.fetchall()\n today_assigned_courses = scad_utils.rowToDict(('AsignacionCursoID',\n 'CursoNombre', 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'),\n today_assigned_courses)\n if len(today_assigned_courses) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for course in today_assigned_courses:\n db_cursor.execute(existence_check_query, (current_datetime.\n strftime('%Y/%m/%d'), course['AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n course['state'] = 'marked'\n elif current_datetime >= scad_utils.timeToDatetime(course[\n 'HoraInicio'], current_datetime):\n if current_datetime - scad_utils.timeToDatetime(course[\n 'HoraInicio'], current_datetime\n ) <= teacher_time_tolerance:\n course['state'] = 'mark_now'\n else:\n course['state'] = 'not_marked'\n else:\n course['state'] = 'waiting'\n db_cursor.close()\n db_connection.close()\n return jsonify(today_assigned_courses)\n elif session['account_type'] == 'Administrador':\n return make_response('ya nos jakiaron', 400)\n\n\n<mask token>\n\n\[email protected]('/admin_add_teacher', methods=['POST'])\ndef adminAddTeacher() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n data = request.get_json()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'insert into Docente() values(?,?,?,?,?)'\n db_cursor.execute(query, (data['DocenteDNI'], data['Nombre'], data[\n 'Apellido'], data['Usuario'], data['Contrasena']))\n db_cursor.close()\n db_connection.close()\n return make_response('se agrego la entrada', 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_teacher_table', methods=['GET'])\ndef adminGetTeacherTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select * from Docente'\n db_cursor.execute(query)\n teacher_table = scad_utils.rowToDict(('DocenteDNI', 'Nombre',\n 'Apellido', 'Usuario', 'Contrasena'), db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(teacher_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_course_table', methods=['GET'])\ndef adminGetCourseTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select * from Curso'\n db_cursor.execute(query)\n course_table = scad_utils.rowToDict(('CursoNombre', 'FechaInicio',\n 'FechaFin'), db_cursor.fetchall())\n for course in course_table:\n course['FechaInicio'] = course['FechaInicio'].isoformat()\n course['FechaFin'] = course['FechaFin'].isoformat()\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_classroom_table', methods=['GET'])\ndef adminGetClassroomTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select Pabellon,Numero from Salon'\n db_cursor.execute(query)\n classroom_table = scad_utils.rowToDict(('Pabellon', 'Numero'),\n db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(classroom_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_course_assignment_table', methods=['GET'])\ndef adminGetCourseAssignmentTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = (\n 'select d.DocenteDNI, d.Nombre, d.Apellido,a.CursoNombre, s.Pabellon,s.Numero, a.HoraInicio, a.HoraFin,a.Dia from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI)'\n )\n db_cursor.execute(query)\n course_assignment_table = scad_utils.rowToDict(('DocenteDNI',\n 'Nombre', 'Apellido', 'CursoNombre', 'Pabellon', 'Numero',\n 'HoraInicio', 'HoraFin', 'Dia'), db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_assignment_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/logout', methods=['DELETE'])\ndef logout() ->dict:\n if 'account_type' not in session:\n return make_response('primero inicia session broz', 301)\n elif session['account_type'] == 'Docente':\n session.pop('Usuario')\n session.pop('Nombre')\n session.pop('Apellido')\n return make_response('hasta luego prosor', 200)\n elif session['account_type'] == 'Administrador':\n session.pop('Usuario')\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n",
"step-2": "<mask token>\n\n\[email protected]('/login', methods=['POST'])\ndef login() ->dict:\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n data: dict = request.get_json()\n query: str = (\n 'select DocenteDNI, Nombre, Apellido, Usuario from Docente where Usuario=? and Contrasena=?'\n )\n db_cursor.execute(query, (data['Usuario'], data['Contrasena']))\n rows = db_cursor.fetchall()\n if len(rows) == 1:\n session.permanent = True\n session['account_type'] = 'Docente'\n session['DocenteDNI'] = rows[0].DocenteDNI\n session['Nombre'] = rows[0].Nombre\n session['Apellido'] = rows[0].Apellido\n session['Usuario'] = rows[0].Usuario\n db_cursor.close()\n db_connection.close()\n return make_response({'account_type': session['account_type']}, 200)\n else:\n query: str = (\n 'select Usuario,Contrasena from Administrador where Usuario=? and Contrasena=?'\n )\n db_cursor.execute(query, (data['Usuario'], data['Contrasena']))\n rows = db_cursor.fetchall()\n if len(rows) == 1:\n session.permanent = True\n session['account_type'] = 'Administrador'\n session['Usuario'] = rows[0].Usuario\n db_cursor.close()\n db_connection.close()\n return make_response({'account_type': session['account_type']}, 200\n )\n else:\n db_cursor.close()\n db_connection.close()\n return make_response('pos a lo mejor se equivoco?', 401)\n\n\[email protected]('/teacher_fullname', methods=['GET'])\ndef teacherFullname() ->dict:\n if 'account_type' not in session:\n return make_response('pa que quieres saber eso jaja salu2', 401)\n elif session['account_type'] == 'Docente':\n return {'Nombre': session['Nombre'], 'Apellido': session['Apellido']}\n elif session['account_type'] == 'Administrador':\n return make_response('wey no!!!', 400)\n\n\[email protected]('/time', methods=['GET'])\ndef time() ->dict:\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n return {'date': current_datetime.strftime('%d/%m/%Y'), 'time':\n current_datetime.strftime('%H,%M,%S')}\n\n\[email protected]('/teacher_course_list', methods=['GET'])\ndef teacherCourseList() ->list:\n if 'account_type' not in session:\n return make_response('nope', 401)\n elif session['account_type'] == 'Docente':\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n query: str = (\n 'select AsignacionCursoID, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) where Dia=dayname(?) and DocenteDNI=? '\n )\n db_cursor.execute(query, (current_datetime.strftime('%Y/%m/%d'),\n session['DocenteDNI']))\n today_assigned_courses: list = db_cursor.fetchall()\n today_assigned_courses = scad_utils.rowToDict(('AsignacionCursoID',\n 'CursoNombre', 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'),\n today_assigned_courses)\n if len(today_assigned_courses) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for course in today_assigned_courses:\n db_cursor.execute(existence_check_query, (current_datetime.\n strftime('%Y/%m/%d'), course['AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n course['state'] = 'marked'\n elif current_datetime >= scad_utils.timeToDatetime(course[\n 'HoraInicio'], current_datetime):\n if current_datetime - scad_utils.timeToDatetime(course[\n 'HoraInicio'], current_datetime\n ) <= teacher_time_tolerance:\n course['state'] = 'mark_now'\n else:\n course['state'] = 'not_marked'\n else:\n course['state'] = 'waiting'\n db_cursor.close()\n db_connection.close()\n return jsonify(today_assigned_courses)\n elif session['account_type'] == 'Administrador':\n return make_response('ya nos jakiaron', 400)\n\n\n<mask token>\n\n\[email protected]('/admin_get_report', methods=['GET'])\ndef adminGetReport() ->list:\n if 'account_type' not in session:\n return make_response('nope', 401)\n elif session['account_type'] == 'Administrador':\n time_range = request.get_json()['time_range']\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n report: list\n if time_range == 'today':\n query: str = (\n 'select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI) where Dia=dayname(?) and a.HoraInicio<? '\n )\n db_cursor.execute(query, (current_datetime.strftime('%Y-%m-%d'),\n current_datetime.strftime('%H:%M:%S')))\n report = db_cursor.fetchall()\n report = scad_utils.rowToDict(('AsignacionCursoID',\n 'DocenteDNI', 'Nombre', 'Apellido', 'CursoNombre',\n 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'), report)\n if len(report) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for assignment in report:\n db_cursor.execute(existence_check_query, (\n current_datetime.strftime('%Y-%m-%d'), assignment[\n 'AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n assignment['state'] = 'marked'\n else:\n assignment['state'] = 'not_marked'\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(report), 200)\n elif time_range == 'yesterday':\n query: str = (\n 'select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI) where Dia=dayname(?)'\n )\n current_datetime -= datetime.timedelta(days=1)\n db_cursor.execute(query, (current_datetime.strftime('%Y-%m-%d'),))\n report = db_cursor.fetchall()\n report = scad_utils.rowToDict(('AsignacionCursoID',\n 'DocenteDNI', 'Nombre', 'Apellido', 'CursoNombre',\n 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'), report)\n if len(report) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for assignment in report:\n db_cursor.execute(existence_check_query, (\n current_datetime.strftime('%Y-%m-%d'), assignment[\n 'AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n assignment['state'] = 'marked'\n else:\n assignment['state'] = 'not_marked'\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(report), 200)\n elif time_range == 'this_week':\n pass\n elif time_range == 'this_month':\n pass\n elif time_range == 'all':\n pass\n else:\n return make_response('peticion invalida', 406)\n elif session['account_type'] == 'Docente':\n return make_response('ya nos jakiaron', 400)\n\n\[email protected]('/admin_add_teacher', methods=['POST'])\ndef adminAddTeacher() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n data = request.get_json()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'insert into Docente() values(?,?,?,?,?)'\n db_cursor.execute(query, (data['DocenteDNI'], data['Nombre'], data[\n 'Apellido'], data['Usuario'], data['Contrasena']))\n db_cursor.close()\n db_connection.close()\n return make_response('se agrego la entrada', 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_teacher_table', methods=['GET'])\ndef adminGetTeacherTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select * from Docente'\n db_cursor.execute(query)\n teacher_table = scad_utils.rowToDict(('DocenteDNI', 'Nombre',\n 'Apellido', 'Usuario', 'Contrasena'), db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(teacher_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_course_table', methods=['GET'])\ndef adminGetCourseTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select * from Curso'\n db_cursor.execute(query)\n course_table = scad_utils.rowToDict(('CursoNombre', 'FechaInicio',\n 'FechaFin'), db_cursor.fetchall())\n for course in course_table:\n course['FechaInicio'] = course['FechaInicio'].isoformat()\n course['FechaFin'] = course['FechaFin'].isoformat()\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_classroom_table', methods=['GET'])\ndef adminGetClassroomTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select Pabellon,Numero from Salon'\n db_cursor.execute(query)\n classroom_table = scad_utils.rowToDict(('Pabellon', 'Numero'),\n db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(classroom_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_course_assignment_table', methods=['GET'])\ndef adminGetCourseAssignmentTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = (\n 'select d.DocenteDNI, d.Nombre, d.Apellido,a.CursoNombre, s.Pabellon,s.Numero, a.HoraInicio, a.HoraFin,a.Dia from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI)'\n )\n db_cursor.execute(query)\n course_assignment_table = scad_utils.rowToDict(('DocenteDNI',\n 'Nombre', 'Apellido', 'CursoNombre', 'Pabellon', 'Numero',\n 'HoraInicio', 'HoraFin', 'Dia'), db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_assignment_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/logout', methods=['DELETE'])\ndef logout() ->dict:\n if 'account_type' not in session:\n return make_response('primero inicia session broz', 301)\n elif session['account_type'] == 'Docente':\n session.pop('Usuario')\n session.pop('Nombre')\n session.pop('Apellido')\n return make_response('hasta luego prosor', 200)\n elif session['account_type'] == 'Administrador':\n session.pop('Usuario')\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n",
"step-3": "<mask token>\n\n\[email protected]('/login', methods=['POST'])\ndef login() ->dict:\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n data: dict = request.get_json()\n query: str = (\n 'select DocenteDNI, Nombre, Apellido, Usuario from Docente where Usuario=? and Contrasena=?'\n )\n db_cursor.execute(query, (data['Usuario'], data['Contrasena']))\n rows = db_cursor.fetchall()\n if len(rows) == 1:\n session.permanent = True\n session['account_type'] = 'Docente'\n session['DocenteDNI'] = rows[0].DocenteDNI\n session['Nombre'] = rows[0].Nombre\n session['Apellido'] = rows[0].Apellido\n session['Usuario'] = rows[0].Usuario\n db_cursor.close()\n db_connection.close()\n return make_response({'account_type': session['account_type']}, 200)\n else:\n query: str = (\n 'select Usuario,Contrasena from Administrador where Usuario=? and Contrasena=?'\n )\n db_cursor.execute(query, (data['Usuario'], data['Contrasena']))\n rows = db_cursor.fetchall()\n if len(rows) == 1:\n session.permanent = True\n session['account_type'] = 'Administrador'\n session['Usuario'] = rows[0].Usuario\n db_cursor.close()\n db_connection.close()\n return make_response({'account_type': session['account_type']}, 200\n )\n else:\n db_cursor.close()\n db_connection.close()\n return make_response('pos a lo mejor se equivoco?', 401)\n\n\[email protected]('/teacher_fullname', methods=['GET'])\ndef teacherFullname() ->dict:\n if 'account_type' not in session:\n return make_response('pa que quieres saber eso jaja salu2', 401)\n elif session['account_type'] == 'Docente':\n return {'Nombre': session['Nombre'], 'Apellido': session['Apellido']}\n elif session['account_type'] == 'Administrador':\n return make_response('wey no!!!', 400)\n\n\[email protected]('/time', methods=['GET'])\ndef time() ->dict:\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n return {'date': current_datetime.strftime('%d/%m/%Y'), 'time':\n current_datetime.strftime('%H,%M,%S')}\n\n\[email protected]('/teacher_course_list', methods=['GET'])\ndef teacherCourseList() ->list:\n if 'account_type' not in session:\n return make_response('nope', 401)\n elif session['account_type'] == 'Docente':\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n query: str = (\n 'select AsignacionCursoID, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) where Dia=dayname(?) and DocenteDNI=? '\n )\n db_cursor.execute(query, (current_datetime.strftime('%Y/%m/%d'),\n session['DocenteDNI']))\n today_assigned_courses: list = db_cursor.fetchall()\n today_assigned_courses = scad_utils.rowToDict(('AsignacionCursoID',\n 'CursoNombre', 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'),\n today_assigned_courses)\n if len(today_assigned_courses) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for course in today_assigned_courses:\n db_cursor.execute(existence_check_query, (current_datetime.\n strftime('%Y/%m/%d'), course['AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n course['state'] = 'marked'\n elif current_datetime >= scad_utils.timeToDatetime(course[\n 'HoraInicio'], current_datetime):\n if current_datetime - scad_utils.timeToDatetime(course[\n 'HoraInicio'], current_datetime\n ) <= teacher_time_tolerance:\n course['state'] = 'mark_now'\n else:\n course['state'] = 'not_marked'\n else:\n course['state'] = 'waiting'\n db_cursor.close()\n db_connection.close()\n return jsonify(today_assigned_courses)\n elif session['account_type'] == 'Administrador':\n return make_response('ya nos jakiaron', 400)\n\n\[email protected]('/teacher_mark', methods=['POST'])\ndef teacherMark() ->dict:\n if 'account_type' not in session:\n return make_response('stap', 401)\n elif session['account_type'] == 'Docente':\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n course_to_mark: dict\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n query: str = (\n 'select AsignacionCursoID,SalonID from AsignacionCurso where DocenteDNI=? and Dia=dayname(?) and HoraInicio <=? and timediff(?,HoraInicio)<=?;'\n )\n db_cursor.execute(query, (session['DocenteDNI'], current_datetime.\n strftime('%Y/%m/%d'), current_datetime.strftime('%H:%M:%S'),\n current_datetime.strftime('%H:%M:%S'), str(teacher_time_tolerance))\n )\n course_to_mark = db_cursor.fetchall()\n if len(course_to_mark) == 1:\n insertion_query: str = 'insert into Marcacion() values(?,?,?,?);'\n db_cursor.execute(insertion_query, (int(course_to_mark[0].\n AsignacionCursoID), current_datetime.strftime('%Y/%m/%d'),\n current_datetime.strftime('%H:%M:%S'), int(course_to_mark[0\n ].SalonID)))\n db_cursor.close()\n db_connection.close()\n return make_response('se marco la asistencia', 200)\n else:\n db_cursor.close()\n db_connection.close()\n return make_response('ya es tarde', 406)\n elif session['account_type'] == 'Administrador':\n return make_response(\n 'papu, si ya nos jakiaste por lo menos usa los servicios correctos no?'\n , 400)\n\n\[email protected]('/admin_get_report', methods=['GET'])\ndef adminGetReport() ->list:\n if 'account_type' not in session:\n return make_response('nope', 401)\n elif session['account_type'] == 'Administrador':\n time_range = request.get_json()['time_range']\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n report: list\n if time_range == 'today':\n query: str = (\n 'select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI) where Dia=dayname(?) and a.HoraInicio<? '\n )\n db_cursor.execute(query, (current_datetime.strftime('%Y-%m-%d'),\n current_datetime.strftime('%H:%M:%S')))\n report = db_cursor.fetchall()\n report = scad_utils.rowToDict(('AsignacionCursoID',\n 'DocenteDNI', 'Nombre', 'Apellido', 'CursoNombre',\n 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'), report)\n if len(report) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for assignment in report:\n db_cursor.execute(existence_check_query, (\n current_datetime.strftime('%Y-%m-%d'), assignment[\n 'AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n assignment['state'] = 'marked'\n else:\n assignment['state'] = 'not_marked'\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(report), 200)\n elif time_range == 'yesterday':\n query: str = (\n 'select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI) where Dia=dayname(?)'\n )\n current_datetime -= datetime.timedelta(days=1)\n db_cursor.execute(query, (current_datetime.strftime('%Y-%m-%d'),))\n report = db_cursor.fetchall()\n report = scad_utils.rowToDict(('AsignacionCursoID',\n 'DocenteDNI', 'Nombre', 'Apellido', 'CursoNombre',\n 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'), report)\n if len(report) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for assignment in report:\n db_cursor.execute(existence_check_query, (\n current_datetime.strftime('%Y-%m-%d'), assignment[\n 'AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n assignment['state'] = 'marked'\n else:\n assignment['state'] = 'not_marked'\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(report), 200)\n elif time_range == 'this_week':\n pass\n elif time_range == 'this_month':\n pass\n elif time_range == 'all':\n pass\n else:\n return make_response('peticion invalida', 406)\n elif session['account_type'] == 'Docente':\n return make_response('ya nos jakiaron', 400)\n\n\[email protected]('/admin_add_teacher', methods=['POST'])\ndef adminAddTeacher() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n data = request.get_json()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'insert into Docente() values(?,?,?,?,?)'\n db_cursor.execute(query, (data['DocenteDNI'], data['Nombre'], data[\n 'Apellido'], data['Usuario'], data['Contrasena']))\n db_cursor.close()\n db_connection.close()\n return make_response('se agrego la entrada', 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_teacher_table', methods=['GET'])\ndef adminGetTeacherTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select * from Docente'\n db_cursor.execute(query)\n teacher_table = scad_utils.rowToDict(('DocenteDNI', 'Nombre',\n 'Apellido', 'Usuario', 'Contrasena'), db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(teacher_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_course_table', methods=['GET'])\ndef adminGetCourseTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select * from Curso'\n db_cursor.execute(query)\n course_table = scad_utils.rowToDict(('CursoNombre', 'FechaInicio',\n 'FechaFin'), db_cursor.fetchall())\n for course in course_table:\n course['FechaInicio'] = course['FechaInicio'].isoformat()\n course['FechaFin'] = course['FechaFin'].isoformat()\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_classroom_table', methods=['GET'])\ndef adminGetClassroomTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select Pabellon,Numero from Salon'\n db_cursor.execute(query)\n classroom_table = scad_utils.rowToDict(('Pabellon', 'Numero'),\n db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(classroom_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_course_assignment_table', methods=['GET'])\ndef adminGetCourseAssignmentTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = (\n 'select d.DocenteDNI, d.Nombre, d.Apellido,a.CursoNombre, s.Pabellon,s.Numero, a.HoraInicio, a.HoraFin,a.Dia from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI)'\n )\n db_cursor.execute(query)\n course_assignment_table = scad_utils.rowToDict(('DocenteDNI',\n 'Nombre', 'Apellido', 'CursoNombre', 'Pabellon', 'Numero',\n 'HoraInicio', 'HoraFin', 'Dia'), db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_assignment_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/logout', methods=['DELETE'])\ndef logout() ->dict:\n if 'account_type' not in session:\n return make_response('primero inicia session broz', 301)\n elif session['account_type'] == 'Docente':\n session.pop('Usuario')\n session.pop('Nombre')\n session.pop('Apellido')\n return make_response('hasta luego prosor', 200)\n elif session['account_type'] == 'Administrador':\n session.pop('Usuario')\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n",
"step-4": "<mask token>\ntesting: bool = True\nif testing:\n fake_datetime = datetime.datetime(2020, 8, 7, 15, 10)\napp = Flask(__name__)\napp.config['SECRET_KEY'] = 'clave ultra secreta'\napp.permanent_session_lifetime = datetime.timedelta(minutes=20)\nteacher_time_tolerance = datetime.timedelta(minutes=20)\ndb = mariadb.ConnectionPool(user='brocolio', password='brocolio', host=\n 'localhost', pool_name='pul', pool_size=20, database='scad')\nspanish_days: dict = {'Monday': 'lunes', 'Tuesday': 'martes', 'Wednesday':\n 'miércoles', 'Thursday': 'jueves', 'Friday': 'viernes', 'Saturday':\n 'sábado', 'Sunday': 'domingo'}\njson.JSONEncoder.default = lambda self, obj: obj.isoformat() if isinstance(obj,\n datetime.datetime) or isinstance(obj, datetime.date) else str(obj)\n\n\[email protected]('/login', methods=['POST'])\ndef login() ->dict:\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n data: dict = request.get_json()\n query: str = (\n 'select DocenteDNI, Nombre, Apellido, Usuario from Docente where Usuario=? and Contrasena=?'\n )\n db_cursor.execute(query, (data['Usuario'], data['Contrasena']))\n rows = db_cursor.fetchall()\n if len(rows) == 1:\n session.permanent = True\n session['account_type'] = 'Docente'\n session['DocenteDNI'] = rows[0].DocenteDNI\n session['Nombre'] = rows[0].Nombre\n session['Apellido'] = rows[0].Apellido\n session['Usuario'] = rows[0].Usuario\n db_cursor.close()\n db_connection.close()\n return make_response({'account_type': session['account_type']}, 200)\n else:\n query: str = (\n 'select Usuario,Contrasena from Administrador where Usuario=? and Contrasena=?'\n )\n db_cursor.execute(query, (data['Usuario'], data['Contrasena']))\n rows = db_cursor.fetchall()\n if len(rows) == 1:\n session.permanent = True\n session['account_type'] = 'Administrador'\n session['Usuario'] = rows[0].Usuario\n db_cursor.close()\n db_connection.close()\n return make_response({'account_type': session['account_type']}, 200\n )\n else:\n db_cursor.close()\n db_connection.close()\n return make_response('pos a lo mejor se equivoco?', 401)\n\n\[email protected]('/teacher_fullname', methods=['GET'])\ndef teacherFullname() ->dict:\n if 'account_type' not in session:\n return make_response('pa que quieres saber eso jaja salu2', 401)\n elif session['account_type'] == 'Docente':\n return {'Nombre': session['Nombre'], 'Apellido': session['Apellido']}\n elif session['account_type'] == 'Administrador':\n return make_response('wey no!!!', 400)\n\n\[email protected]('/time', methods=['GET'])\ndef time() ->dict:\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n return {'date': current_datetime.strftime('%d/%m/%Y'), 'time':\n current_datetime.strftime('%H,%M,%S')}\n\n\[email protected]('/teacher_course_list', methods=['GET'])\ndef teacherCourseList() ->list:\n if 'account_type' not in session:\n return make_response('nope', 401)\n elif session['account_type'] == 'Docente':\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n query: str = (\n 'select AsignacionCursoID, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) where Dia=dayname(?) and DocenteDNI=? '\n )\n db_cursor.execute(query, (current_datetime.strftime('%Y/%m/%d'),\n session['DocenteDNI']))\n today_assigned_courses: list = db_cursor.fetchall()\n today_assigned_courses = scad_utils.rowToDict(('AsignacionCursoID',\n 'CursoNombre', 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'),\n today_assigned_courses)\n if len(today_assigned_courses) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for course in today_assigned_courses:\n db_cursor.execute(existence_check_query, (current_datetime.\n strftime('%Y/%m/%d'), course['AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n course['state'] = 'marked'\n elif current_datetime >= scad_utils.timeToDatetime(course[\n 'HoraInicio'], current_datetime):\n if current_datetime - scad_utils.timeToDatetime(course[\n 'HoraInicio'], current_datetime\n ) <= teacher_time_tolerance:\n course['state'] = 'mark_now'\n else:\n course['state'] = 'not_marked'\n else:\n course['state'] = 'waiting'\n db_cursor.close()\n db_connection.close()\n return jsonify(today_assigned_courses)\n elif session['account_type'] == 'Administrador':\n return make_response('ya nos jakiaron', 400)\n\n\[email protected]('/teacher_mark', methods=['POST'])\ndef teacherMark() ->dict:\n if 'account_type' not in session:\n return make_response('stap', 401)\n elif session['account_type'] == 'Docente':\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n course_to_mark: dict\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n query: str = (\n 'select AsignacionCursoID,SalonID from AsignacionCurso where DocenteDNI=? and Dia=dayname(?) and HoraInicio <=? and timediff(?,HoraInicio)<=?;'\n )\n db_cursor.execute(query, (session['DocenteDNI'], current_datetime.\n strftime('%Y/%m/%d'), current_datetime.strftime('%H:%M:%S'),\n current_datetime.strftime('%H:%M:%S'), str(teacher_time_tolerance))\n )\n course_to_mark = db_cursor.fetchall()\n if len(course_to_mark) == 1:\n insertion_query: str = 'insert into Marcacion() values(?,?,?,?);'\n db_cursor.execute(insertion_query, (int(course_to_mark[0].\n AsignacionCursoID), current_datetime.strftime('%Y/%m/%d'),\n current_datetime.strftime('%H:%M:%S'), int(course_to_mark[0\n ].SalonID)))\n db_cursor.close()\n db_connection.close()\n return make_response('se marco la asistencia', 200)\n else:\n db_cursor.close()\n db_connection.close()\n return make_response('ya es tarde', 406)\n elif session['account_type'] == 'Administrador':\n return make_response(\n 'papu, si ya nos jakiaste por lo menos usa los servicios correctos no?'\n , 400)\n\n\[email protected]('/admin_get_report', methods=['GET'])\ndef adminGetReport() ->list:\n if 'account_type' not in session:\n return make_response('nope', 401)\n elif session['account_type'] == 'Administrador':\n time_range = request.get_json()['time_range']\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n report: list\n if time_range == 'today':\n query: str = (\n 'select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI) where Dia=dayname(?) and a.HoraInicio<? '\n )\n db_cursor.execute(query, (current_datetime.strftime('%Y-%m-%d'),\n current_datetime.strftime('%H:%M:%S')))\n report = db_cursor.fetchall()\n report = scad_utils.rowToDict(('AsignacionCursoID',\n 'DocenteDNI', 'Nombre', 'Apellido', 'CursoNombre',\n 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'), report)\n if len(report) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for assignment in report:\n db_cursor.execute(existence_check_query, (\n current_datetime.strftime('%Y-%m-%d'), assignment[\n 'AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n assignment['state'] = 'marked'\n else:\n assignment['state'] = 'not_marked'\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(report), 200)\n elif time_range == 'yesterday':\n query: str = (\n 'select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI) where Dia=dayname(?)'\n )\n current_datetime -= datetime.timedelta(days=1)\n db_cursor.execute(query, (current_datetime.strftime('%Y-%m-%d'),))\n report = db_cursor.fetchall()\n report = scad_utils.rowToDict(('AsignacionCursoID',\n 'DocenteDNI', 'Nombre', 'Apellido', 'CursoNombre',\n 'HoraInicio', 'HoraFin', 'Pabellon', 'Numero'), report)\n if len(report) > 0:\n existence_check_query: str = (\n 'select * from Marcacion where Fecha=? and AsignacionCursoID=?'\n )\n for assignment in report:\n db_cursor.execute(existence_check_query, (\n current_datetime.strftime('%Y-%m-%d'), assignment[\n 'AsignacionCursoID']))\n if len(db_cursor.fetchall()) > 0:\n assignment['state'] = 'marked'\n else:\n assignment['state'] = 'not_marked'\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(report), 200)\n elif time_range == 'this_week':\n pass\n elif time_range == 'this_month':\n pass\n elif time_range == 'all':\n pass\n else:\n return make_response('peticion invalida', 406)\n elif session['account_type'] == 'Docente':\n return make_response('ya nos jakiaron', 400)\n\n\[email protected]('/admin_add_teacher', methods=['POST'])\ndef adminAddTeacher() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n data = request.get_json()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'insert into Docente() values(?,?,?,?,?)'\n db_cursor.execute(query, (data['DocenteDNI'], data['Nombre'], data[\n 'Apellido'], data['Usuario'], data['Contrasena']))\n db_cursor.close()\n db_connection.close()\n return make_response('se agrego la entrada', 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_teacher_table', methods=['GET'])\ndef adminGetTeacherTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select * from Docente'\n db_cursor.execute(query)\n teacher_table = scad_utils.rowToDict(('DocenteDNI', 'Nombre',\n 'Apellido', 'Usuario', 'Contrasena'), db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(teacher_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_course_table', methods=['GET'])\ndef adminGetCourseTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select * from Curso'\n db_cursor.execute(query)\n course_table = scad_utils.rowToDict(('CursoNombre', 'FechaInicio',\n 'FechaFin'), db_cursor.fetchall())\n for course in course_table:\n course['FechaInicio'] = course['FechaInicio'].isoformat()\n course['FechaFin'] = course['FechaFin'].isoformat()\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_classroom_table', methods=['GET'])\ndef adminGetClassroomTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = 'select Pabellon,Numero from Salon'\n db_cursor.execute(query)\n classroom_table = scad_utils.rowToDict(('Pabellon', 'Numero'),\n db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(classroom_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/admin_get_course_assignment_table', methods=['GET'])\ndef adminGetCourseAssignmentTable() ->dict:\n if 'account_type' not in session:\n return make_response('', 401)\n elif session['account_type'] == 'Administrador':\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n query: str = (\n 'select d.DocenteDNI, d.Nombre, d.Apellido,a.CursoNombre, s.Pabellon,s.Numero, a.HoraInicio, a.HoraFin,a.Dia from AsignacionCurso a inner join Salon s using(SalonID) inner join Docente d using(DocenteDNI)'\n )\n db_cursor.execute(query)\n course_assignment_table = scad_utils.rowToDict(('DocenteDNI',\n 'Nombre', 'Apellido', 'CursoNombre', 'Pabellon', 'Numero',\n 'HoraInicio', 'HoraFin', 'Dia'), db_cursor.fetchall())\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_assignment_table), 200)\n elif session['account_type'] == 'Docente':\n return make_response('', 401)\n\n\[email protected]('/logout', methods=['DELETE'])\ndef logout() ->dict:\n if 'account_type' not in session:\n return make_response('primero inicia session broz', 301)\n elif session['account_type'] == 'Docente':\n session.pop('Usuario')\n session.pop('Nombre')\n session.pop('Apellido')\n return make_response('hasta luego prosor', 200)\n elif session['account_type'] == 'Administrador':\n session.pop('Usuario')\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n return make_response('espero haberle sido util, hasta luego', 200)\n",
"step-5": "from flask import Flask\nfrom flask import request\nfrom flask import session\nfrom flask import jsonify\nfrom flask import make_response\nimport mariadb\nimport datetime\nimport json\nimport scad_utils\n\ntesting: bool = True\nif testing:\n fake_datetime = datetime.datetime(2020, 8, 7, 15, 10)\n\n\napp = Flask(__name__)\napp.config[\"SECRET_KEY\"] = \"clave ultra secreta\"\napp.permanent_session_lifetime = datetime.timedelta(minutes=20)\n\nteacher_time_tolerance = datetime.timedelta(minutes=20)\ndb = mariadb.ConnectionPool(\n user=\"brocolio\",\n password=\"brocolio\",\n host=\"localhost\",\n pool_name=\"pul\",\n pool_size=20,\n database=\"scad\",\n)\n\n# tmp_cursor: mysql.cursor.MySQLCursor = db.cursor()\n# tmp_cursor.execute(\"SET lc_time_names = 'es_PE';\")\n# tmp_cursor.close()\nspanish_days: dict = {\n \"Monday\": \"lunes\",\n \"Tuesday\": \"martes\",\n \"Wednesday\": \"miércoles\",\n \"Thursday\": \"jueves\",\n \"Friday\": \"viernes\",\n \"Saturday\": \"sábado\",\n \"Sunday\": \"domingo\",\n}\n\n\njson.JSONEncoder.default = lambda self, obj: (\n obj.isoformat()\n if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date)\n else str(obj)\n)\n\n\[email protected](\"/login\", methods=[\"POST\"])\ndef login() -> dict:\n\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n data: dict = request.get_json()\n\n # consulta a la base de datos si el usuario y contrasena son validos\n # consulta en la tabla docente\n query: str = (\n \"select DocenteDNI, Nombre, Apellido, Usuario \"\n \"from Docente \"\n \"where Usuario=? and Contrasena=?\"\n )\n db_cursor.execute(query, (data[\"Usuario\"], data[\"Contrasena\"]))\n rows = db_cursor.fetchall()\n if len(rows) == 1:\n session.permanent = True\n session[\"account_type\"] = \"Docente\"\n session[\"DocenteDNI\"] = rows[0].DocenteDNI\n session[\"Nombre\"] = rows[0].Nombre\n session[\"Apellido\"] = rows[0].Apellido\n session[\"Usuario\"] = rows[0].Usuario\n\n db_cursor.close()\n db_connection.close()\n return make_response({\"account_type\": session[\"account_type\"]}, 200)\n\n else:\n # consulta en la tabla administrador\n query: str = (\n \"select Usuario,Contrasena \"\n \"from Administrador \"\n \"where Usuario=? and Contrasena=?\"\n )\n db_cursor.execute(query, (data[\"Usuario\"], data[\"Contrasena\"]))\n rows = db_cursor.fetchall()\n\n if len(rows) == 1:\n session.permanent = True\n session[\"account_type\"] = \"Administrador\"\n session[\"Usuario\"] = rows[0].Usuario\n db_cursor.close()\n db_connection.close()\n return make_response({\"account_type\": session[\"account_type\"]}, 200)\n # no se encontro nada\n else:\n db_cursor.close()\n db_connection.close()\n return make_response(\"pos a lo mejor se equivoco?\", 401)\n\n\[email protected](\"/teacher_fullname\", methods=[\"GET\"])\ndef teacherFullname() -> dict:\n if \"account_type\" not in session:\n return make_response(\"pa que quieres saber eso jaja salu2\", 401)\n elif session[\"account_type\"] == \"Docente\":\n return {\"Nombre\": session[\"Nombre\"], \"Apellido\": session[\"Apellido\"]}\n elif session[\"account_type\"] == \"Administrador\":\n return make_response(\"wey no!!!\", 400)\n\n\[email protected](\"/time\", methods=[\"GET\"])\ndef time() -> dict:\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n return {\n \"date\": current_datetime.strftime(\"%d/%m/%Y\"),\n \"time\": current_datetime.strftime(\"%H,%M,%S\"),\n }\n\n\[email protected](\"/teacher_course_list\", methods=[\"GET\"])\ndef teacherCourseList() -> list:\n # verificar la sesion\n if \"account_type\" not in session:\n # no inicio sesion\n return make_response(\"nope\", 401)\n elif session[\"account_type\"] == \"Docente\":\n # consultar la lista de cursos y si se han marcado o no\n # un curso marcado se diferencia porque el valor de Hora de la tabla Marcacion\n # es diferente de NULL\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n query: str = (\n \"select AsignacionCursoID, a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero \"\n \"from AsignacionCurso a \"\n \"inner join Salon s using(SalonID) \"\n \"where Dia=dayname(?) and DocenteDNI=? \"\n )\n db_cursor.execute(\n query, (current_datetime.strftime(\"%Y/%m/%d\"), session[\"DocenteDNI\"])\n )\n today_assigned_courses: list = db_cursor.fetchall()\n # se formatea la lista de cursos\n today_assigned_courses = scad_utils.rowToDict(\n (\n \"AsignacionCursoID\",\n \"CursoNombre\",\n \"HoraInicio\",\n \"HoraFin\",\n \"Pabellon\",\n \"Numero\",\n ),\n today_assigned_courses,\n )\n if len(today_assigned_courses) > 0:\n existence_check_query: str = (\n \"select * from Marcacion \" \"where Fecha=? and AsignacionCursoID=?\"\n )\n for course in today_assigned_courses:\n db_cursor.execute(\n existence_check_query,\n (\n current_datetime.strftime(\"%Y/%m/%d\"),\n course[\"AsignacionCursoID\"],\n ),\n )\n if len(db_cursor.fetchall()) > 0:\n course[\"state\"] = \"marked\"\n else:\n if current_datetime >= scad_utils.timeToDatetime(\n course[\"HoraInicio\"], current_datetime\n ):\n if (\n current_datetime\n - scad_utils.timeToDatetime(\n course[\"HoraInicio\"], current_datetime\n )\n <= teacher_time_tolerance\n ):\n course[\"state\"] = \"mark_now\"\n else:\n course[\"state\"] = \"not_marked\"\n else:\n course[\"state\"] = \"waiting\"\n\n db_cursor.close()\n db_connection.close()\n return jsonify(today_assigned_courses)\n\n elif session[\"account_type\"] == \"Administrador\":\n # el administrador no deberia usar este servicio\n return make_response(\"ya nos jakiaron\", 400)\n\n\[email protected](\"/teacher_mark\", methods=[\"POST\"])\ndef teacherMark() -> dict:\n # validar si es posible marcar el registro del curso\n if \"account_type\" not in session:\n # no inicio sesion\n return make_response(\"stap\", 401)\n elif session[\"account_type\"] == \"Docente\":\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n # consultar si hay algun curso para marcar\n course_to_mark: dict\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n query: str = (\n \"select AsignacionCursoID,SalonID \"\n \"from AsignacionCurso \"\n \"where DocenteDNI=? \"\n \"and Dia=dayname(?) \"\n \"and HoraInicio <=? \"\n \"and timediff(?,HoraInicio)<=?;\"\n )\n db_cursor.execute(\n query,\n (\n session[\"DocenteDNI\"],\n current_datetime.strftime(\"%Y/%m/%d\"),\n current_datetime.strftime(\"%H:%M:%S\"),\n current_datetime.strftime(\"%H:%M:%S\"),\n str(teacher_time_tolerance),\n ),\n )\n course_to_mark = db_cursor.fetchall()\n if len(course_to_mark) == 1:\n insertion_query: str = (\"insert into Marcacion() \" \"values(?,?,?,?);\")\n\n db_cursor.execute(\n insertion_query,\n (\n int(course_to_mark[0].AsignacionCursoID),\n current_datetime.strftime(\"%Y/%m/%d\"),\n current_datetime.strftime(\"%H:%M:%S\"),\n int(course_to_mark[0].SalonID),\n ),\n )\n db_cursor.close()\n db_connection.close()\n return make_response(\"se marco la asistencia\", 200)\n else:\n db_cursor.close()\n db_connection.close()\n return make_response(\"ya es tarde\", 406)\n\n elif session[\"account_type\"] == \"Administrador\":\n return make_response(\n \"papu, si ya nos jakiaste por lo menos usa los servicios correctos no?\", 400\n )\n\n\[email protected](\"/admin_get_report\", methods=[\"GET\"])\ndef adminGetReport() -> list:\n if \"account_type\" not in session:\n # no inicio sesion\n return make_response(\"nope\", 401)\n elif session[\"account_type\"] == \"Administrador\":\n time_range = request.get_json()[\"time_range\"]\n if testing:\n current_datetime = fake_datetime\n else:\n current_datetime = datetime.datetime.now()\n\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor(named_tuple=True)\n db_cursor.execute(\"SET lc_time_names = 'es_PE'\")\n report: list\n if time_range == \"today\":\n query: str = (\n \"select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, \"\n \"a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero \"\n \"from AsignacionCurso a \"\n \"inner join Salon s using(SalonID) \"\n \"inner join Docente d using(DocenteDNI) \"\n \"where Dia=dayname(?) and a.HoraInicio<? \"\n )\n db_cursor.execute(\n query,\n (\n current_datetime.strftime(\"%Y-%m-%d\"),\n current_datetime.strftime(\"%H:%M:%S\"),\n ),\n )\n report = db_cursor.fetchall()\n # se formatea la lista de cursos\n report = scad_utils.rowToDict(\n (\n \"AsignacionCursoID\",\n \"DocenteDNI\",\n \"Nombre\",\n \"Apellido\",\n \"CursoNombre\",\n \"HoraInicio\",\n \"HoraFin\",\n \"Pabellon\",\n \"Numero\",\n ),\n report,\n )\n if len(report) > 0:\n existence_check_query: str = (\n \"select * from Marcacion \" \"where Fecha=? and AsignacionCursoID=?\"\n )\n for assignment in report:\n db_cursor.execute(\n existence_check_query,\n (\n current_datetime.strftime(\"%Y-%m-%d\"),\n assignment[\"AsignacionCursoID\"],\n ),\n )\n if len(db_cursor.fetchall()) > 0:\n assignment[\"state\"] = \"marked\"\n else:\n assignment[\"state\"] = \"not_marked\"\n\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(report), 200)\n elif time_range == \"yesterday\":\n query: str = (\n \"select a.AsignacionCursoID,d.DocenteDNI,d.Nombre,d.Apellido, \"\n \"a.CursoNombre, a.HoraInicio, a.HoraFin, s.Pabellon, s.Numero \"\n \"from AsignacionCurso a \"\n \"inner join Salon s using(SalonID) \"\n \"inner join Docente d using(DocenteDNI) \"\n \"where Dia=dayname(?)\"\n )\n current_datetime -= datetime.timedelta(days=1)\n db_cursor.execute(\n query, (current_datetime.strftime(\"%Y-%m-%d\"),),\n )\n report = db_cursor.fetchall()\n # se formatea la lista de cursos\n report = scad_utils.rowToDict(\n (\n \"AsignacionCursoID\",\n \"DocenteDNI\",\n \"Nombre\",\n \"Apellido\",\n \"CursoNombre\",\n \"HoraInicio\",\n \"HoraFin\",\n \"Pabellon\",\n \"Numero\",\n ),\n report,\n )\n if len(report) > 0:\n existence_check_query: str = (\n \"select * from Marcacion \" \"where Fecha=? and AsignacionCursoID=?\"\n )\n for assignment in report:\n db_cursor.execute(\n existence_check_query,\n (\n current_datetime.strftime(\"%Y-%m-%d\"),\n assignment[\"AsignacionCursoID\"],\n ),\n )\n if len(db_cursor.fetchall()) > 0:\n assignment[\"state\"] = \"marked\"\n else:\n assignment[\"state\"] = \"not_marked\"\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(report), 200)\n elif time_range == \"this_week\":\n pass\n elif time_range == \"this_month\":\n pass\n elif time_range == \"all\":\n pass\n else:\n return make_response(\"peticion invalida\", 406)\n elif session[\"account_type\"] == \"Docente\":\n # el administrador no deberia usar este servicio\n return make_response(\"ya nos jakiaron\", 400)\n\n\[email protected](\"/admin_add_teacher\", methods=[\"POST\"])\ndef adminAddTeacher() -> dict:\n if \"account_type\" not in session:\n return make_response(\"\", 401)\n elif session[\"account_type\"] == \"Administrador\":\n data = request.get_json()\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n\n query: str = (\"insert into Docente() values(?,?,?,?,?)\")\n db_cursor.execute(\n query,\n (\n data[\"DocenteDNI\"],\n data[\"Nombre\"],\n data[\"Apellido\"],\n data[\"Usuario\"],\n data[\"Contrasena\"],\n ),\n )\n db_cursor.close()\n db_connection.close()\n return make_response(\"se agrego la entrada\", 200)\n elif session[\"account_type\"] == \"Docente\":\n return make_response(\"\", 401)\n\n\[email protected](\"/admin_get_teacher_table\", methods=[\"GET\"])\ndef adminGetTeacherTable() -> dict:\n if \"account_type\" not in session:\n return make_response(\"\", 401)\n elif session[\"account_type\"] == \"Administrador\":\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n\n query: str = (\"select * from Docente\")\n db_cursor.execute(query)\n teacher_table = scad_utils.rowToDict(\n (\"DocenteDNI\", \"Nombre\", \"Apellido\", \"Usuario\", \"Contrasena\"),\n db_cursor.fetchall(),\n )\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(teacher_table), 200)\n elif session[\"account_type\"] == \"Docente\":\n return make_response(\"\", 401)\n\n\[email protected](\"/admin_get_course_table\", methods=[\"GET\"])\ndef adminGetCourseTable() -> dict:\n if \"account_type\" not in session:\n return make_response(\"\", 401)\n elif session[\"account_type\"] == \"Administrador\":\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n\n query: str = (\"select * from Curso\")\n db_cursor.execute(query)\n course_table = scad_utils.rowToDict(\n (\"CursoNombre\", \"FechaInicio\", \"FechaFin\"), db_cursor.fetchall(),\n )\n for course in course_table:\n course[\"FechaInicio\"] = course[\"FechaInicio\"].isoformat()\n course[\"FechaFin\"] = course[\"FechaFin\"].isoformat()\n\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_table), 200)\n elif session[\"account_type\"] == \"Docente\":\n return make_response(\"\", 401)\n\n\[email protected](\"/admin_get_classroom_table\", methods=[\"GET\"])\ndef adminGetClassroomTable() -> dict:\n if \"account_type\" not in session:\n return make_response(\"\", 401)\n elif session[\"account_type\"] == \"Administrador\":\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n\n query: str = (\"select Pabellon,Numero from Salon\")\n db_cursor.execute(query)\n classroom_table = scad_utils.rowToDict(\n (\"Pabellon\", \"Numero\"), db_cursor.fetchall(),\n )\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(classroom_table), 200)\n elif session[\"account_type\"] == \"Docente\":\n return make_response(\"\", 401)\n\n\[email protected](\"/admin_get_course_assignment_table\", methods=[\"GET\"])\ndef adminGetCourseAssignmentTable() -> dict:\n if \"account_type\" not in session:\n return make_response(\"\", 401)\n elif session[\"account_type\"] == \"Administrador\":\n db_connection = db.get_connection()\n db_cursor = db_connection.cursor()\n\n query: str = (\n \"select d.DocenteDNI, d.Nombre, d.Apellido,\"\n \"a.CursoNombre, s.Pabellon,s.Numero, a.HoraInicio, a.HoraFin,a.Dia \"\n \"from AsignacionCurso a \"\n \"inner join Salon s using(SalonID) \"\n \"inner join Docente d using(DocenteDNI)\"\n )\n db_cursor.execute(query)\n course_assignment_table = scad_utils.rowToDict(\n (\n \"DocenteDNI\",\n \"Nombre\",\n \"Apellido\",\n \"CursoNombre\",\n \"Pabellon\",\n \"Numero\",\n \"HoraInicio\",\n \"HoraFin\",\n \"Dia\",\n ),\n db_cursor.fetchall(),\n )\n\n db_cursor.close()\n db_connection.close()\n return make_response(jsonify(course_assignment_table), 200)\n elif session[\"account_type\"] == \"Docente\":\n return make_response(\"\", 401)\n\n\[email protected](\"/logout\", methods=[\"DELETE\"])\ndef logout() -> dict:\n if \"account_type\" not in session:\n return make_response(\"primero inicia session broz\", 301)\n else:\n if session[\"account_type\"] == \"Docente\":\n session.pop(\"Usuario\")\n session.pop(\"Nombre\")\n session.pop(\"Apellido\")\n return make_response(\"hasta luego prosor\", 200)\n elif session[\"account_type\"] == \"Administrador\":\n session.pop(\"Usuario\")\n return make_response(\"espero haberle sido util, hasta luego\", 200)\n return make_response(\"espero haberle sido util, hasta luego\", 200)\n return make_response(\"espero haberle sido util, hasta luego\", 200)\n return make_response(\"espero haberle sido util, hasta luego\", 200)\n return make_response(\"espero haberle sido util, hasta luego\", 200)\n return make_response(\"espero haberle sido util, hasta luego\", 200)\n return make_response(\"espero haberle sido util, hasta luego\", 200)\n return make_response(\"espero haberle sido util, hasta luego\", 200)\n",
"step-ids": [
10,
11,
12,
14,
16
]
}
|
[
10,
11,
12,
14,
16
] |
import xml.etree.ElementTree as ET
class Stage:
def __init__(self, costumes, sounds, variables, blocks, scripts, sprites):
self.costumes = costumes
self.sounds = sounds
self.variables = variables
self.blocks = blocks
self.scripts = scripts
self.sprites = sprites
class Sprite:
def __init__(self, name: str, index: str, xCoord: int, yCoord: int,
heading: int, scale: float, volume: int, pan: int, rotation: int,
draggable: bool, hidden: bool, costumes: str, color: (float, float,
float), pen: str, id: int):
self.name = name
self.index = index
self.coords = xCoord, yCoord
self.heading = heading
self.scale = scale
self.volume = volume
self.pan = pan
self.rotation = rotation
self.draggable = draggable
self.hidden = hidden
self.costumes = costumes
self.color = color
self.pen = pen
self.id = id
|
normal
|
{
"blob_id": "575768c200ad81f878c132d68569c84f497091f2",
"index": 8137,
"step-1": "<mask token>\n\n\nclass Sprite:\n\n def __init__(self, name: str, index: str, xCoord: int, yCoord: int,\n heading: int, scale: float, volume: int, pan: int, rotation: int,\n draggable: bool, hidden: bool, costumes: str, color: (float, float,\n float), pen: str, id: int):\n self.name = name\n self.index = index\n self.coords = xCoord, yCoord\n self.heading = heading\n self.scale = scale\n self.volume = volume\n self.pan = pan\n self.rotation = rotation\n self.draggable = draggable\n self.hidden = hidden\n self.costumes = costumes\n self.color = color\n self.pen = pen\n self.id = id\n",
"step-2": "<mask token>\n\n\nclass Stage:\n <mask token>\n\n\nclass Sprite:\n\n def __init__(self, name: str, index: str, xCoord: int, yCoord: int,\n heading: int, scale: float, volume: int, pan: int, rotation: int,\n draggable: bool, hidden: bool, costumes: str, color: (float, float,\n float), pen: str, id: int):\n self.name = name\n self.index = index\n self.coords = xCoord, yCoord\n self.heading = heading\n self.scale = scale\n self.volume = volume\n self.pan = pan\n self.rotation = rotation\n self.draggable = draggable\n self.hidden = hidden\n self.costumes = costumes\n self.color = color\n self.pen = pen\n self.id = id\n",
"step-3": "<mask token>\n\n\nclass Stage:\n\n def __init__(self, costumes, sounds, variables, blocks, scripts, sprites):\n self.costumes = costumes\n self.sounds = sounds\n self.variables = variables\n self.blocks = blocks\n self.scripts = scripts\n self.sprites = sprites\n\n\nclass Sprite:\n\n def __init__(self, name: str, index: str, xCoord: int, yCoord: int,\n heading: int, scale: float, volume: int, pan: int, rotation: int,\n draggable: bool, hidden: bool, costumes: str, color: (float, float,\n float), pen: str, id: int):\n self.name = name\n self.index = index\n self.coords = xCoord, yCoord\n self.heading = heading\n self.scale = scale\n self.volume = volume\n self.pan = pan\n self.rotation = rotation\n self.draggable = draggable\n self.hidden = hidden\n self.costumes = costumes\n self.color = color\n self.pen = pen\n self.id = id\n",
"step-4": "import xml.etree.ElementTree as ET\n\n\nclass Stage:\n\n def __init__(self, costumes, sounds, variables, blocks, scripts, sprites):\n self.costumes = costumes\n self.sounds = sounds\n self.variables = variables\n self.blocks = blocks\n self.scripts = scripts\n self.sprites = sprites\n\n\nclass Sprite:\n\n def __init__(self, name: str, index: str, xCoord: int, yCoord: int,\n heading: int, scale: float, volume: int, pan: int, rotation: int,\n draggable: bool, hidden: bool, costumes: str, color: (float, float,\n float), pen: str, id: int):\n self.name = name\n self.index = index\n self.coords = xCoord, yCoord\n self.heading = heading\n self.scale = scale\n self.volume = volume\n self.pan = pan\n self.rotation = rotation\n self.draggable = draggable\n self.hidden = hidden\n self.costumes = costumes\n self.color = color\n self.pen = pen\n self.id = id\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
import socket
comms_socket1 = socket.socket()
comms_socket2 = socket.socket()
comms_socket1.bind(("120.79.26.97",55000))
comms_socket2.bind(("120.79.26.97",55001))
comms_socket1.listen()
user1,address1 = comms_socket1.accept()
comms_socket2.listen()
user2,address2 = comms_socket2.accept()
while True:
send_date = user1.recv(4096).decode("UTF-8")
user2.send(bytes(send_data,"UTF-8"))
send_date = user2.recv(4096).decode("UTF-8")
user1.send(bytes(send_data,"UTF-8"))
|
normal
|
{
"blob_id": "8981d53641d22430efb2dd43401fab562b8a95ed",
"index": 3262,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncomms_socket1.bind(('120.79.26.97', 55000))\ncomms_socket2.bind(('120.79.26.97', 55001))\ncomms_socket1.listen()\n<mask token>\ncomms_socket2.listen()\n<mask token>\nwhile True:\n send_date = user1.recv(4096).decode('UTF-8')\n user2.send(bytes(send_data, 'UTF-8'))\n send_date = user2.recv(4096).decode('UTF-8')\n user1.send(bytes(send_data, 'UTF-8'))\n",
"step-3": "<mask token>\ncomms_socket1 = socket.socket()\ncomms_socket2 = socket.socket()\ncomms_socket1.bind(('120.79.26.97', 55000))\ncomms_socket2.bind(('120.79.26.97', 55001))\ncomms_socket1.listen()\nuser1, address1 = comms_socket1.accept()\ncomms_socket2.listen()\nuser2, address2 = comms_socket2.accept()\nwhile True:\n send_date = user1.recv(4096).decode('UTF-8')\n user2.send(bytes(send_data, 'UTF-8'))\n send_date = user2.recv(4096).decode('UTF-8')\n user1.send(bytes(send_data, 'UTF-8'))\n",
"step-4": "import socket\ncomms_socket1 = socket.socket()\ncomms_socket2 = socket.socket()\ncomms_socket1.bind(('120.79.26.97', 55000))\ncomms_socket2.bind(('120.79.26.97', 55001))\ncomms_socket1.listen()\nuser1, address1 = comms_socket1.accept()\ncomms_socket2.listen()\nuser2, address2 = comms_socket2.accept()\nwhile True:\n send_date = user1.recv(4096).decode('UTF-8')\n user2.send(bytes(send_data, 'UTF-8'))\n send_date = user2.recv(4096).decode('UTF-8')\n user1.send(bytes(send_data, 'UTF-8'))\n",
"step-5": "import socket\n\ncomms_socket1 = socket.socket()\ncomms_socket2 = socket.socket()\ncomms_socket1.bind((\"120.79.26.97\",55000))\ncomms_socket2.bind((\"120.79.26.97\",55001))\ncomms_socket1.listen()\nuser1,address1 = comms_socket1.accept()\ncomms_socket2.listen()\nuser2,address2 = comms_socket2.accept()\n\nwhile True:\n send_date = user1.recv(4096).decode(\"UTF-8\")\n user2.send(bytes(send_data,\"UTF-8\"))\n send_date = user2.recv(4096).decode(\"UTF-8\")\n user1.send(bytes(send_data,\"UTF-8\"))\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import tensorflow as tf
import random
from tqdm import tqdm
import spacy
import ujson as json
from collections import Counter
import numpy as np
import os.path
nlp = spacy.blank("en")
def word_tokenize(sent):
doc = nlp(sent)
return [token.text for token in doc]
def convert_idx(text, tokens):
current = 0
spans = []
for token in tokens:
current = text.find(token, current)
if current < 0:
print("Token {} cannot be found".format(token))
raise Exception()
spans.append((current, current + len(token)))
current += len(token)
return spans
def process_file(filename, data_type, word_counter, char_counter, shuffle=False):
print("Generating {} examples...".format(data_type))
examples = []
eval_examples = {}
total = 0
with open(filename, "r") as fh:
for l in fh:
ques, ans, label = l.strip().split("\t")
ques_tokens = word_tokenize(ques)
ques_chars = [list(token) for token in ques_tokens]
ans_tokens = word_tokenize(ans)
ans_chars = [list(token) for token in ans_tokens]
label = int(label)
total += 1
for token in ques_tokens:
word_counter[token.lower()] += 1
for char in token:
char_counter[char] += 1
for token in ans_tokens:
word_counter[token.lower()] += 1
for char in token:
char_counter[char] += 1
example = {"ans_tokens": ans_tokens,
"ans_chars": ans_chars, "ques_tokens": ques_tokens,
"ques_chars": ques_chars, "y":label, "id": total}
examples.append(example)
if random:
random.shuffle(examples)
print("{} questions in total".format(len(examples)))
return examples
def get_embedding(counter, data_type, limit=-1, emb_file=None, size=None, vec_size=None, token2idx_dict=None):
print("Generating {} embedding...".format(data_type))
embedding_dict = {}
filtered_elements = [k for k, v in counter.items() if v > limit]
if emb_file is not None:
assert size is not None
assert vec_size is not None
with open(emb_file, "r", encoding="utf-8") as fh:
for line in tqdm(fh, total=size):
array = line.split()
word = "".join(array[0:-vec_size])
vector = list(map(float, array[-vec_size:]))
if word in counter and counter[word] > limit:
embedding_dict[word] = vector
print("{} / {} tokens have corresponding {} embedding vector".format(
len(embedding_dict), len(filtered_elements), data_type))
else:
assert vec_size is not None
for token in filtered_elements:
embedding_dict[token] = [np.random.normal(
scale=0.01) for _ in range(vec_size)]
print("{} tokens have corresponding embedding vector".format(
len(filtered_elements)))
NULL = "--NULL--"
OOV = "--OOV--"
token2idx_dict = {token: idx for idx, token in enumerate(
embedding_dict.keys(), 2)} if token2idx_dict is None else token2idx_dict
token2idx_dict[NULL] = 0
token2idx_dict[OOV] = 1
embedding_dict[NULL] = [0. for _ in range(vec_size)]
embedding_dict[OOV] = [0. for _ in range(vec_size)]
idx2emb_dict = {idx: embedding_dict[token]
for token, idx in token2idx_dict.items()}
emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]
return emb_mat, token2idx_dict
def build_features_SemEval(config, examples, data_type, out_file, word2idx_dict, char2idx_dict, is_test=False):
ans_limit = config.test_para_limit if is_test else config.para_limit
ques_limit = config.test_ques_limit if is_test else config.ques_limit
char_limit = config.char_limit
def filter_func(example, is_test=False):
return len(example["ans_tokens"]) > ans_limit or len(example["ques_tokens"]) > ques_limit
print("Processing {} examples...".format(data_type))
writer = tf.python_io.TFRecordWriter(out_file)
total = 0
total_ = 0
meta = {}
for example in tqdm(examples):
total_ += 1
#if filter_func(example, is_test):
# continue
total += 1
context_idxs = np.zeros([ans_limit], dtype=np.int32)
context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)
ques_idxs = np.zeros([ques_limit], dtype=np.int32)
ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)
y = 0
def _get_word(word):
for each in (word, word.lower(), word.capitalize(), word.upper()):
if each in word2idx_dict:
return word2idx_dict[each]
return 1
def _get_char(char):
if char in char2idx_dict:
return char2idx_dict[char]
return 1
for i, token in enumerate(example["ans_tokens"][:ans_limit]):
context_idxs[i] = _get_word(token)
for i, token in enumerate(example["ques_tokens"][:ques_limit]):
ques_idxs[i] = _get_word(token)
for i, token in enumerate(example["ans_chars"][:ans_limit]):
for j, char in enumerate(token):
if j == char_limit:
break
context_char_idxs[i, j] = _get_char(char)
for i, token in enumerate(example["ques_chars"][:ques_limit]):
for j, char in enumerate(token):
if j == char_limit:
break
ques_char_idxs[i, j] = _get_char(char)
label = example["y"]
y = float(label)
record = tf.train.Example(features=tf.train.Features(feature={
"ans_idxs": tf.train.Feature(bytes_list=tf.train.BytesList(value=[context_idxs.tostring()])),
"ques_idxs": tf.train.Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring()])),
"ans_char_idxs": tf.train.Feature(bytes_list=tf.train.BytesList(value=[context_char_idxs.tostring()])),
"ques_char_idxs": tf.train.Feature(bytes_list=tf.train.BytesList(value=[ques_char_idxs.tostring()])),
"y": tf.train.Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).tostring()])),
"id": tf.train.Feature(int64_list=tf.train.Int64List(value=[example["id"]]))
}))
writer.write(record.SerializeToString())
print("Build {} / {} instances of features in total".format(total, total_))
meta["total"] = total
writer.close()
return meta
def save(filename, obj, message=None):
if message is not None:
print("Saving {}...".format(message))
with open(filename, "w") as fh:
json.dump(obj, fh)
def preproSemEval(config):
word_counter, char_counter = Counter(), Counter()
train_examples = process_file(
config.SemEval_train_file, "train", word_counter, char_counter, shuffle=True)
dev_examples = process_file(
config.SemEval_dev_file, "dev", word_counter, char_counter)
test_examples = process_file(
config.SemEval_test_file, "test", word_counter, char_counter)
word_emb_file = config.fasttext_file if config.fasttext else config.glove_word_file
char_emb_file = config.glove_char_file if config.pretrained_char else None
char_emb_size = config.glove_char_size if config.pretrained_char else None
char_emb_dim = config.glove_dim if config.pretrained_char else config.char_dim
word2idx_dict = None
if os.path.isfile(config.word2idx_file):
with open(config.word2idx_file, "r") as fh:
word2idx_dict = json.load(fh)
word_emb_mat, word2idx_dict = get_embedding(word_counter, "word", emb_file=word_emb_file,
size=config.glove_word_size, vec_size=config.glove_dim, token2idx_dict=word2idx_dict)
char2idx_dict = None
if os.path.isfile(config.char2idx_file):
with open(config.char2idx_file, "r") as fh:
char2idx_dict = json.load(fh)
char_emb_mat, char2idx_dict = get_embedding(
char_counter, "char", emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim, token2idx_dict=char2idx_dict)
build_features_SemEval(config, train_examples, "train",
config.train_record_file, word2idx_dict, char2idx_dict)
dev_meta = build_features_SemEval(config, dev_examples, "dev",
config.dev_record_file, word2idx_dict, char2idx_dict)
test_meta = build_features_SemEval(config, test_examples, "test",
config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)
save(config.word_emb_file, word_emb_mat, message="word embedding")
save(config.char_emb_file, char_emb_mat, message="char embedding")
save(config.dev_meta, dev_meta, message="dev meta")
save(config.word2idx_file, word2idx_dict, message="word2idx")
save(config.char2idx_file, char2idx_dict, message="char2idx")
save(config.test_meta, test_meta, message="test meta")
save("data/test.json", dev_examples, message="test example")
|
normal
|
{
"blob_id": "5cd9d4fe9889c4d53b50d86fa78ae84d0c242536",
"index": 3693,
"step-1": "<mask token>\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print('Token {} cannot be found'.format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False\n ):\n print('Generating {} examples...'.format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, 'r') as fh:\n for l in fh:\n ques, ans, label = l.strip().split('\\t')\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {'ans_tokens': ans_tokens, 'ans_chars': ans_chars,\n 'ques_tokens': ques_tokens, 'ques_chars': ques_chars, 'y':\n label, 'id': total}\n examples.append(example)\n if random:\n random.shuffle(examples)\n print('{} questions in total'.format(len(examples)))\n return examples\n\n\n<mask token>\n\n\ndef build_features_SemEval(config, examples, data_type, out_file,\n word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example['ans_tokens']) > ans_limit or len(example[\n 'ques_tokens']) > ques_limit\n print('Processing {} examples...'.format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n for i, token in enumerate(example['ans_tokens'][:ans_limit]):\n context_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ques_tokens'][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ans_chars'][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n for i, token in enumerate(example['ques_chars'][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n label = example['y']\n y = float(label)\n record = tf.train.Example(features=tf.train.Features(feature={\n 'ans_idxs': tf.train.Feature(bytes_list=tf.train.BytesList(\n value=[context_idxs.tostring()])), 'ques_idxs': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring\n ()])), 'ans_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[context_char_idxs.tostring()])),\n 'ques_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[ques_char_idxs.tostring()])), 'y': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).\n tostring()])), 'id': tf.train.Feature(int64_list=tf.train.\n Int64List(value=[example['id']]))}))\n writer.write(record.SerializeToString())\n print('Build {} / {} instances of features in total'.format(total, total_))\n meta['total'] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print('Saving {}...'.format(message))\n with open(filename, 'w') as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(config.SemEval_train_file, 'train',\n word_counter, char_counter, shuffle=True)\n dev_examples = process_file(config.SemEval_dev_file, 'dev',\n word_counter, char_counter)\n test_examples = process_file(config.SemEval_test_file, 'test',\n word_counter, char_counter)\n word_emb_file = (config.fasttext_file if config.fasttext else config.\n glove_word_file)\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = (config.glove_dim if config.pretrained_char else config.\n char_dim)\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, 'r') as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, 'word',\n emb_file=word_emb_file, size=config.glove_word_size, vec_size=\n config.glove_dim, token2idx_dict=word2idx_dict)\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, 'r') as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(char_counter, 'char',\n emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim,\n token2idx_dict=char2idx_dict)\n build_features_SemEval(config, train_examples, 'train', config.\n train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, 'dev', config.\n dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, 'test',\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n save(config.word_emb_file, word_emb_mat, message='word embedding')\n save(config.char_emb_file, char_emb_mat, message='char embedding')\n save(config.dev_meta, dev_meta, message='dev meta')\n save(config.word2idx_file, word2idx_dict, message='word2idx')\n save(config.char2idx_file, char2idx_dict, message='char2idx')\n save(config.test_meta, test_meta, message='test meta')\n save('data/test.json', dev_examples, message='test example')\n",
"step-2": "<mask token>\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print('Token {} cannot be found'.format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False\n ):\n print('Generating {} examples...'.format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, 'r') as fh:\n for l in fh:\n ques, ans, label = l.strip().split('\\t')\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {'ans_tokens': ans_tokens, 'ans_chars': ans_chars,\n 'ques_tokens': ques_tokens, 'ques_chars': ques_chars, 'y':\n label, 'id': total}\n examples.append(example)\n if random:\n random.shuffle(examples)\n print('{} questions in total'.format(len(examples)))\n return examples\n\n\ndef get_embedding(counter, data_type, limit=-1, emb_file=None, size=None,\n vec_size=None, token2idx_dict=None):\n print('Generating {} embedding...'.format(data_type))\n embedding_dict = {}\n filtered_elements = [k for k, v in counter.items() if v > limit]\n if emb_file is not None:\n assert size is not None\n assert vec_size is not None\n with open(emb_file, 'r', encoding='utf-8') as fh:\n for line in tqdm(fh, total=size):\n array = line.split()\n word = ''.join(array[0:-vec_size])\n vector = list(map(float, array[-vec_size:]))\n if word in counter and counter[word] > limit:\n embedding_dict[word] = vector\n print('{} / {} tokens have corresponding {} embedding vector'.\n format(len(embedding_dict), len(filtered_elements), data_type))\n else:\n assert vec_size is not None\n for token in filtered_elements:\n embedding_dict[token] = [np.random.normal(scale=0.01) for _ in\n range(vec_size)]\n print('{} tokens have corresponding embedding vector'.format(len(\n filtered_elements)))\n NULL = '--NULL--'\n OOV = '--OOV--'\n token2idx_dict = {token: idx for idx, token in enumerate(embedding_dict\n .keys(), 2)} if token2idx_dict is None else token2idx_dict\n token2idx_dict[NULL] = 0\n token2idx_dict[OOV] = 1\n embedding_dict[NULL] = [(0.0) for _ in range(vec_size)]\n embedding_dict[OOV] = [(0.0) for _ in range(vec_size)]\n idx2emb_dict = {idx: embedding_dict[token] for token, idx in\n token2idx_dict.items()}\n emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]\n return emb_mat, token2idx_dict\n\n\ndef build_features_SemEval(config, examples, data_type, out_file,\n word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example['ans_tokens']) > ans_limit or len(example[\n 'ques_tokens']) > ques_limit\n print('Processing {} examples...'.format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n for i, token in enumerate(example['ans_tokens'][:ans_limit]):\n context_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ques_tokens'][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ans_chars'][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n for i, token in enumerate(example['ques_chars'][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n label = example['y']\n y = float(label)\n record = tf.train.Example(features=tf.train.Features(feature={\n 'ans_idxs': tf.train.Feature(bytes_list=tf.train.BytesList(\n value=[context_idxs.tostring()])), 'ques_idxs': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring\n ()])), 'ans_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[context_char_idxs.tostring()])),\n 'ques_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[ques_char_idxs.tostring()])), 'y': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).\n tostring()])), 'id': tf.train.Feature(int64_list=tf.train.\n Int64List(value=[example['id']]))}))\n writer.write(record.SerializeToString())\n print('Build {} / {} instances of features in total'.format(total, total_))\n meta['total'] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print('Saving {}...'.format(message))\n with open(filename, 'w') as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(config.SemEval_train_file, 'train',\n word_counter, char_counter, shuffle=True)\n dev_examples = process_file(config.SemEval_dev_file, 'dev',\n word_counter, char_counter)\n test_examples = process_file(config.SemEval_test_file, 'test',\n word_counter, char_counter)\n word_emb_file = (config.fasttext_file if config.fasttext else config.\n glove_word_file)\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = (config.glove_dim if config.pretrained_char else config.\n char_dim)\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, 'r') as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, 'word',\n emb_file=word_emb_file, size=config.glove_word_size, vec_size=\n config.glove_dim, token2idx_dict=word2idx_dict)\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, 'r') as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(char_counter, 'char',\n emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim,\n token2idx_dict=char2idx_dict)\n build_features_SemEval(config, train_examples, 'train', config.\n train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, 'dev', config.\n dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, 'test',\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n save(config.word_emb_file, word_emb_mat, message='word embedding')\n save(config.char_emb_file, char_emb_mat, message='char embedding')\n save(config.dev_meta, dev_meta, message='dev meta')\n save(config.word2idx_file, word2idx_dict, message='word2idx')\n save(config.char2idx_file, char2idx_dict, message='char2idx')\n save(config.test_meta, test_meta, message='test meta')\n save('data/test.json', dev_examples, message='test example')\n",
"step-3": "<mask token>\n\n\ndef word_tokenize(sent):\n doc = nlp(sent)\n return [token.text for token in doc]\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print('Token {} cannot be found'.format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False\n ):\n print('Generating {} examples...'.format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, 'r') as fh:\n for l in fh:\n ques, ans, label = l.strip().split('\\t')\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {'ans_tokens': ans_tokens, 'ans_chars': ans_chars,\n 'ques_tokens': ques_tokens, 'ques_chars': ques_chars, 'y':\n label, 'id': total}\n examples.append(example)\n if random:\n random.shuffle(examples)\n print('{} questions in total'.format(len(examples)))\n return examples\n\n\ndef get_embedding(counter, data_type, limit=-1, emb_file=None, size=None,\n vec_size=None, token2idx_dict=None):\n print('Generating {} embedding...'.format(data_type))\n embedding_dict = {}\n filtered_elements = [k for k, v in counter.items() if v > limit]\n if emb_file is not None:\n assert size is not None\n assert vec_size is not None\n with open(emb_file, 'r', encoding='utf-8') as fh:\n for line in tqdm(fh, total=size):\n array = line.split()\n word = ''.join(array[0:-vec_size])\n vector = list(map(float, array[-vec_size:]))\n if word in counter and counter[word] > limit:\n embedding_dict[word] = vector\n print('{} / {} tokens have corresponding {} embedding vector'.\n format(len(embedding_dict), len(filtered_elements), data_type))\n else:\n assert vec_size is not None\n for token in filtered_elements:\n embedding_dict[token] = [np.random.normal(scale=0.01) for _ in\n range(vec_size)]\n print('{} tokens have corresponding embedding vector'.format(len(\n filtered_elements)))\n NULL = '--NULL--'\n OOV = '--OOV--'\n token2idx_dict = {token: idx for idx, token in enumerate(embedding_dict\n .keys(), 2)} if token2idx_dict is None else token2idx_dict\n token2idx_dict[NULL] = 0\n token2idx_dict[OOV] = 1\n embedding_dict[NULL] = [(0.0) for _ in range(vec_size)]\n embedding_dict[OOV] = [(0.0) for _ in range(vec_size)]\n idx2emb_dict = {idx: embedding_dict[token] for token, idx in\n token2idx_dict.items()}\n emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]\n return emb_mat, token2idx_dict\n\n\ndef build_features_SemEval(config, examples, data_type, out_file,\n word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example['ans_tokens']) > ans_limit or len(example[\n 'ques_tokens']) > ques_limit\n print('Processing {} examples...'.format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n for i, token in enumerate(example['ans_tokens'][:ans_limit]):\n context_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ques_tokens'][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ans_chars'][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n for i, token in enumerate(example['ques_chars'][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n label = example['y']\n y = float(label)\n record = tf.train.Example(features=tf.train.Features(feature={\n 'ans_idxs': tf.train.Feature(bytes_list=tf.train.BytesList(\n value=[context_idxs.tostring()])), 'ques_idxs': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring\n ()])), 'ans_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[context_char_idxs.tostring()])),\n 'ques_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[ques_char_idxs.tostring()])), 'y': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).\n tostring()])), 'id': tf.train.Feature(int64_list=tf.train.\n Int64List(value=[example['id']]))}))\n writer.write(record.SerializeToString())\n print('Build {} / {} instances of features in total'.format(total, total_))\n meta['total'] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print('Saving {}...'.format(message))\n with open(filename, 'w') as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(config.SemEval_train_file, 'train',\n word_counter, char_counter, shuffle=True)\n dev_examples = process_file(config.SemEval_dev_file, 'dev',\n word_counter, char_counter)\n test_examples = process_file(config.SemEval_test_file, 'test',\n word_counter, char_counter)\n word_emb_file = (config.fasttext_file if config.fasttext else config.\n glove_word_file)\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = (config.glove_dim if config.pretrained_char else config.\n char_dim)\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, 'r') as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, 'word',\n emb_file=word_emb_file, size=config.glove_word_size, vec_size=\n config.glove_dim, token2idx_dict=word2idx_dict)\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, 'r') as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(char_counter, 'char',\n emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim,\n token2idx_dict=char2idx_dict)\n build_features_SemEval(config, train_examples, 'train', config.\n train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, 'dev', config.\n dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, 'test',\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n save(config.word_emb_file, word_emb_mat, message='word embedding')\n save(config.char_emb_file, char_emb_mat, message='char embedding')\n save(config.dev_meta, dev_meta, message='dev meta')\n save(config.word2idx_file, word2idx_dict, message='word2idx')\n save(config.char2idx_file, char2idx_dict, message='char2idx')\n save(config.test_meta, test_meta, message='test meta')\n save('data/test.json', dev_examples, message='test example')\n",
"step-4": "<mask token>\nnlp = spacy.blank('en')\n\n\ndef word_tokenize(sent):\n doc = nlp(sent)\n return [token.text for token in doc]\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print('Token {} cannot be found'.format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False\n ):\n print('Generating {} examples...'.format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, 'r') as fh:\n for l in fh:\n ques, ans, label = l.strip().split('\\t')\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {'ans_tokens': ans_tokens, 'ans_chars': ans_chars,\n 'ques_tokens': ques_tokens, 'ques_chars': ques_chars, 'y':\n label, 'id': total}\n examples.append(example)\n if random:\n random.shuffle(examples)\n print('{} questions in total'.format(len(examples)))\n return examples\n\n\ndef get_embedding(counter, data_type, limit=-1, emb_file=None, size=None,\n vec_size=None, token2idx_dict=None):\n print('Generating {} embedding...'.format(data_type))\n embedding_dict = {}\n filtered_elements = [k for k, v in counter.items() if v > limit]\n if emb_file is not None:\n assert size is not None\n assert vec_size is not None\n with open(emb_file, 'r', encoding='utf-8') as fh:\n for line in tqdm(fh, total=size):\n array = line.split()\n word = ''.join(array[0:-vec_size])\n vector = list(map(float, array[-vec_size:]))\n if word in counter and counter[word] > limit:\n embedding_dict[word] = vector\n print('{} / {} tokens have corresponding {} embedding vector'.\n format(len(embedding_dict), len(filtered_elements), data_type))\n else:\n assert vec_size is not None\n for token in filtered_elements:\n embedding_dict[token] = [np.random.normal(scale=0.01) for _ in\n range(vec_size)]\n print('{} tokens have corresponding embedding vector'.format(len(\n filtered_elements)))\n NULL = '--NULL--'\n OOV = '--OOV--'\n token2idx_dict = {token: idx for idx, token in enumerate(embedding_dict\n .keys(), 2)} if token2idx_dict is None else token2idx_dict\n token2idx_dict[NULL] = 0\n token2idx_dict[OOV] = 1\n embedding_dict[NULL] = [(0.0) for _ in range(vec_size)]\n embedding_dict[OOV] = [(0.0) for _ in range(vec_size)]\n idx2emb_dict = {idx: embedding_dict[token] for token, idx in\n token2idx_dict.items()}\n emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]\n return emb_mat, token2idx_dict\n\n\ndef build_features_SemEval(config, examples, data_type, out_file,\n word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example['ans_tokens']) > ans_limit or len(example[\n 'ques_tokens']) > ques_limit\n print('Processing {} examples...'.format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n for i, token in enumerate(example['ans_tokens'][:ans_limit]):\n context_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ques_tokens'][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ans_chars'][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n for i, token in enumerate(example['ques_chars'][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n label = example['y']\n y = float(label)\n record = tf.train.Example(features=tf.train.Features(feature={\n 'ans_idxs': tf.train.Feature(bytes_list=tf.train.BytesList(\n value=[context_idxs.tostring()])), 'ques_idxs': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring\n ()])), 'ans_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[context_char_idxs.tostring()])),\n 'ques_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[ques_char_idxs.tostring()])), 'y': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).\n tostring()])), 'id': tf.train.Feature(int64_list=tf.train.\n Int64List(value=[example['id']]))}))\n writer.write(record.SerializeToString())\n print('Build {} / {} instances of features in total'.format(total, total_))\n meta['total'] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print('Saving {}...'.format(message))\n with open(filename, 'w') as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(config.SemEval_train_file, 'train',\n word_counter, char_counter, shuffle=True)\n dev_examples = process_file(config.SemEval_dev_file, 'dev',\n word_counter, char_counter)\n test_examples = process_file(config.SemEval_test_file, 'test',\n word_counter, char_counter)\n word_emb_file = (config.fasttext_file if config.fasttext else config.\n glove_word_file)\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = (config.glove_dim if config.pretrained_char else config.\n char_dim)\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, 'r') as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, 'word',\n emb_file=word_emb_file, size=config.glove_word_size, vec_size=\n config.glove_dim, token2idx_dict=word2idx_dict)\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, 'r') as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(char_counter, 'char',\n emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim,\n token2idx_dict=char2idx_dict)\n build_features_SemEval(config, train_examples, 'train', config.\n train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, 'dev', config.\n dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, 'test',\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n save(config.word_emb_file, word_emb_mat, message='word embedding')\n save(config.char_emb_file, char_emb_mat, message='char embedding')\n save(config.dev_meta, dev_meta, message='dev meta')\n save(config.word2idx_file, word2idx_dict, message='word2idx')\n save(config.char2idx_file, char2idx_dict, message='char2idx')\n save(config.test_meta, test_meta, message='test meta')\n save('data/test.json', dev_examples, message='test example')\n",
"step-5": "import tensorflow as tf\nimport random\nfrom tqdm import tqdm\nimport spacy\nimport ujson as json\nfrom collections import Counter\nimport numpy as np\nimport os.path\n\nnlp = spacy.blank(\"en\")\n\n\ndef word_tokenize(sent):\n doc = nlp(sent)\n return [token.text for token in doc]\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print(\"Token {} cannot be found\".format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False):\n print(\"Generating {} examples...\".format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, \"r\") as fh:\n for l in fh:\n ques, ans, label = l.strip().split(\"\\t\")\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {\"ans_tokens\": ans_tokens,\n \"ans_chars\": ans_chars, \"ques_tokens\": ques_tokens,\n \"ques_chars\": ques_chars, \"y\":label, \"id\": total}\n \n examples.append(example)\n if random:\n random.shuffle(examples)\n print(\"{} questions in total\".format(len(examples)))\n return examples\n\n\ndef get_embedding(counter, data_type, limit=-1, emb_file=None, size=None, vec_size=None, token2idx_dict=None):\n print(\"Generating {} embedding...\".format(data_type))\n embedding_dict = {}\n filtered_elements = [k for k, v in counter.items() if v > limit]\n if emb_file is not None:\n assert size is not None\n assert vec_size is not None\n with open(emb_file, \"r\", encoding=\"utf-8\") as fh:\n for line in tqdm(fh, total=size):\n array = line.split()\n word = \"\".join(array[0:-vec_size])\n vector = list(map(float, array[-vec_size:]))\n if word in counter and counter[word] > limit:\n embedding_dict[word] = vector\n print(\"{} / {} tokens have corresponding {} embedding vector\".format(\n len(embedding_dict), len(filtered_elements), data_type))\n else:\n assert vec_size is not None\n for token in filtered_elements:\n embedding_dict[token] = [np.random.normal(\n scale=0.01) for _ in range(vec_size)]\n print(\"{} tokens have corresponding embedding vector\".format(\n len(filtered_elements)))\n\n NULL = \"--NULL--\"\n OOV = \"--OOV--\"\n token2idx_dict = {token: idx for idx, token in enumerate(\n embedding_dict.keys(), 2)} if token2idx_dict is None else token2idx_dict\n token2idx_dict[NULL] = 0\n token2idx_dict[OOV] = 1\n embedding_dict[NULL] = [0. for _ in range(vec_size)]\n embedding_dict[OOV] = [0. for _ in range(vec_size)]\n idx2emb_dict = {idx: embedding_dict[token]\n for token, idx in token2idx_dict.items()}\n emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]\n return emb_mat, token2idx_dict\n\n\ndef build_features_SemEval(config, examples, data_type, out_file, word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example[\"ans_tokens\"]) > ans_limit or len(example[\"ques_tokens\"]) > ques_limit\n\n print(\"Processing {} examples...\".format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n\n #if filter_func(example, is_test):\n # continue\n\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n \n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n\n for i, token in enumerate(example[\"ans_tokens\"][:ans_limit]):\n context_idxs[i] = _get_word(token)\n\n for i, token in enumerate(example[\"ques_tokens\"][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n\n for i, token in enumerate(example[\"ans_chars\"][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n\n for i, token in enumerate(example[\"ques_chars\"][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n\n label = example[\"y\"]\n y = float(label)\n\n record = tf.train.Example(features=tf.train.Features(feature={\n \"ans_idxs\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[context_idxs.tostring()])),\n \"ques_idxs\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring()])),\n \"ans_char_idxs\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[context_char_idxs.tostring()])),\n \"ques_char_idxs\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[ques_char_idxs.tostring()])),\n \"y\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).tostring()])),\n \"id\": tf.train.Feature(int64_list=tf.train.Int64List(value=[example[\"id\"]]))\n }))\n writer.write(record.SerializeToString())\n print(\"Build {} / {} instances of features in total\".format(total, total_))\n meta[\"total\"] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print(\"Saving {}...\".format(message))\n with open(filename, \"w\") as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(\n config.SemEval_train_file, \"train\", word_counter, char_counter, shuffle=True)\n dev_examples = process_file(\n config.SemEval_dev_file, \"dev\", word_counter, char_counter)\n test_examples = process_file(\n config.SemEval_test_file, \"test\", word_counter, char_counter)\n\n word_emb_file = config.fasttext_file if config.fasttext else config.glove_word_file\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = config.glove_dim if config.pretrained_char else config.char_dim\n\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, \"r\") as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, \"word\", emb_file=word_emb_file,\n size=config.glove_word_size, vec_size=config.glove_dim, token2idx_dict=word2idx_dict)\n\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, \"r\") as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(\n char_counter, \"char\", emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim, token2idx_dict=char2idx_dict)\n\n build_features_SemEval(config, train_examples, \"train\",\n config.train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, \"dev\",\n config.dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, \"test\",\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n\n save(config.word_emb_file, word_emb_mat, message=\"word embedding\")\n save(config.char_emb_file, char_emb_mat, message=\"char embedding\")\n save(config.dev_meta, dev_meta, message=\"dev meta\")\n save(config.word2idx_file, word2idx_dict, message=\"word2idx\")\n save(config.char2idx_file, char2idx_dict, message=\"char2idx\")\n save(config.test_meta, test_meta, message=\"test meta\")\n save(\"data/test.json\", dev_examples, message=\"test example\") \n",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
import boring.dialog
import boring.form
FORMSTRING = '''
Project name@string
Width@int|Height@int
Background color@color
Fullscreen@check
'''
class NewProjectWindow(boring.dialog.DefaultDialog):
def __init__(self, master, _dict=None):
self._dict = _dict
self.output = None
boring.dialog.DefaultDialog.__init__(self, master)
def body(self, master):
initial_values = [
'',
640, 480,
'#dadada',
False
]
if self._dict:
initial_values = [
self._dict.get('name'),
self._dict.get('width'), self._dict.get('height'),
self._dict.get('bgcolor'),
self._dict.get('fullscreen')
]
self.form = boring.form.FormFrame(master, FORMSTRING, initial_values=initial_values, title='%s Project' % ('Edit' if self._dict else 'New'))
self.form.grid(pady=10, padx=10)
return self.form.inputs[0]
def apply(self):
'''
called when ok button is pressed
'''
self.output = {
'name': self.form.values[0],
'width': self.form.values[1],
'height': self.form.values[2],
'bgcolor': self.form.values[3],
'fullscreen': self.form.values[4]
}
def validate(self):
width = self.form.values[1]
height = self.form.values[2]
if width <= 0 or height <= 0:
boring.dialog.MessageBox.warning(parent=self,
title='Wrong data',
message='Invalid width/height')
return False
if not self.form.values[0]:
boring.dialog.MessageBox.warning(parent=self,
title='Project title',
message='Invalid project name')
return False
return True
|
normal
|
{
"blob_id": "76420ec1b37d4b9b85f35764a7f8a0e1f19a15dd",
"index": 5745,
"step-1": "<mask token>\n\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n <mask token>\n\n def apply(self):\n \"\"\"\n called when ok button is pressed\n \"\"\"\n self.output = {'name': self.form.values[0], 'width': self.form.\n values[1], 'height': self.form.values[2], 'bgcolor': self.form.\n values[3], 'fullscreen': self.form.values[4]}\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Wrong data', message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Project title', message='Invalid project name')\n return False\n return True\n",
"step-2": "<mask token>\n\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n\n def body(self, master):\n initial_values = ['', 640, 480, '#dadada', False]\n if self._dict:\n initial_values = [self._dict.get('name'), self._dict.get(\n 'width'), self._dict.get('height'), self._dict.get(\n 'bgcolor'), self._dict.get('fullscreen')]\n self.form = boring.form.FormFrame(master, FORMSTRING,\n initial_values=initial_values, title='%s Project' % ('Edit' if\n self._dict else 'New'))\n self.form.grid(pady=10, padx=10)\n return self.form.inputs[0]\n\n def apply(self):\n \"\"\"\n called when ok button is pressed\n \"\"\"\n self.output = {'name': self.form.values[0], 'width': self.form.\n values[1], 'height': self.form.values[2], 'bgcolor': self.form.\n values[3], 'fullscreen': self.form.values[4]}\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Wrong data', message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Project title', message='Invalid project name')\n return False\n return True\n",
"step-3": "<mask token>\nFORMSTRING = \"\"\"\nProject name@string\nWidth@int|Height@int\nBackground color@color\nFullscreen@check\n\"\"\"\n\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n\n def body(self, master):\n initial_values = ['', 640, 480, '#dadada', False]\n if self._dict:\n initial_values = [self._dict.get('name'), self._dict.get(\n 'width'), self._dict.get('height'), self._dict.get(\n 'bgcolor'), self._dict.get('fullscreen')]\n self.form = boring.form.FormFrame(master, FORMSTRING,\n initial_values=initial_values, title='%s Project' % ('Edit' if\n self._dict else 'New'))\n self.form.grid(pady=10, padx=10)\n return self.form.inputs[0]\n\n def apply(self):\n \"\"\"\n called when ok button is pressed\n \"\"\"\n self.output = {'name': self.form.values[0], 'width': self.form.\n values[1], 'height': self.form.values[2], 'bgcolor': self.form.\n values[3], 'fullscreen': self.form.values[4]}\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Wrong data', message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Project title', message='Invalid project name')\n return False\n return True\n",
"step-4": "import boring.dialog\nimport boring.form\nFORMSTRING = \"\"\"\nProject name@string\nWidth@int|Height@int\nBackground color@color\nFullscreen@check\n\"\"\"\n\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n\n def body(self, master):\n initial_values = ['', 640, 480, '#dadada', False]\n if self._dict:\n initial_values = [self._dict.get('name'), self._dict.get(\n 'width'), self._dict.get('height'), self._dict.get(\n 'bgcolor'), self._dict.get('fullscreen')]\n self.form = boring.form.FormFrame(master, FORMSTRING,\n initial_values=initial_values, title='%s Project' % ('Edit' if\n self._dict else 'New'))\n self.form.grid(pady=10, padx=10)\n return self.form.inputs[0]\n\n def apply(self):\n \"\"\"\n called when ok button is pressed\n \"\"\"\n self.output = {'name': self.form.values[0], 'width': self.form.\n values[1], 'height': self.form.values[2], 'bgcolor': self.form.\n values[3], 'fullscreen': self.form.values[4]}\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Wrong data', message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Project title', message='Invalid project name')\n return False\n return True\n",
"step-5": "import boring.dialog\nimport boring.form\n\nFORMSTRING = '''\nProject name@string\nWidth@int|Height@int\nBackground color@color\nFullscreen@check\n'''\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n\n def body(self, master):\n initial_values = [\n '',\n 640, 480,\n '#dadada',\n False\n ]\n if self._dict:\n initial_values = [\n self._dict.get('name'),\n self._dict.get('width'), self._dict.get('height'),\n self._dict.get('bgcolor'),\n self._dict.get('fullscreen')\n ]\n self.form = boring.form.FormFrame(master, FORMSTRING, initial_values=initial_values, title='%s Project' % ('Edit' if self._dict else 'New'))\n self.form.grid(pady=10, padx=10)\n\n return self.form.inputs[0]\n\n def apply(self):\n '''\n called when ok button is pressed\n '''\n self.output = {\n 'name': self.form.values[0],\n 'width': self.form.values[1],\n 'height': self.form.values[2],\n 'bgcolor': self.form.values[3],\n 'fullscreen': self.form.values[4]\n }\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self,\n title='Wrong data',\n message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self,\n title='Project title',\n message='Invalid project name')\n return False\n return True",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from django.contrib import admin
from django_summernote.admin import SummernoteModelAdmin
from .models import ArticlePost
# Register your models here.
class SomeModelAdmin(SummernoteModelAdmin): # instead of ModelAdmin
summernote_fields = '__all__'
admin.site.register(ArticlePost, SummernoteModelAdmin)
|
normal
|
{
"blob_id": "a86b64ccd0dab4ab70ca9c2b7625fb34afec3794",
"index": 63,
"step-1": "<mask token>\n\n\nclass SomeModelAdmin(SummernoteModelAdmin):\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SomeModelAdmin(SummernoteModelAdmin):\n summernote_fields = '__all__'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SomeModelAdmin(SummernoteModelAdmin):\n summernote_fields = '__all__'\n\n\nadmin.site.register(ArticlePost, SummernoteModelAdmin)\n",
"step-4": "from django.contrib import admin\nfrom django_summernote.admin import SummernoteModelAdmin\nfrom .models import ArticlePost\n\n\nclass SomeModelAdmin(SummernoteModelAdmin):\n summernote_fields = '__all__'\n\n\nadmin.site.register(ArticlePost, SummernoteModelAdmin)\n",
"step-5": "from django.contrib import admin\nfrom django_summernote.admin import SummernoteModelAdmin\nfrom .models import ArticlePost\n# Register your models here.\n\nclass SomeModelAdmin(SummernoteModelAdmin): # instead of ModelAdmin\n summernote_fields = '__all__'\n\nadmin.site.register(ArticlePost, SummernoteModelAdmin)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from __future__ import with_statement # this is to work with python2.5
from pyps import workspace, module
def invoke_function(fu, ws):
return fu._get_code(activate = module.print_code_out_regions)
if __name__=="__main__":
workspace.delete('paws_out_regions')
with workspace('paws_out_regions.c',name='paws_out_regions',deleteOnClose=True) as ws:
for fu in ws.fun:
print invoke_function(fu, ws)
|
normal
|
{
"blob_id": "299432b095f16c3cb4949319705800d06f534cf9",
"index": 1017,
"step-1": "from __future__ import with_statement # this is to work with python2.5\nfrom pyps import workspace, module\n\ndef invoke_function(fu, ws):\n return fu._get_code(activate = module.print_code_out_regions)\n\nif __name__==\"__main__\":\n\tworkspace.delete('paws_out_regions')\n\twith workspace('paws_out_regions.c',name='paws_out_regions',deleteOnClose=True) as ws:\n \tfor fu in ws.fun:\n \tprint invoke_function(fu, ws)\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
"""Generic utilities module"""
from . import average
from . import extract_ocean_scalar
from . import git
from . import gmeantools
from . import merge
from . import netcdf
from . import xrtools
__all__ = [
"average",
"extract_ocean_scalar",
"git",
"gmeantools",
"merge",
"netcdf",
"xrtools",
]
|
normal
|
{
"blob_id": "ab6450ee9038e0c58ca8becf6d2518d5e00b9c90",
"index": 9393,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__ = ['average', 'extract_ocean_scalar', 'git', 'gmeantools', 'merge',\n 'netcdf', 'xrtools']\n",
"step-3": "<mask token>\nfrom . import average\nfrom . import extract_ocean_scalar\nfrom . import git\nfrom . import gmeantools\nfrom . import merge\nfrom . import netcdf\nfrom . import xrtools\n__all__ = ['average', 'extract_ocean_scalar', 'git', 'gmeantools', 'merge',\n 'netcdf', 'xrtools']\n",
"step-4": "\"\"\"Generic utilities module\"\"\"\n\nfrom . import average\nfrom . import extract_ocean_scalar\nfrom . import git\nfrom . import gmeantools\nfrom . import merge\nfrom . import netcdf\nfrom . import xrtools\n\n__all__ = [\n \"average\",\n \"extract_ocean_scalar\",\n \"git\",\n \"gmeantools\",\n \"merge\",\n \"netcdf\",\n \"xrtools\",\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import logging
import search_yelp
import uuid
from apiclient import errors
from google.appengine.api import taskqueue
def insert_worker(mirror_service, food_type=None):
logging.info('zip1 food_type %s' % food_type)
try:
location = mirror_service.locations().get(id='latest').execute()
latlong = '%s,%s' % (location.get('latitude'), location.get('longitude'))
except errors.HttpError, e:
latlong = None
logging.info('location %s' % latlong)
response = search_yelp.make_request(latlong, term=food_type)
body = {
'menuItems': [
{'action':'DELETE'}
]
}
is_first = True
for i in xrange(4):
#body['bundleId'] = str(uuid.uuid1()).replace('-','')
body['bundleId'] = 'bundle6'
#body['bundleId'] = 'bundleId3'
body['isBundleCover'] = is_first
if is_first:
body['html'] = '<article class=\"photo\">\n<img src=\"https://glasseats.appspot.com/static/images/GlassHomeRestaurantResults.png\" width=\"100%\" height=\"100%\">\n <div class=\"photo-overlay\"/>\n <section>\n</section>\n</article>\n'
else:
body['menuItems'] = [
{'action':'VOICE_CALL'},
{'action':'NAVIGATE'}
]
resto = response.values()[2][i]
try:
body['creator'] = {}
body['creator']['phoneNumber'] = resto['phone']
except KeyError:
logging.info('no phone_number')
try:
body['location'] = {}
body['location']['address'] = resto['location']['postal_code']
except KeyError:
logging.info('no location')
try:
image_url = resto['image_url'].replace('ms.jpg', 'l.jpg')
except KeyError:
image_url = None
try:
address = resto['location']['display_address'][0] +','+resto['location']['city']
except KeyError:
address = ''
try:
category = resto['categories'][0][0]
except KeyError:
category = ''
try:
phone_number = resto['phone']
except KeyError:
phone_number = ''
try:
rating_url = resto['rating_img_url']
except KeyError:
rating_url = ''
if image_url:
if food_type:
body['html'] = '<article class=\"photo\">\n<img src=\"' + image_url + '\" width=\"100%\" height=\"100%\">\n <div class=\"photo-overlay\"/>\n <section>\n <p class=\"align-center text-auto-size\">' + resto['name'] + '<br /><img src=\"'+rating_url+'\" /></p>\n </section>\n</article>\n'
else:
body['html'] = '<article class=\"photo\">\n<img src=\"' + image_url + '\" width=\"100%\" height=\"100%\">\n <div class=\"photo-overlay\"/>\n <section>\n <p class=\"align-center text-auto-size\">' + resto['name'] + '<br /><img src=\"'+rating_url+'\" /></p>\n </section>\n</article>\n'
else:
if food_type:
body['html'] = '<article class=\"photo\">\n <div class=\"photo-overlay\"/>\n <section>\n <p class=\"align-center text-auto-size\">' + resto['name'] + '<br /><img src=\"'+rating_url+'\" /></p>\n </section>\n</article>\n'
else:
body['html'] = '<article class=\"photo\">\n <div class=\"photo-overlay\"/>\n <section>\n <p class=\"align-center text-auto-size\">' + resto['name'] + '<br /><img src=\"'+rating_url+'\" /></p>\n </section>\n</article>\n'
is_first = False
mirror_service.timeline().insert(body=body).execute()
try:
del body['html']
except KeyError:
pass
try:
del body['text']
except KeyError:
pass
logging.info('zip3')
def insert_handler(food_type, user_id):
'''Inserting the yelp bundle into the timeline'''
taskqueue.add(url='/yelp_item', params={'user_id':user_id, 'food_type':food_type})
return 'The bundle item has been inserted'
|
normal
|
{
"blob_id": "22c0b8c8d598bb91bb2333343aad285bbcb4ee5b",
"index": 2669,
"step-1": "import logging\nimport search_yelp\nimport uuid\nfrom apiclient import errors\nfrom google.appengine.api import taskqueue\n\n\n\ndef insert_worker(mirror_service, food_type=None):\n\n logging.info('zip1 food_type %s' % food_type)\n try:\n location = mirror_service.locations().get(id='latest').execute()\n latlong = '%s,%s' % (location.get('latitude'), location.get('longitude'))\n except errors.HttpError, e:\n latlong = None\n\n logging.info('location %s' % latlong)\n\n response = search_yelp.make_request(latlong, term=food_type)\n\n body = { \n 'menuItems': [\n {'action':'DELETE'}\n ]\n }\n\n is_first = True\n\n\n\n for i in xrange(4):\n #body['bundleId'] = str(uuid.uuid1()).replace('-','')\n body['bundleId'] = 'bundle6'\n\n #body['bundleId'] = 'bundleId3'\n body['isBundleCover'] = is_first\n\n\n if is_first:\n body['html'] = '<article class=\\\"photo\\\">\\n<img src=\\\"https://glasseats.appspot.com/static/images/GlassHomeRestaurantResults.png\\\" width=\\\"100%\\\" height=\\\"100%\\\">\\n <div class=\\\"photo-overlay\\\"/>\\n <section>\\n</section>\\n</article>\\n'\n\n else:\n body['menuItems'] = [\n {'action':'VOICE_CALL'},\n {'action':'NAVIGATE'}\n ]\n resto = response.values()[2][i]\n\n try:\n body['creator'] = {}\n body['creator']['phoneNumber'] = resto['phone']\n except KeyError:\n logging.info('no phone_number')\n\n try:\n body['location'] = {}\n body['location']['address'] = resto['location']['postal_code']\n except KeyError:\n logging.info('no location')\n\n\n try:\n image_url = resto['image_url'].replace('ms.jpg', 'l.jpg')\n except KeyError:\n image_url = None\n try:\n address = resto['location']['display_address'][0] +','+resto['location']['city']\n except KeyError:\n address = ''\n\n try:\n category = resto['categories'][0][0]\n except KeyError:\n category = ''\n\n try:\n phone_number = resto['phone']\n except KeyError:\n phone_number = ''\n\n try:\n rating_url = resto['rating_img_url']\n except KeyError:\n rating_url = ''\n\n if image_url:\n if food_type:\n body['html'] = '<article class=\\\"photo\\\">\\n<img src=\\\"' + image_url + '\\\" width=\\\"100%\\\" height=\\\"100%\\\">\\n <div class=\\\"photo-overlay\\\"/>\\n <section>\\n <p class=\\\"align-center text-auto-size\\\">' + resto['name'] + '<br /><img src=\\\"'+rating_url+'\\\" /></p>\\n </section>\\n</article>\\n'\n\n else: \n body['html'] = '<article class=\\\"photo\\\">\\n<img src=\\\"' + image_url + '\\\" width=\\\"100%\\\" height=\\\"100%\\\">\\n <div class=\\\"photo-overlay\\\"/>\\n <section>\\n <p class=\\\"align-center text-auto-size\\\">' + resto['name'] + '<br /><img src=\\\"'+rating_url+'\\\" /></p>\\n </section>\\n</article>\\n'\n else:\n if food_type:\n body['html'] = '<article class=\\\"photo\\\">\\n <div class=\\\"photo-overlay\\\"/>\\n <section>\\n <p class=\\\"align-center text-auto-size\\\">' + resto['name'] + '<br /><img src=\\\"'+rating_url+'\\\" /></p>\\n </section>\\n</article>\\n'\n\n else: \n body['html'] = '<article class=\\\"photo\\\">\\n <div class=\\\"photo-overlay\\\"/>\\n <section>\\n <p class=\\\"align-center text-auto-size\\\">' + resto['name'] + '<br /><img src=\\\"'+rating_url+'\\\" /></p>\\n </section>\\n</article>\\n'\n\n\n\n is_first = False\n\n mirror_service.timeline().insert(body=body).execute()\n\n try:\n del body['html']\n except KeyError:\n pass\n try:\n del body['text']\n except KeyError:\n pass\n\n\n logging.info('zip3')\n\n\n\ndef insert_handler(food_type, user_id):\n '''Inserting the yelp bundle into the timeline'''\n taskqueue.add(url='/yelp_item', params={'user_id':user_id, 'food_type':food_type})\n\n return 'The bundle item has been inserted'\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import numpy as np
import numdifftools as nd
from scipy import stats
from scipy import optimize
from functools import partial
class TCRPowerCalculator:
def __init__(self, pcmodel):
self.pcmodel = pcmodel
self.predict_variance = self.pcmodel.predict_variance
self.predict_mean = self.pcmodel.predict_mean
self.get_prediction_interval = self.pcmodel.get_prediction_interval
self.predict_detection_probability = self.pcmodel.predict_detection_probability
#possivle TODO: Parse this method out into a new 2-step model class
def predict_detection_probability_2step(self, tcr_frequency, num_reads, num_cells, detect_thresh = 1):
"""
2-step detection probability model where
1) Num_cells_TCR is sampled first from the blood (Poisson model)
2) The RNA detection probability is calculated (Negbin model).
The num_cells_TCR is marginalized with the num_cells parameter as the upper limit
on the number of cells that could be sampled for a given TCR.
"""
mu_cells = tcr_frequency*num_cells
p0_poisson = stats.poisson.pmf(0, mu_cells)
num_cells_TCR = np.arange(1, num_cells + 1)[:,np.newaxis]
#Step 1 Poisson
p1 = stats.poisson.pmf(num_cells_TCR, mu_cells)
#Get rid of 0 probability cell counts
num_cells_TCR = num_cells_TCR[p1 >0]
p1 = p1[p1 >0]
#Step 2 Negbin
mu_reads = self.pcmodel.predict_mean(num_cells_TCR/num_cells, num_reads)
p2 = np.zeros(p1.shape)
for i in np.arange(detect_thresh):
p2 += self.pcmodel.pmf(mu_reads, count = i)
p0_2step = np.dot(p1.squeeze(), p2.squeeze())
#If 0 cells from Poisson model then automatically get 0 reads
return 1.0 - p0_poisson - p0_2step
def get_limit_of_detection_tcrfreq(self, num_reads, conf_level = 0.95):
opt_f = partial(self.pcmodel.predict_detection_probability, num_reads = num_reads)
opt_res = optimize.root_scalar(lambda freq: opt_f(freq) - conf_level,
method = "brentq",
bracket = [1.0e-16, 1])
return opt_res.root
def get_limit_of_detection_nreads(self, tcr_freq, conf_level = 0.95):
opt_nreads = partial(self.pcmodel.predict_detection_probability, tcr_frequencies = tcr_freq)
opt_res = optimize.root_scalar(lambda nreads: opt_nreads(num_reads = nreads) - conf_level,
method = "secant",
x0 = 1.0e-16,
x1 = 1)
return int(np.around(opt_res.root))
|
normal
|
{
"blob_id": "d327151c9659078e12e4aca46631de33e7ca4dcf",
"index": 167,
"step-1": "<mask token>\n\n\nclass TCRPowerCalculator:\n <mask token>\n\n def predict_detection_probability_2step(self, tcr_frequency, num_reads,\n num_cells, detect_thresh=1):\n \"\"\"\n\t\t2-step detection probability model where \n\t\t\n\t\t1) Num_cells_TCR is sampled first from the blood (Poisson model)\n\t\t2) The RNA detection probability is calculated (Negbin model).\n\t\t\n\t\tThe num_cells_TCR is marginalized with the num_cells parameter as the upper limit \n\t\ton the number of cells that could be sampled for a given TCR.\n\t\t\"\"\"\n mu_cells = tcr_frequency * num_cells\n p0_poisson = stats.poisson.pmf(0, mu_cells)\n num_cells_TCR = np.arange(1, num_cells + 1)[:, np.newaxis]\n p1 = stats.poisson.pmf(num_cells_TCR, mu_cells)\n num_cells_TCR = num_cells_TCR[p1 > 0]\n p1 = p1[p1 > 0]\n mu_reads = self.pcmodel.predict_mean(num_cells_TCR / num_cells,\n num_reads)\n p2 = np.zeros(p1.shape)\n for i in np.arange(detect_thresh):\n p2 += self.pcmodel.pmf(mu_reads, count=i)\n p0_2step = np.dot(p1.squeeze(), p2.squeeze())\n return 1.0 - p0_poisson - p0_2step\n <mask token>\n\n def get_limit_of_detection_nreads(self, tcr_freq, conf_level=0.95):\n opt_nreads = partial(self.pcmodel.predict_detection_probability,\n tcr_frequencies=tcr_freq)\n opt_res = optimize.root_scalar(lambda nreads: opt_nreads(num_reads=\n nreads) - conf_level, method='secant', x0=1e-16, x1=1)\n return int(np.around(opt_res.root))\n",
"step-2": "<mask token>\n\n\nclass TCRPowerCalculator:\n\n def __init__(self, pcmodel):\n self.pcmodel = pcmodel\n self.predict_variance = self.pcmodel.predict_variance\n self.predict_mean = self.pcmodel.predict_mean\n self.get_prediction_interval = self.pcmodel.get_prediction_interval\n self.predict_detection_probability = (self.pcmodel.\n predict_detection_probability)\n\n def predict_detection_probability_2step(self, tcr_frequency, num_reads,\n num_cells, detect_thresh=1):\n \"\"\"\n\t\t2-step detection probability model where \n\t\t\n\t\t1) Num_cells_TCR is sampled first from the blood (Poisson model)\n\t\t2) The RNA detection probability is calculated (Negbin model).\n\t\t\n\t\tThe num_cells_TCR is marginalized with the num_cells parameter as the upper limit \n\t\ton the number of cells that could be sampled for a given TCR.\n\t\t\"\"\"\n mu_cells = tcr_frequency * num_cells\n p0_poisson = stats.poisson.pmf(0, mu_cells)\n num_cells_TCR = np.arange(1, num_cells + 1)[:, np.newaxis]\n p1 = stats.poisson.pmf(num_cells_TCR, mu_cells)\n num_cells_TCR = num_cells_TCR[p1 > 0]\n p1 = p1[p1 > 0]\n mu_reads = self.pcmodel.predict_mean(num_cells_TCR / num_cells,\n num_reads)\n p2 = np.zeros(p1.shape)\n for i in np.arange(detect_thresh):\n p2 += self.pcmodel.pmf(mu_reads, count=i)\n p0_2step = np.dot(p1.squeeze(), p2.squeeze())\n return 1.0 - p0_poisson - p0_2step\n <mask token>\n\n def get_limit_of_detection_nreads(self, tcr_freq, conf_level=0.95):\n opt_nreads = partial(self.pcmodel.predict_detection_probability,\n tcr_frequencies=tcr_freq)\n opt_res = optimize.root_scalar(lambda nreads: opt_nreads(num_reads=\n nreads) - conf_level, method='secant', x0=1e-16, x1=1)\n return int(np.around(opt_res.root))\n",
"step-3": "<mask token>\n\n\nclass TCRPowerCalculator:\n\n def __init__(self, pcmodel):\n self.pcmodel = pcmodel\n self.predict_variance = self.pcmodel.predict_variance\n self.predict_mean = self.pcmodel.predict_mean\n self.get_prediction_interval = self.pcmodel.get_prediction_interval\n self.predict_detection_probability = (self.pcmodel.\n predict_detection_probability)\n\n def predict_detection_probability_2step(self, tcr_frequency, num_reads,\n num_cells, detect_thresh=1):\n \"\"\"\n\t\t2-step detection probability model where \n\t\t\n\t\t1) Num_cells_TCR is sampled first from the blood (Poisson model)\n\t\t2) The RNA detection probability is calculated (Negbin model).\n\t\t\n\t\tThe num_cells_TCR is marginalized with the num_cells parameter as the upper limit \n\t\ton the number of cells that could be sampled for a given TCR.\n\t\t\"\"\"\n mu_cells = tcr_frequency * num_cells\n p0_poisson = stats.poisson.pmf(0, mu_cells)\n num_cells_TCR = np.arange(1, num_cells + 1)[:, np.newaxis]\n p1 = stats.poisson.pmf(num_cells_TCR, mu_cells)\n num_cells_TCR = num_cells_TCR[p1 > 0]\n p1 = p1[p1 > 0]\n mu_reads = self.pcmodel.predict_mean(num_cells_TCR / num_cells,\n num_reads)\n p2 = np.zeros(p1.shape)\n for i in np.arange(detect_thresh):\n p2 += self.pcmodel.pmf(mu_reads, count=i)\n p0_2step = np.dot(p1.squeeze(), p2.squeeze())\n return 1.0 - p0_poisson - p0_2step\n\n def get_limit_of_detection_tcrfreq(self, num_reads, conf_level=0.95):\n opt_f = partial(self.pcmodel.predict_detection_probability,\n num_reads=num_reads)\n opt_res = optimize.root_scalar(lambda freq: opt_f(freq) -\n conf_level, method='brentq', bracket=[1e-16, 1])\n return opt_res.root\n\n def get_limit_of_detection_nreads(self, tcr_freq, conf_level=0.95):\n opt_nreads = partial(self.pcmodel.predict_detection_probability,\n tcr_frequencies=tcr_freq)\n opt_res = optimize.root_scalar(lambda nreads: opt_nreads(num_reads=\n nreads) - conf_level, method='secant', x0=1e-16, x1=1)\n return int(np.around(opt_res.root))\n",
"step-4": "import numpy as np\nimport numdifftools as nd\nfrom scipy import stats\nfrom scipy import optimize\nfrom functools import partial\n\n\nclass TCRPowerCalculator:\n\n def __init__(self, pcmodel):\n self.pcmodel = pcmodel\n self.predict_variance = self.pcmodel.predict_variance\n self.predict_mean = self.pcmodel.predict_mean\n self.get_prediction_interval = self.pcmodel.get_prediction_interval\n self.predict_detection_probability = (self.pcmodel.\n predict_detection_probability)\n\n def predict_detection_probability_2step(self, tcr_frequency, num_reads,\n num_cells, detect_thresh=1):\n \"\"\"\n\t\t2-step detection probability model where \n\t\t\n\t\t1) Num_cells_TCR is sampled first from the blood (Poisson model)\n\t\t2) The RNA detection probability is calculated (Negbin model).\n\t\t\n\t\tThe num_cells_TCR is marginalized with the num_cells parameter as the upper limit \n\t\ton the number of cells that could be sampled for a given TCR.\n\t\t\"\"\"\n mu_cells = tcr_frequency * num_cells\n p0_poisson = stats.poisson.pmf(0, mu_cells)\n num_cells_TCR = np.arange(1, num_cells + 1)[:, np.newaxis]\n p1 = stats.poisson.pmf(num_cells_TCR, mu_cells)\n num_cells_TCR = num_cells_TCR[p1 > 0]\n p1 = p1[p1 > 0]\n mu_reads = self.pcmodel.predict_mean(num_cells_TCR / num_cells,\n num_reads)\n p2 = np.zeros(p1.shape)\n for i in np.arange(detect_thresh):\n p2 += self.pcmodel.pmf(mu_reads, count=i)\n p0_2step = np.dot(p1.squeeze(), p2.squeeze())\n return 1.0 - p0_poisson - p0_2step\n\n def get_limit_of_detection_tcrfreq(self, num_reads, conf_level=0.95):\n opt_f = partial(self.pcmodel.predict_detection_probability,\n num_reads=num_reads)\n opt_res = optimize.root_scalar(lambda freq: opt_f(freq) -\n conf_level, method='brentq', bracket=[1e-16, 1])\n return opt_res.root\n\n def get_limit_of_detection_nreads(self, tcr_freq, conf_level=0.95):\n opt_nreads = partial(self.pcmodel.predict_detection_probability,\n tcr_frequencies=tcr_freq)\n opt_res = optimize.root_scalar(lambda nreads: opt_nreads(num_reads=\n nreads) - conf_level, method='secant', x0=1e-16, x1=1)\n return int(np.around(opt_res.root))\n",
"step-5": "import numpy as np \nimport numdifftools as nd\nfrom scipy import stats\nfrom scipy import optimize\nfrom functools import partial\n\nclass TCRPowerCalculator:\n\tdef __init__(self, pcmodel):\n\t\tself.pcmodel = pcmodel\n\t\tself.predict_variance = self.pcmodel.predict_variance\n\t\tself.predict_mean = self.pcmodel.predict_mean\n\t\tself.get_prediction_interval = self.pcmodel.get_prediction_interval\n\t\tself.predict_detection_probability = self.pcmodel.predict_detection_probability\n\n\t#possivle TODO: Parse this method out into a new 2-step model class\n\tdef predict_detection_probability_2step(self, tcr_frequency, num_reads, num_cells, detect_thresh = 1):\t\t\n\t\t\"\"\"\n\t\t2-step detection probability model where \n\t\t\n\t\t1) Num_cells_TCR is sampled first from the blood (Poisson model)\n\t\t2) The RNA detection probability is calculated (Negbin model).\n\t\t\n\t\tThe num_cells_TCR is marginalized with the num_cells parameter as the upper limit \n\t\ton the number of cells that could be sampled for a given TCR.\n\t\t\"\"\"\n\n\t\tmu_cells = tcr_frequency*num_cells\n\t\tp0_poisson = stats.poisson.pmf(0, mu_cells)\n\t\t\n\t\tnum_cells_TCR = np.arange(1, num_cells + 1)[:,np.newaxis]\n\t\t\n\t\t#Step 1 Poisson\n\t\tp1 = stats.poisson.pmf(num_cells_TCR, mu_cells)\n\n\t\t#Get rid of 0 probability cell counts\n\t\tnum_cells_TCR = num_cells_TCR[p1 >0]\n\t\tp1 = p1[p1 >0]\n\n\t\t#Step 2 Negbin\n\t\tmu_reads = self.pcmodel.predict_mean(num_cells_TCR/num_cells, num_reads)\n\t\t\t\t\n\t\tp2 = np.zeros(p1.shape)\n\t\tfor i in np.arange(detect_thresh):\n\t\t\tp2 += self.pcmodel.pmf(mu_reads, count = i)\n\n\t\tp0_2step = np.dot(p1.squeeze(), p2.squeeze())\n\n\t\t#If 0 cells from Poisson model then automatically get 0 reads\n\t\treturn 1.0 - p0_poisson - p0_2step\n\t\n\tdef get_limit_of_detection_tcrfreq(self, num_reads, conf_level = 0.95):\n\t\topt_f = partial(self.pcmodel.predict_detection_probability, num_reads = num_reads) \n\n\t\topt_res = optimize.root_scalar(lambda freq: opt_f(freq) - conf_level,\n\t\t \t\t\t\t\t\t\t\tmethod = \"brentq\",\n\t\t \t\t\t\t\t\t\t\tbracket = [1.0e-16, 1])\n\t\treturn opt_res.root\n\n\tdef get_limit_of_detection_nreads(self, tcr_freq, conf_level = 0.95):\n\t\topt_nreads = partial(self.pcmodel.predict_detection_probability, tcr_frequencies = tcr_freq) \n\n\t\topt_res = optimize.root_scalar(lambda nreads: opt_nreads(num_reads = nreads) - conf_level,\n\t\t\t\t\t\t\t\t\t\tmethod = \"secant\",\n\t\t\t\t\t\t\t\t\t\tx0 = 1.0e-16,\n\t\t\t\t\t\t\t\t\t\tx1 = 1)\n\t\t\n\t\treturn int(np.around(opt_res.root))",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import csv
import json
from urllib import request
from urllib.error import HTTPError
from urllib.parse import urljoin, urlparse, quote_plus
from optparse import OptionParser
HEADER = ["id", "module", "channel", "type", "value", "datetime"]
def parse_options():
parser = OptionParser()
parser.add_option("-H", "--host")
parser.add_option("-t", "--token")
parser.add_option("-r", "--recursive", action="store_true", default=False)
return parser.parse_args()
def write_csv(url, recursive=False, writer=None, token=""):
response = fetch(url)
if recursive:
write_rows(writer, response)
cursor = next_cursor(response)
if cursor is not None:
print(f"next cursor exists...{cursor}")
ret = urlparse(url)
next_url = f"{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}"
write_csv(next_url, recursive=True, writer=writer, token=token)
else:
write_rows(writer, response)
def fetch(url):
print(f"url...{url}\n")
urlData = request.urlopen(url)
data = urlData.read()
encoding = urlData.info().get_content_charset("utf-8")
return json.loads(data.decode(encoding))
def write_rows(writer, response):
for msg in response["results"]:
values = [msg[k] for k in HEADER]
writer.writerow(values)
def next_cursor(response):
return response["meta"]["cursor"]
if __name__ == "__main__":
opt, args = parse_options()
if opt.host is not None:
url = urljoin(f"https://{opt.host}",
f"datastore/v1/channels?token={opt.token}")
else:
url = f"https://api.sakura.io/datastore/v1/channels?token={opt.token}"
f = open('./datastore.csv', 'w')
writer = csv.writer(f, lineterminator="\n")
# write header
writer.writerow(HEADER)
write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)
f.close()
|
normal
|
{
"blob_id": "b47f15a79f7a82304c2be6af00a5854ff0f6ad3e",
"index": 6987,
"step-1": "<mask token>\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\n<mask token>\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option('-H', '--host')\n parser.add_option('-t', '--token')\n parser.add_option('-r', '--recursive', action='store_true', default=False)\n return parser.parse_args()\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\ndef fetch(url):\n print(f'url...{url}\\n')\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset('utf-8')\n return json.loads(data.decode(encoding))\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\nif __name__ == '__main__':\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f'https://{opt.host}',\n f'datastore/v1/channels?token={opt.token}')\n else:\n url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'\n f = open('./datastore.csv', 'w')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()\n",
"step-3": "<mask token>\nHEADER = ['id', 'module', 'channel', 'type', 'value', 'datetime']\n\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option('-H', '--host')\n parser.add_option('-t', '--token')\n parser.add_option('-r', '--recursive', action='store_true', default=False)\n return parser.parse_args()\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\ndef fetch(url):\n print(f'url...{url}\\n')\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset('utf-8')\n return json.loads(data.decode(encoding))\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\nif __name__ == '__main__':\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f'https://{opt.host}',\n f'datastore/v1/channels?token={opt.token}')\n else:\n url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'\n f = open('./datastore.csv', 'w')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()\n",
"step-4": "import csv\nimport json\nfrom urllib import request\nfrom urllib.error import HTTPError\nfrom urllib.parse import urljoin, urlparse, quote_plus\nfrom optparse import OptionParser\nHEADER = ['id', 'module', 'channel', 'type', 'value', 'datetime']\n\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option('-H', '--host')\n parser.add_option('-t', '--token')\n parser.add_option('-r', '--recursive', action='store_true', default=False)\n return parser.parse_args()\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\ndef fetch(url):\n print(f'url...{url}\\n')\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset('utf-8')\n return json.loads(data.decode(encoding))\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\nif __name__ == '__main__':\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f'https://{opt.host}',\n f'datastore/v1/channels?token={opt.token}')\n else:\n url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'\n f = open('./datastore.csv', 'w')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()\n",
"step-5": "import csv\nimport json\nfrom urllib import request\nfrom urllib.error import HTTPError\nfrom urllib.parse import urljoin, urlparse, quote_plus\nfrom optparse import OptionParser\n\nHEADER = [\"id\", \"module\", \"channel\", \"type\", \"value\", \"datetime\"]\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option(\"-H\", \"--host\")\n parser.add_option(\"-t\", \"--token\")\n parser.add_option(\"-r\", \"--recursive\", action=\"store_true\", default=False)\n return parser.parse_args()\n\ndef write_csv(url, recursive=False, writer=None, token=\"\"):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f\"next cursor exists...{cursor}\")\n ret = urlparse(url)\n next_url = f\"{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}\"\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\ndef fetch(url):\n print(f\"url...{url}\\n\")\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset(\"utf-8\")\n return json.loads(data.decode(encoding))\n\ndef write_rows(writer, response):\n for msg in response[\"results\"]:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\ndef next_cursor(response):\n return response[\"meta\"][\"cursor\"]\n\nif __name__ == \"__main__\":\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f\"https://{opt.host}\",\n f\"datastore/v1/channels?token={opt.token}\")\n else:\n url = f\"https://api.sakura.io/datastore/v1/channels?token={opt.token}\"\n f = open('./datastore.csv', 'w')\n\n writer = csv.writer(f, lineterminator=\"\\n\")\n # write header\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()",
"step-ids": [
3,
6,
7,
8,
9
]
}
|
[
3,
6,
7,
8,
9
] |
#!ipython3
pi_f = 0.1415926
pi = []
for i in range(10):
pi.append(str(pi_f * i*16)[0])
print(pi)
def convertBase(digits, baseA, baseB, precisionB):
return output
#0.56 b8 to b10
#(1/base) ^ (i+1) *x
to10('56')
test = list(str(56))
test
27 9 3
33
0.3212 * 3
4*1.5
0.3212* 4/6
3*3**-1
2*3**-2
1*3**-3
2*3**-4
# 2*10
# 16+4 = 0x14
# 0x16 = 16 + 6 = 22
# 0x22 / 0xa = 2 r 2
16*2+2
#34/10 = 3 r 4
30%16
#14
# 1*16 + 14
# 14 = 0xE
# 1*16+14 = 0x1E
0x3/0xA
# 3/10 = 0.3
# 3/10 = 0 r 3
# Solange durch die neue basis teilen, bis ein unteilbarer rest übrig ist.
# Diese Teilung bringt ganze Zahlen bei der Division hervor.
# Diese ist die nächste Zahl, welche widerum geteilt wird.
# to base-3
0x2BA
16**3
#schema
4096 256 16 1
#hier nur 256 und weniger von interesse
2*256 + 0xB*16 + 0xA*1
11*16
512+ 10+ 176 = 698
0x2BA = 2*256 + B*16 + A*1 = 698
698/3
0x2BA%0x03
0x2B8/0x03
232/16
16*14+8
0xe8%3
0xe7/3
77%3
75/3
25%3
24/3
8%3
6/3
2%3
0/3
# mod's above order:
# 212122
# reversed, true order:
# 221212
# base-8 to base-10
0o72
0o72%10
0o62/10
0o5%10
0o0/10
0o0.56
0o12
56%12
48/12
4%12
0/12
0.5/0.12
5*8**-1
6*8**-2
7*8**1
2*8**0
0o56 to 0x...
0.625/16
= 0.0390625
import math
def runMult(fraction, baseB=16):
output = []
return mult(fraction, baseB, output)
def mult(fraction, baseB, output):
'''
only base-16 now!
'''
prod = fraction * float(baseB)
print("prod: ",prod)
int_prod = int(math.floor(prod))
if int_prod >= 1:
output.append(int_prod)
radix_right = prod - int_prod
print("radix_right: ", radix_right)
if radix_right == 0.0:
print("output: ", output)
return output
else:
mult(radix_right, baseB, output)
(mult(0.5))
(mult(0.56))
runMult(0.71875, 8)
runMult(0.1415926535)
p = math.pi-3
p
(((((((((((((((((((((((((p*16)-2)*16)-4)*16)-3)*16)-15)*16)-6)*16)-10)*16)-8)*16)-8)*16)-8)*16)-5)*16)-10)*16)-3)*16)
0.56
d = 5*8**-1 + 6*8**-2
((((d*16)-11)*16)-8)
11 = b
8 = 8
0o0.56 == 0x0.b8
#b16 to b26
0x0.243f6a8885a3
0.3HIGBEBOHK
def toDec(digits, baseA):
'''
takes fractional part as list
Example:
0.56 = [5,6]
toDec(56, 8)
out: 0.71875
'''
# digit_list = list(str(digits))
digit_list = digits
dec_list = []
# print(digit_list)
for i, d in enumerate(digit_list):
dec_d = float(d)*baseA**-(i+1)
dec_list.append(dec_d)
# print(dec_list)
output = 0.0
for i in dec_list:
output += i
return output
toDec([5,6], 8)
toDec([2,4,3,15,6,10,8,8,8,5,10,3], 16)
def toBase(input, baseA, baseB):
dec = toDec(input, baseA)
0.3212 *3
0.9636
1.4040
0.404 *3
2.020
0.02 *3
0.1
0.1 *3
0.3 *3
1.3
1.3 *3
120011111...
# CORRECT !!! ################################################################
0.56 #base-8 multiplication, 10b10 = 12b8
0.56*12
50 60
5.6
0.75
6.2
7.14
0.14 * 12
0.04 *10 / 8
40 -> 50
0.50
0.1 *10 / 8
10 -> 12
1.2
1.2 + 0.5
1.7
0.7 * 12
7.0
10.6
0.6 *12
7.4
0.4*12
5.0
0.71875
|
normal
|
{
"blob_id": "cffc64970cb82072e5fb949f62e9778942b2be96",
"index": 8269,
"step-1": "#!ipython3\n\npi_f = 0.1415926\npi = []\nfor i in range(10):\n pi.append(str(pi_f * i*16)[0])\n\nprint(pi)\n\n\ndef convertBase(digits, baseA, baseB, precisionB):\n return output\n\n#0.56 b8 to b10\n#(1/base) ^ (i+1) *x\n\n\nto10('56')\n\ntest = list(str(56))\ntest\n\n27 9 3\n 33\n\n\n0.3212 * 3\n4*1.5\n0.3212* 4/6\n\n3*3**-1\n2*3**-2\n1*3**-3\n2*3**-4\n\n# 2*10 \n# 16+4 = 0x14\n# 0x16 = 16 + 6 = 22\n# 0x22 / 0xa = 2 r 2 \n16*2+2\n#34/10 = 3 r 4\n30%16\n#14\n# 1*16 + 14\n# 14 = 0xE\n# 1*16+14 = 0x1E\n\n0x3/0xA\n# 3/10 = 0.3\n# 3/10 = 0 r 3\n\n# Solange durch die neue basis teilen, bis ein unteilbarer rest übrig ist.\n# Diese Teilung bringt ganze Zahlen bei der Division hervor.\n# Diese ist die nächste Zahl, welche widerum geteilt wird.\n\n# to base-3\n0x2BA\n\n16**3\n#schema\n4096 256 16 1\n#hier nur 256 und weniger von interesse\n2*256 + 0xB*16 + 0xA*1\n11*16\n512+ 10+ 176 = 698\n0x2BA = 2*256 + B*16 + A*1 = 698\n\n698/3\n0x2BA%0x03\n0x2B8/0x03\n232/16\n16*14+8\n0xe8%3\n0xe7/3\n77%3\n75/3\n25%3\n24/3\n8%3\n6/3\n2%3\n0/3\n\n# mod's above order:\n# 212122\n# reversed, true order:\n# 221212\n\n# base-8 to base-10\n0o72\n\n0o72%10\n0o62/10\n0o5%10\n0o0/10\n\n\n0o0.56\n0o12\n56%12\n48/12\n4%12\n0/12\n\n0.5/0.12\n\n5*8**-1\n6*8**-2\n\n7*8**1\n2*8**0\n\n0o56 to 0x...\n\n0.625/16\n= 0.0390625\n\nimport math\ndef runMult(fraction, baseB=16):\n output = []\n return mult(fraction, baseB, output)\n\ndef mult(fraction, baseB, output):\n '''\n only base-16 now!\n '''\n prod = fraction * float(baseB)\n print(\"prod: \",prod)\n int_prod = int(math.floor(prod))\n if int_prod >= 1:\n output.append(int_prod)\n radix_right = prod - int_prod\n print(\"radix_right: \", radix_right)\n if radix_right == 0.0:\n print(\"output: \", output)\n return output\n else:\n mult(radix_right, baseB, output)\n\n(mult(0.5))\n(mult(0.56))\nrunMult(0.71875, 8)\nrunMult(0.1415926535)\n\np = math.pi-3\np\n(((((((((((((((((((((((((p*16)-2)*16)-4)*16)-3)*16)-15)*16)-6)*16)-10)*16)-8)*16)-8)*16)-8)*16)-5)*16)-10)*16)-3)*16)\n\n0.56\nd = 5*8**-1 + 6*8**-2\n((((d*16)-11)*16)-8)\n11 = b\n8 = 8\n\n0o0.56 == 0x0.b8\n\n#b16 to b26\n0x0.243f6a8885a3\n0.3HIGBEBOHK\n\n\ndef toDec(digits, baseA):\n '''\n takes fractional part as list\n Example:\n 0.56 = [5,6]\n toDec(56, 8)\n out: 0.71875\n '''\n # digit_list = list(str(digits))\n digit_list = digits\n dec_list = []\n # print(digit_list)\n for i, d in enumerate(digit_list):\n dec_d = float(d)*baseA**-(i+1)\n dec_list.append(dec_d)\n # print(dec_list)\n output = 0.0\n for i in dec_list:\n output += i\n return output\n\ntoDec([5,6], 8)\ntoDec([2,4,3,15,6,10,8,8,8,5,10,3], 16)\n\ndef toBase(input, baseA, baseB):\n dec = toDec(input, baseA)\n\n\n\n0.3212 *3\n0.9636\n1.4040\n0.404 *3\n2.020\n0.02 *3\n0.1\n0.1 *3\n0.3 *3\n1.3\n1.3 *3\n\n120011111...\n\n# CORRECT !!! ################################################################\n0.56 #base-8 multiplication, 10b10 = 12b8 \n0.56*12\n 50 60\n5.6\n0.75\n6.2\n7.14\n0.14 * 12\n 0.04 *10 / 8\n 40 -> 50\n 0.50\n 0.1 *10 / 8\n 10 -> 12\n 1.2\n 1.2 + 0.5\n 1.7\n0.7 * 12\n7.0\n10.6\n0.6 *12\n7.4\n0.4*12\n5.0\n\n0.71875\n\n\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# auther : xiaojinsong([email protected])
parts = ['Is', 'Chicago', 'Not', 'Chicago?']
data = ['ACME', 50, 91.1]
print(' '.join(parts))
def generate_str():
print(','.join(str(d) for d in data))
def sample():
yield 'Is'
yield 'Chicago'
yield 'Not'
yield 'Chicago?'
def combine(source, maxsize):
parts = []
size = 0
for part in source:
parts.append(part)
size += len(part)
if size > maxsize:
yield ''.join(parts)
parts=[]
size = 0
yield ''.join(parts)
if __name__ == '__main__':
generate_str()
text = ','.join(sample())
print(text)
with open('combine.txt', 'w') as f:
for part in combine(sample(), 32768):
f.write(part)
|
normal
|
{
"blob_id": "4ce1e802831f09e503d18fd287cb35400986e3c8",
"index": 8095,
"step-1": "<mask token>\n\n\ndef generate_str():\n print(','.join(str(d) for d in data))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef generate_str():\n print(','.join(str(d) for d in data))\n\n\ndef sample():\n yield 'Is'\n yield 'Chicago'\n yield 'Not'\n yield 'Chicago?'\n\n\n<mask token>\n",
"step-3": "<mask token>\nprint(' '.join(parts))\n\n\ndef generate_str():\n print(','.join(str(d) for d in data))\n\n\ndef sample():\n yield 'Is'\n yield 'Chicago'\n yield 'Not'\n yield 'Chicago?'\n\n\ndef combine(source, maxsize):\n parts = []\n size = 0\n for part in source:\n parts.append(part)\n size += len(part)\n if size > maxsize:\n yield ''.join(parts)\n parts = []\n size = 0\n yield ''.join(parts)\n\n\nif __name__ == '__main__':\n generate_str()\n text = ','.join(sample())\n print(text)\n with open('combine.txt', 'w') as f:\n for part in combine(sample(), 32768):\n f.write(part)\n",
"step-4": "parts = ['Is', 'Chicago', 'Not', 'Chicago?']\ndata = ['ACME', 50, 91.1]\nprint(' '.join(parts))\n\n\ndef generate_str():\n print(','.join(str(d) for d in data))\n\n\ndef sample():\n yield 'Is'\n yield 'Chicago'\n yield 'Not'\n yield 'Chicago?'\n\n\ndef combine(source, maxsize):\n parts = []\n size = 0\n for part in source:\n parts.append(part)\n size += len(part)\n if size > maxsize:\n yield ''.join(parts)\n parts = []\n size = 0\n yield ''.join(parts)\n\n\nif __name__ == '__main__':\n generate_str()\n text = ','.join(sample())\n print(text)\n with open('combine.txt', 'w') as f:\n for part in combine(sample(), 32768):\n f.write(part)\n",
"step-5": "#! /usr/bin/env python\n# -*- coding: utf-8 -*-\n# auther : xiaojinsong([email protected])\n\n\nparts = ['Is', 'Chicago', 'Not', 'Chicago?']\ndata = ['ACME', 50, 91.1]\nprint(' '.join(parts))\n\n\ndef generate_str():\n print(','.join(str(d) for d in data))\n\n\ndef sample():\n yield 'Is'\n yield 'Chicago'\n yield 'Not'\n yield 'Chicago?'\n\n\ndef combine(source, maxsize):\n parts = []\n size = 0\n for part in source:\n parts.append(part)\n size += len(part)\n if size > maxsize:\n yield ''.join(parts)\n parts=[]\n size = 0\n yield ''.join(parts)\n\n\nif __name__ == '__main__':\n generate_str()\n text = ','.join(sample())\n print(text)\n with open('combine.txt', 'w') as f:\n for part in combine(sample(), 32768):\n f.write(part)",
"step-ids": [
1,
2,
4,
5,
6
]
}
|
[
1,
2,
4,
5,
6
] |
from functiona import *
total = totalMarks(85, 67, 56, 45, 78)
avg = average(total)
grade = findGrade(avg)
print(grade)
print(total)
print(avg)
|
normal
|
{
"blob_id": "05f77472625e902b66c4a97a4c640835826bd494",
"index": 3635,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(grade)\nprint(total)\nprint(avg)\n",
"step-3": "<mask token>\ntotal = totalMarks(85, 67, 56, 45, 78)\navg = average(total)\ngrade = findGrade(avg)\nprint(grade)\nprint(total)\nprint(avg)\n",
"step-4": "from functiona import *\ntotal = totalMarks(85, 67, 56, 45, 78)\navg = average(total)\ngrade = findGrade(avg)\nprint(grade)\nprint(total)\nprint(avg)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.